From 656886474bf7c6c212b29c6ebc35c002f15698de Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 1 Oct 2015 16:21:59 -0400 Subject: [PATCH 001/630] initial empty commit From 1b1dcf9c21e9915c0226bca2fb51818fb5395562 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 1 Oct 2015 16:21:59 -0400 Subject: [PATCH 002/630] MXNet.jl generated files. license: MIT authors: Chiyuan Zhang years: 2015 user: pluskid Julia Version 0.4.0-rc2 [fa52609*] --- .gitignore | 3 +++ .travis.yml | 14 ++++++++++++++ LICENSE.md | 22 ++++++++++++++++++++++ README.md | 3 +++ REQUIRE | 1 + appveyor.yml | 34 ++++++++++++++++++++++++++++++++++ src/MXNet.jl | 5 +++++ test/runtests.jl | 5 +++++ 8 files changed, 87 insertions(+) create mode 100644 .gitignore create mode 100644 .travis.yml create mode 100644 LICENSE.md create mode 100644 README.md create mode 100644 REQUIRE create mode 100644 appveyor.yml create mode 100644 src/MXNet.jl create mode 100644 test/runtests.jl diff --git a/.gitignore b/.gitignore new file mode 100644 index 000000000000..8c960ec808d9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +*.jl.cov +*.jl.*.cov +*.jl.mem diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 000000000000..00656a7e0f7d --- /dev/null +++ b/.travis.yml @@ -0,0 +1,14 @@ +# Documentation: http://docs.travis-ci.com/user/languages/julia/ +language: julia +os: + - linux + - osx +julia: + - release + - nightly +notifications: + email: false +# uncomment the following lines to override the default test script +#script: +# - if [[ -a .git/shallow ]]; then git fetch --unshallow; fi +# - julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 000000000000..c578b73f5fdf --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,22 @@ +The MXNet.jl package is licensed under the MIT "Expat" License: + +> Copyright (c) 2015: Chiyuan Zhang. +> +> Permission is hereby granted, free of charge, to any person obtaining +> a copy of this software and associated documentation files (the +> "Software"), to deal in the Software without restriction, including +> without limitation the rights to use, copy, modify, merge, publish, +> distribute, sublicense, and/or sell copies of the Software, and to +> permit persons to whom the Software is furnished to do so, subject to +> the following conditions: +> +> The above copyright notice and this permission notice shall be +> included in all copies or substantial portions of the Software. +> +> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +> IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +> CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +> TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +> SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 000000000000..731c12553095 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# MXNet + +[![Build Status](https://travis-ci.org/pluskid/MXNet.jl.svg?branch=master)](https://travis-ci.org/pluskid/MXNet.jl) diff --git a/REQUIRE b/REQUIRE new file mode 100644 index 000000000000..2c4ef82cb1ab --- /dev/null +++ b/REQUIRE @@ -0,0 +1 @@ +julia 0.3 diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 000000000000..8b7699447712 --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,34 @@ +environment: + matrix: + - JULIAVERSION: "julialang/bin/winnt/x86/0.3/julia-0.3-latest-win32.exe" + - JULIAVERSION: "julialang/bin/winnt/x64/0.3/julia-0.3-latest-win64.exe" + - JULIAVERSION: "julianightlies/bin/winnt/x86/julia-latest-win32.exe" + - JULIAVERSION: "julianightlies/bin/winnt/x64/julia-latest-win64.exe" + +branches: + only: + - master + - /release-.*/ + +notifications: + - provider: Email + on_build_success: false + on_build_failure: false + on_build_status_changed: false + +install: +# Download most recent Julia Windows binary + - ps: (new-object net.webclient).DownloadFile( + $("http://s3.amazonaws.com/"+$env:JULIAVERSION), + "C:\projects\julia-binary.exe") +# Run installer silently, output to C:\projects\julia + - C:\projects\julia-binary.exe /S /D=C:\projects\julia + +build_script: +# Need to convert from shallow to complete for Pkg.clone to work + - IF EXIST .git\shallow (git fetch --unshallow) + - C:\projects\julia\bin\julia -e "versioninfo(); + Pkg.clone(pwd(), \"MXNet\"); Pkg.build(\"MXNet\")" + +test_script: + - C:\projects\julia\bin\julia --check-bounds=yes -e "Pkg.test(\"MXNet\")" diff --git a/src/MXNet.jl b/src/MXNet.jl new file mode 100644 index 000000000000..c45f25e4610a --- /dev/null +++ b/src/MXNet.jl @@ -0,0 +1,5 @@ +module MXNet + +# package code goes here + +end # module diff --git a/test/runtests.jl b/test/runtests.jl new file mode 100644 index 000000000000..d6005fcf699d --- /dev/null +++ b/test/runtests.jl @@ -0,0 +1,5 @@ +using MXNet +using Base.Test + +# write your own tests here +@test 1 == 1 From 3e0abcef744ec0082e372baa4f81f83fad441e8c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 1 Oct 2015 16:23:07 -0400 Subject: [PATCH 003/630] MXNet.jl regenerated files. license: ASL authors: Chiyuan Zhang years: 2015 user: pluskid Julia Version 0.4.0-rc2 [fa52609*] --- LICENSE.md | 199 +++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 178 insertions(+), 21 deletions(-) diff --git a/LICENSE.md b/LICENSE.md index c578b73f5fdf..a34c4a0f662e 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,22 +1,179 @@ -The MXNet.jl package is licensed under the MIT "Expat" License: +The MXNet.jl package is licensed under version 2.0 of the Apache License: -> Copyright (c) 2015: Chiyuan Zhang. -> -> Permission is hereby granted, free of charge, to any person obtaining -> a copy of this software and associated documentation files (the -> "Software"), to deal in the Software without restriction, including -> without limitation the rights to use, copy, modify, merge, publish, -> distribute, sublicense, and/or sell copies of the Software, and to -> permit persons to whom the Software is furnished to do so, subject to -> the following conditions: -> -> The above copyright notice and this permission notice shall be -> included in all copies or substantial portions of the Software. -> -> THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -> EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -> MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -> IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -> CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -> TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -> SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +> Copyright (c) 2015: +> * Chiyuan Zhang +> +> Apache License +> Version 2.0, January 2004 +> http://www.apache.org/licenses/ +> +> TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +> +> 1. Definitions. +> +> "License" shall mean the terms and conditions for use, reproduction, +> and distribution as defined by Sections 1 through 9 of this document. +> +> "Licensor" shall mean the copyright owner or entity authorized by +> the copyright owner that is granting the License. +> +> "Legal Entity" shall mean the union of the acting entity and all +> other entities that control, are controlled by, or are under common +> control with that entity. For the purposes of this definition, +> "control" means (i) the power, direct or indirect, to cause the +> direction or management of such entity, whether by contract or +> otherwise, or (ii) ownership of fifty percent (50%) or more of the +> outstanding shares, or (iii) beneficial ownership of such entity. +> +> "You" (or "Your") shall mean an individual or Legal Entity +> exercising permissions granted by this License. +> +> "Source" form shall mean the preferred form for making modifications, +> including but not limited to software source code, documentation +> source, and configuration files. +> +> "Object" form shall mean any form resulting from mechanical +> transformation or translation of a Source form, including but +> not limited to compiled object code, generated documentation, +> and conversions to other media types. +> +> "Work" shall mean the work of authorship, whether in Source or +> Object form, made available under the License, as indicated by a +> copyright notice that is included in or attached to the work +> (an example is provided in the Appendix below). +> +> "Derivative Works" shall mean any work, whether in Source or Object +> form, that is based on (or derived from) the Work and for which the +> editorial revisions, annotations, elaborations, or other modifications +> represent, as a whole, an original work of authorship. For the purposes +> of this License, Derivative Works shall not include works that remain +> separable from, or merely link (or bind by name) to the interfaces of, +> the Work and Derivative Works thereof. +> +> "Contribution" shall mean any work of authorship, including +> the original version of the Work and any modifications or additions +> to that Work or Derivative Works thereof, that is intentionally +> submitted to Licensor for inclusion in the Work by the copyright owner +> or by an individual or Legal Entity authorized to submit on behalf of +> the copyright owner. For the purposes of this definition, "submitted" +> means any form of electronic, verbal, or written communication sent +> to the Licensor or its representatives, including but not limited to +> communication on electronic mailing lists, source code control systems, +> and issue tracking systems that are managed by, or on behalf of, the +> Licensor for the purpose of discussing and improving the Work, but +> excluding communication that is conspicuously marked or otherwise +> designated in writing by the copyright owner as "Not a Contribution." +> +> "Contributor" shall mean Licensor and any individual or Legal Entity +> on behalf of whom a Contribution has been received by Licensor and +> subsequently incorporated within the Work. +> +> 2. Grant of Copyright License. Subject to the terms and conditions of +> this License, each Contributor hereby grants to You a perpetual, +> worldwide, non-exclusive, no-charge, royalty-free, irrevocable +> copyright license to reproduce, prepare Derivative Works of, +> publicly display, publicly perform, sublicense, and distribute the +> Work and such Derivative Works in Source or Object form. +> +> 3. Grant of Patent License. Subject to the terms and conditions of +> this License, each Contributor hereby grants to You a perpetual, +> worldwide, non-exclusive, no-charge, royalty-free, irrevocable +> (except as stated in this section) patent license to make, have made, +> use, offer to sell, sell, import, and otherwise transfer the Work, +> where such license applies only to those patent claims licensable +> by such Contributor that are necessarily infringed by their +> Contribution(s) alone or by combination of their Contribution(s) +> with the Work to which such Contribution(s) was submitted. If You +> institute patent litigation against any entity (including a +> cross-claim or counterclaim in a lawsuit) alleging that the Work +> or a Contribution incorporated within the Work constitutes direct +> or contributory patent infringement, then any patent licenses +> granted to You under this License for that Work shall terminate +> as of the date such litigation is filed. +> +> 4. Redistribution. You may reproduce and distribute copies of the +> Work or Derivative Works thereof in any medium, with or without +> modifications, and in Source or Object form, provided that You +> meet the following conditions: +> +> (a) You must give any other recipients of the Work or +> Derivative Works a copy of this License; and +> +> (b) You must cause any modified files to carry prominent notices +> stating that You changed the files; and +> +> (c) You must retain, in the Source form of any Derivative Works +> that You distribute, all copyright, patent, trademark, and +> attribution notices from the Source form of the Work, +> excluding those notices that do not pertain to any part of +> the Derivative Works; and +> +> (d) If the Work includes a "NOTICE" text file as part of its +> distribution, then any Derivative Works that You distribute must +> include a readable copy of the attribution notices contained +> within such NOTICE file, excluding those notices that do not +> pertain to any part of the Derivative Works, in at least one +> of the following places: within a NOTICE text file distributed +> as part of the Derivative Works; within the Source form or +> documentation, if provided along with the Derivative Works; or, +> within a display generated by the Derivative Works, if and +> wherever such third-party notices normally appear. The contents +> of the NOTICE file are for informational purposes only and +> do not modify the License. You may add Your own attribution +> notices within Derivative Works that You distribute, alongside +> or as an addendum to the NOTICE text from the Work, provided +> that such additional attribution notices cannot be construed +> as modifying the License. +> +> You may add Your own copyright statement to Your modifications and +> may provide additional or different license terms and conditions +> for use, reproduction, or distribution of Your modifications, or +> for any such Derivative Works as a whole, provided Your use, +> reproduction, and distribution of the Work otherwise complies with +> the conditions stated in this License. +> +> 5. Submission of Contributions. Unless You explicitly state otherwise, +> any Contribution intentionally submitted for inclusion in the Work +> by You to the Licensor shall be under the terms and conditions of +> this License, without any additional terms or conditions. +> Notwithstanding the above, nothing herein shall supersede or modify +> the terms of any separate license agreement you may have executed +> with Licensor regarding such Contributions. +> +> 6. Trademarks. This License does not grant permission to use the trade +> names, trademarks, service marks, or product names of the Licensor, +> except as required for reasonable and customary use in describing the +> origin of the Work and reproducing the content of the NOTICE file. +> +> 7. Disclaimer of Warranty. Unless required by applicable law or +> agreed to in writing, Licensor provides the Work (and each +> Contributor provides its Contributions) on an "AS IS" BASIS, +> WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +> implied, including, without limitation, any warranties or conditions +> of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +> PARTICULAR PURPOSE. You are solely responsible for determining the +> appropriateness of using or redistributing the Work and assume any +> risks associated with Your exercise of permissions under this License. +> +> 8. Limitation of Liability. In no event and under no legal theory, +> whether in tort (including negligence), contract, or otherwise, +> unless required by applicable law (such as deliberate and grossly +> negligent acts) or agreed to in writing, shall any Contributor be +> liable to You for damages, including any direct, indirect, special, +> incidental, or consequential damages of any character arising as a +> result of this License or out of the use or inability to use the +> Work (including but not limited to damages for loss of goodwill, +> work stoppage, computer failure or malfunction, or any and all +> other commercial damages or losses), even if such Contributor +> has been advised of the possibility of such damages. +> +> 9. Accepting Warranty or Additional Liability. While redistributing +> the Work or Derivative Works thereof, You may choose to offer, +> and charge a fee for, acceptance of support, warranty, indemnity, +> or other liability obligations and/or rights consistent with this +> License. However, in accepting such obligations, You may act only +> on Your own behalf and on Your sole responsibility, not on behalf +> of any other Contributor, and only if You agree to indemnify, +> defend, and hold each Contributor harmless for any liability +> incurred by, or claims asserted against, such Contributor by reason +> of your accepting any such warranty or additional liability. From e25bf7e1c1121b50b7e3a298857825b797f6f60f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 2 Oct 2015 00:01:50 -0400 Subject: [PATCH 004/630] basic API testing, create NDArray --- src/MXNet.jl | 4 +++- src/context.jl | 14 +++++++++++ src/init.jl | 65 ++++++++++++++++++++++++++++++++++++++++++++++++++ src/ndarray.jl | 40 +++++++++++++++++++++++++++++++ 4 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 src/context.jl create mode 100644 src/init.jl create mode 100644 src/ndarray.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index c45f25e4610a..b1fcedb35e16 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -1,5 +1,7 @@ module MXNet -# package code goes here +include("init.jl") +include("context.jl") +include("ndarray.jl") end # module diff --git a/src/context.jl b/src/context.jl new file mode 100644 index 000000000000..af8cfd725a49 --- /dev/null +++ b/src/context.jl @@ -0,0 +1,14 @@ +@enum CONTEXT_TYPE CPU=1 GPU=2 + +type Context + device_type :: CONTEXT_TYPE + device_id :: Cint + + old_ctx :: Nullable{Context} +end +Context(dev_type :: CONTEXT_TYPE, dev_id = 0) = + Context(dev_type, dev_id, Nullable{Context}()) + + +# global default context +DEFAULT_CONTEXT = Context(CPU) diff --git a/src/init.jl b/src/init.jl new file mode 100644 index 000000000000..41fbbbcd3210 --- /dev/null +++ b/src/init.jl @@ -0,0 +1,65 @@ +export MXError + +"Exception thrown when an error occurred calling MXNet API." +immutable MXError <: Exception + msg :: AbstractString +end + +################################################################################ +# Common types used in MXNet API +################################################################################ +typealias MX_uint Cuint +typealias MX_float Cfloat + +macro mx_define_handle_t(name) + name = esc(name) + quote + type $name + value :: Ptr{Void} + end + $name() = $name(C_NULL) + function Base.cconvert(::Type{Ptr{Void}}, obj::$name) + obj.value + end + function Base.isnull(obj::$name) obj.value == C_NULL end + function Base.reset(obj::$name) obj.value = C_NULL end + end +end + +@mx_define_handle_t(MX_NDArrayHandle) +@mx_define_handle_t(MX_FunctionHandle) + +################################################################################ +# Initialization and library API entrance +################################################################################ +const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["/Users/chiyuan/work/mxnet/mxnet/lib"]) + +function __init__() + atexit() do + # notify libmxnet we are shutting down + ccall( ("MXNotifyShutdown", MXNET_LIB), Cint, () ) + end +end + +function mx_get_last_error() + msg = ccall( ("MXGetLastError", MXNET_LIB), Ptr{UInt8}, () ) + if msg == C_NULL + throw(MXError("Failed to get last error message")) + end + return bytestring(msg) +end + +"Utility macro to call MXNet API functions" +macro mxcall(fv, argtypes, args...) + f = eval(fv) + args = map(esc, args) + quote + _mxret = ccall( ($(Meta.quot(f)), $MXNET_LIB), + Cint, $argtypes, $(args...) ) + if _mxret != 0 + err_msg = mx_get_last_error() + throw(MXError(err_msg)) + end + end +end + diff --git a/src/ndarray.jl b/src/ndarray.jl new file mode 100644 index 000000000000..ea26504559a7 --- /dev/null +++ b/src/ndarray.jl @@ -0,0 +1,40 @@ +export NDArray +export delete + +function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) + h_ref = Ref{Ptr{Void}}(0) + shape = MX_uint[shape...] + @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{Ptr{Void}}), + shape, length(shape), ctx.device_type, ctx.device_id, delay_alloc, h_ref) + handle = MX_NDArrayHandle(h_ref[]) + return handle +end + +type NDArray + handle :: MX_NDArrayHandle + writable :: Bool + + function NDArray(handle, writable=true) + obj = new(handle, writable) + + # TODO: there is currently no good way of automatically managing external resources + # using finalizers is said to slow down the GC significantly + finalizer(obj, delete) + obj + end +end + +function delete(obj :: NDArray) + if !isnull(obj.handle) + @mxcall(:MXNDArrayFree, (Ptr{Void},), obj.handle) + reset(obj.handle) + end +end + +function empty{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) + NDArray(_ndarray_alloc(shape, ctx, false)) +end +function empty(shape :: Int...) + empty(shape) +end + From 11029be0876b73f563b556787e5184b2668c35a1 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 2 Oct 2015 00:11:50 -0400 Subject: [PATCH 005/630] a placeholder README --- README.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 731c12553095..e9ce3583fcae 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ # MXNet -[![Build Status](https://travis-ci.org/pluskid/MXNet.jl.svg?branch=master)](https://travis-ci.org/pluskid/MXNet.jl) +[![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) + +Julia wrapper of [MXNet](https://github.com/dmlc/mxnet). From d7f011c7f499d3076bc3fe43abd7d824bc216247 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 10:20:37 -0400 Subject: [PATCH 006/630] move gc to handle finalizer --- src/MXNet.jl | 9 +++++++- src/init.jl | 59 +++++++++++++++++++++++++++++++++++--------------- src/ndarray.jl | 14 ++++++------ 3 files changed, 56 insertions(+), 26 deletions(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index b1fcedb35e16..efdf1c76c0c9 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -1,7 +1,14 @@ module MXNet +# we put everything in the namespace mx, because there are a lot of +# functions with the same names as built-in utilities like "zeros", etc. +export mx +module mx + include("init.jl") include("context.jl") include("ndarray.jl") -end # module +end # mx + +end # module MXNet diff --git a/src/init.jl b/src/init.jl index 41fbbbcd3210..f062b52f07f5 100644 --- a/src/init.jl +++ b/src/init.jl @@ -11,24 +11,6 @@ end typealias MX_uint Cuint typealias MX_float Cfloat -macro mx_define_handle_t(name) - name = esc(name) - quote - type $name - value :: Ptr{Void} - end - $name() = $name(C_NULL) - function Base.cconvert(::Type{Ptr{Void}}, obj::$name) - obj.value - end - function Base.isnull(obj::$name) obj.value == C_NULL end - function Base.reset(obj::$name) obj.value = C_NULL end - end -end - -@mx_define_handle_t(MX_NDArrayHandle) -@mx_define_handle_t(MX_FunctionHandle) - ################################################################################ # Initialization and library API entrance ################################################################################ @@ -63,3 +45,44 @@ macro mxcall(fv, argtypes, args...) end end +################################################################################ +# Handle types +################################################################################ +macro mx_define_handle_t(name, destructor) + name = esc(name) + quote + type $name + value :: Ptr{Void} + + function $name(value = C_NULL) + hdr = new(value) + + $(if destructor != :nop + :(finalizer(hdr, delete!)) + end) + + return hdr + end + end + + $(if finalizer != :nop + quote + function delete!(h :: $name) + if h.value != C_NULL + @mxcall($(Meta.quot(destructor)), (Ptr{Void},), h.value) + h.value = C_NULL + end + end + end + end) + + function Base.cconvert(::Type{Ptr{Void}}, obj::$name) + obj.value + end + function Base.isnull(obj::$name) obj.value == C_NULL end + end +end + +@mx_define_handle_t(MX_NDArrayHandle, MXNDArrayFree) +@mx_define_handle_t(MX_FunctionHandle, nop) + diff --git a/src/ndarray.jl b/src/ndarray.jl index ea26504559a7..9cb458a991c9 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -19,17 +19,17 @@ type NDArray # TODO: there is currently no good way of automatically managing external resources # using finalizers is said to slow down the GC significantly - finalizer(obj, delete) + #finalizer(obj, delete) obj end end -function delete(obj :: NDArray) - if !isnull(obj.handle) - @mxcall(:MXNDArrayFree, (Ptr{Void},), obj.handle) - reset(obj.handle) - end -end +#function delete(obj :: NDArray) +# if !isnull(obj.handle) +# @mxcall(:MXNDArrayFree, (Ptr{Void},), obj.handle) +# reset(obj.handle) +# end +#end function empty{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) NDArray(_ndarray_alloc(shape, ctx, false)) From 28531c8679ccd56ba1bed13e159e1260357fd913 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 12:30:55 -0400 Subject: [PATCH 007/630] define binary dn array functions --- src/init.jl | 13 ++++-- src/ndarray.jl | 122 +++++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 116 insertions(+), 19 deletions(-) diff --git a/src/init.jl b/src/init.jl index f062b52f07f5..96aa6d7f0d69 100644 --- a/src/init.jl +++ b/src/init.jl @@ -10,6 +10,10 @@ end ################################################################################ typealias MX_uint Cuint typealias MX_float Cfloat +typealias MX_handle Ptr{Void} + +typealias char_p Ptr{UInt8} +typealias char_pp Ptr{char_p} ################################################################################ # Initialization and library API entrance @@ -17,6 +21,7 @@ typealias MX_float Cfloat const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["/Users/chiyuan/work/mxnet/mxnet/lib"]) function __init__() + _import_ndarray_functions() atexit() do # notify libmxnet we are shutting down ccall( ("MXNotifyShutdown", MXNET_LIB), Cint, () ) @@ -24,7 +29,7 @@ function __init__() end function mx_get_last_error() - msg = ccall( ("MXGetLastError", MXNET_LIB), Ptr{UInt8}, () ) + msg = ccall( ("MXGetLastError", MXNET_LIB), char_p, () ) if msg == C_NULL throw(MXError("Failed to get last error message")) end @@ -52,7 +57,7 @@ macro mx_define_handle_t(name, destructor) name = esc(name) quote type $name - value :: Ptr{Void} + value :: MX_handle function $name(value = C_NULL) hdr = new(value) @@ -69,14 +74,14 @@ macro mx_define_handle_t(name, destructor) quote function delete!(h :: $name) if h.value != C_NULL - @mxcall($(Meta.quot(destructor)), (Ptr{Void},), h.value) + @mxcall($(Meta.quot(destructor)), (MX_handle,), h.value) h.value = C_NULL end end end end) - function Base.cconvert(::Type{Ptr{Void}}, obj::$name) + function Base.convert(::Type{MX_handle}, obj::$name) obj.value end function Base.isnull(obj::$name) obj.value == C_NULL end diff --git a/src/ndarray.jl b/src/ndarray.jl index 9cb458a991c9..0c20a56e6e87 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,36 +1,34 @@ export NDArray export delete +# create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) - h_ref = Ref{Ptr{Void}}(0) + h_ref = Ref{MX_handle}(0) shape = MX_uint[shape...] - @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{Ptr{Void}}), + @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{MX_handle}), shape, length(shape), ctx.device_type, ctx.device_id, delay_alloc, h_ref) handle = MX_NDArrayHandle(h_ref[]) return handle end +# create a handle to an empty NDArray, this handle can be used to hold +# results returned by libmx API calls +function _ndarray_alloc() + h_ref = Ref{MX_handle}(0) + @mxcall(:MXNDArrayCreateNone, (Ref{MX_handle},), h_ref) + return MX_NDArrayHandle(h_ref[]) +end + + type NDArray handle :: MX_NDArrayHandle writable :: Bool function NDArray(handle, writable=true) - obj = new(handle, writable) - - # TODO: there is currently no good way of automatically managing external resources - # using finalizers is said to slow down the GC significantly - #finalizer(obj, delete) - obj + new(handle, writable) end end -#function delete(obj :: NDArray) -# if !isnull(obj.handle) -# @mxcall(:MXNDArrayFree, (Ptr{Void},), obj.handle) -# reset(obj.handle) -# end -#end - function empty{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) NDArray(_ndarray_alloc(shape, ctx, false)) end @@ -38,3 +36,97 @@ function empty(shape :: Int...) empty(shape) end +module _lib +# this module is used to hold functions automatically imported +# from libmxnet +end +function _register_function(lib::Module, name::Symbol, func::Function) + eval(lib, quote + $name = $func + end) +end + +@enum(LIBMX_FUNC_TYPE_MASK, + NDARRAY_ARG_BEFORE_SCALAR = 1, + ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) +) + +function _import_ndarray_functions() + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + n_funcs = n_ref[] + h_funcs = pointer_to_array(h_ref[], n_funcs) + + for i = 1:n_funcs + func_handle = h_funcs[i] + + #---------------------------------------- + # get function information (human readable) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) + + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXFuncGetInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), + func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) + func_name = symbol(bytestring(ref_name[])) + + #---------------------------------------- + # get function specification + ref_n_use_vars = Ref{MX_uint}(0) + ref_n_scalars = Ref{MX_uint}(0) + ref_n_mut_vars = Ref{MX_uint}(0) + ref_type_mask = Ref{Cint}(0) + @mxcall(:MXFuncDescribe, + (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), + func_handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) + + #---------------------------------------- + # prepare function definition + n_used_vars = ref_n_use_vars[] + n_scalars = ref_n_scalars[] + n_mutate_vars = ref_n_mut_vars[] + type_mask = ref_type_mask[] + accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 + if (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 + use_vars_range = 1:n_used_vars + scalar_range = n_used_vars+1:n_used_vars+n_scalars + else + scalar_range = 1:n_scalars + use_vars_range = n_scalars+1:n_scalars+n_used_vars + end + + if n_mutate_vars == 1 && n_used_vars == 2 && n_scalars == 0 + println("defining $func_name") + # binary ndarray function + function binary_ndarray_function(lhs::NDArray, rhs::NDArray, out::NDArray) + @assert(out.writable) + use_vars = MX_handle[lhs.handle, rhs.handle] + scalars = MX_float[] + mut_vars = MX_handle[out.handle] + @mxcall(:MXFuncInvoke, + (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), + func_handle, use_vars, scalars, mut_vars) + return out + end + if accept_empty_mutate + function binary_ndarray_function(lhs::NDArray, rhs::NDArray) + out = NDArray(_ndarray_alloc()) + binary_ndarray_function(lhs, rhs, out) + end + end + + # add methods to the module + eval(_lib, quote + $func_name = $binary_ndarray_function + end) + end + end +end + From e426493be0b5abfabb033e8be1bed3fd91e5d9ff Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 12:56:07 -0400 Subject: [PATCH 008/630] copy ndarray to cpu for debuggin --- src/init.jl | 5 ++++- src/ndarray.jl | 27 +++++++++++++++++++++++++-- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/src/init.jl b/src/init.jl index 96aa6d7f0d69..2a9844b0cd2f 100644 --- a/src/init.jl +++ b/src/init.jl @@ -81,9 +81,12 @@ macro mx_define_handle_t(name, destructor) end end) - function Base.convert(::Type{MX_handle}, obj::$name) + function Base.unsafe_convert(::Type{MX_handle}, obj::$name) obj.value end + Base.convert(t::Type{MX_handle}, obj::$name) = Base.unsafe_convert(t, obj) + Base.cconvert(t::Type{MX_handle}, obj::$name) = Base.unsafe_convert(t, obj) + function Base.isnull(obj::$name) obj.value == C_NULL end end end diff --git a/src/ndarray.jl b/src/ndarray.jl index 0c20a56e6e87..7825b0e2a331 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,5 +1,5 @@ export NDArray -export delete +export empty # create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) @@ -19,7 +19,9 @@ function _ndarray_alloc() return MX_NDArrayHandle(h_ref[]) end - +################################################################################ +# NDArray Type +################################################################################ type NDArray handle :: MX_NDArrayHandle writable :: Bool @@ -29,6 +31,9 @@ type NDArray end end +################################################################################ +# NDArray functions exported to the users +################################################################################ function empty{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) NDArray(_ndarray_alloc(shape, ctx, false)) end @@ -36,6 +41,24 @@ function empty(shape :: Int...) empty(shape) end +function Base.size(arr :: NDArray) + ref_ndim = Ref{MX_uint}(0) + ref_shape = Ref{Ptr{MX_uint}}(0) + @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), + arr.handle, ref_ndim, ref_shape) + tuple(map(Int, pointer_to_array(ref_shape[], ref_ndim[]))...) +end + +function to_array(arr :: NDArray) + out = Array(MX_float, size(arr)) + @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), + arr.handle, pointer(out), length(out)) + return out +end + +################################################################################ +# NDArray functions dynamically exported from libmx +################################################################################ module _lib # this module is used to hold functions automatically imported # from libmxnet From 0062c9db226b05439c20c7372616bf96f4505251 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 13:29:35 -0400 Subject: [PATCH 009/630] define unary ndarray functions --- src/ndarray.jl | 46 +++++++++++++++++++++++++++++++--------------- 1 file changed, 31 insertions(+), 15 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 7825b0e2a331..80a611a51bfc 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -63,10 +63,10 @@ module _lib # this module is used to hold functions automatically imported # from libmxnet end -function _register_function(lib::Module, name::Symbol, func::Function) - eval(lib, quote - $name = $func - end) +function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars) + @mxcall(:MXFuncInvoke, + (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), + func_handle, use_vars, scalars, mut_vars) end @enum(LIBMX_FUNC_TYPE_MASK, @@ -126,29 +126,45 @@ function _import_ndarray_functions() end if n_mutate_vars == 1 && n_used_vars == 2 && n_scalars == 0 - println("defining $func_name") + println("defining binary $func_name") # binary ndarray function - function binary_ndarray_function(lhs::NDArray, rhs::NDArray, out::NDArray) + binary_func = (lhs::NDArray, rhs::NDArray, out::NDArray) -> begin @assert(out.writable) use_vars = MX_handle[lhs.handle, rhs.handle] scalars = MX_float[] mut_vars = MX_handle[out.handle] - @mxcall(:MXFuncInvoke, - (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), - func_handle, use_vars, scalars, mut_vars) + _invoke_mxfunction(use_vars, scalars, mut_vars) return out end + eval(_lib, :(function $func_name(lhs, rhs, out) $binary_func(lhs, rhs, out) end)) + if accept_empty_mutate - function binary_ndarray_function(lhs::NDArray, rhs::NDArray) + binary_func_2 = (lhs::NDArray, rhs::NDArray) -> begin out = NDArray(_ndarray_alloc()) - binary_ndarray_function(lhs, rhs, out) + binary_func(lhs, rhs, out) end + eval(_lib, :(function $func_name(lhs, rhs) $binary_func_2(lhs, rhs) end)) + end + elseif n_mutate_vars == 1 && n_used_vars == 1 && n_scalars == 0 + println("defining unary $func_name") + # unary ndarray function + unary_func = (src::NDArray, out::NDArray) -> begin + @assert(out.writable) + use_vars = MX_handle[src.handle] + scalars = MX_float[] + mut_vars = MX_handle[out.handle] + _invoke_mxfunction(use_vars, scalars, mut_vars) + return out end + eval(_lib, :(function $func_name(src, out) $unary_func(src, out) end)) - # add methods to the module - eval(_lib, quote - $func_name = $binary_ndarray_function - end) + if accept_empty_mutate + unary_func_2 = (src::NDArray) -> begin + out = NDArray(_ndarray_alloc()) + unary_func(src, out) + end + eval(_lib, :(function $func_name(src) $unary_func_2(src) end)) + end end end end From 0ee41beca36e9f9b6311a2b469eaa34cf2ab2eb4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 14:44:35 -0400 Subject: [PATCH 010/630] use a prefix instead of submodule for dynamically imported functions --- src/ndarray.jl | 71 +++++++++++++++++++++++++++----------------------- 1 file changed, 38 insertions(+), 33 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 80a611a51bfc..eae969c9d2af 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -59,10 +59,6 @@ end ################################################################################ # NDArray functions dynamically exported from libmx ################################################################################ -module _lib -# this module is used to hold functions automatically imported -# from libmxnet -end function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars) @mxcall(:MXFuncInvoke, (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), @@ -98,7 +94,14 @@ function _import_ndarray_functions() @mxcall(:MXFuncGetInfo, (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - func_name = symbol(bytestring(ref_name[])) + + # We attach the symbol ℵ (\aleph) to those functions to indicate that they are + # dynamically imported from libmxnet + # + # A first attempt was to collect all those functions in a submodule _lib. But working + # with submodules in Julia is really painful, especially when macros (@mxcall) are + # involved in a function that is to be dynamically generated via eval. + func_name = symbol("ℵ" * bytestring(ref_name[])) #---------------------------------------- # get function specification @@ -128,42 +131,44 @@ function _import_ndarray_functions() if n_mutate_vars == 1 && n_used_vars == 2 && n_scalars == 0 println("defining binary $func_name") # binary ndarray function - binary_func = (lhs::NDArray, rhs::NDArray, out::NDArray) -> begin - @assert(out.writable) - use_vars = MX_handle[lhs.handle, rhs.handle] - scalars = MX_float[] - mut_vars = MX_handle[out.handle] - _invoke_mxfunction(use_vars, scalars, mut_vars) - return out - end - eval(_lib, :(function $func_name(lhs, rhs, out) $binary_func(lhs, rhs, out) end)) + eval(mx, quote + function $func_name(lhs::NDArray, rhs::NDArray, out::NDArray) + @assert(out.writable) + use_vars = MX_handle[lhs.handle, rhs.handle] + scalars = MX_float[] + mut_vars = MX_handle[out.handle] + _invoke_mxfunction($func_handle, use_vars, scalars, mut_vars) + return out + end + end) if accept_empty_mutate - binary_func_2 = (lhs::NDArray, rhs::NDArray) -> begin - out = NDArray(_ndarray_alloc()) - binary_func(lhs, rhs, out) - end - eval(_lib, :(function $func_name(lhs, rhs) $binary_func_2(lhs, rhs) end)) + eval(mx, quote + function $func_name(lhs::NDArray, rhs::NDArray) + $func_name(lhs, rhs, NDArray(_ndarray_alloc())) + end + end) end elseif n_mutate_vars == 1 && n_used_vars == 1 && n_scalars == 0 println("defining unary $func_name") # unary ndarray function - unary_func = (src::NDArray, out::NDArray) -> begin - @assert(out.writable) - use_vars = MX_handle[src.handle] - scalars = MX_float[] - mut_vars = MX_handle[out.handle] - _invoke_mxfunction(use_vars, scalars, mut_vars) - return out - end - eval(_lib, :(function $func_name(src, out) $unary_func(src, out) end)) + eval(mx, quote + function $func_name(src::NDArray, out::NDArray) + @assert(out.writable) + use_vars = MX_handle[src.handle] + scalars = MX_float[] + mut_vars = MX_handle[out.handle] + _invoke_mxfunction($func_handle, use_vars, scalars, mut_vars) + return out + end + end) if accept_empty_mutate - unary_func_2 = (src::NDArray) -> begin - out = NDArray(_ndarray_alloc()) - unary_func(src, out) - end - eval(_lib, :(function $func_name(src) $unary_func_2(src) end)) + eval(mx, quote + function $func_name(src::NDArray) + $func_name(NDArray(_ndarray_alloc())) + end + end) end end end From f3184ad031b6496c8d1077471ac1e4a5bc91428f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 15:44:01 -0400 Subject: [PATCH 011/630] a more general way of defining mx imported functions --- src/ndarray.jl | 59 +++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 49 insertions(+), 10 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index eae969c9d2af..09b47acaf445 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -31,6 +31,12 @@ type NDArray end end +function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) + ################################################################################ # NDArray functions exported to the users ################################################################################ @@ -45,14 +51,14 @@ function Base.size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) ref_shape = Ref{Ptr{MX_uint}}(0) @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), - arr.handle, ref_ndim, ref_shape) + arr, ref_ndim, ref_shape) tuple(map(Int, pointer_to_array(ref_shape[], ref_ndim[]))...) end function to_array(arr :: NDArray) out = Array(MX_float, size(arr)) @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), - arr.handle, pointer(out), length(out)) + arr, pointer(out), length(out)) return out end @@ -131,19 +137,19 @@ function _import_ndarray_functions() if n_mutate_vars == 1 && n_used_vars == 2 && n_scalars == 0 println("defining binary $func_name") # binary ndarray function - eval(mx, quote + eval(quote function $func_name(lhs::NDArray, rhs::NDArray, out::NDArray) @assert(out.writable) - use_vars = MX_handle[lhs.handle, rhs.handle] + use_vars = MX_handle[lhs, rhs] scalars = MX_float[] - mut_vars = MX_handle[out.handle] + mut_vars = MX_handle[out] _invoke_mxfunction($func_handle, use_vars, scalars, mut_vars) return out end end) if accept_empty_mutate - eval(mx, quote + eval(quote function $func_name(lhs::NDArray, rhs::NDArray) $func_name(lhs, rhs, NDArray(_ndarray_alloc())) end @@ -152,24 +158,57 @@ function _import_ndarray_functions() elseif n_mutate_vars == 1 && n_used_vars == 1 && n_scalars == 0 println("defining unary $func_name") # unary ndarray function - eval(mx, quote + eval(quote function $func_name(src::NDArray, out::NDArray) @assert(out.writable) - use_vars = MX_handle[src.handle] + use_vars = MX_handle[src] scalars = MX_float[] - mut_vars = MX_handle[out.handle] + mut_vars = MX_handle[out] _invoke_mxfunction($func_handle, use_vars, scalars, mut_vars) return out end end) if accept_empty_mutate - eval(mx, quote + eval(quote function $func_name(src::NDArray) $func_name(NDArray(_ndarray_alloc())) end end) end + else + println("defining generic $func_name") + # general ndarray function + args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), symbol("sca$i"), AbstractFloat) for i=1:n_scalars], + [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) + _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) + _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) + stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) + if n_mutate_vars == 1 + stmt_ret = :(return out1) + else + stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) + end + + func_body = Expr(:block, stmt_call, stmt_ret) + func_head = Expr(:call, func_name, args...) + + func_def = Expr(:function, func_head, func_body) + eval(func_def) + + if accept_empty_mutate + args0 = args[1:n_used_vars+n_scalars] + func_head0 = Expr(:call, func_name, args0...) + _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] + stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) + func_body0 = Expr(:block, stmt_call0) + func_head0 = Expr(:call, func_name, args0...) + + func_def0 = Expr(:function, func_head0, func_body0) + eval(func_def0) + end end end end From e3a41197c9560397c86b2c48262b2ea99f5411ba Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 15:50:27 -0400 Subject: [PATCH 012/630] only the generic way of defining function is enough --- src/ndarray.jl | 115 ++++++++++++++++--------------------------------- 1 file changed, 36 insertions(+), 79 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 09b47acaf445..4ddd7ed6afdf 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -126,89 +126,46 @@ function _import_ndarray_functions() n_mutate_vars = ref_n_mut_vars[] type_mask = ref_type_mask[] accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 - if (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - use_vars_range = 1:n_used_vars - scalar_range = n_used_vars+1:n_used_vars+n_scalars - else - scalar_range = 1:n_scalars - use_vars_range = n_scalars+1:n_scalars+n_used_vars - end + arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - if n_mutate_vars == 1 && n_used_vars == 2 && n_scalars == 0 - println("defining binary $func_name") - # binary ndarray function - eval(quote - function $func_name(lhs::NDArray, rhs::NDArray, out::NDArray) - @assert(out.writable) - use_vars = MX_handle[lhs, rhs] - scalars = MX_float[] - mut_vars = MX_handle[out] - _invoke_mxfunction($func_handle, use_vars, scalars, mut_vars) - return out - end - end) - - if accept_empty_mutate - eval(quote - function $func_name(lhs::NDArray, rhs::NDArray) - $func_name(lhs, rhs, NDArray(_ndarray_alloc())) - end - end) - end - elseif n_mutate_vars == 1 && n_used_vars == 1 && n_scalars == 0 - println("defining unary $func_name") - # unary ndarray function - eval(quote - function $func_name(src::NDArray, out::NDArray) - @assert(out.writable) - use_vars = MX_handle[src] - scalars = MX_float[] - mut_vars = MX_handle[out] - _invoke_mxfunction($func_handle, use_vars, scalars, mut_vars) - return out - end - end) - - if accept_empty_mutate - eval(quote - function $func_name(src::NDArray) - $func_name(NDArray(_ndarray_alloc())) - end - end) - end - else - println("defining generic $func_name") - # general ndarray function + println("defining generic $func_name") + # general ndarray function + if arg_before_scalar args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], [Expr(:(::), symbol("sca$i"), AbstractFloat) for i=1:n_scalars], [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) - _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) - _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) - stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) - if n_mutate_vars == 1 - stmt_ret = :(return out1) - else - stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) - end - - func_body = Expr(:block, stmt_call, stmt_ret) - func_head = Expr(:call, func_name, args...) - - func_def = Expr(:function, func_head, func_body) - eval(func_def) - - if accept_empty_mutate - args0 = args[1:n_used_vars+n_scalars] - func_head0 = Expr(:call, func_name, args0...) - _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] - stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) - func_body0 = Expr(:block, stmt_call0) - func_head0 = Expr(:call, func_name, args0...) - - func_def0 = Expr(:function, func_head0, func_body0) - eval(func_def0) - end + else + args = vcat([Expr(:(::), symbol("sca$i"), AbstractFloat) for i=1:n_scalars], + [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + end + + _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) + _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) + _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) + stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) + if n_mutate_vars == 1 + stmt_ret = :(return out1) + else + stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) + end + + func_body = Expr(:block, stmt_call, stmt_ret) + func_head = Expr(:call, func_name, args...) + + func_def = Expr(:function, func_head, func_body) + eval(func_def) + + if accept_empty_mutate + args0 = args[1:n_used_vars+n_scalars] + func_head0 = Expr(:call, func_name, args0...) + _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] + stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) + func_body0 = Expr(:block, stmt_call0) + func_head0 = Expr(:call, func_name, args0...) + + func_def0 = Expr(:function, func_head0, func_body0) + eval(func_def0) end end end From 5ed258f609c28223a85072a8b8c90b5642743ae0 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 22:39:50 -0400 Subject: [PATCH 013/630] common array-like interface functions --- src/ndarray.jl | 86 ++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 80 insertions(+), 6 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 4ddd7ed6afdf..57fc8f1ee24f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -47,6 +47,9 @@ function empty(shape :: Int...) empty(shape) end +#------------------------------------------------------------ +# Interface functions similar to Julia Arrays +#------------------------------------------------------------ function Base.size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) ref_shape = Ref{Ptr{MX_uint}}(0) @@ -54,14 +57,85 @@ function Base.size(arr :: NDArray) arr, ref_ndim, ref_shape) tuple(map(Int, pointer_to_array(ref_shape[], ref_ndim[]))...) end +function Base.size(arr :: NDArray, dim :: Int) + size(arr)[dim] +end +function Base.length(arr :: NDArray) + prod(size(arr)) +end +function Base.ndims(arr :: NDArray) + length(size(arr)) +end +function Base.eltype(arr :: NDArray) + MX_float +end + +"Create zero-ed NDArray of specific shape" +function zeros{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) + arr = empty(shape, ctx) + arr[:] = 0 + return arr +end +function zeros(shape :: Int...) + zeros(shape) +end + +"Assign all elements of an NDArray to a scalar" +function Base.setindex!(arr :: NDArray, val :: Real, ::Colon) + ℵ_set_value(val, arr) + return arr +end + +#------------------------------------------------------------ +# Copying functions +#------------------------------------------------------------ +"Copy data between NDArrays" +function Base.copy!(dst :: NDArray, src :: NDArray) + if dst.handle == src.handle + warn("Copying an NDArray to itself") + return + end -function to_array(arr :: NDArray) - out = Array(MX_float, size(arr)) + ℵ_copy_to(src, dst) + return dst +end + +"Copy data from NDArray to Julia Array" +function Base.copy!(dst :: Array{MX_float}, src :: NDArray) + @assert size(dst) == size(src) @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), - arr, pointer(out), length(out)) - return out + src, pointer(dst), length(dst)) + return dst +end + +"Copy data from Julia Array to NDArray" +function Base.copy!{T<:Real}(dst :: NDArray, src :: Array{T}) + @assert size(dst) == size(src) + src = convert(Array{MX_float}, src) # this might involve copying + @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), + dst.handle, pointer(src), length(src)) + return dst +end + +"Create copy: NDArray -> Julia Array" +function Base.copy(arr :: NDArray) + j_arr = Array(MX_float, size(arr)) + Base.copy!(j_arr, arr) end +"Create copy: NDArray -> NDArray in a given context" +function Base.copy(arr :: NDArray, ctx :: Context) + dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) + Base.copy!(dst, arr) +end + +"Create copy: Julia Array -> NDArray in a given context" +function Base.copy{T<:Real}(arr :: Array{T}, ctx :: Context) + dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) + Base.copy!(dst, arr) +end + + ################################################################################ # NDArray functions dynamically exported from libmx ################################################################################ @@ -132,10 +206,10 @@ function _import_ndarray_functions() # general ndarray function if arg_before_scalar args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("sca$i"), AbstractFloat) for i=1:n_scalars], + [Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) else - args = vcat([Expr(:(::), symbol("sca$i"), AbstractFloat) for i=1:n_scalars], + args = vcat([Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) end From 59dcb7ead324b50206077e48f8ff6f6da71c7ac3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 3 Oct 2015 23:43:57 -0400 Subject: [PATCH 014/630] inplace operator for + --- src/context.jl | 4 ++-- src/ndarray.jl | 57 +++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 58 insertions(+), 3 deletions(-) diff --git a/src/context.jl b/src/context.jl index af8cfd725a49..3dbf7e6e482a 100644 --- a/src/context.jl +++ b/src/context.jl @@ -6,8 +6,8 @@ type Context old_ctx :: Nullable{Context} end -Context(dev_type :: CONTEXT_TYPE, dev_id = 0) = - Context(dev_type, dev_id, Nullable{Context}()) +Context(dev_type :: Union{CONTEXT_TYPE, Integer}, dev_id :: Integer = 0) = + Context(convert(CONTEXT_TYPE, dev_type), convert(Cint, dev_id), Nullable{Context}()) # global default context diff --git a/src/ndarray.jl b/src/ndarray.jl index 57fc8f1ee24f..1a05aabc9e69 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -40,6 +40,14 @@ Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) ################################################################################ # NDArray functions exported to the users ################################################################################ +function context(arr :: NDArray) + ref_typeid = Ref{Cint}(0) + ref_devid = Ref{Cint}(0) + @mxcall(:MXNDArrayGetContext, (MX_handle, Ref{Cint}, Ref{Cint}), + arr, ref_typeid, ref_devid) + return Context(ref_typeid[], ref_devid[]) +end + function empty{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) NDArray(_ndarray_alloc(shape, ctx, false)) end @@ -96,7 +104,7 @@ function Base.copy!(dst :: NDArray, src :: NDArray) return end - ℵ_copy_to(src, dst) + ℵ_copyto(src, dst) return dst end @@ -136,6 +144,53 @@ function Base.copy{T<:Real}(arr :: Array{T}, ctx :: Context) end +#------------------------------------------------------------ +# Basic arithmetics +#------------------------------------------------------------ +""" +Julia does not support re-definiton of += operator (like __iadd__ in python), +When one write a += b, it gets translated to a = a+b. a+b will allocate new +memory for the results, and the newly allocated NDArray object is then assigned +back to a, while the original contents in a is discarded. This is very inefficient +when we want to do inplace update. + +This macro is a simple utility to implement this behavior. Write + + @mx.inplace a += b + +will translate into + + mx.add!(a, b) + +which will do inplace adding of the contents of b into a. +""" +macro inplace(stmt) + if stmt.head == :+= + Expr(:call, :add!, esc(stmt.args[1]), esc(stmt.args[2])) + else + error("unsupported inplace translation for $stmt") + end +end + +function add!(dst :: NDArray, args :: Union{Real, NDArray}...) + for arg in args + if isa(arg, Real) + ℵ_plus_scalar(dst, arg, dst) + else + ℵ_plus(dst, arg, dst) + end + end + return dst +end + +# We fix the first arg to be NDArray to avoid ambiguity +import Base.+ +function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) + ret = copy(arg0, context(arg0)) + add!(ret, args...) +end + + ################################################################################ # NDArray functions dynamically exported from libmx ################################################################################ From 678f6f2541c3468da4ab3b0e8553873b8553527e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 5 Oct 2015 01:53:14 -0400 Subject: [PATCH 015/630] basic test for ndarray --- src/ndarray.jl | 52 +++++++++++++++++++--------------------- test/runtests.jl | 3 +-- test/unittest/ndarray.jl | 37 ++++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 30 deletions(-) create mode 100644 test/unittest/ndarray.jl diff --git a/src/ndarray.jl b/src/ndarray.jl index 1a05aabc9e69..e582b949375f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -58,23 +58,24 @@ end #------------------------------------------------------------ # Interface functions similar to Julia Arrays #------------------------------------------------------------ -function Base.size(arr :: NDArray) +import Base: size, length, ndims, eltype +function size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) ref_shape = Ref{Ptr{MX_uint}}(0) @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), arr, ref_ndim, ref_shape) tuple(map(Int, pointer_to_array(ref_shape[], ref_ndim[]))...) end -function Base.size(arr :: NDArray, dim :: Int) +function size(arr :: NDArray, dim :: Int) size(arr)[dim] end -function Base.length(arr :: NDArray) +function length(arr :: NDArray) prod(size(arr)) end -function Base.ndims(arr :: NDArray) +function ndims(arr :: NDArray) length(size(arr)) end -function Base.eltype(arr :: NDArray) +function eltype(arr :: NDArray) MX_float end @@ -88,28 +89,30 @@ function zeros(shape :: Int...) zeros(shape) end +import Base: setindex! "Assign all elements of an NDArray to a scalar" -function Base.setindex!(arr :: NDArray, val :: Real, ::Colon) - ℵ_set_value(val, arr) +function setindex!(arr :: NDArray, val :: Real, ::Colon) + _set_value(val, arr) return arr end #------------------------------------------------------------ # Copying functions #------------------------------------------------------------ +import Base: copy!, copy "Copy data between NDArrays" -function Base.copy!(dst :: NDArray, src :: NDArray) +function copy!(dst :: NDArray, src :: NDArray) if dst.handle == src.handle warn("Copying an NDArray to itself") return end - ℵ_copyto(src, dst) + _copyto(src, dst) return dst end "Copy data from NDArray to Julia Array" -function Base.copy!(dst :: Array{MX_float}, src :: NDArray) +function copy!(dst :: Array{MX_float}, src :: NDArray) @assert size(dst) == size(src) @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), src, pointer(dst), length(dst)) @@ -117,7 +120,7 @@ function Base.copy!(dst :: Array{MX_float}, src :: NDArray) end "Copy data from Julia Array to NDArray" -function Base.copy!{T<:Real}(dst :: NDArray, src :: Array{T}) +function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) @assert size(dst) == size(src) src = convert(Array{MX_float}, src) # this might involve copying @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), @@ -126,21 +129,21 @@ function Base.copy!{T<:Real}(dst :: NDArray, src :: Array{T}) end "Create copy: NDArray -> Julia Array" -function Base.copy(arr :: NDArray) +function copy(arr :: NDArray) j_arr = Array(MX_float, size(arr)) - Base.copy!(j_arr, arr) + copy!(j_arr, arr) end "Create copy: NDArray -> NDArray in a given context" -function Base.copy(arr :: NDArray, ctx :: Context) +function copy(arr :: NDArray, ctx :: Context) dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) - Base.copy!(dst, arr) + copy!(dst, arr) end "Create copy: Julia Array -> NDArray in a given context" -function Base.copy{T<:Real}(arr :: Array{T}, ctx :: Context) - dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) - Base.copy!(dst, arr) +function copy{T<:Real}(arr :: Array{T}, ctx :: Context) + dst = NDArray(_ndarray_alloc(size(arr), ctx, false)) + copy!(dst, arr) end @@ -175,9 +178,9 @@ end function add!(dst :: NDArray, args :: Union{Real, NDArray}...) for arg in args if isa(arg, Real) - ℵ_plus_scalar(dst, arg, dst) + _plus_scalar(dst, arg, dst) else - ℵ_plus(dst, arg, dst) + _plus(dst, arg, dst) end end return dst @@ -230,13 +233,7 @@ function _import_ndarray_functions() (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - # We attach the symbol ℵ (\aleph) to those functions to indicate that they are - # dynamically imported from libmxnet - # - # A first attempt was to collect all those functions in a submodule _lib. But working - # with submodules in Julia is really painful, especially when macros (@mxcall) are - # involved in a function that is to be dynamically generated via eval. - func_name = symbol("ℵ" * bytestring(ref_name[])) + func_name = symbol(bytestring(ref_name[])) #---------------------------------------- # get function specification @@ -257,7 +254,6 @@ function _import_ndarray_functions() accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - println("defining generic $func_name") # general ndarray function if arg_before_scalar args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], diff --git a/test/runtests.jl b/test/runtests.jl index d6005fcf699d..d1aa01edbc29 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,4 @@ using MXNet using Base.Test -# write your own tests here -@test 1 == 1 +include("unittest/ndarray.jl") diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl new file mode 100644 index 000000000000..7ead256fcb66 --- /dev/null +++ b/test/unittest/ndarray.jl @@ -0,0 +1,37 @@ +module TestNDArray +using MXNet +using Base.Test + +################################################################################ +# Test Implementations +################################################################################ +function reldiff(a, b) + diff = sum(abs(a - b)) + norm = sum(abs(a)) + return diff / norm +end + +function test_copy() + dims = tuple(rand(1:10, rand(1:6))...) + tensor = rand(mx.MX_float, dims) + + info("NDArray::copy::dims = $dims") + + # copy to NDArray and back + array = copy(tensor, mx.DEFAULT_CONTEXT) + tensor2 = copy(array) + @test reldiff(tensor, tensor2) < 1e-6 + + # copy between NDArray + array2 = copy(array, mx.DEFAULT_CONTEXT) + tensor2 = copy(array2) + @test reldiff(tensor, tensor2) < 1e-6 +end + + +################################################################################ +# Run tests +################################################################################ +test_copy() + +end From 525c5b246bbad97629deaed96b0bbb60bf43571d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 5 Oct 2015 01:55:39 -0400 Subject: [PATCH 016/630] comment out release testing until v0.4 is released --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 00656a7e0f7d..90bb9e569aea 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,7 @@ os: - linux - osx julia: - - release + #- release - nightly notifications: email: false From e0a0bbe17191503414fcbeb762f2bc0b57ef3bfd Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 5 Oct 2015 10:55:05 -0400 Subject: [PATCH 017/630] fix copy from and to CPU --- src/ndarray.jl | 2 +- test/runtests.jl | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index e582b949375f..13ddf983050a 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -142,7 +142,7 @@ end "Create copy: Julia Array -> NDArray in a given context" function copy{T<:Real}(arr :: Array{T}, ctx :: Context) - dst = NDArray(_ndarray_alloc(size(arr), ctx, false)) + dst = empty(size(arr), ctx) copy!(dst, arr) end diff --git a/test/runtests.jl b/test/runtests.jl index d1aa01edbc29..73cc6eae494d 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,3 +2,4 @@ using MXNet using Base.Test include("unittest/ndarray.jl") + From 6294c93623d20619e0a75e1860ca289ed71a6e98 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 6 Oct 2015 00:15:48 -0400 Subject: [PATCH 018/630] ndarray plus and minus unit-tests --- src/ndarray.jl | 27 ++++++++++++++-- test/unittest/ndarray.jl | 67 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 91 insertions(+), 3 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 13ddf983050a..32ae66e1cf80 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -168,8 +168,10 @@ will translate into which will do inplace adding of the contents of b into a. """ macro inplace(stmt) - if stmt.head == :+= + if stmt.head == :+= || stmt.head == :.+= Expr(:call, :add!, esc(stmt.args[1]), esc(stmt.args[2])) + elseif stmt.head == :-= || stmt.head == :.-= + Expr(:call, :sub!, esc(stmt.args[1]), esc(stmt.args[2])) else error("unsupported inplace translation for $stmt") end @@ -187,12 +189,33 @@ function add!(dst :: NDArray, args :: Union{Real, NDArray}...) end # We fix the first arg to be NDArray to avoid ambiguity -import Base.+ +import Base: +, .+ function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) ret = copy(arg0, context(arg0)) add!(ret, args...) end +function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) + +(arg0, args...) +end +function sub!(dst :: NDArray, arg :: Union{Real, NDArray}) + if isa(arg, Real) + _minus_scalar(dst, arg, dst) + else + _minus(dst, arg, dst) + end +end +import Base: -, .- +function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) + ret = copy(arg0, context(arg0)) + sub!(ret, arg1) +end +function .-(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) + -(arg0, arg1) +end +function -(arg0 :: NDArray) + _mul_scalar(arg0, -1.0) +end ################################################################################ # NDArray functions dynamically exported from libmx diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 7ead256fcb66..c066745f79ca 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -11,8 +11,17 @@ function reldiff(a, b) return diff / norm end +function rand_dims() + tuple(rand(1:10, rand(1:6))...) +end +function rand_tensors{N}(dims::NTuple{N, Int}) + tensor = rand(mx.MX_float, dims) + array = copy(tensor, mx.DEFAULT_CONTEXT) + return (tensor, array) +end + function test_copy() - dims = tuple(rand(1:10, rand(1:6))...) + dims = rand_dims() tensor = rand(mx.MX_float, dims) info("NDArray::copy::dims = $dims") @@ -28,10 +37,66 @@ function test_copy() @test reldiff(tensor, tensor2) < 1e-6 end +function test_plus() + dims = rand_dims() + t1, a1 = rand_tensors(dims) + t2, a2 = rand_tensors(dims) + t3, a3 = rand_tensors(dims) + + info("NDArray::plus::dims = $dims") + + @test reldiff(t1+t2, copy(a1+a2)) < 1e-6 + @test reldiff(t1.+t2, copy(a1.+a2)) < 1e-6 + + @test reldiff(t1+t2+t3, copy(a1+a2+a3)) < 1e-6 + + # test inplace += operation + a0 = a1 # keep a reference to a1 + @mx.inplace a1 += a2 # perform inplace += + @test a0 == a1 # make sure they are still the same object + @test reldiff(copy(a0), copy(a1)) < 1e-6 + @test reldiff(copy(a1), t1+t2) < 1e-6 + + # test scalar + scalar = rand() + @test reldiff(t3 + scalar, copy(a3 + scalar)) < 1e-6 + @test reldiff(t2+scalar+t3, copy(a2+scalar+a3)) < 1e-6 +end + +function test_minus() + dims = rand_dims() + t1, a1 = rand_tensors(dims) + t2, a2 = rand_tensors(dims) + + info("NDArray::minus::dims = $dims") + + @test reldiff(t1-t2, copy(a1-a2)) < 1e-6 + @test reldiff(t1.-t2, copy(a1.-a2)) < 1e-6 + + @test reldiff(-t1, copy(-a1)) < 1e-6 + + # make sure the negation is not in-place, so a1 is not changed after previous + # statement is executed + @test reldiff(t1, copy(a1)) < 1e-6 + + # test inplace -= operation + a0 = a1 # keep a reference to a1 + @mx.inplace a1 -= a2 # perform inplace -= + @test a0 == a1 # make sure they are still the same object + @test reldiff(copy(a0), copy(a1)) < 1e-6 + @test reldiff(copy(a1), t1-t2) < 1e-6 + + # test scalar + scalar = rand() + @test reldiff(t2 - scalar, copy(a2 - scalar)) < 1e-6 +end + ################################################################################ # Run tests ################################################################################ test_copy() +test_plus() +test_minus() end From d6a13671ae6db393b7dfbca97729b6e653bfc059 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 09:09:14 -0400 Subject: [PATCH 019/630] use more meaningful name for sub! and add! --- src/ndarray.jl | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 32ae66e1cf80..482adcb4f154 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -163,21 +163,21 @@ This macro is a simple utility to implement this behavior. Write will translate into - mx.add!(a, b) + mx.add_to!(a, b) which will do inplace adding of the contents of b into a. """ macro inplace(stmt) if stmt.head == :+= || stmt.head == :.+= - Expr(:call, :add!, esc(stmt.args[1]), esc(stmt.args[2])) + Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) elseif stmt.head == :-= || stmt.head == :.-= - Expr(:call, :sub!, esc(stmt.args[1]), esc(stmt.args[2])) + Expr(:call, :sub_from!, esc(stmt.args[1]), esc(stmt.args[2])) else error("unsupported inplace translation for $stmt") end end -function add!(dst :: NDArray, args :: Union{Real, NDArray}...) +function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) for arg in args if isa(arg, Real) _plus_scalar(dst, arg, dst) @@ -192,13 +192,13 @@ end import Base: +, .+ function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) ret = copy(arg0, context(arg0)) - add!(ret, args...) + add_to!(ret, args...) end function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) +(arg0, args...) end -function sub!(dst :: NDArray, arg :: Union{Real, NDArray}) +function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) if isa(arg, Real) _minus_scalar(dst, arg, dst) else @@ -208,7 +208,7 @@ end import Base: -, .- function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) - sub!(ret, arg1) + sub_from!(ret, arg1) end function .-(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) -(arg0, arg1) From d33dff8c214696924a78a2bdcbf981206224f717 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 09:16:23 -0400 Subject: [PATCH 020/630] assignment operator for NDArray --- src/ndarray.jl | 6 ++++++ test/unittest/ndarray.jl | 27 ++++++++++++++++++++++++++- 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 482adcb4f154..b1d9e2790204 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -95,6 +95,12 @@ function setindex!(arr :: NDArray, val :: Real, ::Colon) _set_value(val, arr) return arr end +function setindex!{T<:Real}(arr :: NDArray, val :: Array{T}, ::Colon) + copy!(arr, val) +end +function setindex!(arr :: NDArray, val :: NDArray, ::Colon) + copy!(arr, val) +end #------------------------------------------------------------ # Copying functions diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index c066745f79ca..084545bd3514 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -8,7 +8,7 @@ using Base.Test function reldiff(a, b) diff = sum(abs(a - b)) norm = sum(abs(a)) - return diff / norm + return diff / (norm + 1e-10) end function rand_dims() @@ -37,6 +37,30 @@ function test_copy() @test reldiff(tensor, tensor2) < 1e-6 end +function test_assign() + dims = rand_dims() + tensor = rand(mx.MX_float, dims) + + info("NDArray::assign::dims = $dims") + + # Julia Array -> NDArray assignment + array = mx.empty(size(tensor)) + array[:]= tensor + @test reldiff(tensor, copy(array)) < 1e-6 + + array2 = mx.zeros(size(tensor)) + @test reldiff(zeros(size(tensor)), copy(array2)) < 1e-6 + + # scalar -> NDArray assignment + scalar = rand() + array2[:] = scalar + @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 + + # NDArray -> NDArray assignment + array[:] = array2 + @test reldiff(zeros(size(tensor))+scalar, copy(array)) < 1e-6 +end + function test_plus() dims = rand_dims() t1, a1 = rand_tensors(dims) @@ -96,6 +120,7 @@ end # Run tests ################################################################################ test_copy() +test_assign() test_plus() test_minus() From b2acdc719fb0b0ac1c8264b29b8ccbd53cd5d1cf Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 09:31:38 -0400 Subject: [PATCH 021/630] test ndarray elem-wise mul --- src/ndarray.jl | 31 +++++++++++++++++++++++++++++++ test/unittest/ndarray.jl | 22 ++++++++++++++++++++++ 2 files changed, 53 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index b1d9e2790204..48385cd31d26 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -178,6 +178,10 @@ macro inplace(stmt) Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) elseif stmt.head == :-= || stmt.head == :.-= Expr(:call, :sub_from!, esc(stmt.args[1]), esc(stmt.args[2])) + elseif stmt.head == :.*= + Expr(:call, :mul_to!, esc(stmt.args[1]), esc(stmt.args[2])) + elseif stmt.head == :./= + Expr(:call, :div_from!, esc(stmt.args[1]), esc(stmt.args[2])) else error("unsupported inplace translation for $stmt") end @@ -223,6 +227,33 @@ function -(arg0 :: NDArray) _mul_scalar(arg0, -1.0) end +function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + if isa(arg, Real) + _mul_scalar(dst, arg, dst) + else + _mul(dst, arg, dst) + end + return dst +end +import Base: .*, * +function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) + ret = copy(arg0, context(arg0)) + mul_to!(ret, arg) +end + +function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + if isa(arg, Real) + _div_scalar(dst, arg, dst) + else + _div(dst, arg, dst) + end +end +import Base: ./ +function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) + ret = copy(arg0, context(arg0)) + div_from!(ret, arg) +end + ################################################################################ # NDArray functions dynamically exported from libmx ################################################################################ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 084545bd3514..ddf2084e1ed3 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -115,6 +115,27 @@ function test_minus() @test reldiff(t2 - scalar, copy(a2 - scalar)) < 1e-6 end +function test_mul() + dims = rand_dims() + t1, a1 = rand_tensors(dims) + t2, a2 = rand_tensors(dims) + t3, a3 = rand_tensors(dims) + + info("NDArray::mul::dims = $dims") + + @test reldiff(t1.*t2, copy(a1.*a2)) < 1e-6 + + # test inplace .*= operation + a0 = a1 # keep a reference to a1 + @mx.inplace a1 .*= a2 # perform inplace .*= + @test reldiff(copy(a0), copy(a1)) < 1e-6 + @test reldiff(copy(a1), t1.*t2) < 1e-6 + + # test scalar + scalar = rand() + @test reldiff(t3 * scalar, copy(a3 .* scalar)) < 1e-6 +end + ################################################################################ # Run tests @@ -123,5 +144,6 @@ test_copy() test_assign() test_plus() test_minus() +test_mul() end From 46675836e9cf0b600e8aca0582f1d72a20005172 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 09:36:38 -0400 Subject: [PATCH 022/630] unittest ndarray div --- test/unittest/ndarray.jl | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index ddf2084e1ed3..15a3fea334ed 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -128,6 +128,7 @@ function test_mul() # test inplace .*= operation a0 = a1 # keep a reference to a1 @mx.inplace a1 .*= a2 # perform inplace .*= + @test a0 == a1 # make sure they are still the same object @test reldiff(copy(a0), copy(a1)) < 1e-6 @test reldiff(copy(a1), t1.*t2) < 1e-6 @@ -136,6 +137,29 @@ function test_mul() @test reldiff(t3 * scalar, copy(a3 .* scalar)) < 1e-6 end +function test_div() + dims = rand_dims() + t1, a1 = rand_tensors(dims) + t2, a2 = rand_tensors(dims) + + info("NDArray::div::dims = $dims") + t2 .+= 2 # avoid numerical instability + @mx.inplace a2 .+= 2 + + @test reldiff(t1 ./ t2, copy(a1 ./ a2)) < 1e-6 + + # test inplace -= operation + a0 = a1 # keep a reference to a2 + @mx.inplace a1 ./= a2 # perform inplace ./= + @test a0 == a1 # make sure they are still the same object + @test reldiff(copy(a0), copy(a1)) < 1e-6 + @test reldiff(copy(a1), t1 ./ t2) < 1e-6 + + # test scalar + scalar = rand() + 2 + @test reldiff(t2./scalar, copy(a2./scalar)) < 1e-6 +end + ################################################################################ # Run tests @@ -145,5 +169,6 @@ test_assign() test_plus() test_minus() test_mul() +test_div() end From 12ca85a6161fabc85a7941f654a1dad5b51d98e2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 10:09:47 -0400 Subject: [PATCH 023/630] require julia v0.4 to run --- REQUIRE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/REQUIRE b/REQUIRE index 2c4ef82cb1ab..d5d646713dcf 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1 +1 @@ -julia 0.3 +julia 0.4 From 65cc788a9c161c0626a0508043b264373b12551f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 10:39:57 -0400 Subject: [PATCH 024/630] basic symbol type --- src/MXNet.jl | 1 + src/init.jl | 1 + src/ndarray.jl | 3 +-- src/symbol.jl | 19 +++++++++++++++++++ 4 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 src/symbol.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index efdf1c76c0c9..63748b278d15 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -8,6 +8,7 @@ module mx include("init.jl") include("context.jl") include("ndarray.jl") +include("symbol.jl") end # mx diff --git a/src/init.jl b/src/init.jl index 2a9844b0cd2f..6baa894cdd4f 100644 --- a/src/init.jl +++ b/src/init.jl @@ -93,4 +93,5 @@ end @mx_define_handle_t(MX_NDArrayHandle, MXNDArrayFree) @mx_define_handle_t(MX_FunctionHandle, nop) +@mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) diff --git a/src/ndarray.jl b/src/ndarray.jl index 48385cd31d26..9845edd7eedd 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,5 +1,4 @@ export NDArray -export empty # create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) @@ -255,7 +254,7 @@ function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) end ################################################################################ -# NDArray functions dynamically exported from libmx +# NDArray functions dynamically imported from libmxnet ################################################################################ function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars) @mxcall(:MXFuncInvoke, diff --git a/src/symbol.jl b/src/symbol.jl new file mode 100644 index 000000000000..e15d69859a72 --- /dev/null +++ b/src/symbol.jl @@ -0,0 +1,19 @@ +export Symbol + + +################################################################################ +# Symbol Type +################################################################################ +type Symbol + handle :: MX_SymbolHandle +end + +function variable(name :: Union{Base.Symbol, AbstractString}) + hdr_ref = Ref{MX_handle} + @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) + Symbol(MX_SymbolHandle(hdr_ref[])) +end + +################################################################################ +# Atomic Symbol functions dynamically exported from libmxnet +################################################################################ From 4170e376119534f52b4c851aeba637579a5c171b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 11:06:25 -0400 Subject: [PATCH 025/630] possibility to add dynamic doc string to imported functions --- src/ndarray.jl | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index 9845edd7eedd..4d6bd5407eaa 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -267,6 +267,25 @@ end ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) ) +""" +Import dynamic functions for NDArrays. The arguments to the functions are typically ordered +as + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) + +unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: + + func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) + +If ACCEPT_EMPTY_MUTATE_TARGET is set. An overloaded function without the output arguments will also be defined: + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) + +Upon calling, the output arguments will be automatically initialized with empty NDArrays. + +Those functions always return the output arguments. If there is only one output (the typical situation), that +object (NDArray) is returned. Otherwise, a tuple containing all the outputs will be returned. +""" function _import_ndarray_functions() n_ref = Ref{MX_uint}(0) h_ref = Ref{Ptr{MX_handle}}(0) @@ -351,6 +370,9 @@ function _import_ndarray_functions() func_def0 = Expr(:function, func_head0, func_body0) eval(func_def0) end + + # TODO: add doc string + # eval(:(@doc($doc_str, $func_name))) end end From 6490928f4ee776e6527b070baacde3b4453acbc4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:20:36 -0400 Subject: [PATCH 026/630] try setting up travis CI --- .travis.yml | 25 ++++++++++++++++++++++++- src/init.jl | 2 +- test/travis/build_mxnet.sh | 12 ++++++++++++ test/travis/setup_env.sh | 1 + 4 files changed, 38 insertions(+), 2 deletions(-) create mode 100755 test/travis/build_mxnet.sh create mode 100755 test/travis/setup_env.sh diff --git a/.travis.yml b/.travis.yml index 90bb9e569aea..687835d65a97 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,10 +2,33 @@ language: julia os: - linux - - osx + #- osx julia: #- release - nightly + +# dependent apt packages +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - doxygen + - wget + - git + - libcurl4-openssl-dev + - unzip + - libatlas-dev + - libopencv-dev + - gcc-4.8 + - g++-4.8 + - clang + +before_install: + - export TRAVIS=tests/travis + - source $TRAVIS/setup_env.sh + - source $TRAVIS/build_mxnet.sh + notifications: email: false # uncomment the following lines to override the default test script diff --git a/src/init.jl b/src/init.jl index 6baa894cdd4f..cef8e54c03e0 100644 --- a/src/init.jl +++ b/src/init.jl @@ -18,7 +18,7 @@ typealias char_pp Ptr{char_p} ################################################################################ # Initialization and library API entrance ################################################################################ -const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["/Users/chiyuan/work/mxnet/mxnet/lib"]) +const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["$(get(ENV,"MXNET_HOME",""))/lib"]) function __init__() _import_ndarray_functions() diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh new file mode 100755 index 000000000000..e6ff20221a39 --- /dev/null +++ b/test/travis/build_mxnet.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +git clone --recursive https://github.com/dmlc/mxnet +cd mxnet + +if [ ! -f config.mk ]; then + echo "Use the default config.m" + cp make/config.mk config.mk +fi + +make -j4 +export MXNET_HOME=$PWD diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh new file mode 100755 index 000000000000..a9bf588e2f88 --- /dev/null +++ b/test/travis/setup_env.sh @@ -0,0 +1 @@ +#!/bin/bash From 573eedd2129da5ad3167328badaa09531139bd1b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:23:00 -0400 Subject: [PATCH 027/630] fix travis script --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 687835d65a97..8bbbd4101b76 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,5 @@ # Documentation: http://docs.travis-ci.com/user/languages/julia/ +sudo: false language: julia os: - linux @@ -25,7 +26,7 @@ addons: - clang before_install: - - export TRAVIS=tests/travis + - export TRAVIS=test/travis - source $TRAVIS/setup_env.sh - source $TRAVIS/build_mxnet.sh From b1ca0625d9244ca286acd02bce9f632d20d746cf Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:30:25 -0400 Subject: [PATCH 028/630] more fix for travis script --- .travis.yml | 8 +++----- src/symbol.jl | 3 ++- test/travis/build_mxnet.sh | 7 ++++--- test/travis/run_test.sh | 4 ++++ test/travis/setup_env.sh | 5 +++++ 5 files changed, 18 insertions(+), 9 deletions(-) create mode 100755 test/travis/run_test.sh diff --git a/.travis.yml b/.travis.yml index 8bbbd4101b76..dc09f6b4252d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -23,7 +23,6 @@ addons: - libopencv-dev - gcc-4.8 - g++-4.8 - - clang before_install: - export TRAVIS=test/travis @@ -32,7 +31,6 @@ before_install: notifications: email: false -# uncomment the following lines to override the default test script -#script: -# - if [[ -a .git/shallow ]]; then git fetch --unshallow; fi -# - julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' + +script: + - source $TRAVIS/run_test.sh diff --git a/src/symbol.jl b/src/symbol.jl index e15d69859a72..9ff835703091 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -14,6 +14,7 @@ function variable(name :: Union{Base.Symbol, AbstractString}) Symbol(MX_SymbolHandle(hdr_ref[])) end + ################################################################################ -# Atomic Symbol functions dynamically exported from libmxnet +# Atomic Symbol functions dynamically imported from libmxnet ################################################################################ diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index e6ff20221a39..4e95ad32590d 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -1,12 +1,13 @@ #!/bin/bash -git clone --recursive https://github.com/dmlc/mxnet -cd mxnet +git clone --recursive https://github.com/dmlc/mxnet __mxnet_build +cd __mxnet_build if [ ! -f config.mk ]; then echo "Use the default config.m" cp make/config.mk config.mk fi -make -j4 +make -j4 || exit 1 + export MXNET_HOME=$PWD diff --git a/test/travis/run_test.sh b/test/travis/run_test.sh new file mode 100755 index 000000000000..a576c69f6979 --- /dev/null +++ b/test/travis/run_test.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +if [[ -a .git/shallow ]]; then git fetch --unshallow; fi +julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index a9bf588e2f88..73d3b52acc55 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -1 +1,6 @@ #!/bin/bash + +if [ ${TRAVIS_OS_NAME} == "linux" ]; then + export CXX="g++-4.8" + export CC="gcc-4.8" +fi From 7a66ef4a723ddad0a24da2dcb201d1b38ce92389 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:35:23 -0400 Subject: [PATCH 029/630] enable osx build on travis --- .travis.yml | 2 +- test/travis/setup_env.sh | 15 +++++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index dc09f6b4252d..cae2bfbb7a22 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ sudo: false language: julia os: - linux - #- osx + - osx julia: #- release - nightly diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index 73d3b52acc55..b71b30761359 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -1,6 +1,17 @@ #!/bin/bash +echo "##########################" +echo $TRAVIS_OS_NAME + if [ ${TRAVIS_OS_NAME} == "linux" ]; then - export CXX="g++-4.8" - export CC="gcc-4.8" + alias g++="g++-4.8" + alias gcc="gcc-4.8" +fi + +if [ ${TRAVIS_OS_NAME} == "osx" ]; then + brew update + brew tap homebrew/science + brew info opencv + brew install graphviz + brew install opencv fi From b22c83f62eec779cf055cea46cfafa94ae52e3e4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:42:07 -0400 Subject: [PATCH 030/630] fix build script --- test/travis/build_mxnet.sh | 14 ++++++++++++++ test/travis/setup_env.sh | 5 ----- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 4e95ad32590d..549afb66e37e 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -6,6 +6,20 @@ cd __mxnet_build if [ ! -f config.mk ]; then echo "Use the default config.m" cp make/config.mk config.mk + + if [ ${TRAVIS_OS_NAME} == "linux" ]; then + sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk + sed -i 's/export CXX = g++/export CXX = g++4.8/g' config.mk + echo "==============================" + gcc --version + gcc-4.8 --version + fi + + if [ ${TRAVIS_OS_NAME} == "osx" ]; then + sed -i 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk + fi + + cat config.mk fi make -j4 || exit 1 diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index b71b30761359..c654b1d95533 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -3,11 +3,6 @@ echo "##########################" echo $TRAVIS_OS_NAME -if [ ${TRAVIS_OS_NAME} == "linux" ]; then - alias g++="g++-4.8" - alias gcc="gcc-4.8" -fi - if [ ${TRAVIS_OS_NAME} == "osx" ]; then brew update brew tap homebrew/science From f9b88b0b81de116d2e1152b07dce8b815351e6eb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:44:49 -0400 Subject: [PATCH 031/630] fix typo --- test/travis/build_mxnet.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 549afb66e37e..6d58db9a7461 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -9,7 +9,7 @@ if [ ! -f config.mk ]; then if [ ${TRAVIS_OS_NAME} == "linux" ]; then sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk - sed -i 's/export CXX = g++/export CXX = g++4.8/g' config.mk + sed -i 's/export CXX = g++/export CXX = g++-4.8/g' config.mk echo "==============================" gcc --version gcc-4.8 --version From bdd798ae0857b45f4fc922264c77f365c1a903d6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:48:32 -0400 Subject: [PATCH 032/630] properly switch directory --- test/travis/build_mxnet.sh | 1 + test/travis/setup_env.sh | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 6d58db9a7461..35a064a8528b 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -25,3 +25,4 @@ fi make -j4 || exit 1 export MXNET_HOME=$PWD +cd .. diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index c654b1d95533..242e034120b2 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -4,7 +4,7 @@ echo "##########################" echo $TRAVIS_OS_NAME if [ ${TRAVIS_OS_NAME} == "osx" ]; then - brew update + brew update >/dev/null 2>&1 brew tap homebrew/science brew info opencv brew install graphviz From 7713589761802ca5d22f5154b2ae286013fe346e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 12:50:05 -0400 Subject: [PATCH 033/630] fix sed option --- test/travis/build_mxnet.sh | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 35a064a8528b..28073f0b05a5 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -10,13 +10,10 @@ if [ ! -f config.mk ]; then if [ ${TRAVIS_OS_NAME} == "linux" ]; then sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk sed -i 's/export CXX = g++/export CXX = g++-4.8/g' config.mk - echo "==============================" - gcc --version - gcc-4.8 --version fi if [ ${TRAVIS_OS_NAME} == "osx" ]; then - sed -i 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk + sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk fi cat config.mk From 8950df511562a57b1225899b24bb9d3a8fc0528e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 14:37:24 -0400 Subject: [PATCH 034/630] disable openmp for osx build --- test/travis/build_mxnet.sh | 3 +++ 1 file changed, 3 insertions(+) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 28073f0b05a5..5c2baf6da17e 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -13,7 +13,10 @@ if [ ! -f config.mk ]; then fi if [ ${TRAVIS_OS_NAME} == "osx" ]; then + # add built-in blas header file to path sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk + # disable openmp + sed -i -s 's%USE_OPENMP = 1%USE_OPENMP = 0%g' config.mk fi cat config.mk From 694f0bb4dce82afdfa2c244da01753dbd515378c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Oct 2015 15:21:53 -0400 Subject: [PATCH 035/630] add badges --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index e9ce3583fcae..85cd8cc114e8 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,8 @@ # MXNet [![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) +[![Coverage Status](https://img.shields.io/coveralls/dmlc/MXNet.jl.svg?style=flat)](https://coveralls.io/r/dmlc/MXNet.jl?branch=master) +[![License](https://img.shields.io/github/license/dmlc/MXNet.jl.svg?style=flat)](LICENSE.md) + Julia wrapper of [MXNet](https://github.com/dmlc/mxnet). From efb0b90739611b3ea05299a8baa806dfc3ae8657 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 00:22:24 -0400 Subject: [PATCH 036/630] clean up built script --- test/travis/build_mxnet.sh | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 5c2baf6da17e..7b7f421d3cdb 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -4,22 +4,20 @@ git clone --recursive https://github.com/dmlc/mxnet __mxnet_build cd __mxnet_build if [ ! -f config.mk ]; then - echo "Use the default config.m" - cp make/config.mk config.mk + echo "Use the default config.m" + cp make/config.mk config.mk - if [ ${TRAVIS_OS_NAME} == "linux" ]; then - sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk - sed -i 's/export CXX = g++/export CXX = g++-4.8/g' config.mk - fi + if [ ${TRAVIS_OS_NAME} == "linux" ]; then + sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk + sed -i 's/export CXX = g++/export CXX = g++-4.8/g' config.mk + fi - if [ ${TRAVIS_OS_NAME} == "osx" ]; then - # add built-in blas header file to path - sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk - # disable openmp - sed -i -s 's%USE_OPENMP = 1%USE_OPENMP = 0%g' config.mk - fi - - cat config.mk + if [ ${TRAVIS_OS_NAME} == "osx" ]; then + # add built-in blas header file to path + sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk + # disable openmp + sed -i -s 's%USE_OPENMP = 1%USE_OPENMP = 0%g' config.mk + fi fi make -j4 || exit 1 From 7626fe7e3168359fee11e2c21733efe8ad2dfadd Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 00:57:24 -0400 Subject: [PATCH 037/630] list symbol creator functions --- src/init.jl | 2 ++ src/symbol.jl | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/src/init.jl b/src/init.jl index cef8e54c03e0..31ab5436c6cc 100644 --- a/src/init.jl +++ b/src/init.jl @@ -22,6 +22,8 @@ const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["$(get(ENV,"MXNET_HOME"," function __init__() _import_ndarray_functions() + _import_atomic_symbol_creators() + atexit() do # notify libmxnet we are shutting down ccall( ("MXNotifyShutdown", MXNET_LIB), Cint, () ) diff --git a/src/symbol.jl b/src/symbol.jl index 9ff835703091..0f14ce346007 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -1,6 +1,5 @@ export Symbol - ################################################################################ # Symbol Type ################################################################################ @@ -18,3 +17,35 @@ end ################################################################################ # Atomic Symbol functions dynamically imported from libmxnet ################################################################################ +function _define_atomic_symbol_creator(hdr :: MX_handle) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_kv_nargs = Ref{char_p}(0) + ref_nargs = Ref{MX_uint}(0) + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXSymbolGetAtomicSymbolInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, + Ref{char_pp}, Ref{char_p}), + hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs) + + func_name = symbol(bytestring(ref_name[])) + kv_nargs = symbol(bytestring(ref_kv_nargs[])) + info("defining $func_name, kv_nargs = ($kv_nargs)") +end + +function _import_atomic_symbol_creators() + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXSymbolListAtomicSymbolCreators, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + n_creators = n_ref[] + h_creators = pointer_to_array(h_ref[], n_creators) + + for i = 1:n_creators + creator_hdr = h_creators[i] + _define_atomic_symbol_creator(creator_hdr) + end +end From 627a8e5d571fc299391a2cf134e72f25e5d1f6db Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 09:54:58 -0400 Subject: [PATCH 038/630] name manager --- src/MXNet.jl | 2 ++ src/name.jl | 46 +++++++++++++++++++++++++++++++++++++++++++ test/runtests.jl | 1 + test/unittest/name.jl | 31 +++++++++++++++++++++++++++++ 4 files changed, 80 insertions(+) create mode 100644 src/name.jl create mode 100644 test/unittest/name.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index 63748b278d15..02cda42fbd25 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -7,7 +7,9 @@ module mx include("init.jl") include("context.jl") + include("ndarray.jl") +include("name.jl") include("symbol.jl") end # mx diff --git a/src/name.jl b/src/name.jl new file mode 100644 index 000000000000..5644809c79f9 --- /dev/null +++ b/src/name.jl @@ -0,0 +1,46 @@ +abstract AbstractNameManager +typealias NameType Union{Base.Symbol, AbstractString} +typealias NameCounter Dict{Base.Symbol, Int} + +import Base: get! + +"""Default implementation for generating a name for a symbol. + +When a name is specified by the user, it will be used. Otherwise, a name +is automatically generated based on the hint string. +""" +function _default_get_name!(counter :: NameCounter, name :: NameType, hint :: NameType) + if isa(name, Base.Symbol) || !isempty(name) + return symbol(name) + end + + hint = symbol(hint) + if !haskey(counter, hint) + counter[hint] = 0 + end + name = symbol("$hint$(counter[hint])") + counter[hint] += 1 + return name +end + +type BasicNameManager <: AbstractNameManager + counter :: NameCounter +end +BasicNameManager() = BasicNameManager(NameCounter()) + +function get!(manager :: BasicNameManager, name :: NameType, hint :: NameType) + _default_get_name!(manager.counter, name, hint) +end + +type PrefixNameManager <: AbstractNameManager + prefix :: Base.Symbol + counter :: NameCounter +end +PrefixNameManager(prefix :: NameType) = PrefixNameManager(symbol(prefix), NameCounter()) + +function get!(manager :: PrefixNameManager, name :: NameType, hint :: NameType) + name = _default_get_name!(manager.counter, name, hint) + return symbol("$(manager.prefix)$name") +end + +DEFAULT_NAME_MANAGER = BasicNameManager() diff --git a/test/runtests.jl b/test/runtests.jl index 73cc6eae494d..6b9e4e82384d 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,4 +2,5 @@ using MXNet using Base.Test include("unittest/ndarray.jl") +include("unittest/name.jl") diff --git a/test/unittest/name.jl b/test/unittest/name.jl new file mode 100644 index 000000000000..a7dd6f7bfc35 --- /dev/null +++ b/test/unittest/name.jl @@ -0,0 +1,31 @@ +module TestNameManager +using MXNet +using Base.Test + +function test_default() + info("NameManager::default") + + name = :_____aaaaa_____ + @test get!(mx.DEFAULT_NAME_MANAGER, name, "") == name + @test get!(mx.DEFAULT_NAME_MANAGER, string(name), "") == name + + hint = name + @test get!(mx.DEFAULT_NAME_MANAGER, "", hint) == symbol("$(hint)0") + @test get!(mx.DEFAULT_NAME_MANAGER, "", string(hint)) == symbol("$(hint)1") +end + +function test_prefix() + info("NameManager::prefix") + + name = :_____bbbbb_____ + prefix = :_____foobar_____ + + prefix_manager = mx.PrefixNameManager(prefix) + @test get!(prefix_manager, name, "") == symbol("$prefix$name") + @test get!(prefix_manager, "", name) == symbol("$prefix$(name)0") +end + +test_default() +test_prefix() + +end From 5fea0040a1a6001355f3b985a82fb904de0af1ab Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 13:00:17 -0400 Subject: [PATCH 039/630] symbol constructor --- src/symbol.jl | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/src/symbol.jl b/src/symbol.jl index 0f14ce346007..2fef2055a5dd 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -34,6 +34,63 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) func_name = symbol(bytestring(ref_name[])) kv_nargs = symbol(bytestring(ref_kv_nargs[])) info("defining $func_name, kv_nargs = ($kv_nargs)") + + # function $func_name(args...; kwargs...) + func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) + func_body = quote + idx = findfirst(x -> x[1] == :name, kwargs) + if idx > 0 + name = kwargs[idx][2] + else + name = "" + end + + param_keys = AbstractString[] + param_vals = AbstractString[] + symbol_kws = Dict{Base.Symbol, Symbol}() + + if $kv_nargs != symbol("") && !in($kv_nargs, param_keys) + push!(param_keys, string($kv_nargs)) + push!(param_vals, string(length(args))) + end + + for (k,v) in kwargs + if k == :name; continue; end + if isa(v, Symbol) + symbol_kws[k] = v + else + push!(param_keys, string(k)) + push!(param_vals, string(v)) + end + end + + if length(args) != 0 && length(symbol_kws) != 0 + @assert(false, "$func_name only accepts Symbols either as positional or keyword arguments, not both.") + end + if $kv_nargs != symbol("") && length(symbol_kws) + @assert(false, "$func_name takes variable number of Symbol arguments, please pass input Symbols " * + "via positional arguments, instead of keyword arguments.") + end + + # create the symbol + ref_sym_hdr = Ref{MX_handle}() + @mxcall(:MXSymbolCreateAtomicSymbol, + (MX_handle, MX_unit, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), + hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) + sym_hdr = ref_sym_hdr[] + + sym = Symbol(MX_SymbolHandle(sym_hdr)) + hint = lowercase(string($func_name)) + name = get!(DEFAULT_NAME_MANAGER, name, hint) + + return sym + end + + func_def = Expr(:function, func_head, Expr(:block, func_body)) + eval(func_def) + + # TODO: add doc string + # eval(:(@doc($doc_str, $func_name))) end function _import_atomic_symbol_creators() From ce68e59351be81bb8d8beae3efe54cdcd3649f0c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 19:40:03 -0400 Subject: [PATCH 040/630] symbol basic test --- src/symbol.jl | 116 ++++++++++++++++++++++++++++++++++++++++++----- test/runtests.jl | 3 ++ 2 files changed, 107 insertions(+), 12 deletions(-) diff --git a/src/symbol.jl b/src/symbol.jl index 2fef2055a5dd..ee3b773bc031 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -6,13 +6,96 @@ export Symbol type Symbol handle :: MX_SymbolHandle end +function Base.unsafe_convert(::Type{MX_handle}, obj::Symbol) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::Symbol) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::Symbol) = Base.unsafe_convert(t, obj) + +function Base.deepcopy(self :: Symbol) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCopy, (MX_handle, Ref{MX_handle}), self, ref_hdr) + return Symbol(MX_SymbolHandle(ref_hdr[])) +end +function Base.copy(self :: Symbol) + Base.deepcopy(self) +end + +function Base.call(self :: Symbol, args :: Symbol...) + s = deepcopy(self) + _compose!(s, args...) +end +function Base.call(self :: Symbol; kwargs...) + s = deepcopy(self) + _compose!(s; kwargs...) +end + +macro _list_symbol_info(self, func_name) + quote + ref_sz = Ref{MX_uint}(0) + ref_names = Ref{char_pp}(0) + @mxcall($func_name, (MX_handle, Ref{MX_uint}, Ref{char_pp}), + $self, ref_sz, ref_names) + narg = ref_sz[] + names = pointer_to_array(ref_names[], narg) + names = [symbol(bytestring(x)) for x in names] + return names + end +end +function list_arguments(self :: Symbol) + @_list_symbol_info(self, :MXSymbolListArguments) +end +function list_outputs(self :: Symbol) + @_list_symbol_info(self, :MXSymbolListOutputs) +end +"""List all auxiliary states in the symbool. + +Auxiliary states are special states of symbols that do not corresponds to an argument, +and do not have gradient. But still be useful for the specific operations. +A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. +Most operators do not have Auxiliary states. +""" +function list_auxiliary_states(self :: Symbol) + @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) +end function variable(name :: Union{Base.Symbol, AbstractString}) - hdr_ref = Ref{MX_handle} + hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) Symbol(MX_SymbolHandle(hdr_ref[])) end +"Compose symbol on inputs" +function _compose!(sym :: Symbol; kwargs...) + name = char_p(0) + arg_keys = AbstractString[] + arg_vals = MX_handle[] + + for (k,v) in kwargs + if k == :name + name = string(v) + else + @assert(isa(v, Symbol), "Compose expect `Symbol` as arguments") + push!(arg_keys, string(k)) + push!(arg_vals, v) + end + end + + @mxcall(:MXSymbolCompose, + (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), + sym, name, length(arg_keys), arg_keys, arg_vals) + return sym +end +function _compose!(sym :: Symbol, args::Symbol...) + name = char_p(0) + arg_keys = Ptr{char_p}(0) + arg_vals = MX_handle[args...] + + @mxcall(:MXSymbolCompose, + (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), + sym, name, length(arg_keys), arg_keys, arg_vals) + return sym +end ################################################################################ # Atomic Symbol functions dynamically imported from libmxnet @@ -33,7 +116,6 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) func_name = symbol(bytestring(ref_name[])) kv_nargs = symbol(bytestring(ref_kv_nargs[])) - info("defining $func_name, kv_nargs = ($kv_nargs)") # function $func_name(args...; kwargs...) func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) @@ -49,10 +131,14 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) param_vals = AbstractString[] symbol_kws = Dict{Base.Symbol, Symbol}() - if $kv_nargs != symbol("") && !in($kv_nargs, param_keys) - push!(param_keys, string($kv_nargs)) - push!(param_vals, string(length(args))) - end + $(if kv_nargs != symbol("") + quote + if !in("$kv_narg", param_keys) + push!(param_keys, string("$kv_nargs")) + push!(param_vals, string(length(args))) + end + end + end) for (k,v) in kwargs if k == :name; continue; end @@ -67,22 +153,28 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) if length(args) != 0 && length(symbol_kws) != 0 @assert(false, "$func_name only accepts Symbols either as positional or keyword arguments, not both.") end - if $kv_nargs != symbol("") && length(symbol_kws) - @assert(false, "$func_name takes variable number of Symbol arguments, please pass input Symbols " * - "via positional arguments, instead of keyword arguments.") - end + $(if kv_nargs != symbol("") + quote + if length(symbol_kws) > 0 + @assert(false, "$func_name takes variable number of Symbol arguments, please pass input Symbols " * + "via positional arguments, instead of keyword arguments.") + end + end + end) # create the symbol ref_sym_hdr = Ref{MX_handle}() @mxcall(:MXSymbolCreateAtomicSymbol, - (MX_handle, MX_unit, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), - hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) + (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), + $hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) sym_hdr = ref_sym_hdr[] sym = Symbol(MX_SymbolHandle(sym_hdr)) hint = lowercase(string($func_name)) name = get!(DEFAULT_NAME_MANAGER, name, hint) + _compose!(sym; name=name, symbol_kws...) + return sym end diff --git a/test/runtests.jl b/test/runtests.jl index 6b9e4e82384d..d3c8684cbee8 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,6 +1,9 @@ using MXNet using Base.Test +include("unittest/common.jl") + include("unittest/ndarray.jl") include("unittest/name.jl") +include("unittest/symbol.jl") From 8701e0ae02c208711600bff62abe7f1ecd14caac Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 20:01:52 -0400 Subject: [PATCH 041/630] symbol test_internal --- src/symbol.jl | 21 +++++++++++++++++++++ test/unittest/common.jl | 10 ++++++++++ test/unittest/symbol.jl | 39 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 70 insertions(+) create mode 100644 test/unittest/common.jl create mode 100644 test/unittest/symbol.jl diff --git a/src/symbol.jl b/src/symbol.jl index ee3b773bc031..191398d9de25 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -59,12 +59,33 @@ function list_auxiliary_states(self :: Symbol) @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) end +"Get a new grouped symbol whose output contains all the internal outputs of this symbol." +function get_internals(self :: Symbol) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolGetInternals, (MX_handle, Ref{MX_handle}), self, ref_hdr) + return Symbol(MX_SymbolHandle(ref_hdr[])) +end + function variable(name :: Union{Base.Symbol, AbstractString}) hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) Symbol(MX_SymbolHandle(hdr_ref[])) end +function Base.getindex(self :: Symbol, idx :: Union{Base.Symbol, AbstractString}) + idx = symbol(idx) + i_idx = find(idx .== list_outputs(self)) + @assert(length(i_idx) > 0, "Cannot find output with name '$idx'") + @assert(length(i_idx) < 2, "Found duplicated output with name '$idx'") + Base.getindex(self, i_idx[1]) +end +function Base.getindex(self :: Symbol, idx :: Int) + ref_hdr = Ref{MX_handle}(0) + # note Julia is 1-based, while MXNet is 0-based + @mxcall(:MXSymbolGetOutput, (MX_handle, MX_uint, Ref{MX_handle}), self, idx-1, ref_hdr) + return Symbol(MX_SymbolHandle(ref_hdr[])) +end + "Compose symbol on inputs" function _compose!(sym :: Symbol; kwargs...) name = char_p(0) diff --git a/test/unittest/common.jl b/test/unittest/common.jl new file mode 100644 index 000000000000..6c6d8fdb1fd7 --- /dev/null +++ b/test/unittest/common.jl @@ -0,0 +1,10 @@ +################################################################################ +# Common models used in testing +################################################################################ +function mlp2() + data = mx.variable(:data) + out = mx.FullyConnected(data=data, name=:fc1, num_hidden=1000) + out = mx.Activation(data=out, act_type=:relu) + out = mx.FullyConnected(data=out, name=:fc2, num_hidden=10) + return out +end diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl new file mode 100644 index 000000000000..5ea8ae7e766b --- /dev/null +++ b/test/unittest/symbol.jl @@ -0,0 +1,39 @@ +module TestSymbol +using MXNet +using Base.Test + +using ..Main: mlp2 + +################################################################################ +# Test Implementations +################################################################################ +function test_basic() + info("Symbol::basic") + + model = mlp2() + @test mx.list_arguments(model) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] + @test mx.list_outputs(model) == [:fc2_output] + @test mx.list_auxiliary_states(model) == Symbol[] +end + +function test_internal() + info("Symbol::internal") + + data = mx.variable(:data) + oldfc = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) + net1 = mx.FullyConnected(data=oldfc, name=:fc2, num_hidden=100) + + @test mx.list_arguments(net1) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] + + internal = mx.get_internals(net1) + fc1 = internal[:fc1_output] + @test mx.list_arguments(fc1) == mx.list_arguments(oldfc) +end + +################################################################################ +# Run tests +################################################################################ +test_basic() +test_internal() + +end From 320706fe7a86c8303c3dafc21eadad0d16abe513 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Oct 2015 20:10:37 -0400 Subject: [PATCH 042/630] symbol test_compose --- src/symbol.jl | 10 ++++++++++ test/unittest/symbol.jl | 17 +++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/src/symbol.jl b/src/symbol.jl index 191398d9de25..878c472f7ff5 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -66,12 +66,22 @@ function get_internals(self :: Symbol) return Symbol(MX_SymbolHandle(ref_hdr[])) end +"Create a symbolic variable with the given name" function variable(name :: Union{Base.Symbol, AbstractString}) hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) Symbol(MX_SymbolHandle(hdr_ref[])) end +"Create a symbol that groups symbols together" +function group(symbols :: Symbol...) + handles = MX_handle[symbols...] + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateGroup, (MX_uint, Ptr{MX_handle}, Ref{MX_handle}), + length(handles), handles, ref_hdr) + Symbol(MX_SymbolHandle(ref_hdr[])) +end + function Base.getindex(self :: Symbol, idx :: Union{Base.Symbol, AbstractString}) idx = symbol(idx) i_idx = find(idx .== list_outputs(self)) diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index 5ea8ae7e766b..9506b559a170 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -30,10 +30,27 @@ function test_internal() @test mx.list_arguments(fc1) == mx.list_arguments(oldfc) end +function test_compose() + info("Symbol::compose") + + data = mx.variable(:data) + net1 = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) + net1 = mx.FullyConnected(data=net1, name=:fc2, num_hidden=100) + + net2 = mx.FullyConnected(name=:fc3, num_hidden=10) + net2 = mx.Activation(data=net2, act_type=:relu) + net2 = mx.FullyConnected(data=net2, name=:fc4, num_hidden=20) + + composed = net2(fc3_data=net1, name=:composed) + multi_out = mx.group(composed, net1) + @test mx.list_outputs(multi_out) == [:composed_output, :fc2_output] +end + ################################################################################ # Run tests ################################################################################ test_basic() test_internal() +test_compose() end From 631a965167982ecfd833834567c1b09340943080 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 9 Oct 2015 00:31:27 -0400 Subject: [PATCH 043/630] test infer shape --- src/symbol.jl | 64 +++++++++++++++++++++++++++++++++++++++++ test/unittest/symbol.jl | 13 +++++++++ 2 files changed, 77 insertions(+) diff --git a/src/symbol.jl b/src/symbol.jl index 878c472f7ff5..e11754e2eeaa 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -82,6 +82,70 @@ function group(symbols :: Symbol...) Symbol(MX_SymbolHandle(ref_hdr[])) end +macro _infer_shape(self, keys, indptr, sdata) + quote + ref_arg_shape_size = Ref{MX_uint}(0) + ref_arg_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_arg_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_out_shape_size = Ref{MX_uint}(0) + ref_out_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_out_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_aux_shape_size = Ref{MX_uint}(0) + ref_aux_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_aux_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_complete = Ref{Cint}(0) + @mxcall(:MXSymbolInferShape, + (MX_handle, MX_uint, char_pp, Ptr{MX_uint}, Ptr{MX_uint}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{Cint}), + self, length(indptr)-1, keys, indptr, sdata, + ref_arg_shape_size, ref_arg_shape_ndim, ref_arg_shape_data, + ref_out_shape_size, ref_out_shape_ndim, ref_out_shape_data, + ref_aux_shape_size, ref_aux_shape_ndim, ref_aux_shape_data, + ref_complete) + if ref_complete[] == 0 + return (nothing, nothing, nothing) + else + function build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) + shape_ndim = pointer_to_array(shape_ndim, shape_size) + shape_data = pointer_to_array(shape_data, shape_size) + map(1:shape_size) do i + my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) + tuple(Int[my_shape...]...) + end + end + return ( + build_shapes(ref_arg_shape_size[], ref_arg_shape_ndim[], ref_arg_shape_data[]), + build_shapes(ref_out_shape_size[], ref_out_shape_ndim[], ref_out_shape_data[]), + build_shapes(ref_aux_shape_size[], ref_aux_shape_ndim[], ref_aux_shape_data[]) + ) + end + end +end +function infer_shape(self :: Symbol; kwargs...) + sdata = MX_uint[] + indptr = MX_uint[0] + for (k,v) in kwargs + append!(sdata, [v...]) + push!(indptr, length(sdata)) + end + keys = AbstractString[string(x[1]) for x in kwargs] + @_infer_shape(self, keys, indptr, sdata) +end +function infer_shape(self :: Symbol, args :: Union{Tuple, Void}...) + sdata = MX_uint[] + indptr = MX_uint[0] + for arg in args + if isa(arg, Void); continue; end + append!(sdata, [arg...]) + push!(indptr, length(sdata)) + end + keys = Ptr{char_p}(0) + @_infer_shape(self, keys, indptr, sdata) +end + function Base.getindex(self :: Symbol, idx :: Union{Base.Symbol, AbstractString}) idx = symbol(idx) i_idx = find(idx .== list_outputs(self)) diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index 9506b559a170..cd6cdad1b10a 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -46,11 +46,24 @@ function test_compose() @test mx.list_outputs(multi_out) == [:composed_output, :fc2_output] end +function test_infer_shape() + info("Symbol::infer_shape::mlp2") + + model = mlp2() + data_shape = (100, 100) + arg_shapes, out_shapes, aux_shapes = mx.infer_shape(model, data=data_shape) + arg_shape_dict = Dict{Symbol,Tuple}(zip(mx.list_arguments(model), arg_shapes)) + @test arg_shape_dict == Dict{Symbol,Tuple}(:fc2_bias => (10,),:fc2_weight => (10,1000), + :fc1_bias => (1000,), :fc1_weight => (1000,100), + :data => data_shape) +end + ################################################################################ # Run tests ################################################################################ test_basic() test_internal() test_compose() +test_infer_shape() end From fca478978b59feba681c86c2d1e634b589b624d7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 9 Oct 2015 00:46:12 -0400 Subject: [PATCH 044/630] test infer_shape with error --- test/unittest/symbol.jl | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index cd6cdad1b10a..aa5a18be099f 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -56,8 +56,20 @@ function test_infer_shape() @test arg_shape_dict == Dict{Symbol,Tuple}(:fc2_bias => (10,),:fc2_weight => (10,1000), :fc1_bias => (1000,), :fc1_weight => (1000,100), :data => data_shape) + @test length(out_shapes) == 1 + @test out_shapes[1] == (100, 10) end +function test_infer_shape_error() + info("Symbol::infer_shape::error") + + model = mlp2() + weight_shape = (1, 100) + data_shape = (100, 100) + @test_throws mx.MXError mx.infer_shape(model, data=data_shape, fc1_weight=weight_shape) +end + + ################################################################################ # Run tests ################################################################################ @@ -65,5 +77,6 @@ test_basic() test_internal() test_compose() test_infer_shape() +test_infer_shape_error() end From c05e6a5d3c1b2782b2ee531260e184b2c1d36b01 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 9 Oct 2015 09:09:19 -0400 Subject: [PATCH 045/630] try out julia v0.4 precompilation --- src/MXNet.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/MXNet.jl b/src/MXNet.jl index 02cda42fbd25..b516908b56f3 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -1,3 +1,5 @@ +__precompile__() + module MXNet # we put everything in the namespace mx, because there are a lot of From dd1922d2b8a20c0155d4d361725e8314c8e5649a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 9 Oct 2015 14:26:58 -0400 Subject: [PATCH 046/630] executor --- src/MXNet.jl | 1 + src/executor.jl | 86 +++++++++++++++++++++++++++++++++++++++++++++++++ src/init.jl | 1 + 3 files changed, 88 insertions(+) create mode 100644 src/executor.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index b516908b56f3..22c3623dd166 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -13,6 +13,7 @@ include("context.jl") include("ndarray.jl") include("name.jl") include("symbol.jl") +include("executor.jl") end # mx diff --git a/src/executor.jl b/src/executor.jl new file mode 100644 index 000000000000..aa515c53e694 --- /dev/null +++ b/src/executor.jl @@ -0,0 +1,86 @@ +type Executor + handle :: MX_ExecutorHandle + symbol :: Symbol + arg_arrays :: Vector{NDArray} + grad_arrays :: Vector{Union{Void,NDArray}} + aux_arrays :: Vector{NDArray} + out_arrays :: Vector{NDArray} +end +function Executor(hdr :: MX_ExecutorHandle, symbol :: Symbol, + arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, + aux_arrays :: Vector{NDArray}) + # get output arrays + ref_size = Ref{MX_uint} + ref_hdrs = Ref{Ptr{MX_handle}} + @mxcall(:MXExecutorOutputs, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_handle}}), + hdr, ref_size, ref_hdrs) + out_hdrs = pointer_to_array(ref_hdrs[], ref_size[]) + out_arrays = [NDArray(MX_NDArrayHandle(x)) for x in out_hdrs] + + Executor(hdr, symbol, arg_arrays, grad_arrays, aux_arrays, out_arrays) +end + +function Base.unsafe_convert(::Type{MX_handle}, obj::Executor) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) + +@enum GRAD_REQ GRAD_NULL=0 GRAD_WRITE=1 GRAD_ADD=3 +function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; + args_grad :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, + aux_states :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, + grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) + + function get_ndarray_inputs(arg_key::String, args::Vector{NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + @assert(length(args) == length(arg_names), "Length of $arg_key does not match number of arguments") + return (MX_handle[args...], args) + end + function get_ndarray_inputs(arg_key::String, args::Dict{Base.Symbol,NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + args_vec = map(arg_names) do name + arr = get(args, name, nothing) + if !allow_missing + @assert(!isa(arr, Void), "Must specify all arguments in $arg_key ($name is missing)") + end + arr + end + args_hdr = MX_handle[(isa(x,Void) ? MX_handle(0) : x) for x in args_vec] + return (args_hdr, args_vec) + end + + arg_names = list_arguments(self) + + args_hdr, args = get_ndarray_inputs("args", args, arg_names, false) + if isa(args_grad, Void) + args_grad_hdr = MX_handle[Ptr{Void}(0) for i=1:length(args)] + else + args_grad_hdr, args_grad = get_ndarray_inputs("args_grad", args_grad, arg_names, true) + end + + if isa(aux_states, Void); aux_states = NDArray[]; end + aux_args_hdr, aux_states = get_ndarray_inputs("aux_states", aux_states, list_auxiliary_states(self), false) + + if isa(grad_req, GRAD_REQ) + reqs = MX_uint[grad_req for i=1:length(args)] + elseif isa(grad_req, Vector{GRAD_REQ}) + @assert(length(grad_req) == length(args)) + reqs = MX_uint[grad_req...] + elseif isa(grad_req, Dict{Base.Symbol, GRAD_REQ}) + reqs = MX_uint[get(grad_req, name, GRAD_NULL) for name in arg_names] + end + + ref_hdr = Ref{MX_handle} + @mxcall(:MXExecutorBind, + (MX_handle, Cint, Cint, MX_uint, Ptr{MX_handle}, Ptr{MX_handle}, Ptr{MX_uint}, + MX_uint, Ptr{MX_handle}, Ref{MX_handle}), + self, ctx.device_type, ctx.device_id, length(args), args_hdr, + args_grad_hdr, reqs, length(aux_states), uax_args_hdr, ref_hdr) + executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, + args, args_grad, aux_states) +end + + +function forward(self :: Executor; is_train::Bool=false, kwargs...) + # TODO: kwargs + @mxcall(:MXExecutorForward, (MX_handle, Cint), self, is_train) +end diff --git a/src/init.jl b/src/init.jl index 31ab5436c6cc..3385e8dc403f 100644 --- a/src/init.jl +++ b/src/init.jl @@ -96,4 +96,5 @@ end @mx_define_handle_t(MX_NDArrayHandle, MXNDArrayFree) @mx_define_handle_t(MX_FunctionHandle, nop) @mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) +@mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) From 0a04f25bdb8a7446c51368ddf96de9c5f905b6a3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 9 Oct 2015 23:11:46 -0400 Subject: [PATCH 047/630] executor bind forward --- src/executor.jl | 69 +++++++++++++++++++++++++--------------- src/ndarray.jl | 4 +++ src/symbol.jl | 53 ++++++++++++++++++++++++++++-- test/runtests.jl | 1 + test/unittest/common.jl | 11 +++++++ test/unittest/ndarray.jl | 11 ++----- 6 files changed, 111 insertions(+), 38 deletions(-) diff --git a/src/executor.jl b/src/executor.jl index aa515c53e694..8a42309e74a5 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -4,20 +4,30 @@ type Executor arg_arrays :: Vector{NDArray} grad_arrays :: Vector{Union{Void,NDArray}} aux_arrays :: Vector{NDArray} - out_arrays :: Vector{NDArray} + outputs :: Vector{NDArray} + arg_dict :: Dict{Base.Symbol, NDArray} + aux_dict :: Dict{Base.Symbol, NDArray} end function Executor(hdr :: MX_ExecutorHandle, symbol :: Symbol, arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, aux_arrays :: Vector{NDArray}) # get output arrays - ref_size = Ref{MX_uint} - ref_hdrs = Ref{Ptr{MX_handle}} + ref_size = Ref{MX_uint}(0) + ref_hdrs = Ref{Ptr{MX_handle}}(0) @mxcall(:MXExecutorOutputs, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_handle}}), hdr, ref_size, ref_hdrs) out_hdrs = pointer_to_array(ref_hdrs[], ref_size[]) out_arrays = [NDArray(MX_NDArrayHandle(x)) for x in out_hdrs] - Executor(hdr, symbol, arg_arrays, grad_arrays, aux_arrays, out_arrays) + arg_names = list_arguments(symbol) + @assert(length(arg_names) == length(unique(arg_names)), "Duplicated names in arguments: $arg_names") + arg_dict = Dict{Base.Symbol,NDArray}(zip(arg_names, arg_arrays)) + + aux_names = list_auxiliary_states(symbol) + @assert(length(aux_names) == length(unique(aux_names)), "Duplicated names in auxiliary states: $aux_names") + aux_dict = Dict{Base.Symbol,NDArray}(zip(aux_names, aux_arrays)) + + Executor(hdr, symbol, arg_arrays, grad_arrays, aux_arrays, out_arrays, arg_dict, aux_dict) end function Base.unsafe_convert(::Type{MX_handle}, obj::Executor) @@ -26,39 +36,40 @@ end Base.convert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) +function _get_ndarray_inputs(arg_key::AbstractString, args::Vector{NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + @assert(length(args) == length(arg_names), "Length of $arg_key does not match number of arguments") + return (MX_handle[args...], args) +end +function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + args_vec = map(arg_names) do name + arr = get(args, name, nothing) + if !allow_missing + @assert(!isa(arr, Void), "Must specify all arguments in $arg_key ($name is missing)") + end + arr + end + args_hdr = MX_handle[(isa(x,Void) ? MX_handle(0) : x) for x in args_vec] + return (args_hdr, args_vec) +end + @enum GRAD_REQ GRAD_NULL=0 GRAD_WRITE=1 GRAD_ADD=3 function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, aux_states :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) - function get_ndarray_inputs(arg_key::String, args::Vector{NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) - @assert(length(args) == length(arg_names), "Length of $arg_key does not match number of arguments") - return (MX_handle[args...], args) - end - function get_ndarray_inputs(arg_key::String, args::Dict{Base.Symbol,NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) - args_vec = map(arg_names) do name - arr = get(args, name, nothing) - if !allow_missing - @assert(!isa(arr, Void), "Must specify all arguments in $arg_key ($name is missing)") - end - arr - end - args_hdr = MX_handle[(isa(x,Void) ? MX_handle(0) : x) for x in args_vec] - return (args_hdr, args_vec) - end - arg_names = list_arguments(self) - args_hdr, args = get_ndarray_inputs("args", args, arg_names, false) + args_hdr, args = _get_ndarray_inputs("args", args, arg_names, false) if isa(args_grad, Void) + args_grad = [nothing for i=1:length(args)] args_grad_hdr = MX_handle[Ptr{Void}(0) for i=1:length(args)] else - args_grad_hdr, args_grad = get_ndarray_inputs("args_grad", args_grad, arg_names, true) + args_grad_hdr, args_grad = _get_ndarray_inputs("args_grad", args_grad, arg_names, true) end if isa(aux_states, Void); aux_states = NDArray[]; end - aux_args_hdr, aux_states = get_ndarray_inputs("aux_states", aux_states, list_auxiliary_states(self), false) + aux_args_hdr, aux_states = _get_ndarray_inputs("aux_states", aux_states, list_auxiliary_states(self), false) if isa(grad_req, GRAD_REQ) reqs = MX_uint[grad_req for i=1:length(args)] @@ -69,18 +80,24 @@ function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict reqs = MX_uint[get(grad_req, name, GRAD_NULL) for name in arg_names] end - ref_hdr = Ref{MX_handle} + ref_hdr = Ref{MX_handle}(0) @mxcall(:MXExecutorBind, (MX_handle, Cint, Cint, MX_uint, Ptr{MX_handle}, Ptr{MX_handle}, Ptr{MX_uint}, MX_uint, Ptr{MX_handle}, Ref{MX_handle}), self, ctx.device_type, ctx.device_id, length(args), args_hdr, - args_grad_hdr, reqs, length(aux_states), uax_args_hdr, ref_hdr) + args_grad_hdr, reqs, length(aux_states), aux_args_hdr, ref_hdr) + args_grad = convert(Vector{Union{Void,NDArray}}, args_grad) executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, args, args_grad, aux_states) end function forward(self :: Executor; is_train::Bool=false, kwargs...) - # TODO: kwargs + for (k,v) in kwargs + @assert(k ∈ self.arg_dict, "Unknown argument $k") + @assert(isa(v, NDArray), "Keyword argument $k must be an NDArray") + copy!(self.arg_dict[k], v) + end + @mxcall(:MXExecutorForward, (MX_handle, Cint), self, is_train) end diff --git a/src/ndarray.jl b/src/ndarray.jl index 4d6bd5407eaa..035f465fa230 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -30,6 +30,10 @@ type NDArray end end +function NDArray{T<:Real}(data :: Array{T}) + copy(data, mx.DEFAULT_CONTEXT) +end + function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) Base.unsafe_convert(MX_handle, obj.handle) end diff --git a/src/symbol.jl b/src/symbol.jl index e11754e2eeaa..d8f45cb1f9eb 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -160,6 +160,40 @@ function Base.getindex(self :: Symbol, idx :: Int) return Symbol(MX_SymbolHandle(ref_hdr[])) end +import Base: +, .+ +function +(self :: Symbol, args :: Symbol...) + ret = self + for arg in args + ret = _Plus(ret, arg) + end + ret +end +function .+(self :: Symbol, args :: Symbol...) + +(self, args...) +end + +import Base: -, .- +function -(self :: Symbol, arg :: Symbol) + _Minus(self, arg) +end +function .-(self :: Symbol, arg :: Symbol) + -(self, arg) +end + +import Base: .* +function .*(self :: Symbol, args :: Symbol...) + ret = self + for arg in args + ret = _Mul(ret, arg) + end + ret +end + +import Base: ./ +function ./(self :: Symbol, arg :: Symbol) + _Div(self, arg) +end + "Compose symbol on inputs" function _compose!(sym :: Symbol; kwargs...) name = char_p(0) @@ -182,16 +216,25 @@ function _compose!(sym :: Symbol; kwargs...) return sym end function _compose!(sym :: Symbol, args::Symbol...) - name = char_p(0) + _compose!(sym, char_p(0), args...) +end +function _compose!(sym :: Symbol, name :: Union{Base.Symbol, char_p}, args::Symbol...) + if isa(name, Base.Symbol); name = string(name); end arg_keys = Ptr{char_p}(0) arg_vals = MX_handle[args...] @mxcall(:MXSymbolCompose, (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), - sym, name, length(arg_keys), arg_keys, arg_vals) + sym, name, length(arg_vals), arg_keys, arg_vals) return sym end +function to_json(self :: Symbol) + ref_json = Ref{char_p}(0) + @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) + return bytestring(ref_json[]) +end + ################################################################################ # Atomic Symbol functions dynamically imported from libmxnet ################################################################################ @@ -268,7 +311,11 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) hint = lowercase(string($func_name)) name = get!(DEFAULT_NAME_MANAGER, name, hint) - _compose!(sym; name=name, symbol_kws...) + if length(args) != 0 + _compose!(sym, name, args...) + else + _compose!(sym; name=name, symbol_kws...) + end return sym end diff --git a/test/runtests.jl b/test/runtests.jl index d3c8684cbee8..b328f2c8f565 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -6,4 +6,5 @@ include("unittest/common.jl") include("unittest/ndarray.jl") include("unittest/name.jl") include("unittest/symbol.jl") +include("unittest/bind.jl") diff --git a/test/unittest/common.jl b/test/unittest/common.jl index 6c6d8fdb1fd7..51dc1ed43ec4 100644 --- a/test/unittest/common.jl +++ b/test/unittest/common.jl @@ -1,6 +1,16 @@ ################################################################################ # Common models used in testing ################################################################################ +function reldiff(a, b) + diff = sum(abs(a - b)) + norm = sum(abs(a)) + return diff / (norm + 1e-10) +end + +function rand_dims(max_ndim=6) + tuple(rand(1:10, rand(1:max_ndim))...) +end + function mlp2() data = mx.variable(:data) out = mx.FullyConnected(data=data, name=:fc1, num_hidden=1000) @@ -8,3 +18,4 @@ function mlp2() out = mx.FullyConnected(data=out, name=:fc2, num_hidden=10) return out end + diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 15a3fea334ed..0d8b3aed0b3c 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -2,18 +2,11 @@ module TestNDArray using MXNet using Base.Test +using ..Main: rand_dims, reldiff + ################################################################################ # Test Implementations ################################################################################ -function reldiff(a, b) - diff = sum(abs(a - b)) - norm = sum(abs(a)) - return diff / (norm + 1e-10) -end - -function rand_dims() - tuple(rand(1:10, rand(1:6))...) -end function rand_tensors{N}(dims::NTuple{N, Int}) tensor = rand(mx.MX_float, dims) array = copy(tensor, mx.DEFAULT_CONTEXT) From 2074d12c776ad2d8277225b175c49c2892cb4db7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 9 Oct 2015 23:20:05 -0400 Subject: [PATCH 048/630] executor backward --- src/executor.jl | 11 +++++++ test/unittest/bind.jl | 70 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 81 insertions(+) create mode 100644 test/unittest/bind.jl diff --git a/src/executor.jl b/src/executor.jl index 8a42309e74a5..249a5684d16e 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -101,3 +101,14 @@ function forward(self :: Executor; is_train::Bool=false, kwargs...) @mxcall(:MXExecutorForward, (MX_handle, Cint), self, is_train) end + +function backward(self :: Executor) + backward(self, NDArray[]) +end +function backward(self :: Executor, out_grad :: NDArray) + backward(self, [out_grad]) +end +function backward(self :: Executor, out_grads :: Vector{NDArray}) + out_grads = MX_handle[out_grads...] + @mxcall(:MXExecutorBackward, (MX_handle, MX_uint, Ptr{MX_handle}), self, length(out_grads), out_grads) +end diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl new file mode 100644 index 000000000000..9f480f3f20f3 --- /dev/null +++ b/test/unittest/bind.jl @@ -0,0 +1,70 @@ +module TestBind +using MXNet +using Base.Test + +using ..Main: rand_dims, reldiff + +################################################################################ +# Test Implementations +################################################################################ +function test_arithmetic(uf, gf) + shape = rand_dims() + info("Bind::arithmetic::$uf::dims = $shape") + + lhs = mx.variable(:lhs) + rhs = mx.variable(:rhs) + ret = uf(lhs, rhs) + @test mx.list_arguments(ret) == [:lhs, :rhs] + + lhs_arr = mx.NDArray(rand(shape)) + rhs_arr = mx.NDArray(rand(shape)) + lhs_grad = mx.empty(shape) + rhs_grad = mx.empty(shape) + + exec2 = mx.bind(ret, mx.Context(mx.CPU), [lhs_arr, rhs_arr], args_grad=[lhs_grad, rhs_grad]) + exec3 = mx.bind(ret, mx.Context(mx.CPU), [lhs_arr, rhs_arr]) + exec4 = mx.bind(ret, mx.Context(mx.CPU), Dict(:lhs=>lhs_arr, :rhs=>rhs_arr), + args_grad=Dict(:rhs=>rhs_grad, :lhs=>lhs_grad)) + + mx.forward(exec2) + mx.forward(exec3) + mx.forward(exec4) + + out1 = uf(copy(lhs_arr), copy(rhs_arr)) + out2 = copy(exec2.outputs[1]) + out3 = copy(exec3.outputs[1]) + out4 = copy(exec4.outputs[1]) + @test reldiff(out1, out2) < 1e-6 + @test reldiff(out1, out3) < 1e-6 + @test reldiff(out1, out4) < 1e-6 + + # test gradients + out_grad = mx.NDArray(ones(shape)) + lhs_grad2, rhs_grad2 = gf(copy(out_grad), copy(lhs_arr), copy(rhs_arr)) + mx.backward(exec2, out_grad) + @test reldiff(copy(lhs_grad), lhs_grad2) < 1e-6 + @test reldiff(copy(rhs_grad), rhs_grad2) < 1e-6 + + # reset grads + lhs_grad[:] = 0 + rhs_grad[:] = 0 + # compute using another binding + mx.backward(exec4, out_grad) + @test reldiff(copy(lhs_grad), lhs_grad2) < 1e-6 + @test reldiff(copy(rhs_grad), rhs_grad2) < 1e-6 +end + +function test_arithmetic() + test_arithmetic(.+, (g,x,y) -> (g,g)) + test_arithmetic(.-, (g,x,y) -> (g,-g)) + test_arithmetic(.*, (g,x,y) -> (y.*g, x.*g)) + test_arithmetic(./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) +end + +################################################################################ +# Run tests +################################################################################ +test_arithmetic() + +end + From 6dc48bbf13f97c71d157f44534ea30626cfb2338 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 11 Oct 2015 19:49:17 -0400 Subject: [PATCH 049/630] simple_bind --- src/executor.jl | 25 +++++++++++++++++++++++-- test/unittest/symbol.jl | 2 +- 2 files changed, 24 insertions(+), 3 deletions(-) diff --git a/src/executor.jl b/src/executor.jl index 249a5684d16e..96c379a1bd25 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -52,7 +52,7 @@ function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDA return (args_hdr, args_vec) end -@enum GRAD_REQ GRAD_NULL=0 GRAD_WRITE=1 GRAD_ADD=3 +@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, aux_states :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, @@ -77,7 +77,7 @@ function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict @assert(length(grad_req) == length(args)) reqs = MX_uint[grad_req...] elseif isa(grad_req, Dict{Base.Symbol, GRAD_REQ}) - reqs = MX_uint[get(grad_req, name, GRAD_NULL) for name in arg_names] + reqs = MX_uint[get(grad_req, name, GRAD_NOP) for name in arg_names] end ref_hdr = Ref{MX_handle}(0) @@ -91,6 +91,27 @@ function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict args, args_grad, aux_states) end +function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) + arg_shapes, grad_shapes, aux_shapes = infer_shape(self; kwargs...) + @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") + + arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] + if grad_req == GRAD_NOP + grad_arrays = nothing + else + grad_arrays = Dict{Base.Symbol, NDArray} + for (name, shape) in zip(list_arguments(self), grad_shapes) + # TODO: use a better way to identify data + if !(endswith(string(name), "data") || endswith(string(name), "label")) + grad_arrays[name] = zeros(shape, ctx) + end + end + end + + aux_arrays = [zeros(shape, ctx) for shape in aux_shapes] + return bind(self, ctx, arg_ndarrays, grad_arrays, grad_req, aux_arrays) +end + function forward(self :: Executor; is_train::Bool=false, kwargs...) for (k,v) in kwargs diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index aa5a18be099f..9fa11789701f 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -61,7 +61,7 @@ function test_infer_shape() end function test_infer_shape_error() - info("Symbol::infer_shape::error") + info("Symbol::infer_shape::throws") model = mlp2() weight_shape = (1, 100) From 9dd26e5ef5d10933bd4a7b440db740fc0dba6cb7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 11 Oct 2015 21:07:40 -0400 Subject: [PATCH 050/630] random module --- src/MXNet.jl | 2 ++ src/random.jl | 19 ++++++++++++++++++ test/runtests.jl | 2 ++ test/unittest/random.jl | 43 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 66 insertions(+) create mode 100644 src/random.jl create mode 100644 test/unittest/random.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index 22c3623dd166..195ebfd52cb9 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -11,6 +11,8 @@ include("init.jl") include("context.jl") include("ndarray.jl") +include("random.jl") + include("name.jl") include("symbol.jl") include("executor.jl") diff --git a/src/random.jl b/src/random.jl new file mode 100644 index 000000000000..82b0aea97582 --- /dev/null +++ b/src/random.jl @@ -0,0 +1,19 @@ +function rand!(low::Real, high::Real, out::NDArray) + _random_uniform(low, high, out) +end +function rand(low::Real, high::Real, shape::Tuple, ctx::Context=DEFAULT_CONTEXT) + out = empty(shape, ctx) + rand!(low, high, out) +end + +function randn!(mean::Real, stdvar::Real, out::NDArray) + _random_gaussian(mean, stdvar, out) +end +function randn(mean::Real, stdvar::Real, shape::Tuple, ctx::Context=DEFAULT_CONTEXT) + out = empty(shape, ctx) + randn!(mean, stdvar, out) +end + +function srand!(seed_state::Int) + @mxcall(:MXRandomSeed, (Cint,), seed_state) +end diff --git a/test/runtests.jl b/test/runtests.jl index b328f2c8f565..ebed98227084 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -4,6 +4,8 @@ using Base.Test include("unittest/common.jl") include("unittest/ndarray.jl") +include("unittest/random.jl") + include("unittest/name.jl") include("unittest/symbol.jl") include("unittest/bind.jl") diff --git a/test/unittest/random.jl b/test/unittest/random.jl new file mode 100644 index 000000000000..5328aff26906 --- /dev/null +++ b/test/unittest/random.jl @@ -0,0 +1,43 @@ +module TestRandom +using MXNet +using Base.Test + +function test_uniform() + dims = (100, 100, 2) + info("random::uniform::dims = $dims") + + low = -10; high = 10 + seed = 123 + mx.srand!(seed) + ret1 = mx.rand(low, high, dims) + + mx.srand!(seed) + ret2 = mx.empty(dims) + mx.rand!(low, high, ret2) + + @test copy(ret1) == copy(ret2) + @test abs(mean(copy(ret1)) - (high+low)/2) < 0.1 +end + +function test_gaussian() + dims = (80, 80, 4) + info("random::gaussian::dims = $dims") + + μ = 10; σ = 2 + seed = 456 + mx.srand!(seed) + ret1 = mx.randn(μ, σ, dims) + + mx.srand!(seed) + ret2 = mx.empty(dims) + mx.randn!(μ, σ, ret2) + + @test copy(ret1) == copy(ret2) + @test abs(mean(copy(ret1)) - μ) < 0.1 + @test abs(std(copy(ret1)) - σ) < 0.1 +end + +test_uniform() +test_gaussian() + +end From 357a105cdff45c9b58bcbd9b8fdb146bfe67198e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 11 Oct 2015 21:14:14 -0400 Subject: [PATCH 051/630] default initializers --- src/initializer.jl | 62 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 src/initializer.jl diff --git a/src/initializer.jl b/src/initializer.jl new file mode 100644 index 000000000000..fcc353502b1b --- /dev/null +++ b/src/initializer.jl @@ -0,0 +1,62 @@ +abstract AbstractInitializer + +function call(self :: AbstractInitializer, name :: Symbol, array :: NDArray) + name = string(name) + if endswith(name, "bias") + _init_bias(self, name, array) + elseif endswith(name, "gamma") + _init_gamma(self, name, array) + elseif endswith(name, "beta") + _init_beta(self, name, array) + elseif endswith(name, "weight") + _init_weight(self, name, array) + elseif endswith(name, "moving_mean") + _init_zero(self, name, array) + elseif endswith(name, "moving_var") + _init_zero(self, name, array) + else + _init_default(self, name, array) + end +end + +function _init_bias(self :: AbstractInitializer, name :: Symbol, array :: NDArray) + array[:] = 0 +end +function _init_gamma(self :: AbstractInitializer, name :: Symbol, array :: NDArray) + array[:] = 1 +end +function _init_beta(self :: AbstractInitializer, name :: Symbol, array :: NDArray) + array[:] = 0 +end +function _init_zero(self :: AbstractInitializer, name :: Symbol, array :: NDArray) + array[:] = 0 +end + +immutable UniformInitializer <: AbstractInitializer + scale :: AbstractFloat +end +UniformInitializer() = UniformInitializer(0.07) + +function _init_weight(self :: UniformInitializer, name :: Symbol, array :: NDArray) + rand!(-self.scale, self.scale, array) +end + +immutable NormalInitializer <: AbstractInitializer + μ :: AbstractFloat + σ :: AbstractFloat +end +NormalInitializer(; μ=0, σ=0.01) = NormalInitializer(μ, σ) + +function _init_weight(self :: NormalInitializer, name :: Symbol, array :: NDArray) + randn!(self.μ, self.σ, array) +end + +immutable XaiverInitializer <: AbstractInitializer +end +function _init_weight(self :: NormalInitializer, name :: Symbol, array :: NDArray) + dims = size(array) + fan_in = prod(dims[2:end]) + fan_out = dims[1] + scale = sqrt(3 / (fan_in + fan_out)) + rand!(-scale, scale, array) +end From f3e4e95321cebdcfff4d12ce39ea066723953a89 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 12 Oct 2015 22:19:18 -0400 Subject: [PATCH 052/630] non-unicode name for initializer parameters --- src/initializer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/initializer.jl b/src/initializer.jl index fcc353502b1b..5a3dc5ef005c 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -45,7 +45,7 @@ immutable NormalInitializer <: AbstractInitializer μ :: AbstractFloat σ :: AbstractFloat end -NormalInitializer(; μ=0, σ=0.01) = NormalInitializer(μ, σ) +NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) function _init_weight(self :: NormalInitializer, name :: Symbol, array :: NDArray) randn!(self.μ, self.σ, array) From 7f35875377033bdc21a7f84f87fa118002e36401 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 13 Oct 2015 21:16:24 -0400 Subject: [PATCH 053/630] make clear the meaning of row-major vs. column-major in array conversion --- src/MXNet.jl | 2 ++ src/init.jl | 3 ++- src/ndarray.jl | 38 ++++++++++++++++++++++++++++++++++++-- test/unittest/ndarray.jl | 8 ++++++++ 4 files changed, 48 insertions(+), 3 deletions(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index 195ebfd52cb9..dee95f9f6115 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -17,6 +17,8 @@ include("name.jl") include("symbol.jl") include("executor.jl") +include("io.jl") + end # mx end # module MXNet diff --git a/src/init.jl b/src/init.jl index 3385e8dc403f..894edbf83bd8 100644 --- a/src/init.jl +++ b/src/init.jl @@ -23,6 +23,7 @@ const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["$(get(ENV,"MXNET_HOME"," function __init__() _import_ndarray_functions() _import_atomic_symbol_creators() + _import_io_iterators() atexit() do # notify libmxnet we are shutting down @@ -97,4 +98,4 @@ end @mx_define_handle_t(MX_FunctionHandle, nop) @mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) @mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) - +@mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) diff --git a/src/ndarray.jl b/src/ndarray.jl index 035f465fa230..527d53f21ac3 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -3,7 +3,7 @@ export NDArray # create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) h_ref = Ref{MX_handle}(0) - shape = MX_uint[shape...] + shape = flipdim(MX_uint[shape...],1) @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{MX_handle}), shape, length(shape), ctx.device_type, ctx.device_id, delay_alloc, h_ref) handle = MX_NDArrayHandle(h_ref[]) @@ -21,6 +21,16 @@ end ################################################################################ # NDArray Type ################################################################################ +"""Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block + of tensor-based computation. + + **Note** since C/C++ use row-major ordering for arrays while Julia follows a + column-major ordering. To keep things consistent, we keep the underlying data + in their original layout, but use *language-native* convention when we talk + about shapes. For example, a mini-batch of 100 MNIST images is a tensor of + C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory + have shape (28,28,1,100). +""" type NDArray handle :: MX_NDArrayHandle writable :: Bool @@ -62,12 +72,16 @@ end # Interface functions similar to Julia Arrays #------------------------------------------------------------ import Base: size, length, ndims, eltype +"""Get the shape of an `NDArray`. Note the shape is converted to Julia convention. + So the same piece of memory, in Julia (column-major), with shape (K, M, N), will be of the + shape (N, M, K) in the Python (row-major) binding. +""" function size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) ref_shape = Ref{Ptr{MX_uint}}(0) @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), arr, ref_ndim, ref_shape) - tuple(map(Int, pointer_to_array(ref_shape[], ref_ndim[]))...) + tuple(map(Int, flipdim(pointer_to_array(ref_shape[], ref_ndim[]),1))...) end function size(arr :: NDArray, dim :: Int) size(arr)[dim] @@ -92,6 +106,23 @@ function zeros(shape :: Int...) zeros(shape) end +import Base: sub +function sub(arr :: NDArray, ::Colon) + arr +end +function sub(arr :: NDArray, slice::UnitRange{Int}) + dim1 = size(arr)[end] + @assert(1 <= slice.start <= slice.stop <= dim1) + + hdr_ref = Ref{MX_handle}(0) + # note Julia is 1-based, inclusive-inclusive indexing, while C++ is + # 0-based, inclusive-exclusive indexing. So 1:3 in Julia should + # translates into 0:3 in C++. + @mxcall(:MXNDArraySlice, (MX_handle, MX_uint, MX_uint, Ref{MX_handle}), + arr, slice.start-1, slice.stop, hdr_ref) + return NDArray(MX_NDArrayHandle(hdr_ref[]), arr.writable) +end + import Base: setindex! "Assign all elements of an NDArray to a scalar" function setindex!(arr :: NDArray, val :: Real, ::Colon) @@ -104,6 +135,9 @@ end function setindex!(arr :: NDArray, val :: NDArray, ::Colon) copy!(arr, val) end +function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, slice::UnitRange{Int}) + copy!(sub(arr, slice), val) +end #------------------------------------------------------------ # Copying functions diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 0d8b3aed0b3c..b070f6edfa93 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -54,6 +54,13 @@ function test_assign() @test reldiff(zeros(size(tensor))+scalar, copy(array)) < 1e-6 end +function test_slice() + array = mx.zeros((2,4)) + array[2:3] = ones(2,2) + @test copy(array) == [0 1 1 0; 0 1 1 0] + @test copy(sub(array, 2:3)) == [1 1; 1 1] +end + function test_plus() dims = rand_dims() t1, a1 = rand_tensors(dims) @@ -159,6 +166,7 @@ end ################################################################################ test_copy() test_assign() +test_slice() test_plus() test_minus() test_mul() From 23c14fcd13f3ee45863bf7ff077077fac8dbb65c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 13 Oct 2015 21:35:50 -0400 Subject: [PATCH 054/630] check writable bit in NDArrays --- src/ndarray.jl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index 527d53f21ac3..f94b64d1c7d7 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -126,6 +126,7 @@ end import Base: setindex! "Assign all elements of an NDArray to a scalar" function setindex!(arr :: NDArray, val :: Real, ::Colon) + @assert(arr.writable) _set_value(val, arr) return arr end @@ -145,6 +146,7 @@ end import Base: copy!, copy "Copy data between NDArrays" function copy!(dst :: NDArray, src :: NDArray) + @assert(dst.writable) if dst.handle == src.handle warn("Copying an NDArray to itself") return @@ -164,6 +166,7 @@ end "Copy data from Julia Array to NDArray" function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) + @assert dst.writable @assert size(dst) == size(src) src = convert(Array{MX_float}, src) # this might involve copying @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), @@ -225,6 +228,7 @@ macro inplace(stmt) end function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) + @assert dst.writable for arg in args if isa(arg, Real) _plus_scalar(dst, arg, dst) @@ -246,6 +250,7 @@ function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) end function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + @assert dst.writable if isa(arg, Real) _minus_scalar(dst, arg, dst) else @@ -265,6 +270,7 @@ function -(arg0 :: NDArray) end function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + @assert dst.writable if isa(arg, Real) _mul_scalar(dst, arg, dst) else @@ -279,6 +285,7 @@ function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) end function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + @assert dst.writable if isa(arg, Real) _div_scalar(dst, arg, dst) else From 7481dab25b1d2444a94ba4921f6b72fb53c2bc26 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 13 Oct 2015 21:56:02 -0400 Subject: [PATCH 055/630] untested io.jl --- src/io.jl | 203 +++++++++++++++++++++++++++++++++++++++ src/ndarray.jl | 12 ++- test/unittest/ndarray.jl | 2 +- 3 files changed, 213 insertions(+), 4 deletions(-) create mode 100644 src/io.jl diff --git a/src/io.jl b/src/io.jl new file mode 100644 index 000000000000..6ea020ec4163 --- /dev/null +++ b/src/io.jl @@ -0,0 +1,203 @@ +"""Root type for data provider + + A data provider provides interface to iterate over a dataset. It should implement the following functions: + + provides(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} + + Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, + `[(:data, (100,1,28,28)), (:softmax_label, (100,1))]`. + + A data provider should implement the standard Julia iteration interface, including `Base.start`, + `Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will + always be called like + + for batch in provider + # ... + load_data!(batch, targets) + end + + which translates into + + state = Base.start(provider) + while !done(provider, state) + (batch, state) = next(provider, state) + # ... + load_data!(batch, targets) + end + + In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither + of those function will be called twice consequtively. The detailed interfaces are list below: + + Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState + + Initialize or reset the data iteration. + + Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) + => (AbstractDataBatch, AbstractDataProviderState) + + Return one batch of data. Actual data can be retrieved from the batch by interface functions described + in the document of type `AbstractDataBatch`. + + Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool + + Return `false` if there is more batch to get. + + Base.eltype(::Type{MyDataProvider}) => MyDataProviderState + + Return the type of the data provider state. +""" +abstract AbstractDataProvider + +"""Root type for states of data provider""" +abstract AbstractDataProviderState + +"""A list of (slice, NDArray) pairs. Usually each NDArray resides on a different device, and each + slice describe which part of a larger piece of data should goto that device. +""" +typealias SlicedNDArray Vector{Tuple{UnitRange{Int},NDArray}} + +"""Root type for data batch + + A data batch must implement the following interface function to actually provide the data. The interface + is designed to make it easy to generate data on the fly. + + load_data!(batch :: AbstractDataBatch, targets :: Dict{Base.Symbol, SlicedNDArray}) + + Load data into targets. The target is a dictionary mapping name to actual `SlicedNDArray` the data should be + copied into. Note `targets` might not contain names of all the data we could *provide*, simply because + some the data we provie is not needed. + + The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. +""" +abstract AbstractDataBatch + + +################################################################################ +# MXDataProvider +################################################################################ + +"""Wrapper of built-in `libmxnet` data iterators. +""" +type MXDataProvider <: AbstractDataProvider + handle :: MX_DataIterHandle + provides :: Vector{Tuple{Base.Symbol, Tuple}} +end + +function _reset_data_iter(handle :: MX_DataIterHandle) + @mxcall(:MXDataIterBeforeFirst, (MX_handle,), handle) +end +function _iter_next(handle :: MX_DataIterHandle) + ref_ret = Ref{Cint}(0) + @mxcall(:MXDataIterNext, (MX_handle, Ref{Cint}), handle, ref_ret) + return Bool(ref_ret[]) +end +function _get_data(handle :: MX_DataIterHandle) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXDataIterGetData, (MX_handle, Ref{MX_handle}), handle, ref_hdr) + return NDArray(MX_NDArrayHandle(ref_hdr[]), false) +end +function _get_label(handle :: MX_DataIterHandle) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXDataIterGetLabel, (MX_handle, Ref{MX_handle}), handle, ref_hdr) + return NDArray(MX_NDArrayHandle(ref_hdr[]), false) +end + +function MXDataProvider(handle :: MX_DataIterHandle; + data_name :: Union{Base.Symbol,Void}=:data, + label_name :: Union{Base.Symbol,Void}=:softmax_label, + kwargs...) # for convenience, we ignore the rest keyword arguments + # init iterator, load the first batch and get shapes + _reset_data_iter(handle) + @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") + provides = [(data_name, size(_get_data(handle)))] + if !isa(label_name, Void) + push!(provides, (label_name::Base.Symbol, size(_get_label(handle)))) + end + + MXDataProvider(handle, provides) +end + +type MXDataProviderState <: AbstractDataProviderState + has_next :: Bool +end +type MXDataBatch <: AbstractDataBatch + provider :: MXDataProvider +end + +function Base.eltype(provider :: MXDataProvider) + MXDataBatch +end +function Base.start(provider :: MXDataProvider) + _reset_data_iter(provider.handle) + return MXDataProviderState(true) +end +function Base.done(provider :: MXDataProvider, state :: MXDataProviderState) + state.has_next = _iter_next(provider.handle) + return !state.has_next +end +function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) + return (MXDataBatch(provider.handle), state) +end + +function load_data!(batch :: MXDataBatch, targets :: Dict{Base.Symbol, SlicedNDArray}) + for (k,v) in targets + if k == batch.provider.provides[1][1] + # data + src = _get_data(batch.provider.handle) + elseif k == batch.provider.provides[2][1] + # label + src = _get_label(batch.provider.handle) + else + @assert(false, "Unknown data $k, we only provide $(batch.provider.provides)") + end + + for (idx, target) in v + copy!(target, slice(src, idx)) + end + end +end + + +function _define_data_iter_creator(hdr :: MX_handle) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXDataIterGetIterInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), + hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) + + iter_name = symbol(bytestring(ref_name[])) + println("defining iterator $iter_name") + defun = quote + function $iter_name(; kwargs...) + arg_keys = AbstractString[string(k) for (k,v) in kwargs] + arg_vals = AbstractString[string(v) for (k,v) in kwargs] + ref_hdr = Ref{MX_handle} + + @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), + $hdr, length(arg_keys), arg_keys, arg_vals, ref_hdr) + + return MXDataProvider(MX_DataIterHandle(ref_hdr[]); kwargs...) + end + end + eval(defun) + # TODO: add docstring +end + +function _import_io_iterators() + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + n_creators = n_ref[] + h_creators = pointer_to_array(h_ref[], n_creators) + + for i = 1:n_creators + creator_hdr = h_creators[i] + _define_data_iter_creator(creator_hdr) + end +end diff --git a/src/ndarray.jl b/src/ndarray.jl index f94b64d1c7d7..bc2438de266f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -106,11 +106,17 @@ function zeros(shape :: Int...) zeros(shape) end -import Base: sub -function sub(arr :: NDArray, ::Colon) +import Base: slice +"""`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest + changing dimension is supported. In Julia's column-major perspective, this is the last + dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create + a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is + used in data parallelization to split mini-batch into sub-batches for different devices. +""" +function slice(arr :: NDArray, ::Colon) arr end -function sub(arr :: NDArray, slice::UnitRange{Int}) +function slice(arr :: NDArray, slice::UnitRange{Int}) dim1 = size(arr)[end] @assert(1 <= slice.start <= slice.stop <= dim1) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index b070f6edfa93..04fc3a632204 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -58,7 +58,7 @@ function test_slice() array = mx.zeros((2,4)) array[2:3] = ones(2,2) @test copy(array) == [0 1 1 0; 0 1 1 0] - @test copy(sub(array, 2:3)) == [1 1; 1 1] + @test copy(slice(array, 2:3)) == [1 1; 1 1] end function test_plus() From 956a2209a8af700821bb18c96838a6a5620d0b90 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 13 Oct 2015 23:25:05 -0400 Subject: [PATCH 056/630] unittest MNIST data provider --- .gitignore | 1 + src/MXNet.jl | 2 ++ src/io.jl | 33 +++++++++++++++++++++++---- src/ndarray.jl | 4 ++-- src/util.jl | 23 +++++++++++++++++++ test/{unittest => }/common.jl | 0 test/runtests.jl | 18 +++++++++------ test/unittest/io.jl | 43 +++++++++++++++++++++++++++++++++++ 8 files changed, 111 insertions(+), 13 deletions(-) create mode 100644 src/util.jl rename test/{unittest => }/common.jl (100%) create mode 100644 test/unittest/io.jl diff --git a/.gitignore b/.gitignore index 8c960ec808d9..2ff5c4a3d770 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ *.jl.cov *.jl.*.cov *.jl.mem +data diff --git a/src/MXNet.jl b/src/MXNet.jl index dee95f9f6115..a5ee9bbae104 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -19,6 +19,8 @@ include("executor.jl") include("io.jl") +include("util.jl") + end # mx end # module MXNet diff --git a/src/io.jl b/src/io.jl index 6ea020ec4163..df7db7375d50 100644 --- a/src/io.jl +++ b/src/io.jl @@ -68,6 +68,17 @@ typealias SlicedNDArray Vector{Tuple{UnitRange{Int},NDArray}} some the data we provie is not needed. The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. + + The following function should also be implemented to handle the case when the mini-batch size does not + divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer + than the mini-batch size. This is usually not an issue during the training as the remaining space may + contain the data and label copied during the previous mini-batch are still valid data. However, during + testing, especially when doing feature extraction, we need to be precise about the number of samples + processed. + + get_pad(batch :: AbstractDataBatch) + + Return the number of *dummy samples* in this mini-batch. """ abstract AbstractDataBatch @@ -109,14 +120,17 @@ function MXDataProvider(handle :: MX_DataIterHandle; # init iterator, load the first batch and get shapes _reset_data_iter(handle) @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") - provides = [(data_name, size(_get_data(handle)))] + provides = Tuple{Base.Symbol, Tuple}[(data_name, size(_get_data(handle)))] if !isa(label_name, Void) push!(provides, (label_name::Base.Symbol, size(_get_label(handle)))) end + _reset_data_iter(handle) MXDataProvider(handle, provides) end +provides(provider::MXDataProvider) = provider.provides + type MXDataProviderState <: AbstractDataProviderState has_next :: Bool end @@ -136,7 +150,7 @@ function Base.done(provider :: MXDataProvider, state :: MXDataProviderState) return !state.has_next end function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) - return (MXDataBatch(provider.handle), state) + return (MXDataBatch(provider), state) end function load_data!(batch :: MXDataBatch, targets :: Dict{Base.Symbol, SlicedNDArray}) @@ -157,6 +171,12 @@ function load_data!(batch :: MXDataBatch, targets :: Dict{Base.Symbol, SlicedNDA end end +function get_pad(batch :: MXDataBatch) + ref_pad = Ref{Cint}(0) + @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), batch.provider.handle, ref_pad) + return Int(ref_pad[]) +end + function _define_data_iter_creator(hdr :: MX_handle) ref_name = Ref{char_p}(0) @@ -171,12 +191,11 @@ function _define_data_iter_creator(hdr :: MX_handle) hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) iter_name = symbol(bytestring(ref_name[])) - println("defining iterator $iter_name") defun = quote function $iter_name(; kwargs...) arg_keys = AbstractString[string(k) for (k,v) in kwargs] arg_vals = AbstractString[string(v) for (k,v) in kwargs] - ref_hdr = Ref{MX_handle} + ref_hdr = Ref{MX_handle}(0) @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), $hdr, length(arg_keys), arg_keys, arg_vals, ref_hdr) @@ -186,6 +205,12 @@ function _define_data_iter_creator(hdr :: MX_handle) end eval(defun) # TODO: add docstring + + # add an alias XXXProvider => XXXIter + if endswith(string(iter_name), "Iter") + alias_name = symbol(string(iter_name)[1:end-4] * "Provider") + eval(:($alias_name = $iter_name)) + end end function _import_io_iterators() diff --git a/src/ndarray.jl b/src/ndarray.jl index bc2438de266f..5e1e29fa7eb6 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -142,8 +142,8 @@ end function setindex!(arr :: NDArray, val :: NDArray, ::Colon) copy!(arr, val) end -function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, slice::UnitRange{Int}) - copy!(sub(arr, slice), val) +function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) + copy!(slice(arr, idx), val) end #------------------------------------------------------------ diff --git a/src/util.jl b/src/util.jl new file mode 100644 index 000000000000..48a55348049e --- /dev/null +++ b/src/util.jl @@ -0,0 +1,23 @@ +function get_data_dir() + data_dir = joinpath(Pkg.dir("MXNet"), "data") + mkpath(data_dir) + data_dir +end + +function get_mnist_ubyte() + data_dir = get_data_dir() + mnist_dir = joinpath(data_dir, "mnist") + mkpath(mnist_dir) + filenames = Dict(:train_data => "train-images-idx3-ubyte", + :train_label => "train-labels-idx1-ubyte", + :test_data => "t10k-images-idx3-ubyte", + :test_label => "t10k-labels-idx1-ubyte") + filenames = [k => joinpath(mnist_dir, v) for (k,v) in filenames] + if !all(isfile, values(filenames)) + cd(mnist_dir) do + run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip`) + run(`unzip -u mnist.zip`) + end + end + return filenames +end diff --git a/test/unittest/common.jl b/test/common.jl similarity index 100% rename from test/unittest/common.jl rename to test/common.jl diff --git a/test/runtests.jl b/test/runtests.jl index ebed98227084..1d5f49b4d1f1 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,12 +1,16 @@ using MXNet using Base.Test -include("unittest/common.jl") +# run test in the whole directory, latest modified files +# are run first, this makes waiting time shorter when writing +# or modifying unit-tests +function test_dir(dir) + jl_files = sort(filter(x -> ismatch(r".*\.jl$", x), readdir(dir)), by = fn -> stat(joinpath(dir,fn)).mtime) + map(reverse(jl_files)) do file + include("$dir/$file") + end +end -include("unittest/ndarray.jl") -include("unittest/random.jl") - -include("unittest/name.jl") -include("unittest/symbol.jl") -include("unittest/bind.jl") +include("common.jl") +test_dir("unittest") diff --git a/test/unittest/io.jl b/test/unittest/io.jl new file mode 100644 index 000000000000..1ac65ae78464 --- /dev/null +++ b/test/unittest/io.jl @@ -0,0 +1,43 @@ +module TestIO +using MXNet +using Base.Test + +function test_mnist() + info("IO::MNIST") + filenames = mx.get_mnist_ubyte() + + batch_size = 10 + mnist_provider = mx.MNISTProvider(image=filenames[:train_data], + label=filenames[:train_label], + batch_size=batch_size, silent=true, shuffle=false) + spec = mx.provides(mnist_provider) + spec = Dict(spec) + @test haskey(spec, :data) + @test haskey(spec, :softmax_label) + @test spec[:data] == (28,28,1,batch_size) + @test spec[:softmax_label] == (batch_size,) + + n_batch = 0 + for batch in mnist_provider + if n_batch == 0 + data_array = mx.empty(28,28,1,batch_size) + label_array = mx.empty(batch_size) + targets = Dict(:data => [(1:batch_size, data_array)], + :softmax_label => [(1:batch_size, label_array)]) + + mx.load_data!(batch, targets) + + true_labels = [5,0,4,1,9,2,1,3,1,4] # the first 10 labels in MNIST train + got_labels = Int[copy(label_array)...] + @test true_labels == got_labels + end + + n_batch += 1 + end + + @test n_batch == 60000 / batch_size +end + +test_mnist() + +end From 31132563f77e359d5ce7acadb47626414bc0cd65 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 14 Oct 2015 18:07:15 -0400 Subject: [PATCH 057/630] kvstore init / push / pull --- src/MXNet.jl | 1 + src/init.jl | 1 + src/kvstore.jl | 80 ++++++++++++++++++++++++++++++++++++++++ src/ndarray.jl | 10 +++++ test/unittest/kvstore.jl | 31 ++++++++++++++++ 5 files changed, 123 insertions(+) create mode 100644 src/kvstore.jl create mode 100644 test/unittest/kvstore.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index a5ee9bbae104..964b467dcb37 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -18,6 +18,7 @@ include("symbol.jl") include("executor.jl") include("io.jl") +include("kvstore.jl") include("util.jl") diff --git a/src/init.jl b/src/init.jl index 894edbf83bd8..f23d9f3b2207 100644 --- a/src/init.jl +++ b/src/init.jl @@ -99,3 +99,4 @@ end @mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) @mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) @mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) +@mx_define_handle_t(MX_KVStoreHandle, MXKVStoreFree) diff --git a/src/kvstore.jl b/src/kvstore.jl new file mode 100644 index 000000000000..6c5f58ff20a7 --- /dev/null +++ b/src/kvstore.jl @@ -0,0 +1,80 @@ +type KVStore + handle :: MX_KVStoreHandle +end + +function KVStore(kv_type::Base.Symbol = :local) + @assert(kv_type ∈ [:local]) # TODO: update with allowed types + + ref_hdr = Ref{MX_handle}(0) + kv_type = string(kv_type) + @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), kv_type, ref_hdr) + return KVStore(MX_KVStoreHandle(ref_hdr[])) +end +function Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) + +function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) + @assert length(keys) == length(vals) + Keys_flt = Int[] + vals_flt = NDArray[] + for (k,v) in zip(keys, vals) + append!(keys_flt, ones(Int, length(v))*k) + append!(vals_flt, v) + end + return (keys_flt, vals_flt) +end + +function init!(self :: KVStore, key :: Int, val :: NDArray) + init!(self, [key], [val]) +end +function init!(self :: KVStore, key :: Int, vals :: Vector{NDArray}) + init!(self, ones(Int, length(vals))*key, vals) +end +function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) + init!(self, _flatten_kvlist(keys, vals)...) +end +function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}) + @assert length(keys) == length(vals) + keys = Cint[keys...] + vals = MX_handle[vals...] + @mxcall(:MXKVStoreInit, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}), + self, length(keys), keys, vals) +end + +import Base.push! +function push!(self :: KVStore, key :: Int, val :: NDArray; priority :: Int = 0) + push!(self, [key], [val]; priority = priority) +end +function push!(self :: KVStore, key :: Int, vals :: Vector{NDArray}; priority :: Int = 0) + push!(self, ones(Int, length(vals))*key, vals; priority = priority) +end +function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}; priority::Int=0) + push!(self, _flatten_kvlist(keys, vals)...; priority = priority) +end +function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}; priority::Int=0) + @assert length(keys) == length(vals) + keys = Cint[keys...] + vals = MX_handle[vals...] + @mxcall(:MXKVStorePush, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), + self, length(keys), keys, vals, priority) +end + +function pull!(self :: KVStore, key :: Int, out :: NDArray; priority :: Int = 0) + pull!(self, [key], [out]) +end +function pull!(self :: KVStore, key :: Int, outs :: Vector{NDArray}; priority :: Int = 0) + pull!(self, ones(Int, length(outs))*key, outs; priority = priority) +end +function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{Vector{NDArray}}; priority::Int=0) + pull!(self, _flatten_kvlist(keys, outs)...; priority = priority) +end +function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{NDArray}; priority::Int=0) + @assert length(keys) == length(outs) + keys = Cint[keys...] + outs = MX_handle[outs...] + @mxcall(:MXKVStorePull, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), + self, length(keys), keys, outs, priority) +end diff --git a/src/ndarray.jl b/src/ndarray.jl index 5e1e29fa7eb6..3aaf9946814b 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -106,6 +106,16 @@ function zeros(shape :: Int...) zeros(shape) end +"Create NDArray and initialize with 1" +function ones{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) + arr = empty(shape, ctx) + arr[:] = 1 + return arr +end +function ones(shape :: Int...) + ones(shape) +end + import Base: slice """`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest changing dimension is supported. In Julia's column-major perspective, this is the last diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl new file mode 100644 index 000000000000..1e3060ff42e7 --- /dev/null +++ b/test/unittest/kvstore.jl @@ -0,0 +1,31 @@ +module TestKVStore +using MXNet +using Base.Test + +using ..Main: rand_dims + +SHAPE = rand_dims() +KEYS = [5,7,11] + +function init_kv() + kv = mx.KVStore() + mx.init!(kv, 3, mx.zeros(SHAPE)) + + vals = [mx.zeros(SHAPE) for k in KEYS] + mx.init!(kv, KEYS, vals) + return kv +end + +function test_single_kv_pair() + info("KVStore::single") + + kv = init_kv() + mx.push!(kv, 3, mx.ones(SHAPE)) + val = mx.empty(SHAPE) + mx.pull!(kv, 3, val) + @test maximum(abs(copy(val) - 1)) == 0 +end + +test_single_kv_pair() + +end From 8e4102f0bf687d2e941f7904e763f1b1d4c56dea Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 14 Oct 2015 18:24:49 -0400 Subject: [PATCH 058/630] kvstore test aggregration --- src/kvstore.jl | 10 +++++----- src/ndarray.jl | 6 ++++++ test/unittest/kvstore.jl | 28 ++++++++++++++++++++++++++++ 3 files changed, 39 insertions(+), 5 deletions(-) diff --git a/src/kvstore.jl b/src/kvstore.jl index 6c5f58ff20a7..d553a65dbd42 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -18,10 +18,10 @@ Base.cconvert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) @assert length(keys) == length(vals) - Keys_flt = Int[] + keys_flt = Int[] vals_flt = NDArray[] for (k,v) in zip(keys, vals) - append!(keys_flt, ones(Int, length(v))*k) + append!(keys_flt, Base.ones(Int, length(v))*k) append!(vals_flt, v) end return (keys_flt, vals_flt) @@ -31,7 +31,7 @@ function init!(self :: KVStore, key :: Int, val :: NDArray) init!(self, [key], [val]) end function init!(self :: KVStore, key :: Int, vals :: Vector{NDArray}) - init!(self, ones(Int, length(vals))*key, vals) + init!(self, Base.ones(Int, length(vals))*key, vals) end function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) init!(self, _flatten_kvlist(keys, vals)...) @@ -49,7 +49,7 @@ function push!(self :: KVStore, key :: Int, val :: NDArray; priority :: Int = 0) push!(self, [key], [val]; priority = priority) end function push!(self :: KVStore, key :: Int, vals :: Vector{NDArray}; priority :: Int = 0) - push!(self, ones(Int, length(vals))*key, vals; priority = priority) + push!(self, Base.ones(Int, length(vals))*key, vals; priority = priority) end function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}; priority::Int=0) push!(self, _flatten_kvlist(keys, vals)...; priority = priority) @@ -66,7 +66,7 @@ function pull!(self :: KVStore, key :: Int, out :: NDArray; priority :: Int = 0) pull!(self, [key], [out]) end function pull!(self :: KVStore, key :: Int, outs :: Vector{NDArray}; priority :: Int = 0) - pull!(self, ones(Int, length(outs))*key, outs; priority = priority) + pull!(self, Base.ones(Int, length(outs))*key, outs; priority = priority) end function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{Vector{NDArray}}; priority::Int=0) pull!(self, _flatten_kvlist(keys, outs)...; priority = priority) diff --git a/src/ndarray.jl b/src/ndarray.jl index 3aaf9946814b..95757917bd29 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -299,6 +299,12 @@ function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) end +# unlike *, we only allow type Real in arguments, because array-array * operator +# means matrix multiplication in Julia +function *(arg0 :: NDArray, arg :: Real) + ret = copy(arg0, context(arg0)) + mul_to!(ret, arg) +end function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index 1e3060ff42e7..bca77a556922 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -26,6 +26,34 @@ function test_single_kv_pair() @test maximum(abs(copy(val) - 1)) == 0 end +function test_aggregator() + info("KVStore::aggregator") + + kv = init_kv() + + num_devs = 4 + devs = [mx.Context(mx.CPU, i) for i=0:num_devs-1] + vals = [mx.ones(SHAPE, dev) for dev in devs] + + mx.push!(kv, 3, vals) + mx.pull!(kv, 3, vals) + for v in vals + @test maximum(abs(copy(v)) - num_devs) == 0 + end + + # list + vals = [mx.NDArray[mx.ones(SHAPE, dev)*2 for dev in devs] for k in KEYS] + mx.push!(kv, KEYS, vals) + mx.pull!(kv, KEYS, vals) + + for vv in vals + for v in vv + @test maximum(abs(copy(v)) - 2num_devs) == 0 + end + end +end + test_single_kv_pair() +test_aggregator() end From 48a2b208ee8369e8d845b2b3613dfc8515f9a606 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 16 Oct 2015 00:02:02 -0400 Subject: [PATCH 059/630] eval metric --- src/MXNet.jl | 2 ++ src/metric.jl | 30 ++++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 src/metric.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index 964b467dcb37..4f703d5e6d18 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -20,6 +20,8 @@ include("executor.jl") include("io.jl") include("kvstore.jl") +include("metric.jl") + include("util.jl") end # mx diff --git a/src/metric.jl b/src/metric.jl new file mode 100644 index 000000000000..94dd97a9b911 --- /dev/null +++ b/src/metric.jl @@ -0,0 +1,30 @@ +abstract AbstractEvalMetric + +type Accuracy <: AbstractEvalMetric + acc_sum :: Float64 + n_sample :: Int + + Accuracy() = new(0.0, 0) +end + +function update!(metric :: Accuracy, label :: NDArray, pred :: NDArray) + label = copy(label) + pred = copy(pred) + + n_sample = size(pred)[end] + metric.n_sample += n_sample + for i = 1:n_sample + klass = indmax(sub(pred,:,i)) + metric.acc_sum += (klass-1) == label[i] + end +end + +import Base: get +function get(metric :: Accuracy) + metric.acc_sum / metric.n_sample +end + +function reset!(metric :: Accuracy) + metric.acc_sum = 0.0 + metric.n_sample = 0 +end From dd02638fb9e3250829094eaa1cdbaee45d18b8d0 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 16 Oct 2015 00:14:40 -0400 Subject: [PATCH 060/630] shape should be translated between Julia<->C in infer_shape --- src/symbol.jl | 6 +++--- test/unittest/symbol.jl | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/symbol.jl b/src/symbol.jl index d8f45cb1f9eb..a352173b7a43 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -113,7 +113,7 @@ macro _infer_shape(self, keys, indptr, sdata) shape_data = pointer_to_array(shape_data, shape_size) map(1:shape_size) do i my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) - tuple(Int[my_shape...]...) + tuple(flipdim(Int[my_shape...],1)...) end end return ( @@ -128,7 +128,7 @@ function infer_shape(self :: Symbol; kwargs...) sdata = MX_uint[] indptr = MX_uint[0] for (k,v) in kwargs - append!(sdata, [v...]) + append!(sdata, flipdim([v...],1)) push!(indptr, length(sdata)) end keys = AbstractString[string(x[1]) for x in kwargs] @@ -139,7 +139,7 @@ function infer_shape(self :: Symbol, args :: Union{Tuple, Void}...) indptr = MX_uint[0] for arg in args if isa(arg, Void); continue; end - append!(sdata, [arg...]) + append!(sdata, flipdim([arg...],1)) push!(indptr, length(sdata)) end keys = Ptr{char_p}(0) diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index 9fa11789701f..b62496d20fbc 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -53,18 +53,18 @@ function test_infer_shape() data_shape = (100, 100) arg_shapes, out_shapes, aux_shapes = mx.infer_shape(model, data=data_shape) arg_shape_dict = Dict{Symbol,Tuple}(zip(mx.list_arguments(model), arg_shapes)) - @test arg_shape_dict == Dict{Symbol,Tuple}(:fc2_bias => (10,),:fc2_weight => (10,1000), - :fc1_bias => (1000,), :fc1_weight => (1000,100), + @test arg_shape_dict == Dict{Symbol,Tuple}(:fc2_bias => (10,),:fc2_weight => (1000,10), + :fc1_bias => (1000,), :fc1_weight => (100, 1000), :data => data_shape) @test length(out_shapes) == 1 - @test out_shapes[1] == (100, 10) + @test out_shapes[1] == (10, 100) end function test_infer_shape_error() info("Symbol::infer_shape::throws") model = mlp2() - weight_shape = (1, 100) + weight_shape = (100, 1) data_shape = (100, 100) @test_throws mx.MXError mx.infer_shape(model, data=data_shape, fc1_weight=weight_shape) end From d061a180579281f213f3d4cd09f8838aec4471dc Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 13:43:38 -0400 Subject: [PATCH 061/630] optimizer and estimator --- REQUIRE | 1 + src/MXNet.jl | 6 +- src/estimator.jl | 264 ++++++++++++++++++++++++++++++++++++++++++++ src/executor.jl | 27 ++++- src/io.jl | 67 ++++++----- src/kvstore.jl | 47 +++++++- src/metric.jl | 11 +- src/ndarray.jl | 3 + src/optimizer.jl | 76 +++++++++++++ src/symbol.jl | 3 +- test/unittest/io.jl | 19 ++-- 11 files changed, 479 insertions(+), 45 deletions(-) create mode 100644 src/estimator.jl create mode 100644 src/optimizer.jl diff --git a/REQUIRE b/REQUIRE index d5d646713dcf..76fa30ce5f54 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1 +1,2 @@ julia 0.4 +Formatting diff --git a/src/MXNet.jl b/src/MXNet.jl index 4f703d5e6d18..cd96b3573af5 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -6,6 +6,7 @@ module MXNet # functions with the same names as built-in utilities like "zeros", etc. export mx module mx +using Formatting include("init.jl") include("context.jl") @@ -17,10 +18,13 @@ include("name.jl") include("symbol.jl") include("executor.jl") +include("metric.jl") +include("optimizer.jl") + include("io.jl") include("kvstore.jl") -include("metric.jl") +include("estimator.jl") include("util.jl") diff --git a/src/estimator.jl b/src/estimator.jl new file mode 100644 index 000000000000..b3bd67e19bf4 --- /dev/null +++ b/src/estimator.jl @@ -0,0 +1,264 @@ +abstract AbstractEstimator + +type FeedForward <: AbstractEstimator + arch :: Symbol + ctx :: Vector{Context} + + arg_params :: Dict{Base.Symbol, NDArray} + aux_params :: Dict{Base.Symbol, NDArray} + + # leave the rest fields undefined + FeedForward(arch :: Symbol, ctx :: Vector{Context}) = new(arch, ctx) +end + +function _check_arguments(symbol :: Symbol) + arg_names = list_arguments(symbol) + @assert(length(unique(arg_names)) == length(arg_names), "Duplicated names in arguments $arg_names") +end + +"""Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector + of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that + piece. +""" +function _split_inputs(batch_size :: Int, n_split :: Int) + @assert(batch_size >= n_split) + per_split = floor(Int, batch_size / n_split) + counts = zeros(Int, n_split)+per_split + extra = batch_size - sum(counts) + counts[1:extra] += 1 + + cum = [0, cumsum(counts)...] + idx = [cum[i-1]+1:cum[i] for i = 2:length(cum)] + return idx +end + +function FeedForward(arch :: Symbol; context :: Union{Context, Vector{Context}, Void} = nothing) + if isa(context, Void) + context = [Context(CPU)] + elseif isa(context, Context) + context = [context] + end + FeedForward(arch, context) +end + +function _init_params(self :: FeedForward, data :: AbstractDataProvider) + # all arg names, including data, label, and parameters + arg_names = list_arguments(self.arch) + + data_shapes = provide_data(data) + label_shapes = provide_label(data) + data_names = [x[1] for x in data_shapes] + label_names = [x[1] for x in label_shapes] + + param_names = setdiff(arg_names, data_names ∪ label_names) + aux_names = list_auxiliary_states(self.arch) + + arg_shapes, grad_shapes, aux_shapes = infer_shape(self.arch; data_shapes...) + if !isdefined(self, :arg_params) + param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) + self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) + end + if !isdefined(self, :aux_params) + self.aux_params = Dict([name => empty(shape) for (name,shape) in zip(aux_names,aux_shapes)]) + end + + # initialize the contents of the parameters + for (k,v) in self.arg_params + self.initializer(k, v) + end + for (k,v) in self.aux_params + self.initializer(k, v) + end + + return (param_names, aux_names) +end + +function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) + if num_device == 1 && !ismatch(r"dist", string(kv_type)) + kv = nothing + else + if kv_type == :local + max_size = maximum([prod(size(param)) for (k,param) in arg_params]) + if max_size < 1024 * 1024 * 16 + kv_type = :loca_update_cpu + else + kv_type = :local_allreduce_cpu + end + info("Auto-select kvstore type = $kv_type") + end + kv = KVStore(kv_type) + end + + update_on_kvstore = true + if isa(kv, Void) || ismatch(r"local_allreduce", string(get_type(kv))) + update_on_kvstore = false + end + + return (kv, update_on_kvstore) +end + +function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; + epoch_stop :: Int = 10, epoch_start :: Int = 1, + eval_data :: Union{Void, AbstractDataProvider} = nothing, + eval_metric :: AbstractEvalMetric = Accuracy(), + kvstore :: Union{Base.Symbol, KVStore} = :local) + + info("Start training on $(self.ctx)") + + batch_size = get_batch_size(data) + num_dev = length(self.ctx) + slices = _split_inputs(batch_size, num_dev) + + # initialize parameters + info("Initializing parameters...") + param_names, aux_names = _init_params(self, param_names, aux_names) + + # setup kvstore + if isa(kvstore, Base.Symbol) + info("Creating KVStore...") + kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) + end + + train_execs = Array(Executor, num_dev) + for i = 1:num_dev + data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] + train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes...) + end + + # set up input data structures + data_names = [x[1] for x in provide_data(data)] + label_names = [x[1] for x in provide_label(data)] + + data_arrays = Vector{NDArray}[[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] + for name in data_names] + label_arrays = Vector{NDArray}[[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] + for name in label_names] + + param_arrays = Vector{NDArray}[[exec.arg_arrays[i] for exec in train_execs] for i = 1:length(param_names)] + grad_arrays = Vector{NDArray}[[exec.grad_arrays[i] for exec in train_execs] for i = 1:length(param_names)] + + optimizer.inv_batch_size = 1.0/batch_size + + if !update_on_kvstore + updater = get_updater(self.optimizer) + end + + if !isa(kvstore, Void) + if update_on_kvstore + set_optimizer(kvstore, optimizer) + end + + # init kv with gradients + for idx = 1:length(param_arrays) + param_on_devs = param_arrays[idx] + grad_on_devs = grad_arrays[idx] + + init!(kvstore, idx, self.arg_params[param_names[idx]]) + + # pull weights back + pull!(kvstore, idx, param_on_devs, priority=-idx) + end + end + + # set up output and labels in CPU for evaluation metric + output_shapes = [tuple(size(x)[1:end-1]...,batch_size) for x in train_execs[1].outputs] + cpu_dev = Context(CPU) + cpu_output_arrays = [empty(shape, cpu_dev) for shape in output_shapes] + cpu_label_arrays = [empty(shape, cpu_dev) for (name,shape) in provide_label(data)] + cpu_label_arrays_full_slice = [(1:batch_size, x) for x in label_arrays] + + # now start training... + for i_epoch = epoch_start:epoch_stop + time_start = time() + reset!(eval_metric) + n_batch = 0 + + for batch in data + load_data!(batch, data_arrays) + load_label!(batch, label_arrays) + + # forward and backward + for (texec, islice) in zip(train_execs, slices) + forward(texec, is_train=true) + + # copy outputs into cpu ndarray, for evaluation metric + for (cpu_out, dev_out) in zip(cpu_output_arrays, texec.outputs) + copy!(slice(cpu_out, islice), dev_out) + end + + backward(texec) + end + + # update parameters + for idx = 1:length(param_names) + # gradient synchronization + if !isa(kvstore, Void) + # push gradient, priority is negative index + push!(kvstore, idx, grad_arrays[idx], priority=-idx) + if update_on_kvstore + # pull back the weights + pull!(kvstore, idx, param_arrays[idx], priority=-idx) + else + # pull back the sum-ed gradients, to the same locations + pull!(kvstore, idx, grad_arrays[idx], priority=-idx) + end + end + + if !update_on_kvstore + # manual updating + for i_dev = 1:num_dev + # create a fake index, so that the updater create states + # for different param AND different devices, TODO(mli) + # use a better solution later + fake_idx = idx * num_dev + i_dev + updater(fake_idx, grad_arrays[idx][i_dev], param_arrays[idx][i_dev]) + end + end + end + + n_batch += 1 + + # update evaluation metric on training set + load_label!(batch, cpu_label_arrays_full_slice) + update!(eval_metric, cpu_label_arrays, cpu_output_arrays) + end # end of one epoch + + time_stop = time() + info("== Epoch {1:0>3d} ==========", i_epoch) + info("## Training summary") + for (name, value) in get(eval_metric) + info("{1>15s} = {2:.4f}", name, value) + end + info("{1>15s} = {2:.2f} seconds", "time", (time_stop-time_start)/1e9) + + # evaluation on validation set + if !isa(eval_data, Void) + # because we are re-using the memory allocated for the training network, + # the batch_size of the validation dataset must be the same as the training + # batch_size + @assert(get_batch_size(eval_data) == batch_size) + + reset!(eval_metric) + for batch in eval_data + load_data!(batch, data_arrays) + + # forward and backward + for (texec, islice) in zip(train_execs, slices) + forward(texec, is_train=true) + + # copy outputs into cpu ndarray, for evaluation metric + for (cpu_out, dev_out) in zip(cpu_output_arrays, texec.outputs) + copy!(slice(cpu_out, islice), dev_out) + end + end + load_label!(batch, cpu_label_arrays_full_slice) + update!(eval_metric, cpu_label_arrays, cpu_output_arrays) + end + + info("## Validation summary") + for (name, value) in get(eval_metric) + info("{1>15s} = {2:.4f}", name, value) + end + end + end # end of all epochs +end diff --git a/src/executor.jl b/src/executor.jl index 96c379a1bd25..6ddcff91efea 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -100,9 +100,11 @@ function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_W grad_arrays = nothing else grad_arrays = Dict{Base.Symbol, NDArray} + provided_data_names = keys(kwargs) for (name, shape) in zip(list_arguments(self), grad_shapes) # TODO: use a better way to identify data - if !(endswith(string(name), "data") || endswith(string(name), "label")) + #if !(endswith(string(name), "data") || endswith(string(name), "label")) + if !in(name, provided_data_names) grad_arrays[name] = zeros(shape, ctx) end end @@ -133,3 +135,26 @@ function backward(self :: Executor, out_grads :: Vector{NDArray}) out_grads = MX_handle[out_grads...] @mxcall(:MXExecutorBackward, (MX_handle, MX_uint, Ptr{MX_handle}), self, length(out_grads), out_grads) end + + +function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, + aux_params::Union{Void,Dict{Base.Symbol,NDArray}}=nothing; + allow_extra_params::Bool=false) + for (name, array) in arg_params + if haskey(self.arg_dict, name) + copy!(self.arg_dict[name], array) + else + @assert(allow_extra_params, "Extra params $name not in the arguments") + end + end + + if !isa(aux_params, Void) + for (name, array) in aux_params + if haskey(self.aux_dict, name) + copy!(self.aux_dict[name], array) + else + @assert(allow_extra_params, "Extra auxiliary state $name not recognized") + end + end + end +end diff --git a/src/io.jl b/src/io.jl index df7db7375d50..834e7ad755a4 100644 --- a/src/io.jl +++ b/src/io.jl @@ -2,10 +2,16 @@ A data provider provides interface to iterate over a dataset. It should implement the following functions: - provides(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} + provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} + provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, - `[(:data, (100,1,28,28)), (:softmax_label, (100,1))]`. + `[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient + function + + get_batch_size(provider :: AbstractDataProvider) => Int + + which returns the batch size used in this data provider. A data provider should implement the standard Julia iteration interface, including `Base.start`, `Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will @@ -58,14 +64,14 @@ typealias SlicedNDArray Vector{Tuple{UnitRange{Int},NDArray}} """Root type for data batch - A data batch must implement the following interface function to actually provide the data. The interface - is designed to make it easy to generate data on the fly. + A data batch must implement the following interface function to actually provide the data and label. - load_data!(batch :: AbstractDataBatch, targets :: Dict{Base.Symbol, SlicedNDArray}) + load_data!(batch :: AbstractDataBatch, targets :: Vector{SlicedNDArray}) + load_label!(batch :: AbstractDataBatch, targets :: Vector{SlicedNDArray}) - Load data into targets. The target is a dictionary mapping name to actual `SlicedNDArray` the data should be - copied into. Note `targets` might not contain names of all the data we could *provide*, simply because - some the data we provie is not needed. + Load data and label into targets. The target is a list of `SlicedNDArray` the data/label should be + copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and + `provide_label`. The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. @@ -90,8 +96,10 @@ abstract AbstractDataBatch """Wrapper of built-in `libmxnet` data iterators. """ type MXDataProvider <: AbstractDataProvider - handle :: MX_DataIterHandle - provides :: Vector{Tuple{Base.Symbol, Tuple}} + handle :: MX_DataIterHandle + data_shape :: Vector{Tuple{Base.Symbol, Tuple}} + label_shape:: Vector{Tuple{Base.Symbol, Tuple}} + batch_size :: Int end function _reset_data_iter(handle :: MX_DataIterHandle) @@ -120,16 +128,20 @@ function MXDataProvider(handle :: MX_DataIterHandle; # init iterator, load the first batch and get shapes _reset_data_iter(handle) @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") - provides = Tuple{Base.Symbol, Tuple}[(data_name, size(_get_data(handle)))] + data_shape = Tuple{Base.Symbol, Tuple}[(data_name, size(_get_data(handle)))] if !isa(label_name, Void) - push!(provides, (label_name::Base.Symbol, size(_get_label(handle)))) + label_shape = Tuple{Base.Symbol, Tuple}[(label_name::Base.Symbol, size(_get_label(handle)))] + else + label_shape = Tuple{Base.Symbol, Tuple}[] end _reset_data_iter(handle) - MXDataProvider(handle, provides) + MXDataProvider(handle, data_shape, label_shape, data_shape[1][2][end]) end -provides(provider::MXDataProvider) = provider.provides +provide_data(provider::MXDataProvider) = provider.data_shape +provide_label(provider::MXDataProvider) = provider.label_shape +get_batch_size(provider::MXDataProvider) = provider.batch_size type MXDataProviderState <: AbstractDataProviderState has_next :: Bool @@ -153,24 +165,21 @@ function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) return (MXDataBatch(provider), state) end -function load_data!(batch :: MXDataBatch, targets :: Dict{Base.Symbol, SlicedNDArray}) - for (k,v) in targets - if k == batch.provider.provides[1][1] - # data - src = _get_data(batch.provider.handle) - elseif k == batch.provider.provides[2][1] - # label - src = _get_label(batch.provider.handle) - else - @assert(false, "Unknown data $k, we only provide $(batch.provider.provides)") - end - - for (idx, target) in v - copy!(target, slice(src, idx)) - end +function _load_general!(batch :: MXDataBatch, loader :: Function, targets :: Vector{SlicedNDArray}) + @assert length(targets) == 1 + src = loader(batch.provider.handle) + for (idx, target) in targets[1] + copy!(target, slice(src, idx)) end end +function load_data!(batch :: MXDataBatch, targets :: Vector{SlicedNDArray}) + _load_general!(batch, _get_data, targets) +end +function load_label!(batch :: MXDataBatch, targets :: Vector{SlicedNDArray}) + _load_general!(batch, _get_label, targets) +end + function get_pad(batch :: MXDataBatch) ref_pad = Ref{Cint}(0) @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), batch.provider.handle, ref_pad) diff --git a/src/kvstore.jl b/src/kvstore.jl index d553a65dbd42..6d0642a6d2a7 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -1,5 +1,6 @@ type KVStore - handle :: MX_KVStoreHandle + handle :: MX_KVStoreHandle + updater_c :: Ptr{Void} end function KVStore(kv_type::Base.Symbol = :local) @@ -8,7 +9,7 @@ function KVStore(kv_type::Base.Symbol = :local) ref_hdr = Ref{MX_handle}(0) kv_type = string(kv_type) @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), kv_type, ref_hdr) - return KVStore(MX_KVStoreHandle(ref_hdr[])) + return KVStore(MX_KVStoreHandle(ref_hdr[]), Ptr{Void}(0)) end function Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) Base.unsafe_convert(MX_handle, obj.handle) @@ -78,3 +79,45 @@ function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{NDArray}; pr @mxcall(:MXKVStorePull, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), self, length(keys), keys, outs, priority) end + + +function get_type(self :: KVStore) + type_ref = Ref{char_p}(0) + @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) + return symbol(bytestring(type_ref[])) +end + +function get_num_workers(self :: KVStore) + ref_size = Ref{Cint}(0) + @mxcall(:MXKVStoreGetGroupSize, (MX_handle, Ref{Cint}), self, ref_size) + return Int(ref_size[]) +end + +function get_rank(self :: KVStore) + ref_rank = Ref{Cint}(0) + @mxcall(:MXKVStoreGetRank, (MX_handle, Ref{Cint}), self, ref_rank) + return Int(ref_rank[]) +end + + +function set_updater(self :: KVStore, updater :: Function) + function updater_wrapper(index :: Cint, nd_recv :: MX_handle, nd_local :: MX_handle, ::Ptr{Void}) + updater(index, NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) + end + self.wrapper_c = cfunction(updater_wrapper, Void, (Cint, MX_handle, MX_handle, Ptr{Void})) + + @mxcall(:MXKVStoreSetUpdater, (MX_handle, Ptr{Void}, Ptr{Void}), + self, self.wrapper_c, Ptr{Void}(0)) +end + +function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) + ref_is_worker = Ref{Cint}(0) + @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref_is_worker) + is_worker = ref_is_worker[] + + if ismatch(r"dist", string(get_type(self))) && is_worker + # TODO + else + set_updater(self, get_updater(optimizer)) + end +end diff --git a/src/metric.jl b/src/metric.jl index 94dd97a9b911..d4f38e4545f5 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -7,7 +7,7 @@ type Accuracy <: AbstractEvalMetric Accuracy() = new(0.0, 0) end -function update!(metric :: Accuracy, label :: NDArray, pred :: NDArray) +function _update_single_output(metric :: Accuracy, labels :: NDArray, pred :: NDArray) label = copy(label) pred = copy(pred) @@ -19,9 +19,16 @@ function update!(metric :: Accuracy, label :: NDArray, pred :: NDArray) end end +function update!(metric :: Accuracy, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + @assert length(labels) == length(preds) + for i = 1:length(labels) + _update_single_output(labels[i], preds[i]) + end +end + import Base: get function get(metric :: Accuracy) - metric.acc_sum / metric.n_sample + return [(:accuracy, metric.acc_sum / metric.n_sample)] end function reset!(metric :: Accuracy) diff --git a/src/ndarray.jl b/src/ndarray.jl index 95757917bd29..0b8b4c155cc1 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -305,6 +305,9 @@ function *(arg0 :: NDArray, arg :: Real) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) end +function *(arg0 :: Real, arg :: NDArray) + *(arg, arg0) +end function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable diff --git a/src/optimizer.jl b/src/optimizer.jl new file mode 100644 index 000000000000..eb4ca4db3b5e --- /dev/null +++ b/src/optimizer.jl @@ -0,0 +1,76 @@ +abstract AbstractOptimizer + +abstract AbstractLearningRateScheduler +abstract AbstractMomentumScheduler + +type FixedLearningRateScheduler <: AbstractLearningRateScheduler + learning_rate :: Float64 +end +get_learning_rate(self :: FixedLearningRateScheduler, iter :: Int) = self.learning_rate + +type NullMomentumScheduler <: AbstractMomentumScheduler +end +get_momentum(self :: NullMomentumScheduler, iter :: Int) = 0.0 + +type FixedMomentumScheduler <: AbstractMomentumScheduler + momentum :: Float64 +end +get_momentum(self :: FixedMomentumScheduler, iter :: Int) = self.momentum + +type SGD <: AbstractOptimizer + iter :: Int + + lr_scheduler :: AbstractLearningRateScheduler + mom_scheduler :: AbstractMomentumScheduler + weight_decay :: Float64 + grad_scale :: Float64 + grad_clip :: Float64 + inv_batch_size:: Float64 + + function SGD(;lr_scheduler::AbstractLearningRateScheduler=FixedLearningRateScheduler(0.01), + mom_scheduler::AbstractMomentumScheduler=NullMomentumScheduler(), + weight_decay::Float64=0.0001, + grad_scale::Float64=1.0, + clip_grad::Float64=0.0) + new(0, lr_scheduler, mom_scheduler, weight_decay, grad_scale, grad_clip, 1.0) + end +end + +function create_state(self :: SGD, index :: Int, weight :: NDArray) + if isa(self.mom_scheduler, NullMomentumScheduler) + return nothing + else + return zeros(size(weight), context(weight)) + end +end + +function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) + lr = get_learning_rate(self.lr_scheduler, self.iter) + grad_scale = self.grad_scale * self.inv_batch_size + + if isa(state, Void) + @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) + else + mom = state :: NDArray + coef = get_momentum(self.mom_scheduler, self.iter) + @inplace mom .*= coef + if self.clip_gradient > 0 + # TODO: + else + @inplace mom += -lr * (grad_scale * grad + self.weight_decay * weight) + end + @inplace weight += mom + end +end + + +function get_updater(optimizer :: AbstractOptimizer) + states = Dict{Int,Any}() + function updater(index :: Int, grad :: NDArray, weight :: NDArray) + if !haskey(states, index) + states[index] = create_state(optimizer, index, weight) + end + update(optimizer, index, weight, grad, states[index]) + end + return updater +end diff --git a/src/symbol.jl b/src/symbol.jl index a352173b7a43..b97e7a7594c8 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -111,10 +111,11 @@ macro _infer_shape(self, keys, indptr, sdata) function build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) shape_ndim = pointer_to_array(shape_ndim, shape_size) shape_data = pointer_to_array(shape_data, shape_size) - map(1:shape_size) do i + shapes = map(1:shape_size) do i my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) tuple(flipdim(Int[my_shape...],1)...) end + convert(Vector{Tuple}, shapes) end return ( build_shapes(ref_arg_shape_size[], ref_arg_shape_ndim[], ref_arg_shape_data[]), diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 1ac65ae78464..0e9e33f12244 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -10,22 +10,23 @@ function test_mnist() mnist_provider = mx.MNISTProvider(image=filenames[:train_data], label=filenames[:train_label], batch_size=batch_size, silent=true, shuffle=false) - spec = mx.provides(mnist_provider) - spec = Dict(spec) - @test haskey(spec, :data) - @test haskey(spec, :softmax_label) - @test spec[:data] == (28,28,1,batch_size) - @test spec[:softmax_label] == (batch_size,) + data_spec = mx.provide_data(mnist_provider) + label_spec = mx.provide_label(mnist_provider) + @test data_spec == [(:data, (28,28,1,batch_size))] + @test label_spec == [(:softmax_label, (batch_size,))] n_batch = 0 for batch in mnist_provider if n_batch == 0 data_array = mx.empty(28,28,1,batch_size) label_array = mx.empty(batch_size) - targets = Dict(:data => [(1:batch_size, data_array)], - :softmax_label => [(1:batch_size, label_array)]) + # have to use "for i=1:1" to get over the legacy "feature" of using + # [ ] to do concatenation in Julia + data_targets = [[(1:batch_size, data_array)] for i = 1:1] + label_targets = [[(1:batch_size, label_array)] for i = 1:1] - mx.load_data!(batch, targets) + mx.load_data!(batch, data_targets) + mx.load_label!(batch, label_targets) true_labels = [5,0,4,1,9,2,1,3,1,4] # the first 10 labels in MNIST train got_labels = Int[copy(label_array)...] From 8a0a43bcfd5afcd7b5972fb23718cde9019c2777 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 14:07:27 -0400 Subject: [PATCH 062/630] fix initializers --- src/MXNet.jl | 1 + src/estimator.jl | 11 ++++++----- src/initializer.jl | 31 ++++++++++++++++--------------- src/optimizer.jl | 2 +- 4 files changed, 24 insertions(+), 21 deletions(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index cd96b3573af5..cfa56b21d292 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -20,6 +20,7 @@ include("executor.jl") include("metric.jl") include("optimizer.jl") +include("initializer.jl") include("io.jl") include("kvstore.jl") diff --git a/src/estimator.jl b/src/estimator.jl index b3bd67e19bf4..5aa0446cbff1 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -23,7 +23,7 @@ end function _split_inputs(batch_size :: Int, n_split :: Int) @assert(batch_size >= n_split) per_split = floor(Int, batch_size / n_split) - counts = zeros(Int, n_split)+per_split + counts = Base.zeros(Int, n_split)+per_split extra = batch_size - sum(counts) counts[1:extra] += 1 @@ -41,7 +41,7 @@ function FeedForward(arch :: Symbol; context :: Union{Context, Vector{Context}, FeedForward(arch, context) end -function _init_params(self :: FeedForward, data :: AbstractDataProvider) +function _init_params(self :: FeedForward, data :: AbstractDataProvider, initializer) # all arg names, including data, label, and parameters arg_names = list_arguments(self.arch) @@ -64,10 +64,10 @@ function _init_params(self :: FeedForward, data :: AbstractDataProvider) # initialize the contents of the parameters for (k,v) in self.arg_params - self.initializer(k, v) + initializer(k, v) end for (k,v) in self.aux_params - self.initializer(k, v) + initializer(k, v) end return (param_names, aux_names) @@ -98,6 +98,7 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : end function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; + initializer :: AbstractInitializer = UniformInitializer(0.01), epoch_stop :: Int = 10, epoch_start :: Int = 1, eval_data :: Union{Void, AbstractDataProvider} = nothing, eval_metric :: AbstractEvalMetric = Accuracy(), @@ -111,7 +112,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # initialize parameters info("Initializing parameters...") - param_names, aux_names = _init_params(self, param_names, aux_names) + param_names, aux_names = _init_params(self, data, initializer) # setup kvstore if isa(kvstore, Base.Symbol) diff --git a/src/initializer.jl b/src/initializer.jl index 5a3dc5ef005c..502baf2189e1 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -1,34 +1,34 @@ abstract AbstractInitializer -function call(self :: AbstractInitializer, name :: Symbol, array :: NDArray) - name = string(name) - if endswith(name, "bias") +function call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + strname = string(name) + if endswith(strname, "bias") _init_bias(self, name, array) - elseif endswith(name, "gamma") + elseif endswith(strname, "gamma") _init_gamma(self, name, array) - elseif endswith(name, "beta") + elseif endswith(strname, "beta") _init_beta(self, name, array) - elseif endswith(name, "weight") + elseif endswith(strname, "weight") _init_weight(self, name, array) - elseif endswith(name, "moving_mean") + elseif endswith(strname, "moving_mean") _init_zero(self, name, array) - elseif endswith(name, "moving_var") + elseif endswith(strname, "moving_var") _init_zero(self, name, array) else _init_default(self, name, array) end end -function _init_bias(self :: AbstractInitializer, name :: Symbol, array :: NDArray) +function _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) array[:] = 0 end -function _init_gamma(self :: AbstractInitializer, name :: Symbol, array :: NDArray) +function _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) array[:] = 1 end -function _init_beta(self :: AbstractInitializer, name :: Symbol, array :: NDArray) +function _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) array[:] = 0 end -function _init_zero(self :: AbstractInitializer, name :: Symbol, array :: NDArray) +function _init_zero(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) array[:] = 0 end @@ -37,7 +37,7 @@ immutable UniformInitializer <: AbstractInitializer end UniformInitializer() = UniformInitializer(0.07) -function _init_weight(self :: UniformInitializer, name :: Symbol, array :: NDArray) +function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: NDArray) rand!(-self.scale, self.scale, array) end @@ -47,13 +47,14 @@ immutable NormalInitializer <: AbstractInitializer end NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) -function _init_weight(self :: NormalInitializer, name :: Symbol, array :: NDArray) +function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) randn!(self.μ, self.σ, array) end immutable XaiverInitializer <: AbstractInitializer end -function _init_weight(self :: NormalInitializer, name :: Symbol, array :: NDArray) + +function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) dims = size(array) fan_in = prod(dims[2:end]) fan_out = dims[1] diff --git a/src/optimizer.jl b/src/optimizer.jl index eb4ca4db3b5e..050a752f0ad7 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -31,7 +31,7 @@ type SGD <: AbstractOptimizer mom_scheduler::AbstractMomentumScheduler=NullMomentumScheduler(), weight_decay::Float64=0.0001, grad_scale::Float64=1.0, - clip_grad::Float64=0.0) + grad_clip::Float64=0.0) new(0, lr_scheduler, mom_scheduler, weight_decay, grad_scale, grad_clip, 1.0) end end From 92ec4997b6f166555f28190883a466a9f89538e4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 14:22:21 -0400 Subject: [PATCH 063/630] fix executor --- src/estimator.jl | 3 ++- src/executor.jl | 12 ++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/estimator.jl b/src/estimator.jl index 5aa0446cbff1..794db739ff9c 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -123,7 +123,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra train_execs = Array(Executor, num_dev) for i = 1:num_dev data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] - train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes...) + label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] + train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes..., label_shapes...) end # set up input data structures diff --git a/src/executor.jl b/src/executor.jl index 6ddcff91efea..82b10f8982bc 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -96,14 +96,14 @@ function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_W @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] + arg_names = list_arguments(self) if grad_req == GRAD_NOP grad_arrays = nothing else - grad_arrays = Dict{Base.Symbol, NDArray} - provided_data_names = keys(kwargs) - for (name, shape) in zip(list_arguments(self), grad_shapes) - # TODO: use a better way to identify data - #if !(endswith(string(name), "data") || endswith(string(name), "label")) + provided_data_names = [x[1] for x in kwargs] + grad_arrays = Dict{Base.Symbol,NDArray}() + for (name, shape) in zip(arg_names, grad_shapes) + # if not in provided data, should be parameters if !in(name, provided_data_names) grad_arrays[name] = zeros(shape, ctx) end @@ -111,7 +111,7 @@ function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_W end aux_arrays = [zeros(shape, ctx) for shape in aux_shapes] - return bind(self, ctx, arg_ndarrays, grad_arrays, grad_req, aux_arrays) + return bind(self, ctx, arg_arrays, args_grad=grad_arrays, grad_req=grad_req, aux_states=aux_arrays) end From 8c0e465e7c621dc029273da6073f32871b7c52dd Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 15:44:11 -0400 Subject: [PATCH 064/630] bug fix --- src/context.jl | 4 ++++ src/estimator.jl | 42 ++++++++++++++++++++---------------------- src/executor.jl | 4 ++-- src/io.jl | 19 ++++++++++--------- src/metric.jl | 4 ++-- src/ndarray.jl | 4 ++++ src/optimizer.jl | 2 +- 7 files changed, 43 insertions(+), 36 deletions(-) diff --git a/src/context.jl b/src/context.jl index 3dbf7e6e482a..4a9f4667d4cf 100644 --- a/src/context.jl +++ b/src/context.jl @@ -9,6 +9,10 @@ end Context(dev_type :: Union{CONTEXT_TYPE, Integer}, dev_id :: Integer = 0) = Context(convert(CONTEXT_TYPE, dev_type), convert(Cint, dev_id), Nullable{Context}()) +function Base.show(io :: IO, ctx :: Context) + print(io, "$(ctx.device_type)$(ctx.device_id))") +end + # global default context DEFAULT_CONTEXT = Context(CPU) diff --git a/src/estimator.jl b/src/estimator.jl index 794db739ff9c..e0f2fbc7a6ac 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -11,11 +11,6 @@ type FeedForward <: AbstractEstimator FeedForward(arch :: Symbol, ctx :: Vector{Context}) = new(arch, ctx) end -function _check_arguments(symbol :: Symbol) - arg_names = list_arguments(symbol) - @assert(length(unique(arg_names)) == length(arg_names), "Duplicated names in arguments $arg_names") -end - """Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that piece. @@ -53,7 +48,7 @@ function _init_params(self :: FeedForward, data :: AbstractDataProvider, initial param_names = setdiff(arg_names, data_names ∪ label_names) aux_names = list_auxiliary_states(self.arch) - arg_shapes, grad_shapes, aux_shapes = infer_shape(self.arch; data_shapes...) + arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; data_shapes...) if !isdefined(self, :arg_params) param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) @@ -70,7 +65,7 @@ function _init_params(self :: FeedForward, data :: AbstractDataProvider, initial initializer(k, v) end - return (param_names, aux_names) + return (arg_names, param_names, aux_names) end function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) @@ -112,7 +107,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # initialize parameters info("Initializing parameters...") - param_names, aux_names = _init_params(self, data, initializer) + arg_names, param_names, aux_names = _init_params(self, data, initializer) # setup kvstore if isa(kvstore, Base.Symbol) @@ -128,21 +123,23 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end # set up input data structures - data_names = [x[1] for x in provide_data(data)] - label_names = [x[1] for x in provide_label(data)] + data_names = [x[1] for x in provide_data(data)] + label_names = [x[1] for x in provide_label(data)] + + data_arrays = [SlicedNDArray[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] + for name in data_names] + label_arrays = [SlicedNDArray[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] + for name in label_names] - data_arrays = Vector{NDArray}[[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] - for name in data_names] - label_arrays = Vector{NDArray}[[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] - for name in label_names] + param_idx = filter(i -> in(arg_names[i], param_names), 1:length(arg_names)) - param_arrays = Vector{NDArray}[[exec.arg_arrays[i] for exec in train_execs] for i = 1:length(param_names)] - grad_arrays = Vector{NDArray}[[exec.grad_arrays[i] for exec in train_execs] for i = 1:length(param_names)] + param_arrays = [NDArray[exec.arg_arrays[i] for exec in train_execs] for i in param_idx] + grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] optimizer.inv_batch_size = 1.0/batch_size if !update_on_kvstore - updater = get_updater(self.optimizer) + updater = get_updater(optimizer) end if !isa(kvstore, Void) @@ -150,6 +147,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra set_optimizer(kvstore, optimizer) end + info("Initializing KVStore...") # init kv with gradients for idx = 1:length(param_arrays) param_on_devs = param_arrays[idx] @@ -167,7 +165,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra cpu_dev = Context(CPU) cpu_output_arrays = [empty(shape, cpu_dev) for shape in output_shapes] cpu_label_arrays = [empty(shape, cpu_dev) for (name,shape) in provide_label(data)] - cpu_label_arrays_full_slice = [(1:batch_size, x) for x in label_arrays] + cpu_label_arrays_full_slice = [SlicedNDArray[(1:batch_size, x)] for x in cpu_label_arrays] # now start training... for i_epoch = epoch_start:epoch_stop @@ -226,12 +224,12 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end # end of one epoch time_stop = time() - info("== Epoch {1:0>3d} ==========", i_epoch) + info(format("== Epoch {1:0>3d} ==========", i_epoch)) info("## Training summary") for (name, value) in get(eval_metric) - info("{1>15s} = {2:.4f}", name, value) + info(format("{1:>15s} = {2:.4f}", name, value)) end - info("{1>15s} = {2:.2f} seconds", "time", (time_stop-time_start)/1e9) + info(format("{1:>15s} = {2:.2f} seconds", "time", (time_stop-time_start)/1e9)) # evaluation on validation set if !isa(eval_data, Void) @@ -259,7 +257,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra info("## Validation summary") for (name, value) in get(eval_metric) - info("{1>15s} = {2:.4f}", name, value) + info(format("{1:>15s} = {2:.4f}", name, value)) end end end # end of all epochs diff --git a/src/executor.jl b/src/executor.jl index 82b10f8982bc..94aabba5ddd5 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -92,7 +92,7 @@ function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict end function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) - arg_shapes, grad_shapes, aux_shapes = infer_shape(self; kwargs...) + arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] @@ -102,7 +102,7 @@ function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_W else provided_data_names = [x[1] for x in kwargs] grad_arrays = Dict{Base.Symbol,NDArray}() - for (name, shape) in zip(arg_names, grad_shapes) + for (name, shape) in zip(arg_names, arg_shapes) # if not in provided data, should be parameters if !in(name, provided_data_names) grad_arrays[name] = zeros(shape, ctx) diff --git a/src/io.jl b/src/io.jl index 834e7ad755a4..5587fbc97575 100644 --- a/src/io.jl +++ b/src/io.jl @@ -57,21 +57,22 @@ abstract AbstractDataProvider """Root type for states of data provider""" abstract AbstractDataProviderState -"""A list of (slice, NDArray) pairs. Usually each NDArray resides on a different device, and each +"""A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each slice describe which part of a larger piece of data should goto that device. """ -typealias SlicedNDArray Vector{Tuple{UnitRange{Int},NDArray}} +typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} """Root type for data batch A data batch must implement the following interface function to actually provide the data and label. - load_data!(batch :: AbstractDataBatch, targets :: Vector{SlicedNDArray}) - load_label!(batch :: AbstractDataBatch, targets :: Vector{SlicedNDArray}) + load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) + load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - Load data and label into targets. The target is a list of `SlicedNDArray` the data/label should be + Load data and label into targets. The targets is a list of target that the data/label should be copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and - `provide_label`. + `provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the + memory buffer for each device. The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. @@ -165,7 +166,7 @@ function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) return (MXDataBatch(provider), state) end -function _load_general!(batch :: MXDataBatch, loader :: Function, targets :: Vector{SlicedNDArray}) +function _load_general!(batch :: MXDataBatch, loader :: Function, targets :: Vector{Vector{SlicedNDArray}}) @assert length(targets) == 1 src = loader(batch.provider.handle) for (idx, target) in targets[1] @@ -173,10 +174,10 @@ function _load_general!(batch :: MXDataBatch, loader :: Function, targets :: Vec end end -function load_data!(batch :: MXDataBatch, targets :: Vector{SlicedNDArray}) +function load_data!(batch :: MXDataBatch, targets :: Vector{Vector{SlicedNDArray}}) _load_general!(batch, _get_data, targets) end -function load_label!(batch :: MXDataBatch, targets :: Vector{SlicedNDArray}) +function load_label!(batch :: MXDataBatch, targets :: Vector{Vector{SlicedNDArray}}) _load_general!(batch, _get_label, targets) end diff --git a/src/metric.jl b/src/metric.jl index d4f38e4545f5..99cdc96a94db 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -7,7 +7,7 @@ type Accuracy <: AbstractEvalMetric Accuracy() = new(0.0, 0) end -function _update_single_output(metric :: Accuracy, labels :: NDArray, pred :: NDArray) +function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) label = copy(label) pred = copy(pred) @@ -22,7 +22,7 @@ end function update!(metric :: Accuracy, labels :: Vector{NDArray}, preds :: Vector{NDArray}) @assert length(labels) == length(preds) for i = 1:length(labels) - _update_single_output(labels[i], preds[i]) + _update_single_output(metric, labels[i], preds[i]) end end diff --git a/src/ndarray.jl b/src/ndarray.jl index 0b8b4c155cc1..cd23eb59cc98 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -40,6 +40,10 @@ type NDArray end end +function Base.show(io :: IO, arr :: NDArray) + print(io, "mx.NDArray$(size(arr))") +end + function NDArray{T<:Real}(data :: Array{T}) copy(data, mx.DEFAULT_CONTEXT) end diff --git a/src/optimizer.jl b/src/optimizer.jl index 050a752f0ad7..191fc6a855c2 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -54,7 +54,7 @@ function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, s mom = state :: NDArray coef = get_momentum(self.mom_scheduler, self.iter) @inplace mom .*= coef - if self.clip_gradient > 0 + if self.grad_clip > 0 # TODO: else @inplace mom += -lr * (grad_scale * grad + self.weight_decay * weight) From 614de3a40ea0b9687a6d47a763d05668c0856e11 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 16:27:13 -0400 Subject: [PATCH 065/630] mnist mlp example --- examples/mnist/mlp.jl | 33 +++++++++++++++++++++++++++++++++ src/estimator.jl | 4 +++- src/metric.jl | 1 + test/unittest/ndarray.jl | 16 ++++++++++++++++ 4 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 examples/mnist/mlp.jl diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl new file mode 100644 index 000000000000..88e4dade744a --- /dev/null +++ b/examples/mnist/mlp.jl @@ -0,0 +1,33 @@ +using MXNet + +# define MLP +data = mx.variable(:data) +fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) +act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) +fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) +act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) +fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) +mlp = mx.Softmax(data = fc3, name=:softmax) + +# download MNIST into Pkg.dir("MXNet")/data/mnist if not exist +filenames = mx.get_mnist_ubyte() + +# data provider +batch_size = 100 +train_provider = mx.MNISTProvider(image=filenames[:train_data], + label=filenames[:train_label], + batch_size=batch_size, shuffle=true, flat=true) +eval_provider = mx.MNISTProvider(image=filenames[:test_data], + label=filenames[:test_label], + batch_size=batch_size, shuffle=false, flat=true) + +# setup estimator +estimator = mx.FeedForward(mlp, context=mx.Context(mx.CPU)) + +# optimizer +optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.1), + mom_scheduler=mx.FixedMomentumScheduler(0.9), + weight_decay=0.00001) + +# fit parameters +mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) diff --git a/src/estimator.jl b/src/estimator.jl index e0f2fbc7a6ac..c4e4859bcfc4 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -120,6 +120,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes..., label_shapes...) + + copy_params_from(train_execs[i], self.arg_params, self.aux_params) end # set up input data structures @@ -229,7 +231,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra for (name, value) in get(eval_metric) info(format("{1:>15s} = {2:.4f}", name, value)) end - info(format("{1:>15s} = {2:.2f} seconds", "time", (time_stop-time_start)/1e9)) + info(format("{1:>15s} = {2:.4f} seconds", "time", time_stop-time_start)) # evaluation on validation set if !isa(eval_data, Void) diff --git a/src/metric.jl b/src/metric.jl index 99cdc96a94db..59acb529a9a7 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -16,6 +16,7 @@ function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDA for i = 1:n_sample klass = indmax(sub(pred,:,i)) metric.acc_sum += (klass-1) == label[i] + #println("$(sub(pred,:,i)) $(klass-1) <=> $(label[i])") end end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 04fc3a632204..d547159af1e2 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -160,6 +160,21 @@ function test_div() @test reldiff(t2./scalar, copy(a2./scalar)) < 1e-6 end +function test_gd() + dims = rand_dims() + tw, aw = rand_tensors(dims) + tg, ag = rand_tensors(dims) + + info("NDArray::gd::dims = $dims") + + lr = rand() + wd = rand() + + @mx.inplace aw += -lr * (ag + wd * aw) + tw += -lr * (tg + wd * tw) + @test reldiff(copy(aw), tw) < 1e-6 +end + ################################################################################ # Run tests @@ -171,5 +186,6 @@ test_plus() test_minus() test_mul() test_div() +test_gd() end From 61fa00c8ad58d368c59a4bef5d69dbbead985ef9 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 16:51:02 -0400 Subject: [PATCH 066/630] lenet example (not tested) --- examples/mnist/lenet.jl | 48 ++++++++++++++++++++++++++++++++++++ examples/mnist/mlp.jl | 8 ++---- examples/mnist/mnist-data.jl | 16 ++++++++++++ 3 files changed, 66 insertions(+), 6 deletions(-) create mode 100644 examples/mnist/lenet.jl create mode 100644 examples/mnist/mnist-data.jl diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl new file mode 100644 index 000000000000..7efe4c42eedc --- /dev/null +++ b/examples/mnist/lenet.jl @@ -0,0 +1,48 @@ +using MXNet + +#-------------------------------------------------------------------------------- +# define lenet + +# input +data = mx.variable(:data) + +# first conv +conv1 = mx.Convolution(data=data, kernel=(5,5), num_filter=20) +tanh1 = mx.Activation(data=conv1, act_type=:tanh) +pool1 = mx.Pooling(data=tanh1, pool_type=:max, kernel=(2,2), stride=(2,2)) + +# second conv +conv2 = mx.Convolution(data=pool1, kernel=(5,5), num_filter=50) +tanh2 = mx.Activation(data=conv2, act_type=:tanh) +pool2 = mx.Pooling(data=tanh2, pool_type=:max, kernel=(2,2), stride=(2,2)) + +# first fully-connected +flat = mx.Flatten(data=pool2) +fc1 = mx.FullyConnected(data=flat, num_hidden=500) +tanh3 = mx.Activation(data=fc1, act_type=:tanh) + +# second fully-connected +fc2 = mx.FullyConnected(data=tanh3, num_hidden=10) + +# softmax loss +lenet = mx.Softmax(data=fc2, name=:softmax) + + +#-------------------------------------------------------------------------------- +# load data +batch_size = 100 +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) + +#-------------------------------------------------------------------------------- +# fit model +dev = mx.Context(mx.GPU) +estimator = mx.FeedForward(lenet, context=dev) + +# optimizer +optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), + mom_scheduler=mx.FixedMomentumScheduler(0.9), + weight_decay=0.00001) + +# fit parameters +mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 88e4dade744a..446869b46d31 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -14,12 +14,8 @@ filenames = mx.get_mnist_ubyte() # data provider batch_size = 100 -train_provider = mx.MNISTProvider(image=filenames[:train_data], - label=filenames[:train_label], - batch_size=batch_size, shuffle=true, flat=true) -eval_provider = mx.MNISTProvider(image=filenames[:test_data], - label=filenames[:test_label], - batch_size=batch_size, shuffle=false, flat=true) +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size) # setup estimator estimator = mx.FeedForward(mlp, context=mx.Context(mx.CPU)) diff --git a/examples/mnist/mnist-data.jl b/examples/mnist/mnist-data.jl new file mode 100644 index 000000000000..7349152617f9 --- /dev/null +++ b/examples/mnist/mnist-data.jl @@ -0,0 +1,16 @@ +function get_mnist_providers(batch_size::Int; data_name=:data, label_name=:softmax_label, flat=true) + # download MNIST into Pkg.dir("MXNet")/data/mnist if not exist + filenames = mx.get_mnist_ubyte() + + # data provider + train_provider = mx.MNISTProvider(image=filenames[:train_data], + label=filenames[:train_label], + data_name=data_name, label_name=label_name, + batch_size=batch_size, shuffle=true, flat=flat, silent=true) + eval_provider = mx.MNISTProvider(image=filenames[:test_data], + label=filenames[:test_label], + data_name=data_name, label_name=label_name, + batch_size=batch_size, shuffle=false, flat=flat, silent=true) + + return (train_provider, eval_provider) +end From 383fd98996b49fed4311cfe3b138946fcc3c4467 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 19:07:41 -0400 Subject: [PATCH 067/630] fix kvstore closure --- src/context.jl | 2 +- src/estimator.jl | 2 +- src/init.jl | 1 + src/kvstore.jl | 26 ++++++++++++++++++-------- 4 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/context.jl b/src/context.jl index 4a9f4667d4cf..20b0c7b14ee6 100644 --- a/src/context.jl +++ b/src/context.jl @@ -1,4 +1,4 @@ -@enum CONTEXT_TYPE CPU=1 GPU=2 +@enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 type Context device_type :: CONTEXT_TYPE diff --git a/src/estimator.jl b/src/estimator.jl index c4e4859bcfc4..e349ec952115 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -75,7 +75,7 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : if kv_type == :local max_size = maximum([prod(size(param)) for (k,param) in arg_params]) if max_size < 1024 * 1024 * 16 - kv_type = :loca_update_cpu + kv_type = :local_update_cpu else kv_type = :local_allreduce_cpu end diff --git a/src/init.jl b/src/init.jl index f23d9f3b2207..fd8f57dd32f9 100644 --- a/src/init.jl +++ b/src/init.jl @@ -100,3 +100,4 @@ end @mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) @mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) @mx_define_handle_t(MX_KVStoreHandle, MXKVStoreFree) + diff --git a/src/kvstore.jl b/src/kvstore.jl index 6d0642a6d2a7..562edc3d4850 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -1,15 +1,18 @@ type KVStore handle :: MX_KVStoreHandle updater_c :: Ptr{Void} + updater :: Function + + KVStore(hdr :: MX_KVStoreHandle) = new(hdr, Ptr{Void}(0)) end function KVStore(kv_type::Base.Symbol = :local) - @assert(kv_type ∈ [:local]) # TODO: update with allowed types + #@assert(kv_type ∈ [:local]) # TODO: update with allowed types ref_hdr = Ref{MX_handle}(0) kv_type = string(kv_type) @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), kv_type, ref_hdr) - return KVStore(MX_KVStoreHandle(ref_hdr[]), Ptr{Void}(0)) + return KVStore(MX_KVStoreHandle(ref_hdr[])) end function Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) Base.unsafe_convert(MX_handle, obj.handle) @@ -100,14 +103,21 @@ function get_rank(self :: KVStore) end +# TODO: Currently Julia does not support closure in c-callbacks, so we are making use of the +# extra handle parameter of the API to pass the updater object around. Fix this when someday +# full closure cfunction is supported in Julia. +function _kvstore_update_wrapper(index::Cint, nd_recv::MX_handle, nd_local::MX_handle, updater::Ptr{Void}) + x = unsafe_pointer_to_objref(updater) + updater_func = unsafe_pointer_to_objref(updater) :: Function + updater_func(Int(index), NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) + return nothing +end function set_updater(self :: KVStore, updater :: Function) - function updater_wrapper(index :: Cint, nd_recv :: MX_handle, nd_local :: MX_handle, ::Ptr{Void}) - updater(index, NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) - end - self.wrapper_c = cfunction(updater_wrapper, Void, (Cint, MX_handle, MX_handle, Ptr{Void})) + self.updater = updater # keep a reference to the julia object so that updater_c is kept valid + self.updater_c = cfunction(_kvstore_update_wrapper, Void, (Cint, MX_handle, MX_handle, Ptr{Void})) - @mxcall(:MXKVStoreSetUpdater, (MX_handle, Ptr{Void}, Ptr{Void}), - self, self.wrapper_c, Ptr{Void}(0)) + @mxcall(:MXKVStoreSetUpdater, (MX_handle, Ptr{Void}, Any), + self, self.updater_c, updater) end function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) From 7f8612ef209a1bcc3528ee326fe9988bbb23d6b9 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 17 Oct 2015 21:50:39 -0400 Subject: [PATCH 068/630] cifar10 example (pushing to GPU server for testing) --- examples/cifar10/cifar10.jl | 86 +++++++++++++++++++++++++++++++++++++ examples/mnist/mlp.jl | 3 -- src/init.jl | 30 +++++++++++++ src/symbol.jl | 2 +- src/util.jl | 17 ++++++++ 5 files changed, 134 insertions(+), 4 deletions(-) create mode 100644 examples/cifar10/cifar10.jl diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl new file mode 100644 index 000000000000..157bee6dd777 --- /dev/null +++ b/examples/cifar10/cifar10.jl @@ -0,0 +1,86 @@ +using MXNet + +#-------------------------------------------------------------------------------- +# Helper functions to construct larger networks + +# basic Conv + BN + ReLU factory +function conv_factory(data, num_filter, kernel; stride=(1,1), pad=(0,0), act_type=:relu) + conv = mx.Convolution(data=data, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad) + bn = mx.BatchNorm(data=conv) + act = mx.Activation(data=bn, act_type=act_type) + return act +end + +# simple downsampling factory +function downsample_factory(data, ch_3x3) + # conv 3x3 + conv = conv_factory(data, ch_3x3, (3,3), stride=(2,2), pad=(1,1)) + # pool + pool = mx.Pooling(data=data, kernel=(3,3), stride=(2,2), pool_type=:max) + # concat + concat = mx.Concat(conv, pool) + return concat +end + +# a simple module +function simple_factory(data, ch_1x1, ch_3x3) + # 1x1 + conv1x1 = conv_factory(data, ch_1x1, (1,1); pad=(0,0)) + # 3x3 + conv3x3 = conv_factory(data, ch_3x3, (3,3); pad=(1,1)) + # concat + concat = mx.Concat(conv1x1, conv3x3) + return concat +end + + +#-------------------------------------------------------------------------------- +# Actual architecture +data = mx.variable(:data) +conv1 = conv_factory(data, 96, (3,3); pad=(1,1), act_type=:relu) +in3a = simple_factory(conv1, 32, 32) +in3b = simple_factory(in3a, 32, 48) +in3c = downsample_factory(in3b, 80) +in4a = simple_factory(in3c, 112, 48) +in4b = simple_factory(in4a, 96, 64) +in4c = simple_factory(in4b, 80, 80) +in4d = simple_factory(in4b, 48, 96) +in4e = downsample_factory(in4d, 96) +in5a = simple_factory(in4e, 176, 160) +in5b = simple_factory(in5a, 176, 160) +pool = mx.Pooling(data=in5b, pool_type=:avg, kernel=(7,7), name=:global_pool) +flatten = mx.Flatten(data=pool, name=:flatten1) +fc = mx.FullyConnected(data=flatten, num_hidden=10, name=:fc1) +softmax = mx.Softmax(data=fc, name=:loss) + + +#-------------------------------------------------------------------------------- +# Prepare data +filenames = get_cifar10() +batch_size = 128 +num_epoch = 10 +num_gpus = 1 + +train_provider = mx.ImageRecordProvider(label_name=:loss_label, + path_imgrec=filenames[:train], mean_img=filenames[:mean], + rand_crop=true, rand_mirror=true, data_shape=(28,28,3), + batch_size=batch_size, preprocess_threads=1) +test_provider = mx.ImageRecordProvider(label_name=:loss_label, + path_imgrec=filenames[:test], mean_img=filenames[:mean], + rand_crop=false, rand_mirror=false, data_shape=(28,28,3), + batch_size=batch_size, preprocess_threads=1) + + +#-------------------------------------------------------------------------------- +# Training model +gpus = [mx.Context(GPU, i) for i = 0:num_gpus-1] +estimator = mx.FeedForward(softmax, context=gpus) + +# optimizer +optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), + mom_scheduler=mx.FixedMomentumScheduler(0.9), + weight_decay=0.0001) + +# fit parameters +mx.fit(estimator, optimizer, train_provider, epoch_stop=num_epoch, eval_data=eval_provider, + initializer=mx.UniformInitializer(0.07)) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 446869b46d31..c2d30235e0f6 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -9,9 +9,6 @@ act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) mlp = mx.Softmax(data = fc3, name=:softmax) -# download MNIST into Pkg.dir("MXNet")/data/mnist if not exist -filenames = mx.get_mnist_ubyte() - # data provider batch_size = 100 include("mnist-data.jl") diff --git a/src/init.jl b/src/init.jl index fd8f57dd32f9..ac858f926c62 100644 --- a/src/init.jl +++ b/src/init.jl @@ -101,3 +101,33 @@ end @mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) @mx_define_handle_t(MX_KVStoreHandle, MXKVStoreFree) +################################################################################ +# MXNet Params +# +# MXNet API use string to pass some common parameters like the configurations +# when defining layers. Typically, it is enough to use string(obj) to get a +# recognizable representation for libmxnet. However, there is currently a +# caveat: +# +# Because Julia use column-major ordering for tensors. In order to properly +# interact with Julia Arrays, the shape will look "reversed" from the Julia +# side. For example, a typical MNIST mini-batch tensor is of shape (28,28,1,100) +# from Julia side, while the shape information for the same piece of memory +# should be interpreted as (100,1,28,28) from C/C++/Python side. +# +# Therefore, when passing parameters to libmxnet, we should reverse the shape +# parameter. For example, when the user specify a non-square kernel size for +# a convolution or pooling layer. Unfortunately, those operators are automatically +# imported, and information about the type of each parameter is somehow limited. +# One hacky way is to match the type description for the string "Shape(tuple)" +# when importing operators. But currently we simply decided to reverse **all** +# NTuple{N, Int} passed to libmxnet. +# +# TODO: find a better solution in case this cause issues in the future. +################################################################################ +function dump_mx_param(val :: Any) + string(val) +end +function dump_mx_param{N,T<:Integer}(shape :: NTuple{N, T}) + string(tuple(flipdim([shape...],1)...)) +end diff --git a/src/symbol.jl b/src/symbol.jl index b97e7a7594c8..0a6204d40789 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -285,7 +285,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) symbol_kws[k] = v else push!(param_keys, string(k)) - push!(param_vals, string(v)) + push!(param_vals, dump_mx_param(v)) end end diff --git a/src/util.jl b/src/util.jl index 48a55348049e..b85787649c37 100644 --- a/src/util.jl +++ b/src/util.jl @@ -21,3 +21,20 @@ function get_mnist_ubyte() end return filenames end + +function get_cifar10() + data_dir = get_data_dir() + cifar10_dir = joinpath(data_dir, "cifar10") + mkpath(cifar10_dir) + filenames = Dict(:train => "train.rec", :test => "test.rec") + filenames = [k => joinpath(cifar10_dir, v) for (k,v) in filenames] + if !all(isfile, values(filenames)) + cd(cifar10_dir) do + run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) + run(`unzip -u cifar10.zip`) + end + end + + filenames[:mean] = joinpath(cifar10_dir, "cifar_mean.bin") + return filenames +end From edf087194938ed87b5c47e1fa2ac4a10ebe2556d Mon Sep 17 00:00:00 2001 From: pluskid Date: Sat, 17 Oct 2015 22:21:30 -0400 Subject: [PATCH 069/630] working cifar10 example --- examples/cifar10/cifar10.jl | 8 ++++---- src/context.jl | 2 +- src/io.jl | 2 +- src/symbol.jl | 9 +++++---- src/util.jl | 4 ++-- 5 files changed, 13 insertions(+), 12 deletions(-) diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 157bee6dd777..9d627b0471c2 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -56,10 +56,10 @@ softmax = mx.Softmax(data=fc, name=:loss) #-------------------------------------------------------------------------------- # Prepare data -filenames = get_cifar10() +filenames = mx.get_cifar10() batch_size = 128 num_epoch = 10 -num_gpus = 1 +num_gpus = 8 train_provider = mx.ImageRecordProvider(label_name=:loss_label, path_imgrec=filenames[:train], mean_img=filenames[:mean], @@ -73,7 +73,7 @@ test_provider = mx.ImageRecordProvider(label_name=:loss_label, #-------------------------------------------------------------------------------- # Training model -gpus = [mx.Context(GPU, i) for i = 0:num_gpus-1] +gpus = [mx.Context(mx.GPU, i) for i = 0:num_gpus-1] estimator = mx.FeedForward(softmax, context=gpus) # optimizer @@ -82,5 +82,5 @@ optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), weight_decay=0.0001) # fit parameters -mx.fit(estimator, optimizer, train_provider, epoch_stop=num_epoch, eval_data=eval_provider, +mx.fit(estimator, optimizer, train_provider, epoch_stop=num_epoch, eval_data=test_provider, initializer=mx.UniformInitializer(0.07)) diff --git a/src/context.jl b/src/context.jl index 20b0c7b14ee6..06062500be6c 100644 --- a/src/context.jl +++ b/src/context.jl @@ -10,7 +10,7 @@ Context(dev_type :: Union{CONTEXT_TYPE, Integer}, dev_id :: Integer = 0) = Context(convert(CONTEXT_TYPE, dev_type), convert(Cint, dev_id), Nullable{Context}()) function Base.show(io :: IO, ctx :: Context) - print(io, "$(ctx.device_type)$(ctx.device_id))") + print(io, "$(ctx.device_type)$(ctx.device_id)") end diff --git a/src/io.jl b/src/io.jl index 5587fbc97575..cf618be3e4a4 100644 --- a/src/io.jl +++ b/src/io.jl @@ -204,7 +204,7 @@ function _define_data_iter_creator(hdr :: MX_handle) defun = quote function $iter_name(; kwargs...) arg_keys = AbstractString[string(k) for (k,v) in kwargs] - arg_vals = AbstractString[string(v) for (k,v) in kwargs] + arg_vals = AbstractString[dump_mx_param(v) for (k,v) in kwargs] ref_hdr = Ref{MX_handle}(0) @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), diff --git a/src/symbol.jl b/src/symbol.jl index 0a6204d40789..85799d524c51 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -253,8 +253,9 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) Ref{char_pp}, Ref{char_p}), hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs) - func_name = symbol(bytestring(ref_name[])) - kv_nargs = symbol(bytestring(ref_kv_nargs[])) + func_name = symbol(bytestring(ref_name[])) + kv_nargs_s = bytestring(ref_kv_nargs[]) + kv_nargs = symbol(kv_nargs_s) # function $func_name(args...; kwargs...) func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) @@ -272,8 +273,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) $(if kv_nargs != symbol("") quote - if !in("$kv_narg", param_keys) - push!(param_keys, string("$kv_nargs")) + if !in($kv_nargs_s, param_keys) + push!(param_keys, $kv_nargs_s) push!(param_vals, string(length(args))) end end diff --git a/src/util.jl b/src/util.jl index b85787649c37..4a331fcbfec1 100644 --- a/src/util.jl +++ b/src/util.jl @@ -26,7 +26,7 @@ function get_cifar10() data_dir = get_data_dir() cifar10_dir = joinpath(data_dir, "cifar10") mkpath(cifar10_dir) - filenames = Dict(:train => "train.rec", :test => "test.rec") + filenames = Dict(:train => "cifar/train.rec", :test => "cifar/test.rec") filenames = [k => joinpath(cifar10_dir, v) for (k,v) in filenames] if !all(isfile, values(filenames)) cd(cifar10_dir) do @@ -35,6 +35,6 @@ function get_cifar10() end end - filenames[:mean] = joinpath(cifar10_dir, "cifar_mean.bin") + filenames[:mean] = joinpath(cifar10_dir, "cifar/cifar_mean.bin") return filenames end From aa18e9880436a496c020e8600268bba4e60efdc7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 19 Oct 2015 15:30:54 -0400 Subject: [PATCH 070/630] basic doc building setup --- docs/build.jl | 8 ++++++++ src/init.jl | 2 -- src/ndarray.jl | 2 -- 3 files changed, 8 insertions(+), 4 deletions(-) create mode 100644 docs/build.jl diff --git a/docs/build.jl b/docs/build.jl new file mode 100644 index 000000000000..d6a4ec9ef5d5 --- /dev/null +++ b/docs/build.jl @@ -0,0 +1,8 @@ +using MXNet +using Lexicon + +config = Config(md_permalink = false, mathjax = true) + +index = save("api/MXNet.md", MXNet.mx, config) +save("api/index.md", Index([index]), config; md_subheader = :category) + diff --git a/src/init.jl b/src/init.jl index ac858f926c62..2c4239ce8d25 100644 --- a/src/init.jl +++ b/src/init.jl @@ -1,5 +1,3 @@ -export MXError - "Exception thrown when an error occurred calling MXNet API." immutable MXError <: Exception msg :: AbstractString diff --git a/src/ndarray.jl b/src/ndarray.jl index cd23eb59cc98..1d80dcdbdd51 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,5 +1,3 @@ -export NDArray - # create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) h_ref = Ref{MX_handle}(0) From ff055b557b38d4fb2543d8cfa380dec11dd3c6be Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 00:28:58 -0400 Subject: [PATCH 071/630] test rtd hook --- .gitignore | 1 + docs/Makefile | 192 +++++++++++++++++++++ docs/api/MXNet.md | 416 ++++++++++++++++++++++++++++++++++++++++++++++ docs/api/index.md | 81 +++++++++ docs/conf.py | 292 ++++++++++++++++++++++++++++++++ docs/index.rst | 23 +++ docs/make.bat | 263 +++++++++++++++++++++++++++++ 7 files changed, 1268 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/api/MXNet.md create mode 100644 docs/api/index.md create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/make.bat diff --git a/.gitignore b/.gitignore index 2ff5c4a3d770..1b5cdca45b40 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ *.jl.*.cov *.jl.mem data +docs/_build diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 000000000000..ce3d7abdbfaa --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,192 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MXNetjl.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MXNetjl.qhc" + +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/MXNetjl" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MXNetjl" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/api/MXNet.md b/docs/api/MXNet.md new file mode 100644 index 000000000000..fbaee9ae02fa --- /dev/null +++ b/docs/api/MXNet.md @@ -0,0 +1,416 @@ +# MXNet.mx + +## Internal + +--- + + +#### _compose!(sym::MXNet.mx.Symbol) +Compose symbol on inputs + +*source:* +[MXNet/src/symbol.jl:199](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L199) + +--- + + +#### _default_get_name!(counter::Dict{Symbol, Int64}, name::Union{AbstractString, Symbol}, hint::Union{AbstractString, Symbol}) +Default implementation for generating a name for a symbol. + +When a name is specified by the user, it will be used. Otherwise, a name +is automatically generated based on the hint string. + + +*source:* +[MXNet/src/name.jl:12](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/name.jl#L12) + +--- + + +#### _import_ndarray_functions() +Import dynamic functions for NDArrays. The arguments to the functions are typically ordered +as + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) + +unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: + + func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) + +If ACCEPT_EMPTY_MUTATE_TARGET is set. An overloaded function without the output arguments will also be defined: + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) + +Upon calling, the output arguments will be automatically initialized with empty NDArrays. + +Those functions always return the output arguments. If there is only one output (the typical situation), that +object (NDArray) is returned. Otherwise, a tuple containing all the outputs will be returned. + + +*source:* +[MXNet/src/ndarray.jl:361](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L361) + +--- + + +#### _split_inputs(batch_size::Int64, n_split::Int64) +Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector + of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that + piece. + + +*source:* +[MXNet/src/estimator.jl:18](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/estimator.jl#L18) + +--- + + +#### copy!(dst::Array{Float32, N}, src::MXNet.mx.NDArray) +Copy data from NDArray to Julia Array + +*source:* +[MXNet/src/ndarray.jl:178](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L178) + +--- + + +#### copy!(dst::MXNet.mx.NDArray, src::MXNet.mx.NDArray) +Copy data between NDArrays + +*source:* +[MXNet/src/ndarray.jl:166](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L166) + +--- + + +#### copy!{T<:Real}(dst::MXNet.mx.NDArray, src::Array{T<:Real, N}) +Copy data from Julia Array to NDArray + +*source:* +[MXNet/src/ndarray.jl:186](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L186) + +--- + + +#### copy(arr::MXNet.mx.NDArray) +Create copy: NDArray -> Julia Array + +*source:* +[MXNet/src/ndarray.jl:196](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L196) + +--- + + +#### copy(arr::MXNet.mx.NDArray, ctx::MXNet.mx.Context) +Create copy: NDArray -> NDArray in a given context + +*source:* +[MXNet/src/ndarray.jl:202](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L202) + +--- + + +#### copy{T<:Real}(arr::Array{T<:Real, N}, ctx::MXNet.mx.Context) +Create copy: Julia Array -> NDArray in a given context + +*source:* +[MXNet/src/ndarray.jl:208](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L208) + +--- + + +#### get_internals(self::MXNet.mx.Symbol) +Get a new grouped symbol whose output contains all the internal outputs of this symbol. + +*source:* +[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L63) + +--- + + +#### group(symbols::MXNet.mx.Symbol...) +Create a symbol that groups symbols together + +*source:* +[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L77) + +--- + + +#### list_auxiliary_states(self::MXNet.mx.Symbol) +List all auxiliary states in the symbool. + +Auxiliary states are special states of symbols that do not corresponds to an argument, +and do not have gradient. But still be useful for the specific operations. +A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. +Most operators do not have Auxiliary states. + + +*source:* +[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L58) + +--- + + +#### ones{N}(shape::NTuple{N, Int64}) +Create NDArray and initialize with 1 + +*source:* +[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L112) + +--- + + +#### ones{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context) +Create NDArray and initialize with 1 + +*source:* +[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L112) + +--- + + +#### setindex!(arr::MXNet.mx.NDArray, val::Real, ::Colon) +Assign all elements of an NDArray to a scalar + +*source:* +[MXNet/src/ndarray.jl:146](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L146) + +--- + + +#### size(arr::MXNet.mx.NDArray) +Get the shape of an `NDArray`. Note the shape is converted to Julia convention. + So the same piece of memory, in Julia (column-major), with shape (K, M, N), will be of the + shape (N, M, K) in the Python (row-major) binding. + + +*source:* +[MXNet/src/ndarray.jl:81](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L81) + +--- + + +#### slice(arr::MXNet.mx.NDArray, ::Colon) +`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest + changing dimension is supported. In Julia's column-major perspective, this is the last + dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create + a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is + used in data parallelization to split mini-batch into sub-batches for different devices. + + +*source:* +[MXNet/src/ndarray.jl:128](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L128) + +--- + + +#### variable(name::Union{AbstractString, Symbol}) +Create a symbolic variable with the given name + +*source:* +[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L70) + +--- + + +#### zeros{N}(shape::NTuple{N, Int64}) +Create zero-ed NDArray of specific shape + +*source:* +[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L102) + +--- + + +#### zeros{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context) +Create zero-ed NDArray of specific shape + +*source:* +[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L102) + +--- + + +#### MXNet.mx.AbstractDataBatch +Root type for data batch + + A data batch must implement the following interface function to actually provide the data and label. + + load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) + load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) + + Load data and label into targets. The targets is a list of target that the data/label should be + copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and + `provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the + memory buffer for each device. + + The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. + + The following function should also be implemented to handle the case when the mini-batch size does not + divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer + than the mini-batch size. This is usually not an issue during the training as the remaining space may + contain the data and label copied during the previous mini-batch are still valid data. However, during + testing, especially when doing feature extraction, we need to be precise about the number of samples + processed. + + get_pad(batch :: AbstractDataBatch) + + Return the number of *dummy samples* in this mini-batch. + + +*source:* +[MXNet/src/io.jl:90](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L90) + +--- + + +#### MXNet.mx.AbstractDataProvider +Root type for data provider + + A data provider provides interface to iterate over a dataset. It should implement the following functions: + + provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} + provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} + + Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, + `[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient + function + + get_batch_size(provider :: AbstractDataProvider) => Int + + which returns the batch size used in this data provider. + + A data provider should implement the standard Julia iteration interface, including `Base.start`, + `Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will + always be called like + + for batch in provider + # ... + load_data!(batch, targets) + end + + which translates into + + state = Base.start(provider) + while !done(provider, state) + (batch, state) = next(provider, state) + # ... + load_data!(batch, targets) + end + + In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither + of those function will be called twice consequtively. The detailed interfaces are list below: + + Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState + + Initialize or reset the data iteration. + + Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) + => (AbstractDataBatch, AbstractDataProviderState) + + Return one batch of data. Actual data can be retrieved from the batch by interface functions described + in the document of type `AbstractDataBatch`. + + Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool + + Return `false` if there is more batch to get. + + Base.eltype(::Type{MyDataProvider}) => MyDataProviderState + + Return the type of the data provider state. + + +*source:* +[MXNet/src/io.jl:55](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L55) + +--- + + +#### MXNet.mx.AbstractDataProviderState +Root type for states of data provider + +*source:* +[MXNet/src/io.jl:58](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L58) + +--- + + +#### MXNet.mx.MXDataProvider +Wrapper of built-in `libmxnet` data iterators. + + +*source:* +[MXNet/src/io.jl:99](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L99) + +--- + + +#### MXNet.mx.MXError +Exception thrown when an error occurred calling MXNet API. + +*source:* +[MXNet/src/init.jl:2](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/init.jl#L2) + +--- + + +#### MXNet.mx.NDArray +Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block + of tensor-based computation. + + **Note** since C/C++ use row-major ordering for arrays while Julia follows a + column-major ordering. To keep things consistent, we keep the underlying data + in their original layout, but use *language-native* convention when we talk + about shapes. For example, a mini-batch of 100 MNIST images is a tensor of + C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory + have shape (28,28,1,100). + + +*source:* +[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L32) + +--- + + +#### SlicedNDArray +A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each + slice describe which part of a larger piece of data should goto that device. + + +*source:* +[MXNet/src/io.jl:63](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L63) + +--- + + +#### @inplace(stmt) +Julia does not support re-definiton of += operator (like __iadd__ in python), +When one write a += b, it gets translated to a = a+b. a+b will allocate new +memory for the results, and the newly allocated NDArray object is then assigned +back to a, while the original contents in a is discarded. This is very inefficient +when we want to do inplace update. + +This macro is a simple utility to implement this behavior. Write + + @mx.inplace a += b + +will translate into + + mx.add_to!(a, b) + +which will do inplace adding of the contents of b into a. + + +*source:* +[MXNet/src/ndarray.jl:234](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L234) + +--- + + +#### @mxcall(fv, argtypes, args...) +Utility macro to call MXNet API functions + +*source:* +[MXNet/src/init.jl:41](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/init.jl#L41) + diff --git a/docs/api/index.md b/docs/api/index.md new file mode 100644 index 000000000000..aa274bb16cd5 --- /dev/null +++ b/docs/api/index.md @@ -0,0 +1,81 @@ +# API-INDEX + + +## MODULE: MXNet.mx + +--- + +## Methods [Internal] + +[_compose!(sym::MXNet.mx.Symbol)](MXNet.md#method___compose.1) Compose symbol on inputs + +[_default_get_name!(counter::Dict{Symbol, Int64}, name::Union{AbstractString, Symbol}, hint::Union{AbstractString, Symbol})](MXNet.md#method___default_get_name.1) Default implementation for generating a name for a symbol. + +[_import_ndarray_functions()](MXNet.md#method___import_ndarray_functions.1) Import dynamic functions for NDArrays. The arguments to the functions are typically ordered + +[_split_inputs(batch_size::Int64, n_split::Int64)](MXNet.md#method___split_inputs.1) Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector + +[copy!(dst::Array{Float32, N}, src::MXNet.mx.NDArray)](MXNet.md#method__copy.1) Copy data from NDArray to Julia Array + +[copy!(dst::MXNet.mx.NDArray, src::MXNet.mx.NDArray)](MXNet.md#method__copy.2) Copy data between NDArrays + +[copy!{T<:Real}(dst::MXNet.mx.NDArray, src::Array{T<:Real, N})](MXNet.md#method__copy.3) Copy data from Julia Array to NDArray + +[copy(arr::MXNet.mx.NDArray)](MXNet.md#method__copy.4) Create copy: NDArray -> Julia Array + +[copy(arr::MXNet.mx.NDArray, ctx::MXNet.mx.Context)](MXNet.md#method__copy.5) Create copy: NDArray -> NDArray in a given context + +[copy{T<:Real}(arr::Array{T<:Real, N}, ctx::MXNet.mx.Context)](MXNet.md#method__copy.6) Create copy: Julia Array -> NDArray in a given context + +[get_internals(self::MXNet.mx.Symbol)](MXNet.md#method__get_internals.1) Get a new grouped symbol whose output contains all the internal outputs of this symbol. + +[group(symbols::MXNet.mx.Symbol...)](MXNet.md#method__group.1) Create a symbol that groups symbols together + +[list_auxiliary_states(self::MXNet.mx.Symbol)](MXNet.md#method__list_auxiliary_states.1) List all auxiliary states in the symbool. + +[ones{N}(shape::NTuple{N, Int64})](MXNet.md#method__ones.1) Create NDArray and initialize with 1 + +[ones{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context)](MXNet.md#method__ones.2) Create NDArray and initialize with 1 + +[setindex!(arr::MXNet.mx.NDArray, val::Real, ::Colon)](MXNet.md#method__setindex.1) Assign all elements of an NDArray to a scalar + +[size(arr::MXNet.mx.NDArray)](MXNet.md#method__size.1) Get the shape of an `NDArray`. Note the shape is converted to Julia convention. + +[slice(arr::MXNet.mx.NDArray, ::Colon)](MXNet.md#method__slice.1) `slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest + +[variable(name::Union{AbstractString, Symbol})](MXNet.md#method__variable.1) Create a symbolic variable with the given name + +[zeros{N}(shape::NTuple{N, Int64})](MXNet.md#method__zeros.1) Create zero-ed NDArray of specific shape + +[zeros{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context)](MXNet.md#method__zeros.2) Create zero-ed NDArray of specific shape + +--- + +## Types [Internal] + +[MXNet.mx.AbstractDataBatch](MXNet.md#type__abstractdatabatch.1) Root type for data batch + +[MXNet.mx.AbstractDataProvider](MXNet.md#type__abstractdataprovider.1) Root type for data provider + +[MXNet.mx.AbstractDataProviderState](MXNet.md#type__abstractdataproviderstate.1) Root type for states of data provider + +[MXNet.mx.MXDataProvider](MXNet.md#type__mxdataprovider.1) Wrapper of built-in `libmxnet` data iterators. + +[MXNet.mx.MXError](MXNet.md#type__mxerror.1) Exception thrown when an error occurred calling MXNet API. + +[MXNet.mx.NDArray](MXNet.md#type__ndarray.1) Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block + +--- + +## Typealiass [Internal] + +[SlicedNDArray](MXNet.md#typealias__slicedndarray.1) A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each + +--- + +## Macros [Internal] + +[@inplace(stmt)](MXNet.md#macro___inplace.1) Julia does not support re-definiton of += operator (like __iadd__ in python), + +[@mxcall(fv, argtypes, args...)](MXNet.md#macro___mxcall.1) Utility macro to call MXNet API functions + diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 000000000000..3aab37726449 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,292 @@ +# -*- coding: utf-8 -*- +# +# MXNet.jl documentation build configuration file, created by +# sphinx-quickstart on Tue Oct 20 00:09:24 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.mathjax', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = ['.rst', '.md'] + +from recommonmark.parser import CommonMarkParser + +source_parsers = { + '.md': CommonMarkParser, +} + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'MXNet.jl' +copyright = u'2015, pluskid' +author = u'pluskid' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.0.1' +# The full version, including alpha/beta/rc tags. +release = '0.0.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +#html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +#html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +#html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'MXNetjldoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', + +# Latex figure (float) alignment +#'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'MXNetjl.tex', u'MXNet.jl Documentation', + u'pluskid', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'mxnetjl', u'MXNet.jl Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'MXNetjl', u'MXNet.jl Documentation', + author, 'MXNetjl', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 000000000000..81b86df5034d --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,23 @@ +.. MXNet.jl documentation master file, created by + sphinx-quickstart on Tue Oct 20 00:09:24 2015. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to MXNet.jl's documentation! +==================================== + +Contents: + +.. toctree:: + :maxdepth: 2 + + api/MXNet + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 000000000000..e682697a9877 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,263 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + echo. coverage to run coverage check of the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +REM Check if sphinx-build is available and fallback to Python version if any +%SPHINXBUILD% 2> nul +if errorlevel 9009 goto sphinx_python +goto sphinx_ok + +:sphinx_python + +set SPHINXBUILD=python -m sphinx.__init__ +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +:sphinx_ok + + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\MXNetjl.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\MXNetjl.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %~dp0 + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "coverage" ( + %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage + if errorlevel 1 exit /b 1 + echo. + echo.Testing of coverage in the sources finished, look at the ^ +results in %BUILDDIR%/coverage/python.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end From 4a25b00b85a1d0083c37ab9024f433f73d2f417c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 00:31:14 -0400 Subject: [PATCH 072/630] add rtd badge --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 85cd8cc114e8..4852bcd52161 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # MXNet [![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) -[![Coverage Status](https://img.shields.io/coveralls/dmlc/MXNet.jl.svg?style=flat)](https://coveralls.io/r/dmlc/MXNet.jl?branch=master) -[![License](https://img.shields.io/github/license/dmlc/MXNet.jl.svg?style=flat)](LICENSE.md) +[![Documentation Status](https://readthedocs.org/projects/mxnetjl/badge/?version=latest)](http://mxnetjl.readthedocs.org/en/latest/?badge=latest) +[![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) Julia wrapper of [MXNet](https://github.com/dmlc/mxnet). From 34a1c89bf2b65351914e00ccd12a033df724a721 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 00:34:47 -0400 Subject: [PATCH 073/630] use rtd theme --- docs/conf.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 3aab37726449..33f8b3ce6a42 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -116,7 +116,18 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'alabaster' +html_theme = 'default' + +import os +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + try: + import sphinx_rtd_theme + html_theme = "sphinx_rtd_theme" + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + except: + pass # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the From 4eff47d15f3f63fcfbe5fb31b6fa7a4455b5ed1f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 01:07:57 -0400 Subject: [PATCH 074/630] fix some doc markdown formatting --- docs/api/MXNet.md | 214 +++++++++++++++++++++++----------------------- docs/api/index.md | 81 ------------------ docs/build.jl | 3 +- docs/index.rst | 6 ++ src/io.jl | 120 +++++++++++++++----------- src/name.jl | 8 +- src/ndarray.jl | 16 ++-- src/symbol.jl | 1 - 8 files changed, 200 insertions(+), 249 deletions(-) delete mode 100644 docs/api/index.md diff --git a/docs/api/MXNet.md b/docs/api/MXNet.md index fbaee9ae02fa..d4b273ba417d 100644 --- a/docs/api/MXNet.md +++ b/docs/api/MXNet.md @@ -4,51 +4,35 @@ --- - -#### _compose!(sym::MXNet.mx.Symbol) -Compose symbol on inputs - -*source:* -[MXNet/src/symbol.jl:199](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L199) - ---- - - -#### _default_get_name!(counter::Dict{Symbol, Int64}, name::Union{AbstractString, Symbol}, hint::Union{AbstractString, Symbol}) -Default implementation for generating a name for a symbol. - -When a name is specified by the user, it will be used. Otherwise, a name -is automatically generated based on the hint string. - - -*source:* -[MXNet/src/name.jl:12](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/name.jl#L12) - ---- - #### _import_ndarray_functions() Import dynamic functions for NDArrays. The arguments to the functions are typically ordered as - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +```julia +func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +``` unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: - func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) +```julia +func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) +``` -If ACCEPT_EMPTY_MUTATE_TARGET is set. An overloaded function without the output arguments will also be defined: +If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +```julia +func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +``` Upon calling, the output arguments will be automatically initialized with empty NDArrays. Those functions always return the output arguments. If there is only one output (the typical situation), that -object (NDArray) is returned. Otherwise, a tuple containing all the outputs will be returned. +object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. *source:* -[MXNet/src/ndarray.jl:361](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L361) +[MXNet/src/ndarray.jl:367](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L367) --- @@ -60,7 +44,7 @@ Get a split of `batch_size` into `n_split` pieces for data parallelization. Retu *source:* -[MXNet/src/estimator.jl:18](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/estimator.jl#L18) +[MXNet/src/estimator.jl:18](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/estimator.jl#L18) --- @@ -69,7 +53,7 @@ Get a split of `batch_size` into `n_split` pieces for data parallelization. Retu Copy data from NDArray to Julia Array *source:* -[MXNet/src/ndarray.jl:178](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L178) +[MXNet/src/ndarray.jl:178](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L178) --- @@ -78,7 +62,7 @@ Copy data from NDArray to Julia Array Copy data between NDArrays *source:* -[MXNet/src/ndarray.jl:166](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L166) +[MXNet/src/ndarray.jl:166](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L166) --- @@ -87,7 +71,7 @@ Copy data between NDArrays Copy data from Julia Array to NDArray *source:* -[MXNet/src/ndarray.jl:186](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L186) +[MXNet/src/ndarray.jl:186](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L186) --- @@ -96,7 +80,7 @@ Copy data from Julia Array to NDArray Create copy: NDArray -> Julia Array *source:* -[MXNet/src/ndarray.jl:196](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L196) +[MXNet/src/ndarray.jl:196](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L196) --- @@ -105,7 +89,7 @@ Create copy: NDArray -> Julia Array Create copy: NDArray -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:202](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L202) +[MXNet/src/ndarray.jl:202](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L202) --- @@ -114,7 +98,7 @@ Create copy: NDArray -> NDArray in a given context Create copy: Julia Array -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:208](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L208) +[MXNet/src/ndarray.jl:208](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L208) --- @@ -123,7 +107,7 @@ Create copy: Julia Array -> NDArray in a given context Get a new grouped symbol whose output contains all the internal outputs of this symbol. *source:* -[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L63) +[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L63) --- @@ -132,7 +116,7 @@ Get a new grouped symbol whose output contains all the internal outputs of this Create a symbol that groups symbols together *source:* -[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L77) +[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L77) --- @@ -147,7 +131,7 @@ Most operators do not have Auxiliary states. *source:* -[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L58) +[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L58) --- @@ -156,7 +140,7 @@ Most operators do not have Auxiliary states. Create NDArray and initialize with 1 *source:* -[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L112) +[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L112) --- @@ -165,7 +149,7 @@ Create NDArray and initialize with 1 Create NDArray and initialize with 1 *source:* -[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L112) +[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L112) --- @@ -174,7 +158,7 @@ Create NDArray and initialize with 1 Assign all elements of an NDArray to a scalar *source:* -[MXNet/src/ndarray.jl:146](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L146) +[MXNet/src/ndarray.jl:146](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L146) --- @@ -186,7 +170,7 @@ Get the shape of an `NDArray`. Note the shape is converted to Julia convention. *source:* -[MXNet/src/ndarray.jl:81](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L81) +[MXNet/src/ndarray.jl:81](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L81) --- @@ -200,7 +184,7 @@ Get the shape of an `NDArray`. Note the shape is converted to Julia convention. *source:* -[MXNet/src/ndarray.jl:128](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L128) +[MXNet/src/ndarray.jl:128](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L128) --- @@ -209,7 +193,7 @@ Get the shape of an `NDArray`. Note the shape is converted to Julia convention. Create a symbolic variable with the given name *source:* -[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/symbol.jl#L70) +[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L70) --- @@ -218,7 +202,7 @@ Create a symbolic variable with the given name Create zero-ed NDArray of specific shape *source:* -[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L102) +[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L102) --- @@ -227,7 +211,7 @@ Create zero-ed NDArray of specific shape Create zero-ed NDArray of specific shape *source:* -[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L102) +[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L102) --- @@ -235,32 +219,36 @@ Create zero-ed NDArray of specific shape #### MXNet.mx.AbstractDataBatch Root type for data batch - A data batch must implement the following interface function to actually provide the data and label. +A data batch must implement the following interface function to actually provide the data and label. - load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +```julia +load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +``` - Load data and label into targets. The targets is a list of target that the data/label should be - copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and - `provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the - memory buffer for each device. +Load data and label into targets. The targets is a list of target that the data/label should be +copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and +`provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the +memory buffer for each device. - The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. +The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. - The following function should also be implemented to handle the case when the mini-batch size does not - divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer - than the mini-batch size. This is usually not an issue during the training as the remaining space may - contain the data and label copied during the previous mini-batch are still valid data. However, during - testing, especially when doing feature extraction, we need to be precise about the number of samples - processed. +The following function should also be implemented to handle the case when the mini-batch size does not +divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer +than the mini-batch size. This is usually not an issue during the training as the remaining space may +contain the data and label copied during the previous mini-batch are still valid data. However, during +testing, especially when doing feature extraction, we need to be precise about the number of samples +processed. - get_pad(batch :: AbstractDataBatch) +```julia +get_pad(batch :: AbstractDataBatch) +``` - Return the number of *dummy samples* in this mini-batch. +Return the number of *dummy samples* in this mini-batch. *source:* -[MXNet/src/io.jl:90](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L90) +[MXNet/src/io.jl:110](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L110) --- @@ -268,61 +256,77 @@ Root type for data batch #### MXNet.mx.AbstractDataProvider Root type for data provider - A data provider provides interface to iterate over a dataset. It should implement the following functions: +A data provider provides interface to iterate over a dataset. It should implement the following functions: - provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} - provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} +```julia +provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} +provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} +``` - Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, - `[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient - function +Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, +`[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient +function - get_batch_size(provider :: AbstractDataProvider) => Int +```julia +get_batch_size(provider :: AbstractDataProvider) => Int +``` - which returns the batch size used in this data provider. +which returns the batch size used in this data provider. - A data provider should implement the standard Julia iteration interface, including `Base.start`, - `Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will - always be called like +A data provider should implement the standard Julia iteration interface, including `Base.start`, +`Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will +always be called like - for batch in provider - # ... - load_data!(batch, targets) - end +```julia +for batch in provider + # ... + load_data!(batch, targets) +end +``` - which translates into +which translates into - state = Base.start(provider) - while !done(provider, state) - (batch, state) = next(provider, state) - # ... - load_data!(batch, targets) - end +```julia +state = Base.start(provider) +while !done(provider, state) + (batch, state) = next(provider, state) + # ... + load_data!(batch, targets) +end +``` - In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither - of those function will be called twice consequtively. The detailed interfaces are list below: +In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither +of those function will be called twice consequtively. The detailed interfaces are list below: - Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState +```julia +Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState +``` - Initialize or reset the data iteration. +Initialize or reset the data iteration. - Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) - => (AbstractDataBatch, AbstractDataProviderState) +```julia +Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) + => (AbstractDataBatch, AbstractDataProviderState) +``` - Return one batch of data. Actual data can be retrieved from the batch by interface functions described - in the document of type `AbstractDataBatch`. +Return one batch of data. Actual data can be retrieved from the batch by interface functions described +in the document of type `AbstractDataBatch`. - Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool +```julia +Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool +``` - Return `false` if there is more batch to get. +Return `false` if there is more batch to get. - Base.eltype(::Type{MyDataProvider}) => MyDataProviderState +```julia +Base.eltype(::Type{MyDataProvider}) => MyDataProviderState +``` - Return the type of the data provider state. +Return the type of the data provider state. *source:* -[MXNet/src/io.jl:55](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L55) +[MXNet/src/io.jl:71](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L71) --- @@ -331,7 +335,7 @@ Root type for data provider Root type for states of data provider *source:* -[MXNet/src/io.jl:58](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L58) +[MXNet/src/io.jl:74](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L74) --- @@ -341,7 +345,7 @@ Wrapper of built-in `libmxnet` data iterators. *source:* -[MXNet/src/io.jl:99](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L99) +[MXNet/src/io.jl:119](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L119) --- @@ -350,7 +354,7 @@ Wrapper of built-in `libmxnet` data iterators. Exception thrown when an error occurred calling MXNet API. *source:* -[MXNet/src/init.jl:2](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/init.jl#L2) +[MXNet/src/init.jl:2](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/init.jl#L2) --- @@ -368,7 +372,7 @@ Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block *source:* -[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L32) +[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L32) --- @@ -379,7 +383,7 @@ A tuple of (slice, NDArray). Usually each NDArray resides on a different device, *source:* -[MXNet/src/io.jl:63](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/io.jl#L63) +[MXNet/src/io.jl:79](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L79) --- @@ -403,7 +407,7 @@ which will do inplace adding of the contents of b into a. *source:* -[MXNet/src/ndarray.jl:234](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/ndarray.jl#L234) +[MXNet/src/ndarray.jl:234](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L234) --- @@ -412,5 +416,5 @@ which will do inplace adding of the contents of b into a. Utility macro to call MXNet API functions *source:* -[MXNet/src/init.jl:41](https://github.com/dmlc/MXNet.jl/tree/aa18e9880436a496c020e8600268bba4e60efdc7/src/init.jl#L41) +[MXNet/src/init.jl:41](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/init.jl#L41) diff --git a/docs/api/index.md b/docs/api/index.md deleted file mode 100644 index aa274bb16cd5..000000000000 --- a/docs/api/index.md +++ /dev/null @@ -1,81 +0,0 @@ -# API-INDEX - - -## MODULE: MXNet.mx - ---- - -## Methods [Internal] - -[_compose!(sym::MXNet.mx.Symbol)](MXNet.md#method___compose.1) Compose symbol on inputs - -[_default_get_name!(counter::Dict{Symbol, Int64}, name::Union{AbstractString, Symbol}, hint::Union{AbstractString, Symbol})](MXNet.md#method___default_get_name.1) Default implementation for generating a name for a symbol. - -[_import_ndarray_functions()](MXNet.md#method___import_ndarray_functions.1) Import dynamic functions for NDArrays. The arguments to the functions are typically ordered - -[_split_inputs(batch_size::Int64, n_split::Int64)](MXNet.md#method___split_inputs.1) Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector - -[copy!(dst::Array{Float32, N}, src::MXNet.mx.NDArray)](MXNet.md#method__copy.1) Copy data from NDArray to Julia Array - -[copy!(dst::MXNet.mx.NDArray, src::MXNet.mx.NDArray)](MXNet.md#method__copy.2) Copy data between NDArrays - -[copy!{T<:Real}(dst::MXNet.mx.NDArray, src::Array{T<:Real, N})](MXNet.md#method__copy.3) Copy data from Julia Array to NDArray - -[copy(arr::MXNet.mx.NDArray)](MXNet.md#method__copy.4) Create copy: NDArray -> Julia Array - -[copy(arr::MXNet.mx.NDArray, ctx::MXNet.mx.Context)](MXNet.md#method__copy.5) Create copy: NDArray -> NDArray in a given context - -[copy{T<:Real}(arr::Array{T<:Real, N}, ctx::MXNet.mx.Context)](MXNet.md#method__copy.6) Create copy: Julia Array -> NDArray in a given context - -[get_internals(self::MXNet.mx.Symbol)](MXNet.md#method__get_internals.1) Get a new grouped symbol whose output contains all the internal outputs of this symbol. - -[group(symbols::MXNet.mx.Symbol...)](MXNet.md#method__group.1) Create a symbol that groups symbols together - -[list_auxiliary_states(self::MXNet.mx.Symbol)](MXNet.md#method__list_auxiliary_states.1) List all auxiliary states in the symbool. - -[ones{N}(shape::NTuple{N, Int64})](MXNet.md#method__ones.1) Create NDArray and initialize with 1 - -[ones{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context)](MXNet.md#method__ones.2) Create NDArray and initialize with 1 - -[setindex!(arr::MXNet.mx.NDArray, val::Real, ::Colon)](MXNet.md#method__setindex.1) Assign all elements of an NDArray to a scalar - -[size(arr::MXNet.mx.NDArray)](MXNet.md#method__size.1) Get the shape of an `NDArray`. Note the shape is converted to Julia convention. - -[slice(arr::MXNet.mx.NDArray, ::Colon)](MXNet.md#method__slice.1) `slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest - -[variable(name::Union{AbstractString, Symbol})](MXNet.md#method__variable.1) Create a symbolic variable with the given name - -[zeros{N}(shape::NTuple{N, Int64})](MXNet.md#method__zeros.1) Create zero-ed NDArray of specific shape - -[zeros{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context)](MXNet.md#method__zeros.2) Create zero-ed NDArray of specific shape - ---- - -## Types [Internal] - -[MXNet.mx.AbstractDataBatch](MXNet.md#type__abstractdatabatch.1) Root type for data batch - -[MXNet.mx.AbstractDataProvider](MXNet.md#type__abstractdataprovider.1) Root type for data provider - -[MXNet.mx.AbstractDataProviderState](MXNet.md#type__abstractdataproviderstate.1) Root type for states of data provider - -[MXNet.mx.MXDataProvider](MXNet.md#type__mxdataprovider.1) Wrapper of built-in `libmxnet` data iterators. - -[MXNet.mx.MXError](MXNet.md#type__mxerror.1) Exception thrown when an error occurred calling MXNet API. - -[MXNet.mx.NDArray](MXNet.md#type__ndarray.1) Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block - ---- - -## Typealiass [Internal] - -[SlicedNDArray](MXNet.md#typealias__slicedndarray.1) A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each - ---- - -## Macros [Internal] - -[@inplace(stmt)](MXNet.md#macro___inplace.1) Julia does not support re-definiton of += operator (like __iadd__ in python), - -[@mxcall(fv, argtypes, args...)](MXNet.md#macro___mxcall.1) Utility macro to call MXNet API functions - diff --git a/docs/build.jl b/docs/build.jl index d6a4ec9ef5d5..11a45ec75883 100644 --- a/docs/build.jl +++ b/docs/build.jl @@ -3,6 +3,5 @@ using Lexicon config = Config(md_permalink = false, mathjax = true) -index = save("api/MXNet.md", MXNet.mx, config) -save("api/index.md", Index([index]), config; md_subheader = :category) +index = save("api/MXNet.md", mx, config) diff --git a/docs/index.rst b/docs/index.rst index 81b86df5034d..b2598cac01bb 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,6 +11,12 @@ Contents: .. toctree:: :maxdepth: 2 +API Documentation +----------------- + +.. toctree:: + :maxdepth: 1 + api/MXNet diff --git a/src/io.jl b/src/io.jl index cf618be3e4a4..78fb7cefc22c 100644 --- a/src/io.jl +++ b/src/io.jl @@ -1,56 +1,72 @@ """Root type for data provider - A data provider provides interface to iterate over a dataset. It should implement the following functions: +A data provider provides interface to iterate over a dataset. It should implement the following functions: - provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} - provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} +```julia +provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} +provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} +``` - Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, - `[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient - function +Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, +`[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient +function - get_batch_size(provider :: AbstractDataProvider) => Int +```julia +get_batch_size(provider :: AbstractDataProvider) => Int +``` - which returns the batch size used in this data provider. +which returns the batch size used in this data provider. - A data provider should implement the standard Julia iteration interface, including `Base.start`, - `Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will - always be called like +A data provider should implement the standard Julia iteration interface, including `Base.start`, +`Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will +always be called like - for batch in provider - # ... - load_data!(batch, targets) - end +```julia +for batch in provider + # ... + load_data!(batch, targets) +end +``` - which translates into +which translates into - state = Base.start(provider) - while !done(provider, state) - (batch, state) = next(provider, state) - # ... - load_data!(batch, targets) - end +```julia +state = Base.start(provider) +while !done(provider, state) + (batch, state) = next(provider, state) + # ... + load_data!(batch, targets) +end +``` - In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither - of those function will be called twice consequtively. The detailed interfaces are list below: +In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither +of those function will be called twice consequtively. The detailed interfaces are list below: - Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState +```julia +Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState +``` - Initialize or reset the data iteration. +Initialize or reset the data iteration. - Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) - => (AbstractDataBatch, AbstractDataProviderState) +```julia +Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) + => (AbstractDataBatch, AbstractDataProviderState) +``` - Return one batch of data. Actual data can be retrieved from the batch by interface functions described - in the document of type `AbstractDataBatch`. +Return one batch of data. Actual data can be retrieved from the batch by interface functions described +in the document of type `AbstractDataBatch`. - Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool +```julia +Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool +``` - Return `false` if there is more batch to get. +Return `false` if there is more batch to get. - Base.eltype(::Type{MyDataProvider}) => MyDataProviderState +```julia +Base.eltype(::Type{MyDataProvider}) => MyDataProviderState +``` - Return the type of the data provider state. +Return the type of the data provider state. """ abstract AbstractDataProvider @@ -64,28 +80,32 @@ typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} """Root type for data batch - A data batch must implement the following interface function to actually provide the data and label. +A data batch must implement the following interface function to actually provide the data and label. - load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +```julia +load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +``` - Load data and label into targets. The targets is a list of target that the data/label should be - copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and - `provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the - memory buffer for each device. +Load data and label into targets. The targets is a list of target that the data/label should be +copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and +`provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the +memory buffer for each device. - The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. +The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. - The following function should also be implemented to handle the case when the mini-batch size does not - divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer - than the mini-batch size. This is usually not an issue during the training as the remaining space may - contain the data and label copied during the previous mini-batch are still valid data. However, during - testing, especially when doing feature extraction, we need to be precise about the number of samples - processed. +The following function should also be implemented to handle the case when the mini-batch size does not +divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer +than the mini-batch size. This is usually not an issue during the training as the remaining space may +contain the data and label copied during the previous mini-batch are still valid data. However, during +testing, especially when doing feature extraction, we need to be precise about the number of samples +processed. - get_pad(batch :: AbstractDataBatch) +```julia +get_pad(batch :: AbstractDataBatch) +``` - Return the number of *dummy samples* in this mini-batch. +Return the number of *dummy samples* in this mini-batch. """ abstract AbstractDataBatch diff --git a/src/name.jl b/src/name.jl index 5644809c79f9..5ebf10917ce6 100644 --- a/src/name.jl +++ b/src/name.jl @@ -4,11 +4,9 @@ typealias NameCounter Dict{Base.Symbol, Int} import Base: get! -"""Default implementation for generating a name for a symbol. - -When a name is specified by the user, it will be used. Otherwise, a name -is automatically generated based on the hint string. -""" +# Default implementation for generating a name for a symbol. +# When a name is specified by the user, it will be used. Otherwise, a name +# is automatically generated based on the hint string. function _default_get_name!(counter :: NameCounter, name :: NameType, hint :: NameType) if isa(name, Base.Symbol) || !isempty(name) return symbol(name) diff --git a/src/ndarray.jl b/src/ndarray.jl index 1d80dcdbdd51..bdc9775d5e87 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -343,20 +343,26 @@ end Import dynamic functions for NDArrays. The arguments to the functions are typically ordered as - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +```julia +func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +``` unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: - func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) +```julia +func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) +``` -If ACCEPT_EMPTY_MUTATE_TARGET is set. An overloaded function without the output arguments will also be defined: +If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +```julia +func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +``` Upon calling, the output arguments will be automatically initialized with empty NDArrays. Those functions always return the output arguments. If there is only one output (the typical situation), that -object (NDArray) is returned. Otherwise, a tuple containing all the outputs will be returned. +object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. """ function _import_ndarray_functions() n_ref = Ref{MX_uint}(0) diff --git a/src/symbol.jl b/src/symbol.jl index 85799d524c51..921671e394e9 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -195,7 +195,6 @@ function ./(self :: Symbol, arg :: Symbol) _Div(self, arg) end -"Compose symbol on inputs" function _compose!(sym :: Symbol; kwargs...) name = char_p(0) arg_keys = AbstractString[] From d13ddc6542bdb00e26b87e721a9b0e79a22bbd66 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 01:16:10 -0400 Subject: [PATCH 075/630] clean up Context --- src/context.jl | 19 ++++++++++--------- src/ndarray.jl | 17 +++++++++++++---- src/random.jl | 10 ++++++++-- test/unittest/ndarray.jl | 6 +++--- 4 files changed, 34 insertions(+), 18 deletions(-) diff --git a/src/context.jl b/src/context.jl index 06062500be6c..5146d4afcb64 100644 --- a/src/context.jl +++ b/src/context.jl @@ -1,18 +1,19 @@ @enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 -type Context +immutable Context device_type :: CONTEXT_TYPE - device_id :: Cint - - old_ctx :: Nullable{Context} + device_id :: Int end -Context(dev_type :: Union{CONTEXT_TYPE, Integer}, dev_id :: Integer = 0) = - Context(convert(CONTEXT_TYPE, dev_type), convert(Cint, dev_id), Nullable{Context}()) +Context(dev_type :: Union{CONTEXT_TYPE, Int}, dev_id :: Int = 0) = + Context(convert(CONTEXT_TYPE, dev_type), dev_id) function Base.show(io :: IO, ctx :: Context) print(io, "$(ctx.device_type)$(ctx.device_id)") end - -# global default context -DEFAULT_CONTEXT = Context(CPU) +function cpu(dev_id::Int=0) + return Context(CPU, dev_id) +end +function gpu(dev_id::Int=0) + return Context(GPU, dev_id) +end diff --git a/src/ndarray.jl b/src/ndarray.jl index bdc9775d5e87..d5a61a173a34 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -43,7 +43,7 @@ function Base.show(io :: IO, arr :: NDArray) end function NDArray{T<:Real}(data :: Array{T}) - copy(data, mx.DEFAULT_CONTEXT) + copy(data, cpu()) end function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) @@ -63,7 +63,10 @@ function context(arr :: NDArray) return Context(ref_typeid[], ref_devid[]) end -function empty{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) +function empty{N}(shape :: NTuple{N, Int}) + empty(shape, cpu()) +end +function empty{N}(shape :: NTuple{N, Int}, ctx :: Context) NDArray(_ndarray_alloc(shape, ctx, false)) end function empty(shape :: Int...) @@ -99,7 +102,10 @@ function eltype(arr :: NDArray) end "Create zero-ed NDArray of specific shape" -function zeros{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) +function zeros{N}(shape :: NTuple{N, Int}) + zeros(shape, cpu()) +end +function zeros{N}(shape :: NTuple{N, Int}, ctx :: Context) arr = empty(shape, ctx) arr[:] = 0 return arr @@ -109,7 +115,10 @@ function zeros(shape :: Int...) end "Create NDArray and initialize with 1" -function ones{N}(shape :: NTuple{N, Int}, ctx :: Context = DEFAULT_CONTEXT) +function ones{N}(shape :: NTuple{N, Int}) + ones(shape, cpu()) +end +function ones{N}(shape :: NTuple{N, Int}, ctx :: Context) arr = empty(shape, ctx) arr[:] = 1 return arr diff --git a/src/random.jl b/src/random.jl index 82b0aea97582..79a8b6e9e20b 100644 --- a/src/random.jl +++ b/src/random.jl @@ -1,7 +1,10 @@ function rand!(low::Real, high::Real, out::NDArray) _random_uniform(low, high, out) end -function rand(low::Real, high::Real, shape::Tuple, ctx::Context=DEFAULT_CONTEXT) +function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}) + rand(low, high, shape, cpu()) +end +function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context) out = empty(shape, ctx) rand!(low, high, out) end @@ -9,7 +12,10 @@ end function randn!(mean::Real, stdvar::Real, out::NDArray) _random_gaussian(mean, stdvar, out) end -function randn(mean::Real, stdvar::Real, shape::Tuple, ctx::Context=DEFAULT_CONTEXT) +function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}) + randn(mean, stdvar, shape, cpu()) +end +function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context) out = empty(shape, ctx) randn!(mean, stdvar, out) end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index d547159af1e2..7d5df826f32b 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -9,7 +9,7 @@ using ..Main: rand_dims, reldiff ################################################################################ function rand_tensors{N}(dims::NTuple{N, Int}) tensor = rand(mx.MX_float, dims) - array = copy(tensor, mx.DEFAULT_CONTEXT) + array = copy(tensor, mx.cpu()) return (tensor, array) end @@ -20,12 +20,12 @@ function test_copy() info("NDArray::copy::dims = $dims") # copy to NDArray and back - array = copy(tensor, mx.DEFAULT_CONTEXT) + array = copy(tensor, mx.cpu()) tensor2 = copy(array) @test reldiff(tensor, tensor2) < 1e-6 # copy between NDArray - array2 = copy(array, mx.DEFAULT_CONTEXT) + array2 = copy(array, mx.cpu()) tensor2 = copy(array2) @test reldiff(tensor, tensor2) < 1e-6 end From 70579c4cace6dbdafd34a4fced7fc7d5e83bb842 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 01:59:47 -0400 Subject: [PATCH 076/630] use light-weighted mkdocs for documentation instead --- docs/Makefile | 192 -------------------- docs/api/MXNet.md | 420 -------------------------------------------- docs/api/ndarray.md | 189 ++++++++++++++++++++ docs/api/symbol.md | 46 +++++ docs/build.jl | 16 +- docs/conf.py | 303 -------------------------------- docs/index.md | 0 docs/index.rst | 29 --- docs/make.bat | 263 --------------------------- mkdocs.yml | 9 + src/ndarray.jl | 38 ++-- 11 files changed, 280 insertions(+), 1225 deletions(-) delete mode 100644 docs/Makefile delete mode 100644 docs/api/MXNet.md create mode 100644 docs/api/ndarray.md create mode 100644 docs/api/symbol.md delete mode 100644 docs/conf.py create mode 100644 docs/index.md delete mode 100644 docs/index.rst delete mode 100644 docs/make.bat create mode 100644 mkdocs.yml diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index ce3d7abdbfaa..000000000000 --- a/docs/Makefile +++ /dev/null @@ -1,192 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " applehelp to make an Apple Help Book" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - @echo " coverage to run coverage check of the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/MXNetjl.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/MXNetjl.qhc" - -applehelp: - $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp - @echo - @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." - @echo "N.B. You won't be able to view it unless you put it in" \ - "~/Library/Documentation/Help or install it in your application" \ - "bundle." - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/MXNetjl" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/MXNetjl" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -coverage: - $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage - @echo "Testing of coverage in the sources finished, look at the " \ - "results in $(BUILDDIR)/coverage/python.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/api/MXNet.md b/docs/api/MXNet.md deleted file mode 100644 index d4b273ba417d..000000000000 --- a/docs/api/MXNet.md +++ /dev/null @@ -1,420 +0,0 @@ -# MXNet.mx - -## Internal - ---- - - -#### _import_ndarray_functions() -Import dynamic functions for NDArrays. The arguments to the functions are typically ordered -as - -```julia -func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) -``` - -unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: - -```julia -func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) -``` - -If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: - -```julia -func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) -``` - -Upon calling, the output arguments will be automatically initialized with empty NDArrays. - -Those functions always return the output arguments. If there is only one output (the typical situation), that -object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. - - -*source:* -[MXNet/src/ndarray.jl:367](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L367) - ---- - - -#### _split_inputs(batch_size::Int64, n_split::Int64) -Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector - of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that - piece. - - -*source:* -[MXNet/src/estimator.jl:18](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/estimator.jl#L18) - ---- - - -#### copy!(dst::Array{Float32, N}, src::MXNet.mx.NDArray) -Copy data from NDArray to Julia Array - -*source:* -[MXNet/src/ndarray.jl:178](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L178) - ---- - - -#### copy!(dst::MXNet.mx.NDArray, src::MXNet.mx.NDArray) -Copy data between NDArrays - -*source:* -[MXNet/src/ndarray.jl:166](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L166) - ---- - - -#### copy!{T<:Real}(dst::MXNet.mx.NDArray, src::Array{T<:Real, N}) -Copy data from Julia Array to NDArray - -*source:* -[MXNet/src/ndarray.jl:186](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L186) - ---- - - -#### copy(arr::MXNet.mx.NDArray) -Create copy: NDArray -> Julia Array - -*source:* -[MXNet/src/ndarray.jl:196](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L196) - ---- - - -#### copy(arr::MXNet.mx.NDArray, ctx::MXNet.mx.Context) -Create copy: NDArray -> NDArray in a given context - -*source:* -[MXNet/src/ndarray.jl:202](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L202) - ---- - - -#### copy{T<:Real}(arr::Array{T<:Real, N}, ctx::MXNet.mx.Context) -Create copy: Julia Array -> NDArray in a given context - -*source:* -[MXNet/src/ndarray.jl:208](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L208) - ---- - - -#### get_internals(self::MXNet.mx.Symbol) -Get a new grouped symbol whose output contains all the internal outputs of this symbol. - -*source:* -[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L63) - ---- - - -#### group(symbols::MXNet.mx.Symbol...) -Create a symbol that groups symbols together - -*source:* -[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L77) - ---- - - -#### list_auxiliary_states(self::MXNet.mx.Symbol) -List all auxiliary states in the symbool. - -Auxiliary states are special states of symbols that do not corresponds to an argument, -and do not have gradient. But still be useful for the specific operations. -A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. -Most operators do not have Auxiliary states. - - -*source:* -[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L58) - ---- - - -#### ones{N}(shape::NTuple{N, Int64}) -Create NDArray and initialize with 1 - -*source:* -[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L112) - ---- - - -#### ones{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context) -Create NDArray and initialize with 1 - -*source:* -[MXNet/src/ndarray.jl:112](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L112) - ---- - - -#### setindex!(arr::MXNet.mx.NDArray, val::Real, ::Colon) -Assign all elements of an NDArray to a scalar - -*source:* -[MXNet/src/ndarray.jl:146](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L146) - ---- - - -#### size(arr::MXNet.mx.NDArray) -Get the shape of an `NDArray`. Note the shape is converted to Julia convention. - So the same piece of memory, in Julia (column-major), with shape (K, M, N), will be of the - shape (N, M, K) in the Python (row-major) binding. - - -*source:* -[MXNet/src/ndarray.jl:81](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L81) - ---- - - -#### slice(arr::MXNet.mx.NDArray, ::Colon) -`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest - changing dimension is supported. In Julia's column-major perspective, this is the last - dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create - a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is - used in data parallelization to split mini-batch into sub-batches for different devices. - - -*source:* -[MXNet/src/ndarray.jl:128](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L128) - ---- - - -#### variable(name::Union{AbstractString, Symbol}) -Create a symbolic variable with the given name - -*source:* -[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/symbol.jl#L70) - ---- - - -#### zeros{N}(shape::NTuple{N, Int64}) -Create zero-ed NDArray of specific shape - -*source:* -[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L102) - ---- - - -#### zeros{N}(shape::NTuple{N, Int64}, ctx::MXNet.mx.Context) -Create zero-ed NDArray of specific shape - -*source:* -[MXNet/src/ndarray.jl:102](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L102) - ---- - - -#### MXNet.mx.AbstractDataBatch -Root type for data batch - -A data batch must implement the following interface function to actually provide the data and label. - -```julia -load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) -load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) -``` - -Load data and label into targets. The targets is a list of target that the data/label should be -copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and -`provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the -memory buffer for each device. - -The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. - -The following function should also be implemented to handle the case when the mini-batch size does not -divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer -than the mini-batch size. This is usually not an issue during the training as the remaining space may -contain the data and label copied during the previous mini-batch are still valid data. However, during -testing, especially when doing feature extraction, we need to be precise about the number of samples -processed. - -```julia -get_pad(batch :: AbstractDataBatch) -``` - -Return the number of *dummy samples* in this mini-batch. - - -*source:* -[MXNet/src/io.jl:110](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L110) - ---- - - -#### MXNet.mx.AbstractDataProvider -Root type for data provider - -A data provider provides interface to iterate over a dataset. It should implement the following functions: - -```julia -provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} -provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} -``` - -Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, -`[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient -function - -```julia -get_batch_size(provider :: AbstractDataProvider) => Int -``` - -which returns the batch size used in this data provider. - -A data provider should implement the standard Julia iteration interface, including `Base.start`, -`Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will -always be called like - -```julia -for batch in provider - # ... - load_data!(batch, targets) -end -``` - -which translates into - -```julia -state = Base.start(provider) -while !done(provider, state) - (batch, state) = next(provider, state) - # ... - load_data!(batch, targets) -end -``` - -In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither -of those function will be called twice consequtively. The detailed interfaces are list below: - -```julia -Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState -``` - -Initialize or reset the data iteration. - -```julia -Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) - => (AbstractDataBatch, AbstractDataProviderState) -``` - -Return one batch of data. Actual data can be retrieved from the batch by interface functions described -in the document of type `AbstractDataBatch`. - -```julia -Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool -``` - -Return `false` if there is more batch to get. - -```julia -Base.eltype(::Type{MyDataProvider}) => MyDataProviderState -``` - -Return the type of the data provider state. - - -*source:* -[MXNet/src/io.jl:71](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L71) - ---- - - -#### MXNet.mx.AbstractDataProviderState -Root type for states of data provider - -*source:* -[MXNet/src/io.jl:74](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L74) - ---- - - -#### MXNet.mx.MXDataProvider -Wrapper of built-in `libmxnet` data iterators. - - -*source:* -[MXNet/src/io.jl:119](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L119) - ---- - - -#### MXNet.mx.MXError -Exception thrown when an error occurred calling MXNet API. - -*source:* -[MXNet/src/init.jl:2](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/init.jl#L2) - ---- - - -#### MXNet.mx.NDArray -Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block - of tensor-based computation. - - **Note** since C/C++ use row-major ordering for arrays while Julia follows a - column-major ordering. To keep things consistent, we keep the underlying data - in their original layout, but use *language-native* convention when we talk - about shapes. For example, a mini-batch of 100 MNIST images is a tensor of - C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory - have shape (28,28,1,100). - - -*source:* -[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L32) - ---- - - -#### SlicedNDArray -A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each - slice describe which part of a larger piece of data should goto that device. - - -*source:* -[MXNet/src/io.jl:79](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/io.jl#L79) - ---- - - -#### @inplace(stmt) -Julia does not support re-definiton of += operator (like __iadd__ in python), -When one write a += b, it gets translated to a = a+b. a+b will allocate new -memory for the results, and the newly allocated NDArray object is then assigned -back to a, while the original contents in a is discarded. This is very inefficient -when we want to do inplace update. - -This macro is a simple utility to implement this behavior. Write - - @mx.inplace a += b - -will translate into - - mx.add_to!(a, b) - -which will do inplace adding of the contents of b into a. - - -*source:* -[MXNet/src/ndarray.jl:234](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/ndarray.jl#L234) - ---- - - -#### @mxcall(fv, argtypes, args...) -Utility macro to call MXNet API functions - -*source:* -[MXNet/src/init.jl:41](https://github.com/dmlc/MXNet.jl/tree/34a1c89bf2b65351914e00ccd12a033df724a721/src/init.jl#L41) - diff --git a/docs/api/ndarray.md b/docs/api/ndarray.md new file mode 100644 index 000000000000..2ede7ba99e78 --- /dev/null +++ b/docs/api/ndarray.md @@ -0,0 +1,189 @@ +# MXNet.mx + +## Internal + +--- + + +#### _import_ndarray_functions() +Import dynamic functions for NDArrays. The arguments to the functions are typically ordered +as + +```julia +func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +``` + +unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: + +```julia +func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) +``` + +If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: + +```julia +func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +``` + +Upon calling, the output arguments will be automatically initialized with empty NDArrays. + +Those functions always return the output arguments. If there is only one output (the typical situation), that +object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. + + +*source:* +[MXNet/src/ndarray.jl:380](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L380) + +--- + + +#### copy!(dst::Array{Float32, N}, src::MXNet.mx.NDArray) +Copy data from NDArray to Julia Array + +*source:* +[MXNet/src/ndarray.jl:187](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L187) + +--- + + +#### copy!(dst::MXNet.mx.NDArray, src::MXNet.mx.NDArray) +Copy data between NDArrays + +*source:* +[MXNet/src/ndarray.jl:175](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L175) + +--- + + +#### copy!{T<:Real}(dst::MXNet.mx.NDArray, src::Array{T<:Real, N}) +Copy data from Julia Array to NDArray + +*source:* +[MXNet/src/ndarray.jl:195](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L195) + +--- + + +#### copy(arr::MXNet.mx.NDArray) +Create copy: NDArray -> Julia Array + +*source:* +[MXNet/src/ndarray.jl:205](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L205) + +--- + + +#### copy(arr::MXNet.mx.NDArray, ctx::MXNet.mx.Context) +Create copy: NDArray -> NDArray in a given context + +*source:* +[MXNet/src/ndarray.jl:211](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L211) + +--- + + +#### copy{T<:Real}(arr::Array{T<:Real, N}, ctx::MXNet.mx.Context) +Create copy: Julia Array -> NDArray in a given context + +*source:* +[MXNet/src/ndarray.jl:217](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L217) + +--- + + +#### ones{N}(shape::NTuple{N, Int64}) +Create NDArray and initialize with 1 + +*source:* +[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L118) + +--- + + +#### setindex!(arr::MXNet.mx.NDArray, val::Real, ::Colon) +Assign all elements of an NDArray to a scalar + +*source:* +[MXNet/src/ndarray.jl:155](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L155) + +--- + + +#### size(arr::MXNet.mx.NDArray) +Get the shape of an `NDArray`. Note the shape is converted to Julia convention. + So the same piece of memory, in Julia (column-major), with shape (K, M, N), will be of the + shape (N, M, K) in the Python (row-major) binding. + + +*source:* +[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L84) + +--- + + +#### slice(arr::MXNet.mx.NDArray, ::Colon) +`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest +changing dimension is supported. In Julia's column-major perspective, this is the last +dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create +a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is +used in data parallelization to split mini-batch into sub-batches for different devices. + + +*source:* +[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L137) + +--- + + +#### zeros{N}(shape::NTuple{N, Int64}) +Create zero-ed NDArray of specific shape + +*source:* +[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L105) + +--- + + +#### MXNet.mx.NDArray +Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block +of tensor-based computation. + +**Note** since C/C++ use row-major ordering for arrays while Julia follows a +column-major ordering. To keep things consistent, we keep the underlying data +in their original layout, but use *language-native* convention when we talk +about shapes. For example, a mini-batch of 100 MNIST images is a tensor of +C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory +have shape (28,28,1,100). + + +*source:* +[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L32) + +--- + + +#### @inplace(stmt) +Julia does not support re-definiton of `+=` operator (like `__iadd__` in python), +When one write `a += b`, it gets translated to `a = a+b`. `a+b` will allocate new +memory for the results, and the newly allocated `NDArray` object is then assigned +back to a, while the original contents in a is discarded. This is very inefficient +when we want to do inplace update. + +This macro is a simple utility to implement this behavior. Write + +```julia +@mx.inplace a += b +``` + +will translate into + +```julia +mx.add_to!(a, b) +``` + +which will do inplace adding of the contents of b into a. + + +*source:* +[MXNet/src/ndarray.jl:247](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L247) + diff --git a/docs/api/symbol.md b/docs/api/symbol.md new file mode 100644 index 000000000000..1172f54214df --- /dev/null +++ b/docs/api/symbol.md @@ -0,0 +1,46 @@ +# MXNet.mx + +## Internal + +--- + + +#### get_internals(self::MXNet.mx.Symbol) +Get a new grouped symbol whose output contains all the internal outputs of this symbol. + +*source:* +[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L63) + +--- + + +#### group(symbols::MXNet.mx.Symbol...) +Create a symbol that groups symbols together + +*source:* +[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L77) + +--- + + +#### list_auxiliary_states(self::MXNet.mx.Symbol) +List all auxiliary states in the symbool. + +Auxiliary states are special states of symbols that do not corresponds to an argument, +and do not have gradient. But still be useful for the specific operations. +A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. +Most operators do not have Auxiliary states. + + +*source:* +[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L58) + +--- + + +#### variable(name::Union{AbstractString, Symbol}) +Create a symbolic variable with the given name + +*source:* +[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L70) + diff --git a/docs/build.jl b/docs/build.jl index 11a45ec75883..24c380f0f40a 100644 --- a/docs/build.jl +++ b/docs/build.jl @@ -3,5 +3,19 @@ using Lexicon config = Config(md_permalink = false, mathjax = true) -index = save("api/MXNet.md", mx, config) +function save_meta(file :: AbstractString, docs :: Lexicon.Metadata, order = [:source]) + isfile(file) || mkpath(dirname(file)) + open(file, "w") do io + for (k,v) in Lexicon.EachEntry(docs, order = order) + name = Lexicon.writeobj(k) + println(io, "#### $name") + println(io, v.docs.data) + end + end +end + +doc = Lexicon.metadata(MXNet.mx) +for mod in [:ndarray, :symbol] + save("api/$mod.md", MIME("text/md"), filter(doc, files=["$mod.jl"]), config) +end diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 33f8b3ce6a42..000000000000 --- a/docs/conf.py +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- -# -# MXNet.jl documentation build configuration file, created by -# sphinx-quickstart on Tue Oct 20 00:09:24 2015. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.mathjax', -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# source_suffix = ['.rst', '.md'] -source_suffix = ['.rst', '.md'] - -from recommonmark.parser import CommonMarkParser - -source_parsers = { - '.md': CommonMarkParser, -} - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'MXNet.jl' -copyright = u'2015, pluskid' -author = u'pluskid' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.0.1' -# The full version, including alpha/beta/rc tags. -release = '0.0.1' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -import os -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -if not on_rtd: # only import and set the theme if we're building docs locally - try: - import sphinx_rtd_theme - html_theme = "sphinx_rtd_theme" - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - except: - pass - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'MXNetjldoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'MXNetjl.tex', u'MXNet.jl Documentation', - u'pluskid', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'mxnetjl', u'MXNet.jl Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'MXNetjl', u'MXNet.jl Documentation', - author, 'MXNetjl', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index b2598cac01bb..000000000000 --- a/docs/index.rst +++ /dev/null @@ -1,29 +0,0 @@ -.. MXNet.jl documentation master file, created by - sphinx-quickstart on Tue Oct 20 00:09:24 2015. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to MXNet.jl's documentation! -==================================== - -Contents: - -.. toctree:: - :maxdepth: 2 - -API Documentation ------------------ - -.. toctree:: - :maxdepth: 1 - - api/MXNet - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index e682697a9877..000000000000 --- a/docs/make.bat +++ /dev/null @@ -1,263 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - echo. coverage to run coverage check of the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -REM Check if sphinx-build is available and fallback to Python version if any -%SPHINXBUILD% 2> nul -if errorlevel 9009 goto sphinx_python -goto sphinx_ok - -:sphinx_python - -set SPHINXBUILD=python -m sphinx.__init__ -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -:sphinx_ok - - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\MXNetjl.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\MXNetjl.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %~dp0 - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "coverage" ( - %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage - if errorlevel 1 exit /b 1 - echo. - echo.Testing of coverage in the sources finished, look at the ^ -results in %BUILDDIR%/coverage/python.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 000000000000..4b8b0cbcf6ca --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,9 @@ +site_name: MXNet.jl +site_author: pluskid +repo_url: https://github.com/dmlc/MXNet.jl +theme: readthedocs +pages: + - Home: index.md + - API Documentation: + - 'ndarray': 'api/ndarray.md' + - 'symbol': 'api/symbol.md' diff --git a/src/ndarray.jl b/src/ndarray.jl index d5a61a173a34..a868495a846b 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -20,14 +20,14 @@ end # NDArray Type ################################################################################ """Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block - of tensor-based computation. - - **Note** since C/C++ use row-major ordering for arrays while Julia follows a - column-major ordering. To keep things consistent, we keep the underlying data - in their original layout, but use *language-native* convention when we talk - about shapes. For example, a mini-batch of 100 MNIST images is a tensor of - C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory - have shape (28,28,1,100). +of tensor-based computation. + +**Note** since C/C++ use row-major ordering for arrays while Julia follows a +column-major ordering. To keep things consistent, we keep the underlying data +in their original layout, but use *language-native* convention when we talk +about shapes. For example, a mini-batch of 100 MNIST images is a tensor of +C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory +have shape (28,28,1,100). """ type NDArray handle :: MX_NDArrayHandle @@ -129,10 +129,10 @@ end import Base: slice """`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest - changing dimension is supported. In Julia's column-major perspective, this is the last - dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create - a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is - used in data parallelization to split mini-batch into sub-batches for different devices. +changing dimension is supported. In Julia's column-major perspective, this is the last +dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create +a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is +used in data parallelization to split mini-batch into sub-batches for different devices. """ function slice(arr :: NDArray, ::Colon) arr @@ -224,19 +224,23 @@ end # Basic arithmetics #------------------------------------------------------------ """ -Julia does not support re-definiton of += operator (like __iadd__ in python), -When one write a += b, it gets translated to a = a+b. a+b will allocate new -memory for the results, and the newly allocated NDArray object is then assigned +Julia does not support re-definiton of `+=` operator (like `__iadd__` in python), +When one write `a += b`, it gets translated to `a = a+b`. `a+b` will allocate new +memory for the results, and the newly allocated `NDArray` object is then assigned back to a, while the original contents in a is discarded. This is very inefficient when we want to do inplace update. This macro is a simple utility to implement this behavior. Write - @mx.inplace a += b +```julia +@mx.inplace a += b +``` will translate into - mx.add_to!(a, b) +```julia +mx.add_to!(a, b) +``` which will do inplace adding of the contents of b into a. """ From 0257055072b7f350963a9b3a19806268d41e7d01 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 02:03:32 -0400 Subject: [PATCH 077/630] one-sentence readme. --- docs/index.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/index.md b/docs/index.md index e69de29bb2d1..f5d55b86aec0 100644 --- a/docs/index.md +++ b/docs/index.md @@ -0,0 +1,3 @@ +# MXNet.jl + +MXNet.jl is a [Julia](http://julialang.org/) package for *efficient* and *flexible* deep learning, built on top of [dmlc/mxnet](https://github.com/dmlc/mxnet). From 1eb7ebcda9aec2b97ec88d208704411890d018c7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 14:17:01 -0400 Subject: [PATCH 078/630] use osx.mk to simplify travis OSX build --- test/travis/build_mxnet.sh | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 7b7f421d3cdb..5eaa31c03c7b 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -4,19 +4,18 @@ git clone --recursive https://github.com/dmlc/mxnet __mxnet_build cd __mxnet_build if [ ! -f config.mk ]; then - echo "Use the default config.m" - cp make/config.mk config.mk - if [ ${TRAVIS_OS_NAME} == "linux" ]; then + cp make/config.mk config.mk sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk sed -i 's/export CXX = g++/export CXX = g++-4.8/g' config.mk fi if [ ${TRAVIS_OS_NAME} == "osx" ]; then - # add built-in blas header file to path - sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk - # disable openmp - sed -i -s 's%USE_OPENMP = 1%USE_OPENMP = 0%g' config.mk + cp make/osx.mk config.mk + ## add built-in blas header file to path + #sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk + ## disable openmp + #sed -i -s 's%USE_OPENMP = 1%USE_OPENMP = 0%g' config.mk fi fi From 92237212c594440455567ec4d39caec909ddd11e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 20 Oct 2015 14:50:31 -0400 Subject: [PATCH 079/630] clean up CI build script --- test/travis/build_mxnet.sh | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh index 5eaa31c03c7b..6774011f6355 100755 --- a/test/travis/build_mxnet.sh +++ b/test/travis/build_mxnet.sh @@ -12,10 +12,6 @@ if [ ! -f config.mk ]; then if [ ${TRAVIS_OS_NAME} == "osx" ]; then cp make/osx.mk config.mk - ## add built-in blas header file to path - #sed -i -s 's%ADD_CFLAGS =%ADD_CFLAGS = -I/System/Library/Frameworks/Accelerate.framework/Versions/Current/Frameworks/vecLib.framework/Versions/Current/Headers/%' config.mk - ## disable openmp - #sed -i -s 's%USE_OPENMP = 1%USE_OPENMP = 0%g' config.mk fi fi From 2c945ea216f3c9431074f7f9466f77f2d22bee20 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Oct 2015 01:03:37 -0400 Subject: [PATCH 080/630] docstring for imported symbols --- docs/api/ndarray.md | 28 ++++++++++++++-------------- docs/api/symbol.md | 8 ++++---- docs/build.jl | 11 ----------- src/symbol.jl | 12 ++++++++++-- src/util.jl | 29 +++++++++++++++++++++++++++++ 5 files changed, 57 insertions(+), 31 deletions(-) diff --git a/docs/api/ndarray.md b/docs/api/ndarray.md index 2ede7ba99e78..c09b80ea0ab2 100644 --- a/docs/api/ndarray.md +++ b/docs/api/ndarray.md @@ -32,7 +32,7 @@ object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs wi *source:* -[MXNet/src/ndarray.jl:380](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L380) +[MXNet/src/ndarray.jl:380](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L380) --- @@ -41,7 +41,7 @@ object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs wi Copy data from NDArray to Julia Array *source:* -[MXNet/src/ndarray.jl:187](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L187) +[MXNet/src/ndarray.jl:187](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L187) --- @@ -50,7 +50,7 @@ Copy data from NDArray to Julia Array Copy data between NDArrays *source:* -[MXNet/src/ndarray.jl:175](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L175) +[MXNet/src/ndarray.jl:175](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L175) --- @@ -59,7 +59,7 @@ Copy data between NDArrays Copy data from Julia Array to NDArray *source:* -[MXNet/src/ndarray.jl:195](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L195) +[MXNet/src/ndarray.jl:195](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L195) --- @@ -68,7 +68,7 @@ Copy data from Julia Array to NDArray Create copy: NDArray -> Julia Array *source:* -[MXNet/src/ndarray.jl:205](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L205) +[MXNet/src/ndarray.jl:205](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L205) --- @@ -77,7 +77,7 @@ Create copy: NDArray -> Julia Array Create copy: NDArray -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:211](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L211) +[MXNet/src/ndarray.jl:211](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L211) --- @@ -86,7 +86,7 @@ Create copy: NDArray -> NDArray in a given context Create copy: Julia Array -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:217](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L217) +[MXNet/src/ndarray.jl:217](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L217) --- @@ -95,7 +95,7 @@ Create copy: Julia Array -> NDArray in a given context Create NDArray and initialize with 1 *source:* -[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L118) +[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L118) --- @@ -104,7 +104,7 @@ Create NDArray and initialize with 1 Assign all elements of an NDArray to a scalar *source:* -[MXNet/src/ndarray.jl:155](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L155) +[MXNet/src/ndarray.jl:155](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L155) --- @@ -116,7 +116,7 @@ Get the shape of an `NDArray`. Note the shape is converted to Julia convention. *source:* -[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L84) +[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L84) --- @@ -130,7 +130,7 @@ used in data parallelization to split mini-batch into sub-batches for different *source:* -[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L137) +[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L137) --- @@ -139,7 +139,7 @@ used in data parallelization to split mini-batch into sub-batches for different Create zero-ed NDArray of specific shape *source:* -[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L105) +[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L105) --- @@ -157,7 +157,7 @@ have shape (28,28,1,100). *source:* -[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L32) +[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L32) --- @@ -185,5 +185,5 @@ which will do inplace adding of the contents of b into a. *source:* -[MXNet/src/ndarray.jl:247](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/ndarray.jl#L247) +[MXNet/src/ndarray.jl:247](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L247) diff --git a/docs/api/symbol.md b/docs/api/symbol.md index 1172f54214df..1ccd875896b9 100644 --- a/docs/api/symbol.md +++ b/docs/api/symbol.md @@ -9,7 +9,7 @@ Get a new grouped symbol whose output contains all the internal outputs of this symbol. *source:* -[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L63) +[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L63) --- @@ -18,7 +18,7 @@ Get a new grouped symbol whose output contains all the internal outputs of this Create a symbol that groups symbols together *source:* -[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L77) +[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L77) --- @@ -33,7 +33,7 @@ Most operators do not have Auxiliary states. *source:* -[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L58) +[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L58) --- @@ -42,5 +42,5 @@ Most operators do not have Auxiliary states. Create a symbolic variable with the given name *source:* -[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/d13ddc6542bdb00e26b87e721a9b0e79a22bbd66/src/symbol.jl#L70) +[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L70) diff --git a/docs/build.jl b/docs/build.jl index 24c380f0f40a..9576e4112c38 100644 --- a/docs/build.jl +++ b/docs/build.jl @@ -3,17 +3,6 @@ using Lexicon config = Config(md_permalink = false, mathjax = true) -function save_meta(file :: AbstractString, docs :: Lexicon.Metadata, order = [:source]) - isfile(file) || mkpath(dirname(file)) - open(file, "w") do io - for (k,v) in Lexicon.EachEntry(docs, order = order) - name = Lexicon.writeobj(k) - println(io, "#### $name") - println(io, v.docs.data) - end - end -end - doc = Lexicon.metadata(MXNet.mx) for mod in [:ndarray, :symbol] save("api/$mod.md", MIME("text/md"), filter(doc, files=["$mod.jl"]), config) diff --git a/src/symbol.jl b/src/symbol.jl index 921671e394e9..013c24d865c5 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -256,6 +256,14 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) kv_nargs_s = bytestring(ref_kv_nargs[]) kv_nargs = symbol(kv_nargs_s) + f_desc = bytestring(ref_desc[]) * "\n\n" + if !isempty(kv_nargs_s) + f_desc *= "This function support variable length positional `Symbol` inputs.\n\n" + end + f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) + f_desc *= "* `name`: Julia Symbol (e.g. `:my_symbol`), optional.\n\n The name of the symbol.\n\n" + f_desc *= "**Returns**\n\n`symbol`: `mx.Symbol`\n\n The constructed symbol." + # function $func_name(args...; kwargs...) func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) func_body = quote @@ -324,8 +332,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) func_def = Expr(:function, func_head, Expr(:block, func_body)) eval(func_def) - # TODO: add doc string - # eval(:(@doc($doc_str, $func_name))) + # add doc string + eval(:(@doc($f_desc, $func_name))) end function _import_atomic_symbol_creators() diff --git a/src/util.jl b/src/util.jl index 4a331fcbfec1..1c52fdf3f2c3 100644 --- a/src/util.jl +++ b/src/util.jl @@ -1,3 +1,6 @@ +################################################################################ +# Dataset related utilities +################################################################################ function get_data_dir() data_dir = joinpath(Pkg.dir("MXNet"), "data") mkpath(data_dir) @@ -38,3 +41,29 @@ function get_cifar10() filenames[:mean] = joinpath(cifar10_dir, "cifar/cifar_mean.bin") return filenames end + + +################################################################################ +# Internal Utilities +################################################################################ +function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) + param_keys = Set{AbstractString}() + + arg_names = pointer_to_array(arg_names[], narg) + arg_types = pointer_to_array(arg_types[], narg) + arg_descs = pointer_to_array(arg_descs[], narg) + docstrings = AbstractString[] + + for i = 1:narg + arg_name = bytestring(arg_names[i]) + if arg_name ∈ param_keys && remove_dup + continue + end + push!(param_keys, arg_name) + + arg_type = bytestring(arg_types[i]) + arg_desc = bytestring(arg_descs[i]) + push!(docstrings, "* `$arg_name`: $arg_type\n\n $arg_desc\n\n") + end + return "**Parameters**\n\n$(join(docstrings, "\n"))" +end From 1da331cc08aa7b90b858cdb16c9bd636d978e444 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Oct 2015 09:57:45 -0400 Subject: [PATCH 081/630] installation guide (or what we wanted to be...) --- docs/index.md | 5 ++++- docs/user-guide/install.md | 20 ++++++++++++++++++++ mkdocs.yml | 2 ++ 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 docs/user-guide/install.md diff --git a/docs/index.md b/docs/index.md index f5d55b86aec0..350c3ab52fc9 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,3 +1,6 @@ # MXNet.jl -MXNet.jl is a [Julia](http://julialang.org/) package for *efficient* and *flexible* deep learning, built on top of [dmlc/mxnet](https://github.com/dmlc/mxnet). +MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: + +* Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. +* Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. diff --git a/docs/user-guide/install.md b/docs/user-guide/install.md new file mode 100644 index 000000000000..ab62c116ed03 --- /dev/null +++ b/docs/user-guide/install.md @@ -0,0 +1,20 @@ +# Automatic Installation + +To install MXNet.jl, simply type +```jl +Pkg.add("MXNet") +``` +in the Julia REPL. MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. If the compilation fails due to unresolved dependency, or if you prefer to work with a customized installation of libmxnet, please see [below](#manual-compilation). + +To use the latest git version of MXNet.jl, use the following command instead +```jl +Pkg.checkout("MXNet") +``` + +# Manual Compilation + +It is possible to compile libmxnet separately and point MXNet.jl to a the existing library in case automatic compilation fails due to unresolved dependencies in an un-standard environment; Or when one want to work with a seperate, maybe customized libmxnet. + +To build libmxnet, please refer to [the installation guide of libmxnet](http://mxnet.readthedocs.org/en/latest/build.html). After successfully installing libmxnet, set the `MXNET_HOME` environment variable to the location of libmxnet. In other words, the compiled `libmxnet.so` should be found in `$MXNET_HOME/lib`. + +When the `MXNET_HOME` environment variable is detected and the corresponding `libmxnet.so` could be loaded successfully, MXNet.jl will skip automatic building during installation and use the specified libmxnet instead. diff --git a/mkdocs.yml b/mkdocs.yml index 4b8b0cbcf6ca..1dec035a93bd 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -4,6 +4,8 @@ repo_url: https://github.com/dmlc/MXNet.jl theme: readthedocs pages: - Home: index.md + - User Guide: + - 'Installation Guide' : 'user-guide/install.md' - API Documentation: - 'ndarray': 'api/ndarray.md' - 'symbol': 'api/symbol.md' From 684bad20fe7f200556b850b2e2d0405604b02c98 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Oct 2015 10:51:49 -0400 Subject: [PATCH 082/630] part of overview --- docs/user-guide/overview.md | 39 +++++++++++++++++++++++++++++++++++++ mkdocs.yml | 1 + 2 files changed, 40 insertions(+) create mode 100644 docs/user-guide/overview.md diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md new file mode 100644 index 000000000000..7271b9263a32 --- /dev/null +++ b/docs/user-guide/overview.md @@ -0,0 +1,39 @@ +# MXNet.jl Namespace + +Most the functions and types in MXNet.jl are organized in a flat namespace. Because many some functions are conflicting with existing names in the Julia Base module, we wrap them all in a `mx` module. The convention of accessing the MXNet.jl interface is the to use the `mx.` prefix explicitly: +```jl +using MXNet + +x = mx.zeros(2,3) # MXNet NDArray +y = zeros(eltype(x), size(x)) # Julia Array +copy!(y, x) # Overloaded function in Julia Base +z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU +mx.copy!(z, y) # Same as copy!(z, y) +``` +Note functions like `size`, `copy!` that is extensively overloaded for various types works out of the box. But functions like `zeros` and `ones` will be ambiguous, so we always use the `mx.` prefix. If you prefer, the `mx.` prefix can be used explicitly for all MXNet.jl functions, including `size` and `copy!` as shown in the last line. + +# High Level Interface + +The way we build deep learning models in MXNet.jl is to use the powerful symbolic composition system. It is like [Theano](http://deeplearning.net/software/theano/), except that we avoided long expression compiliation time by providing *larger* neural network related building blocks to guarantee computation performance. See also [this note](http://mxnet.readthedocs.org/en/latest/program_model.html) for the design and trade-off of the MXNet symbolic composition system. + +The basic type is `mx.Symbol`. The following is a trivial example of composing two symbols with the `+` operation. +```jl +A = mx.variable(:A) +B = mx.variable(:B) +C = A + B +``` +We get a new *symbol* by composing existing *symbols* by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. +```jl +net = mx.variable(:data) +net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) +net = mx.Activation(data=net, name=:relu1, act_type=:relu) +net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) +net = mx.Softmax(data=net, name=:out) +``` +Each time we take the previous symbol, and compose with an operation. Unlike the simple `+` example above, the *operations* here are "bigger" ones, that correspond to common computation layers in deep neural networks. + +Each of those operation takes one or more input symbols for composition, with optional hyper-parameters (e.g. `num_hidden`, `act_type`) to further customize the composition results. + +When applying those operations, we can also specify a `name` for the result symbol. This is convenient if we want to refer to this symbol later on. If not supplied, a name will be automatically generated. + +# Low Level Interface diff --git a/mkdocs.yml b/mkdocs.yml index 1dec035a93bd..c364b2d4f3a6 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -6,6 +6,7 @@ pages: - Home: index.md - User Guide: - 'Installation Guide' : 'user-guide/install.md' + - 'Overview' : 'user-guide/overview.md' - API Documentation: - 'ndarray': 'api/ndarray.md' - 'symbol': 'api/symbol.md' From df6613dfdfd65a33446adae57d7980e5d331408d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Oct 2015 17:01:57 -0400 Subject: [PATCH 083/630] shape inference doc --- docs/user-guide/overview.md | 81 +++++++++++++++++++++++++++++++++++-- 1 file changed, 78 insertions(+), 3 deletions(-) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index 7271b9263a32..86a81a4edf96 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -1,7 +1,7 @@ # MXNet.jl Namespace Most the functions and types in MXNet.jl are organized in a flat namespace. Because many some functions are conflicting with existing names in the Julia Base module, we wrap them all in a `mx` module. The convention of accessing the MXNet.jl interface is the to use the `mx.` prefix explicitly: -```jl +```julia using MXNet x = mx.zeros(2,3) # MXNet NDArray @@ -14,16 +14,18 @@ Note functions like `size`, `copy!` that is extensively overloaded for various t # High Level Interface +## Symbols and Composition + The way we build deep learning models in MXNet.jl is to use the powerful symbolic composition system. It is like [Theano](http://deeplearning.net/software/theano/), except that we avoided long expression compiliation time by providing *larger* neural network related building blocks to guarantee computation performance. See also [this note](http://mxnet.readthedocs.org/en/latest/program_model.html) for the design and trade-off of the MXNet symbolic composition system. The basic type is `mx.Symbol`. The following is a trivial example of composing two symbols with the `+` operation. -```jl +```julia A = mx.variable(:A) B = mx.variable(:B) C = A + B ``` We get a new *symbol* by composing existing *symbols* by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. -```jl +```julia net = mx.variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) net = mx.Activation(data=net, name=:relu1, act_type=:relu) @@ -36,4 +38,77 @@ Each of those operation takes one or more input symbols for composition, with op When applying those operations, we can also specify a `name` for the result symbol. This is convenient if we want to refer to this symbol later on. If not supplied, a name will be automatically generated. +Each symbol takes some arguments. For example, in the `+` case above, to compute the value of `C`, we will need to know the values of the two inputs `A` and `B`. For neural networks, the arguments are primarily two categories: *inputs* and *parameters*. *inputs* are data and labels for the networks, while *parameters* are typically trainable *weights*, *bias*, *filters*. + +When composing symbols, their arguments accumulates. We can list all the arguments by +```julia +julia> mx.list_arguments(net) +6-element Array{Symbol,1}: + :data # Input data, name from the first data variable + :fc1_weight # Weights of the fully connected layer named :fc1 + :fc1_bias # Bias of the layer :fc1 + :fc2_weight # Weights of the layer :fc2 + :fc2_bias # Bias of the layer :fc2 + :out_label # Input label, required by the softmax layer named :out +``` +Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: +```julia +net = mx.variable(:data) +w = mx.variable(:myweight) +net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) +mx.list_arguments(net) +# => +# 3-element Array{Symbol,1}: +# :data +# :myweight +# :fc1_bias +``` +The simple fact is that a `variable` is just a placeholder `mx.Symbol`. In composition, we can use arbitrary symbols for arguments. For example: +```julia +net = mx.variable(:data) +net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) +net2 = mx.variable(:data2) +net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) +mx.list_arguments(net2) +# => +# 3-element Array{Symbol,1}: +# :data2 +# :net2_weight +# :net2_bias +composed_net = net2(data2=net, name=:composed) +mx.list_arguments(composed_net) +# => +# 5-element Array{Symbol,1}: +# :data +# :fc1_weight +# :fc1_bias +# :net2_weight +# :net2_bias +``` +Note we use a composed symbol, `net` as the argument `data2` for `net2` to get a new symbol, which we named `:composed`. It also shows that a symbol itself is a call-able object, which can be invoked to fill in missing arguments and get more complicated symbol compositions. + +## Shape Inference + +Given enough information, the shapes of all arguments in a composed symbol could be inferred automatically. For example, given the input shape, and some hyper-parameters like `num_hidden`, the shapes for the weights and bias in a neural network could be inferred. +```julia +net = mx.variable(:data) +net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) +arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) +``` +The returned shapes corresponds to arguments with the same order as returned by `mx.list_arguments`. The `out_shapes` are shapes for outputs, and `aux_shapes` can be safely ignored for now. +```julia +for (n,s) in zip(mx.list_arguments(net), arg_shapes) + println("$n => $s") +end +# => +# data => (10,64) +# fc1_weight => (10,10) +# fc1_bias => (10,) +for (n,s) in zip(mx.list_outputs(net), out_shapes) + println("$n => $s") +end +# => +# fc1_output => (10,64) +``` + # Low Level Interface From 4ac5e7bcc5e64e40a8f4453b1689dde901853e77 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Oct 2015 17:37:00 -0400 Subject: [PATCH 084/630] allow keyword arguments in bind. --- docs/user-guide/overview.md | 21 +++++++++++++++++++++ src/executor.jl | 32 +++++++++++++++++++++----------- 2 files changed, 42 insertions(+), 11 deletions(-) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index 86a81a4edf96..79c8ff6b5d6b 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -111,4 +111,25 @@ end # fc1_output => (10,64) ``` +## Binding and Executing + +In order to execute the computation graph specified a composed symbol, we will *bind* the free variables to concrete values, specified as `mx.NDArray`s. This will create an `mx.Executor` on a given `mx.Context`. A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. +```julia +A = mx.variable(:A) +B = mx.variable(:B) +C = A .* B +a = mx.ones(3) * 4 +b = mx.ones(3) * 2 +c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)) + +mx.forward(c_exec) +copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array +# => +# 3-element Array{Float32,1}: +# 8.0 +# 8.0 +# 8.0 +``` +**TODO** Provide pointers to further details. + # Low Level Interface diff --git a/src/executor.jl b/src/executor.jl index 94aabba5ddd5..fe83db86a831 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -48,27 +48,26 @@ function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDA end arr end + # help the type inference + if allow_missing + args_vec = Union{NDArray,Void}[args_vec...] + else + args_vec = NDArray[args_vec...] + end args_hdr = MX_handle[(isa(x,Void) ? MX_handle(0) : x) for x in args_vec] return (args_hdr, args_vec) end @enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; - args_grad :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, - aux_states :: Union{Void,Vector{NDArray},Dict{Base.Symbol,NDArray}} = nothing, + args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), + aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) arg_names = list_arguments(self) - args_hdr, args = _get_ndarray_inputs("args", args, arg_names, false) - if isa(args_grad, Void) - args_grad = [nothing for i=1:length(args)] - args_grad_hdr = MX_handle[Ptr{Void}(0) for i=1:length(args)] - else - args_grad_hdr, args_grad = _get_ndarray_inputs("args_grad", args_grad, arg_names, true) - end - - if isa(aux_states, Void); aux_states = NDArray[]; end + args_hdr, args = _get_ndarray_inputs("args", args, arg_names, false) + args_grad_hdr, args_grad = _get_ndarray_inputs("args_grad", args_grad, arg_names, true) aux_args_hdr, aux_states = _get_ndarray_inputs("aux_states", aux_states, list_auxiliary_states(self), false) if isa(grad_req, GRAD_REQ) @@ -90,6 +89,17 @@ function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, args, args_grad, aux_states) end +function bind(self :: Symbol; kwargs...) + kwargs = Dict(kwargs) + @assert(haskey(kwargs, :args), "Must specify args") + args = pop!(kwargs, :args) + if haskey(kwargs, :context) + context = pop!(kwargs, :context) + else + context = cpu() + end + bind(self, context, args; kwargs...) +end function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) From af5645615b8e19265de1088fbf49a7b95f7b030c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Oct 2015 17:56:43 -0400 Subject: [PATCH 085/630] part of the doc on NDArrays --- docs/user-guide/overview.md | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index 79c8ff6b5d6b..b750877ed1bd 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -130,6 +130,19 @@ copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array # 8.0 # 8.0 ``` -**TODO** Provide pointers to further details. +For neural networks, it is easier to use `simple_bind`. By providing the shape for input arguments, it will perform a shape inference for the rest of the arguments and create the `NDArray`s automatically. In practice, the binding and executing steps are hidden under the `Estimator` interface. + +**TODO** Provide pointers to estimator tutorial and further details about binding and symbolic API. # Low Level Interface + +## NDArrays + +`NDArray`s are basic building blocks of the actual computations in MXNet. It is like a Julia `Array` object, with some important differences listed here: + +* The actual data could live on different `Context` (e.g. GPUs). For some contexts, iterating into the elements one by one is very slow, thus indexing into `NDArray` is not supported in general. The easiest way to inspect the contents of an `NDArray` is to use the `copy` function to copy the contents as a Julia `Array`. +* Operations on `NDArray`s (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. + +While most of the computation is hidden in libmxnet by operators corresponding to various neural network layers. Getting familiar with the `NDArray` API is useful for implementing `Optimizer`s or customized operators in Julia directly. + +## Distributed Key-value Store From cf9a06a0122f21279e844e86cef276611e254b0a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 01:22:54 -0400 Subject: [PATCH 086/630] getindex operator for NDArray --- docs/user-guide/overview.md | 100 ++++++++++++++++++++++++++++++++++++ src/ndarray.jl | 11 ++++ 2 files changed, 111 insertions(+) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index b750877ed1bd..40abe770363c 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -142,7 +142,107 @@ For neural networks, it is easier to use `simple_bind`. By providing the shape f * The actual data could live on different `Context` (e.g. GPUs). For some contexts, iterating into the elements one by one is very slow, thus indexing into `NDArray` is not supported in general. The easiest way to inspect the contents of an `NDArray` is to use the `copy` function to copy the contents as a Julia `Array`. * Operations on `NDArray`s (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. +* There is no generics in `NDArray`, the `eltype` is always `mx.MX_float`. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is designed to support multiple languages as front-ends, it is much simpler to implement with a fixed data type. While most of the computation is hidden in libmxnet by operators corresponding to various neural network layers. Getting familiar with the `NDArray` API is useful for implementing `Optimizer`s or customized operators in Julia directly. +The followings are common ways to create `NDArray` objects: + +* `mx.empty(shape[, context])`: create on uninitialized array of a given shape on a specific device. For example, `mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))`. +* `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: similar to the Julia's built-in `zeros` and `ones`. +* `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to a specific device. + +Most of the convenient functions like `size`, `length`, `ndims`, `eltype` on array objects should work out-of-the-box. Although indexing is not supported, it is possible to take *slices*: +```julia +a = mx.ones(2,3) +b = mx.slice(a, 1:2) +b[:] = 2 +println(copy(a)) +# => +# Float32[2.0 2.0 1.0 +# 2.0 2.0 1.0] +``` +A slice is a sub-region sharing the same memory with the original `NDArray` object. A slice is always a contiguous piece of memory, so only slicing on the *last* dimension is supported. The example above also shows a way to set the contents of an `NDArray`. +```julia +a = mx.empty(2,3) +a[:] = 0.5 # set all elements to a scalar +a[:] = rand(size(a)) # set contents with a Julia Array +copy!(a, rand(size(a))) # set value by copying a Julia Array +b = mx.empty(size(a)) +b[:] = a # copying and assignment between NDArrays +``` +Note due to the intrinsic limitation design of the Julia language, a normal assignment +```julia +a = b +``` +does **not** mean copying the contents of `b` to `a`. Instead, it just make the variable `a` pointing to a new object, which is `b`. Similarly, inplace arithmetics does not work as expected: +```julia +a = mx.ones(2) +r = a # keep a reference to a +b = mx.ones(2) +a += b # translates to a = a + b +println(copy(a)) +# => Float32[2.0f0,2.0f0] +println(copy(r)) +# => Float32[1.0f0,1.0f0] +``` +As we can see, `a` has expected value, but instead of inplace updating, a new `NDArray` is created and `a` is set to point to this new object. If we look at `r`, which still reference to the old `a`, its content has not changed. There is currently no way in Julia to overload the operators like `+=` to get customized behavior. + +Instead, you will need to write `a[:] = a+b`, or if you want *real* inplace `+=` operation, MXNet.jl provides a simple macro `@mx.inplace`: +```julia +@mx.inplace a += b +macroexpand(:(@mx.inplace a += b)) +# => :(MXNet.mx.add_to!(a,b)) +``` +As we can see, it translate the `+=` operator to an explicit `add_to!` function call, which invokes into libmxnet to add the contents of `b` into `a` directly. For example, the following is the update rule in the SGD `Optimizer` (both `grad` and `weight` are `NDArray` objects): +```julia +@inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) +``` +Note there is no much magic in `mx.inplace`: it only does a shallow translation. In the SGD update rule example above, the computation like scaling the gradient by `grad_scale` and adding the weight decay all create temporary `NDArray` objects. However, libmxnet has a customized memory allocator designed specifically to handle this kind of situations. So typically creating temp intermediate arrays is not a problem. The following snippet does a simple benchmark on allocating temp `NDArray`s vs. pre-allocating: +```julia +using Benchmark +using MXNet + +N_REP = 1000 +SHAPE = (128, 64) +CTX = mx.cpu() +LR = 0.1 + +function inplace_op() + weight = mx.zeros(SHAPE, CTX) + grad = mx.ones(SHAPE, CTX) + + # pre-allocate temp objects + grad_lr = mx.empty(SHAPE, CTX) + + for i = 1:N_REP + copy!(grad_lr, grad) + @mx.inplace grad_lr .*= LR + @mx.inplace weight -= grad_lr + end + return weight +end + +function normal_op() + weight = mx.zeros(SHAPE, CTX) + grad = mx.ones(SHAPE, CTX) + + for i = 1:N_REP + weight[:] -= LR * grad + end + return weight +end + +# make sure the results are the same +@assert(maximum(abs(copy(normal_op() - inplace_op()))) < 1e-6) + +println(compare([inplace_op, normal_op], 100)) +``` +The comparison on my laptop shows that + +| Row | Function | Average | Relative | Replications | +|-----|--------------|-----------|----------|--------------| +| 1 | "inplace_op" | 0.0074854 | 1.0 | 100 | +| 2 | "normal_op" | 0.0174202 | 2.32723 | 100 | + ## Distributed Key-value Store diff --git a/src/ndarray.jl b/src/ndarray.jl index a868495a846b..30b08b8ff1e9 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -167,6 +167,17 @@ function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, id copy!(slice(arr, idx), val) end +import Base: getindex +"""Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a +copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. +""" +function getindex(arr :: NDArray, ::Colon) + return arr +end +function getindex(arr :: NDArray, idx::UnitRange{Int}) + slice(arr, idx) +end + #------------------------------------------------------------ # Copying functions #------------------------------------------------------------ From 3a265442b4b2e612df576815b19c3506fff05c31 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 01:54:05 -0400 Subject: [PATCH 087/630] ndarray doc --- docs/user-guide/overview.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index 40abe770363c..e09b3ec4d002 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -238,11 +238,13 @@ end println(compare([inplace_op, normal_op], 100)) ``` -The comparison on my laptop shows that +The comparison on my laptop shows that `normal_op` while allocating a lot of temp `NDArray`s in the loop (the performance gets worse when increasing `N_REP`), is only about twice slower than the pre-allocated one. | Row | Function | Average | Relative | Replications | |-----|--------------|-----------|----------|--------------| | 1 | "inplace_op" | 0.0074854 | 1.0 | 100 | | 2 | "normal_op" | 0.0174202 | 2.32723 | 100 | +So it will typically not be a problem unless you are at the bottleneck of the computation (e.g. implementing some neural network layers in Julia). + ## Distributed Key-value Store From 0c3d0668df8c908c56da93b48597a6504b35ff75 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 12:22:40 -0400 Subject: [PATCH 088/630] API variable -> Variable; group -> Group --- examples/cifar10/cifar10.jl | 2 +- examples/mnist/lenet.jl | 2 +- examples/mnist/mlp.jl | 2 +- src/ndarray.jl | 3 +++ src/symbol.jl | 4 ++-- test/common.jl | 2 +- test/unittest/bind.jl | 4 ++-- test/unittest/symbol.jl | 6 +++--- 8 files changed, 14 insertions(+), 11 deletions(-) diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 9d627b0471c2..50d399e32d5e 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -36,7 +36,7 @@ end #-------------------------------------------------------------------------------- # Actual architecture -data = mx.variable(:data) +data = mx.Variable(:data) conv1 = conv_factory(data, 96, (3,3); pad=(1,1), act_type=:relu) in3a = simple_factory(conv1, 32, 32) in3b = simple_factory(in3a, 32, 48) diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index 7efe4c42eedc..d8e8c9fea0ee 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -4,7 +4,7 @@ using MXNet # define lenet # input -data = mx.variable(:data) +data = mx.Variable(:data) # first conv conv1 = mx.Convolution(data=data, kernel=(5,5), num_filter=20) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index c2d30235e0f6..e8c095a613fe 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -1,7 +1,7 @@ using MXNet # define MLP -data = mx.variable(:data) +data = mx.Variable(:data) fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) diff --git a/src/ndarray.jl b/src/ndarray.jl index 30b08b8ff1e9..c3207b8aa963 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -325,6 +325,9 @@ function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) end +function .*(arg0 :: Real, arg :: NDArray) + .*(arg, arg0) +end # unlike *, we only allow type Real in arguments, because array-array * operator # means matrix multiplication in Julia function *(arg0 :: NDArray, arg :: Real) diff --git a/src/symbol.jl b/src/symbol.jl index 013c24d865c5..e0daf53e36c3 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -67,14 +67,14 @@ function get_internals(self :: Symbol) end "Create a symbolic variable with the given name" -function variable(name :: Union{Base.Symbol, AbstractString}) +function Variable(name :: Union{Base.Symbol, AbstractString}) hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) Symbol(MX_SymbolHandle(hdr_ref[])) end "Create a symbol that groups symbols together" -function group(symbols :: Symbol...) +function Group(symbols :: Symbol...) handles = MX_handle[symbols...] ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateGroup, (MX_uint, Ptr{MX_handle}, Ref{MX_handle}), diff --git a/test/common.jl b/test/common.jl index 51dc1ed43ec4..fc4c4f63649e 100644 --- a/test/common.jl +++ b/test/common.jl @@ -12,7 +12,7 @@ function rand_dims(max_ndim=6) end function mlp2() - data = mx.variable(:data) + data = mx.Variable(:data) out = mx.FullyConnected(data=data, name=:fc1, num_hidden=1000) out = mx.Activation(data=out, act_type=:relu) out = mx.FullyConnected(data=out, name=:fc2, num_hidden=10) diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 9f480f3f20f3..760e261fe581 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -11,8 +11,8 @@ function test_arithmetic(uf, gf) shape = rand_dims() info("Bind::arithmetic::$uf::dims = $shape") - lhs = mx.variable(:lhs) - rhs = mx.variable(:rhs) + lhs = mx.Variable(:lhs) + rhs = mx.Variable(:rhs) ret = uf(lhs, rhs) @test mx.list_arguments(ret) == [:lhs, :rhs] diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index b62496d20fbc..fecb25c159a8 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -19,7 +19,7 @@ end function test_internal() info("Symbol::internal") - data = mx.variable(:data) + data = mx.Variable(:data) oldfc = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) net1 = mx.FullyConnected(data=oldfc, name=:fc2, num_hidden=100) @@ -33,7 +33,7 @@ end function test_compose() info("Symbol::compose") - data = mx.variable(:data) + data = mx.Variable(:data) net1 = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) net1 = mx.FullyConnected(data=net1, name=:fc2, num_hidden=100) @@ -42,7 +42,7 @@ function test_compose() net2 = mx.FullyConnected(data=net2, name=:fc4, num_hidden=20) composed = net2(fc3_data=net1, name=:composed) - multi_out = mx.group(composed, net1) + multi_out = mx.Group(composed, net1) @test mx.list_outputs(multi_out) == [:composed_output, :fc2_output] end From 9665cb1cb7112683cd8728b228aa1be487101d5e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 12:25:03 -0400 Subject: [PATCH 089/630] update doc: variable -> Variable --- docs/user-guide/overview.md | 40 +++++++++++++++++++++++++------------ 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index e09b3ec4d002..33567bd7817d 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -20,13 +20,13 @@ The way we build deep learning models in MXNet.jl is to use the powerful symboli The basic type is `mx.Symbol`. The following is a trivial example of composing two symbols with the `+` operation. ```julia -A = mx.variable(:A) -B = mx.variable(:B) +A = mx.Variable(:A) +B = mx.Variable(:B) C = A + B ``` We get a new *symbol* by composing existing *symbols* by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. ```julia -net = mx.variable(:data) +net = mx.Variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) net = mx.Activation(data=net, name=:relu1, act_type=:relu) net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) @@ -53,8 +53,8 @@ julia> mx.list_arguments(net) ``` Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: ```julia -net = mx.variable(:data) -w = mx.variable(:myweight) +net = mx.Variable(:data) +w = mx.Variable(:myweight) net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) mx.list_arguments(net) # => @@ -63,11 +63,11 @@ mx.list_arguments(net) # :myweight # :fc1_bias ``` -The simple fact is that a `variable` is just a placeholder `mx.Symbol`. In composition, we can use arbitrary symbols for arguments. For example: +The simple fact is that a `Variable` is just a placeholder `mx.Symbol`. In composition, we can use arbitrary symbols for arguments. For example: ```julia -net = mx.variable(:data) +net = mx.Variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) -net2 = mx.variable(:data2) +net2 = mx.Variable(:data2) net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) mx.list_arguments(net2) # => @@ -91,7 +91,7 @@ Note we use a composed symbol, `net` as the argument `data2` for `net2` to get a Given enough information, the shapes of all arguments in a composed symbol could be inferred automatically. For example, given the input shape, and some hyper-parameters like `num_hidden`, the shapes for the weights and bias in a neural network could be inferred. ```julia -net = mx.variable(:data) +net = mx.Variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) ``` @@ -115,8 +115,8 @@ end In order to execute the computation graph specified a composed symbol, we will *bind* the free variables to concrete values, specified as `mx.NDArray`s. This will create an `mx.Executor` on a given `mx.Context`. A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. ```julia -A = mx.variable(:A) -B = mx.variable(:B) +A = mx.Variable(:A) +B = mx.Variable(:B) C = A .* B a = mx.ones(3) * 4 b = mx.ones(3) * 2 @@ -198,7 +198,7 @@ As we can see, it translate the `+=` operator to an explicit `add_to!` function ```julia @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) ``` -Note there is no much magic in `mx.inplace`: it only does a shallow translation. In the SGD update rule example above, the computation like scaling the gradient by `grad_scale` and adding the weight decay all create temporary `NDArray` objects. However, libmxnet has a customized memory allocator designed specifically to handle this kind of situations. So typically creating temp intermediate arrays is not a problem. The following snippet does a simple benchmark on allocating temp `NDArray`s vs. pre-allocating: +Note there is no much magic in `mx.inplace`: it only does a shallow translation. In the SGD update rule example above, the computation like scaling the gradient by `grad_scale` and adding the weight decay all create temporary `NDArray` objects. To mitigate this issue, libmxnet has a customized memory allocator designed specifically to handle this kind of situations. The following snippet does a simple benchmark on allocating temp `NDArray`s vs. pre-allocating: ```julia using Benchmark using MXNet @@ -245,6 +245,20 @@ The comparison on my laptop shows that `normal_op` while allocating a lot of tem | 1 | "inplace_op" | 0.0074854 | 1.0 | 100 | | 2 | "normal_op" | 0.0174202 | 2.32723 | 100 | -So it will typically not be a problem unless you are at the bottleneck of the computation (e.g. implementing some neural network layers in Julia). +So it will usually not be a big problem unless you are at the bottleneck of the computation. ## Distributed Key-value Store + +The type `KVStore` and related methods are used for data sharing across different devices or machines. It provides a simple and efficient integer-`NDArray` key-value storage system that each device can pull or push. + +The following example shows how to create a `local` `KVStore`, initialize a value and then pull it back. +```julia +kv = mx.KVStore(:local) +shape = (2,3) +key = 3 + +mx.init!(kv, key, mx.ones(shape)*2) +a = mx.empty(shape) +mx.pull!(kv, key, a) # pull value into a +println(copy(a)) +``` From 210aba8530931aa97f2fc1d8c7985c1350dc6ce0 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 12:26:08 -0400 Subject: [PATCH 090/630] add libmxnet.dll into library search list --- src/init.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/init.jl b/src/init.jl index 2c4239ce8d25..eae4cd8995b9 100644 --- a/src/init.jl +++ b/src/init.jl @@ -16,7 +16,7 @@ typealias char_pp Ptr{char_p} ################################################################################ # Initialization and library API entrance ################################################################################ -const MXNET_LIB = Libdl.find_library(["libmxnet.so"], ["$(get(ENV,"MXNET_HOME",""))/lib"]) +const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], ["$(get(ENV,"MXNET_HOME",""))/lib"]) function __init__() _import_ndarray_functions() From 478568c9cbabc24c062da9599334e0fd287210ab Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 13:03:52 -0400 Subject: [PATCH 091/630] doc for mnist --- docs/tutorials/mnist.md | 72 +++++++++++++++++++++++++++++++++++++++++ examples/mnist/mlp.jl | 2 +- mkdocs.yml | 2 ++ 3 files changed, 75 insertions(+), 1 deletion(-) create mode 100644 docs/tutorials/mnist.md diff --git a/docs/tutorials/mnist.md b/docs/tutorials/mnist.md new file mode 100644 index 000000000000..dfddf7408e6a --- /dev/null +++ b/docs/tutorials/mnist.md @@ -0,0 +1,72 @@ +In this tutorial, we will work through examples of training a simple multi-layer perceptron and then a convolutional neural network (the LeNet architecture) on the [MNIST handwritten digit dataset](http://yann.lecun.com/exdb/mnist/). The code for this tutorial could be found in [`Pkg.dir("MXNet")`/examples/mnist/](https://github.com/dmlc/MXNet.jl/tree/master/examples/mnist). + +# Simple 3-layer MLP + +This is a tiny 3-layer MLP that could be easily trained on CPU. The script starts with +```julia +using MXNet +``` +to load the `MXNet` module. Then we are ready to define the network architecture via the [symbolic API](../user-guide/overview.md#symbols-and-composition). We start with a placeholder `data` symbol, +```julia +data = mx.Variable(:data) +``` +and then cascading fully-connected layers and activation functions: +```julia +fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) +act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) +fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) +act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) +fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) +``` +Note each composition we take the previous symbol as the `data` argument, forming a feedforward chain. The architecture looks like +``` +Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units +``` +where the last 10 units correspond to the 10 output classes (digits 0,...,9). We then add a final `Softmax` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: +```julia +mlp = mx.Softmax(data = fc3, name=:softmax) +``` + +After defining the architecture, we are ready to load the MNIST data. MXNet.jl provide built-in data providers for the MNIST dataset, which could automatically download the dataset into `Pkg.dir("MXNet")/data/mnist` if necessary. We wrap the code to construct the data provider into `mnist-data.jl` so that it could be shared by both the MLP example and the LeNet ConvNets example. +```julia +batch_size = 100 +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size) +``` +If you need to write your own data providers for customized data format, please refer to **TODO**: pointer to data provider API. + +Given the architecture and data, we can instantiate an *estimator* to do the actual training. `mx.FeedForward` is the built-in estimator that is suitable for most feed-forward architectures. When constructing the estimator, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. +```julia +estimator = mx.FeedForward(mlp, context=mx.cpu()) +``` +You can use a `mx.gpu()` or if a list of devices (e.g. `[mx.gpu(0), mx.gpu(1)]`) is provided, data-parallelization will be used automatically. But for this tiny example, using a GPU device might not help. + +The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 and momentum 0.9: +```julia +optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.1), + mom_scheduler=mx.FixedMomentumScheduler(0.9), + weight_decay=0.00001) +``` +Now we can do the training. Here the `epoch_stop` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. +```julia +mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +``` +Here is a sample output +``` +INFO: Start training on [CPU0] +INFO: Initializing parameters... +INFO: Creating KVStore... +INFO: == Epoch 001 ========== +INFO: ## Training summary +INFO: :accuracy = 0.7554 +INFO: time = 1.3165 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9502 +... +INFO: == Epoch 020 ========== +INFO: ## Training summary +INFO: :accuracy = 0.9949 +INFO: time = 0.9287 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9775 +``` diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index e8c095a613fe..a30e876ae708 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -15,7 +15,7 @@ include("mnist-data.jl") train_provider, eval_provider = get_mnist_providers(batch_size) # setup estimator -estimator = mx.FeedForward(mlp, context=mx.Context(mx.CPU)) +estimator = mx.FeedForward(mlp, context=mx.cpu()) # optimizer optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.1), diff --git a/mkdocs.yml b/mkdocs.yml index c364b2d4f3a6..78822e3ada5a 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -7,6 +7,8 @@ pages: - User Guide: - 'Installation Guide' : 'user-guide/install.md' - 'Overview' : 'user-guide/overview.md' + - Tutorials: + - 'MNIST': 'tutorials/mnist.md' - API Documentation: - 'ndarray': 'api/ndarray.md' - 'symbol': 'api/symbol.md' From d15bc779ab829327f2162ea9525e2cf22e66f819 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 17:17:42 -0400 Subject: [PATCH 092/630] add mx.chain macro --- README.md | 10 ++++++++++ examples/mnist/mlp.jl | 27 ++++++++++++++++++++------- src/symbol.jl | 29 +++++++++++++++++++++++++++++ 3 files changed, 59 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 4852bcd52161..2df461d4993c 100644 --- a/README.md +++ b/README.md @@ -6,3 +6,13 @@ Julia wrapper of [MXNet](https://github.com/dmlc/mxnet). + +```julia +mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => + mx.Softmax(name=:softmax) +``` diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index a30e876ae708..8fab150ad7a7 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -1,13 +1,26 @@ using MXNet +#-------------------------------------------------------------------------------- # define MLP -data = mx.Variable(:data) -fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) -act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) -fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) -act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) -fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) -mlp = mx.Softmax(data = fc3, name=:softmax) +# the following two ways are equivalent + +#-- Option 1: explicit composition +# data = mx.Variable(:data) +# fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) +# act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) +# fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) +# act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) +# fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) +# mlp = mx.Softmax(data = fc3, name=:softmax) + +#-- Option 2: using the mx.chain macro +mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => + mx.Softmax(name=:softmax) # data provider batch_size = 100 diff --git a/src/symbol.jl b/src/symbol.jl index e0daf53e36c3..b69dfd37f226 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -349,3 +349,32 @@ function _import_atomic_symbol_creators() _define_atomic_symbol_creator(creator_hdr) end end + +################################################################################ +# Utility macros to chain up symbols +################################################################################ +macro chain(layers) + exprs = [] + last_layer = nothing + function _chain_layer(layer, last_layer) + if isa(last_layer, Void) + layer + else + @assert(isa(layer, Expr) && layer.head == :call, "Do not know how to chain up $layer") + return Expr(:call, layer.args[1], last_layer, layer.args[2:end]...) + end + end + while true + if layers.head == :(=>) + new_layer = gensym() + push!(exprs, :($new_layer = $(_chain_layer(layers.args[1], last_layer)))) + last_layer = new_layer + layers = layers.args[2] + else + push!(exprs, _chain_layer(layers, last_layer)) + break + end + end + return Expr(:block, exprs...) +end + From e9088f5d64ced63ac988bf7fa60eb74403f2168a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 18:22:38 -0400 Subject: [PATCH 093/630] rename init.jl -> base.jl --- src/{init.jl => base.jl} | 66 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) rename src/{init.jl => base.jl} (69%) diff --git a/src/init.jl b/src/base.jl similarity index 69% rename from src/init.jl rename to src/base.jl index eae4cd8995b9..b3c6cee026dc 100644 --- a/src/init.jl +++ b/src/base.jl @@ -129,3 +129,69 @@ end function dump_mx_param{N,T<:Integer}(shape :: NTuple{N, T}) string(tuple(flipdim([shape...],1)...)) end + +"""A convenient macro copied from Mocha.jl that could be used to define structs +with default values and type checks. For example +```julia +@defstruct MyStruct Any ( + field1 :: Int = 0, + (field2 :: AbstractString = "", !isempty(field2)) +) +``` +where each field could be either +```julia +field_name :: field_type = default_value +``` +or put within a tuple, with the second element +specifying a validation check on the field value. +In the example above, the default value for +field2 does not satisfy the assertion, this +could be used to force user to provide a +valid value when no meaningful default value +is available. + +The macro will define a constructor that could accept +the keyword arguments. +""" +macro defstruct(name, super_name, fields) + @assert fields.head == :tuple + fields = fields.args + @assert length(fields) > 0 + name = esc(name) + + field_defs = Array(Expr, length(fields)) # :(field2 :: Int) + field_names = Array(Base.Symbol, length(fields)) # :field2 + field_defaults = Array(Expr, length(fields)) # :(field2 :: Int = 0) + field_asserts = Array(Expr, length(fields)) # :(field2 >= 0) + + for i = 1:length(fields) + field = fields[i] + if field.head == :tuple + field_asserts[i] = field.args[2] + field = field.args[1] + end + field_defs[i] = esc(field.args[1]) + field_names[i] = field.args[1].args[1] + field_defaults[i] = Expr(:kw, field.args...) + end + + # body of layer type, defining fields + type_body = Expr(:block, field_defs...) + + # constructor + asserts = map(filter(i -> isdefined(field_asserts,i), 1:length(fields))) do i + :(@assert($(field_asserts[i]))) + end + construct = Expr(:call, name, field_names...) + ctor_body = Expr(:block, asserts..., construct) + ctor_def = Expr(:call, name, Expr(:parameters, field_defaults...)) + ctor = Expr(:(=), ctor_def, ctor_body) + + quote + type $(name) <: $super_name + $type_body + end + + $ctor + end +end From bed3a02d7b1ef58aab2db80e620433965af5514c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 18:22:55 -0400 Subject: [PATCH 094/630] refactor optimizer options --- src/MXNet.jl | 2 +- src/estimator.jl | 2 +- src/optimizer.jl | 50 +++---------------------------------- src/optimizers/sgd.jl | 57 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 62 insertions(+), 49 deletions(-) create mode 100644 src/optimizers/sgd.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index cfa56b21d292..0e56b743d5d5 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -8,7 +8,7 @@ export mx module mx using Formatting -include("init.jl") +include("base.jl") include("context.jl") include("ndarray.jl") diff --git a/src/estimator.jl b/src/estimator.jl index e349ec952115..42402d0a2bdf 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -138,7 +138,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra param_arrays = [NDArray[exec.arg_arrays[i] for exec in train_execs] for i in param_idx] grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] - optimizer.inv_batch_size = 1.0/batch_size + optimizer.batch_size = batch_size if !update_on_kvstore updater = get_updater(optimizer) diff --git a/src/optimizer.jl b/src/optimizer.jl index 191fc6a855c2..d01f92e7d735 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -17,53 +17,6 @@ type FixedMomentumScheduler <: AbstractMomentumScheduler end get_momentum(self :: FixedMomentumScheduler, iter :: Int) = self.momentum -type SGD <: AbstractOptimizer - iter :: Int - - lr_scheduler :: AbstractLearningRateScheduler - mom_scheduler :: AbstractMomentumScheduler - weight_decay :: Float64 - grad_scale :: Float64 - grad_clip :: Float64 - inv_batch_size:: Float64 - - function SGD(;lr_scheduler::AbstractLearningRateScheduler=FixedLearningRateScheduler(0.01), - mom_scheduler::AbstractMomentumScheduler=NullMomentumScheduler(), - weight_decay::Float64=0.0001, - grad_scale::Float64=1.0, - grad_clip::Float64=0.0) - new(0, lr_scheduler, mom_scheduler, weight_decay, grad_scale, grad_clip, 1.0) - end -end - -function create_state(self :: SGD, index :: Int, weight :: NDArray) - if isa(self.mom_scheduler, NullMomentumScheduler) - return nothing - else - return zeros(size(weight), context(weight)) - end -end - -function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) - lr = get_learning_rate(self.lr_scheduler, self.iter) - grad_scale = self.grad_scale * self.inv_batch_size - - if isa(state, Void) - @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) - else - mom = state :: NDArray - coef = get_momentum(self.mom_scheduler, self.iter) - @inplace mom .*= coef - if self.grad_clip > 0 - # TODO: - else - @inplace mom += -lr * (grad_scale * grad + self.weight_decay * weight) - end - @inplace weight += mom - end -end - - function get_updater(optimizer :: AbstractOptimizer) states = Dict{Int,Any}() function updater(index :: Int, grad :: NDArray, weight :: NDArray) @@ -74,3 +27,6 @@ function get_updater(optimizer :: AbstractOptimizer) end return updater end + + +include("optimizers/sgd.jl") diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl new file mode 100644 index 000000000000..4e6a6757fb67 --- /dev/null +++ b/src/optimizers/sgd.jl @@ -0,0 +1,57 @@ +@defstruct SGDOptions Any ( + (lr :: Real = 0.01, lr > 0), + (momentum :: Real = 0.0, momentum >= 0), + (weight_decay :: Real = 0.0001, weight_decay >= 0), + (grad_scale :: Real = 1.0, grad_scale >= 0), + (grad_clip :: Real = 0, grad_clip >= 0), + lr_scheduler :: Any = nothing, + mom_scheduler :: Any = nothing +) + + +type SGD <: AbstractOptimizer + iter :: Int + batch_size :: Int + opts :: SGDOptions + + function SGD(; kwargs...) + opts = SGDOptions(;kwargs...) + if !isa(opts.lr_scheduler, AbstractLearningRateScheduler) + opts.lr_scheduler = FixedLearningRateScheduler(opts.lr) + end + if !isa(opts.mom_scheduler, AbstractMomentumScheduler) + opts.mom_scheduler = opts.momentum > 0 ? + FixedMomentumScheduler(opts.momentum) : + NullMomentumScheduler() + end + + new(0, 0, opts) + end +end + +function create_state(self :: SGD, index :: Int, weight :: NDArray) + if isa(self.opts.mom_scheduler, NullMomentumScheduler) + return nothing + else + return zeros(size(weight), context(weight)) + end +end + +function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) + lr = get_learning_rate(self.opts.lr_scheduler, self.iter) + grad_scale = self.opts.grad_scale / self.batch_size + + if isa(state, Void) + @inplace weight += -lr * (grad_scale * grad + self.opts.weight_decay * weight) + else + mom = state :: NDArray + coef = get_momentum(self.opts.mom_scheduler, self.iter) + @inplace mom .*= coef + if self.opts.grad_clip > 0 + # TODO: + else + @inplace mom += -lr * (grad_scale * grad + self.opts.weight_decay * weight) + end + @inplace weight += mom + end +end From a75a8efedc7f2944d4cad0515661a95643a2c5c5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 18:24:06 -0400 Subject: [PATCH 095/630] simplify mnist mlp example --- examples/mnist/mlp.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 8fab150ad7a7..66e1bdac49a8 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -31,9 +31,7 @@ train_provider, eval_provider = get_mnist_providers(batch_size) estimator = mx.FeedForward(mlp, context=mx.cpu()) # optimizer -optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.1), - mom_scheduler=mx.FixedMomentumScheduler(0.9), - weight_decay=0.00001) +optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) From 43b2486578509996a73bab93609a4cdaa9d6bff6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 18:29:44 -0400 Subject: [PATCH 096/630] clean up lenet example --- docs/tutorials/mnist.md | 14 +++++++++++--- examples/mnist/lenet.jl | 7 ++----- 2 files changed, 13 insertions(+), 8 deletions(-) diff --git a/docs/tutorials/mnist.md b/docs/tutorials/mnist.md index dfddf7408e6a..15b651d4b69c 100644 --- a/docs/tutorials/mnist.md +++ b/docs/tutorials/mnist.md @@ -26,6 +26,16 @@ where the last 10 units correspond to the 10 output classes (digits 0,...,9). We ```julia mlp = mx.Softmax(data = fc3, name=:softmax) ``` +As we can see, the MLP is just a chain of layers. For this case, we can also use the `mx.chain` macro. The same architecture above can be defined as +```julia +mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => + mx.Softmax(name=:softmax) +``` After defining the architecture, we are ready to load the MNIST data. MXNet.jl provide built-in data providers for the MNIST dataset, which could automatically download the dataset into `Pkg.dir("MXNet")/data/mnist` if necessary. We wrap the code to construct the data provider into `mnist-data.jl` so that it could be shared by both the MLP example and the LeNet ConvNets example. ```julia @@ -43,9 +53,7 @@ You can use a `mx.gpu()` or if a list of devices (e.g. `[mx.gpu(0), mx.gpu(1)]`) The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 and momentum 0.9: ```julia -optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.1), - mom_scheduler=mx.FixedMomentumScheduler(0.9), - weight_decay=0.00001) +optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) ``` Now we can do the training. Here the `epoch_stop` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. ```julia diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index d8e8c9fea0ee..bcf0b02893b5 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -36,13 +36,10 @@ train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) #-------------------------------------------------------------------------------- # fit model -dev = mx.Context(mx.GPU) -estimator = mx.FeedForward(lenet, context=dev) +estimator = mx.FeedForward(lenet, context=mx.gpu()) # optimizer -optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), - mom_scheduler=mx.FixedMomentumScheduler(0.9), - weight_decay=0.00001) +optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) # fit parameters mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) From 02e4af36114fd5a18c4b97fc2ccd5a4884b9ecb5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 20:55:07 -0400 Subject: [PATCH 097/630] hygiene in chain macro --- src/symbol.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/symbol.jl b/src/symbol.jl index b69dfd37f226..097fe1d5b20a 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -358,10 +358,10 @@ macro chain(layers) last_layer = nothing function _chain_layer(layer, last_layer) if isa(last_layer, Void) - layer + esc(layer) else @assert(isa(layer, Expr) && layer.head == :call, "Do not know how to chain up $layer") - return Expr(:call, layer.args[1], last_layer, layer.args[2:end]...) + return Expr(:call, esc(layer.args[1]), last_layer, map(esc, layer.args[2:end])...) end end while true From 1fb4946797084a63f15558225732115cc479bff3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 21:02:03 -0400 Subject: [PATCH 098/630] use @chain in lenet example. --- examples/mnist/lenet.jl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index bcf0b02893b5..af631e44c3bc 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -7,22 +7,22 @@ using MXNet data = mx.Variable(:data) # first conv -conv1 = mx.Convolution(data=data, kernel=(5,5), num_filter=20) -tanh1 = mx.Activation(data=conv1, act_type=:tanh) -pool1 = mx.Pooling(data=tanh1, pool_type=:max, kernel=(2,2), stride=(2,2)) +conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) # second conv -conv2 = mx.Convolution(data=pool1, kernel=(5,5), num_filter=50) -tanh2 = mx.Activation(data=conv2, act_type=:tanh) -pool2 = mx.Pooling(data=tanh2, pool_type=:max, kernel=(2,2), stride=(2,2)) +conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) # first fully-connected -flat = mx.Flatten(data=pool2) -fc1 = mx.FullyConnected(data=flat, num_hidden=500) -tanh3 = mx.Activation(data=fc1, act_type=:tanh) +fc1 = @mx.chain mx.Flatten(data=conv2) => + mx.FullyConnected(num_hidden=500) => + mx.Activation(act_type=:tanh) # second fully-connected -fc2 = mx.FullyConnected(data=tanh3, num_hidden=10) +fc2 = mx.FullyConnected(data=fc1, num_hidden=10) # softmax loss lenet = mx.Softmax(data=fc2, name=:softmax) From 3548a5ffa15064dcf223022a10ddaf6aacdc558f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 21:29:17 -0400 Subject: [PATCH 099/630] lenet tutorial --- README.md | 15 +++++++++ docs/tutorials/mnist.md | 70 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 85 insertions(+) diff --git a/README.md b/README.md index 2df461d4993c..559d630473a2 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,8 @@ Julia wrapper of [MXNet](https://github.com/dmlc/mxnet). ```julia +using MXNet + mlp = @mx.chain mx.Variable(:data) => mx.FullyConnected(name=:fc1, num_hidden=128) => mx.Activation(name=:relu1, act_type=:relu) => @@ -15,4 +17,17 @@ mlp = @mx.chain mx.Variable(:data) => mx.Activation(name=:relu2, act_type=:relu) => mx.FullyConnected(name=:fc3, num_hidden=10) => mx.Softmax(name=:softmax) + +# data provider +batch_size = 100 +train_provider, eval_provider = get_mnist_providers(batch_size) + +# setup estimator +estimator = mx.FeedForward(mlp, context=mx.cpu()) + +# optimizer +optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) + +# fit parameters +mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) ``` diff --git a/docs/tutorials/mnist.md b/docs/tutorials/mnist.md index 15b651d4b69c..219d8936f686 100644 --- a/docs/tutorials/mnist.md +++ b/docs/tutorials/mnist.md @@ -78,3 +78,73 @@ INFO: time = 0.9287 seconds INFO: ## Validation summary INFO: :accuracy = 0.9775 ``` + +# Convolutional Neural Networks + +In the second example, we show a slightly more complicated architecture that involves convolution and pooling. This architecture for the MNIST is usually called the *LeNet*. The first part of the architecture is listed below: +```julia +# input +data = mx.Variable(:data) + +# first conv +conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) + +# second conv +conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) +``` +We basically defined two convolution modules. Each convolution module is actually a chain of `Convolution`, `tanh` activation and then max `Pooling` operations. + +Each sample in the MNIST dataset is a 28x28 single-channel grayscale image. In the tensor format used by `NDArray`, a batch of 100 samples is a tensor of shape `(28,28,1,100)`. The convolution and pooling operates in the spatial axis, so `kernel=(5,5)` indicate a square region of 5-width and 5-height. +The rest of the architecture follows as: +```julia +# first fully-connected +fc1 = @mx.chain mx.Flatten(data=conv2) => + mx.FullyConnected(num_hidden=500) => + mx.Activation(act_type=:tanh) + +# second fully-connected +fc2 = mx.FullyConnected(data=fc1, num_hidden=10) + +# softmax loss +lenet = mx.Softmax(data=fc2, name=:softmax) +``` +Note a fully-connected operator expects the input to be a matrix. However, the results from spatial convolution and pooling are 4D tensors. So we explicitly used a `Flatten` operator to flat the tensor, before connecting it to the `FullyConnected` operator. + +The rest of the network is the same as the previous MLP example. As before, we can now load the MNIST dataset: +```julia +batch_size = 100 +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) +``` +Note we specified `flat=false` to tell the data provider to provide 4D tensors instead of 2D matrices because the convolution operators needs correct spatial shape information. We then construct a feedforward model on GPU, and train it. +```julia +#-------------------------------------------------------------------------------- +# fit model +estimator = mx.FeedForward(lenet, context=mx.gpu()) + +# optimizer +optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) + +# fit parameters +mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +``` +And here is a sample of running outputs: +``` +INFO: == Epoch 001 ========== +INFO: ## Training summary +INFO: :accuracy = 0.6750 +INFO: time = 4.9814 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9712 +... +INFO: == Epoch 020 ========== +INFO: ## Training summary +INFO: :accuracy = 1.0000 +INFO: time = 4.0086 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9915 +``` From 7c43f2cae15261d25d4b682dabcb730b44601fc5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 21:37:33 -0400 Subject: [PATCH 100/630] update readme. --- README.md | 8 +++++++- docs/index.md | 2 ++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 559d630473a2..acf0fca121fb 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,12 @@ [![Documentation Status](https://readthedocs.org/projects/mxnetjl/badge/?version=latest)](http://mxnetjl.readthedocs.org/en/latest/?badge=latest) [![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) +MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: -Julia wrapper of [MXNet](https://github.com/dmlc/mxnet). +* Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. +* Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. + +Here is an exmple of how training a simple 3-layer MLP on MNIST looks like: ```julia using MXNet @@ -31,3 +35,5 @@ optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) ``` + +For more details, please refer to the [document](http://mxnetjl.readthedocs.org/) and [examples](examples). diff --git a/docs/index.md b/docs/index.md index 350c3ab52fc9..c411cc86fd95 100644 --- a/docs/index.md +++ b/docs/index.md @@ -4,3 +4,5 @@ MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julia * Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. * Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. + +To install MXNet.jl, please follow the [installaton guide](user-guide/install.md). See the navigation menu in the sidebar for an [overview](user-guide/overview.md) of MXNet.jl and tutorials on training neural networks in MXNet.jl. From 3b43bcbd23c7db40f18d92ab4b794ceb3002dfe4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 23:30:09 -0400 Subject: [PATCH 101/630] first version of deps/build.jl that is not working b/c opencv --- REQUIRE | 1 + deps/build.jl | 82 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 deps/build.jl diff --git a/REQUIRE b/REQUIRE index 76fa30ce5f54..a0f5c9865565 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,2 +1,3 @@ julia 0.4 Formatting +BinDeps diff --git a/deps/build.jl b/deps/build.jl new file mode 100644 index 000000000000..00d088d4bc17 --- /dev/null +++ b/deps/build.jl @@ -0,0 +1,82 @@ +################################################################################ +# First try to detect and load existing libmxnet +################################################################################ +# if haskey(ENV, "MXNET_HOME") +# info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") +# info("Trying to load existing libmxnet...") +# lib = Libdl.find_library(["libmxnet.so","libmxnet.dll"], ["$(ENV["MXNET_HOME"])/lib"]) +# if !isempty(lib) +# info("Existing libmxnet detected at $lib") +# exit(0) +# else +# info("Failed to load existing libmxnet, trying to build from source...") +# end +# end + + +################################################################################ +# If not found, try to build automatically using BinDeps +################################################################################ +@windows_only begin + info("Automatic building libxmnet on Windows is currently not supported.") + info("Please follow the libmxnet documentation on how to build manually") + info("or to install pre-build packages:") + info("http://mxnet.readthedocs.org/en/latest/build.html#building-on-windows") + exit(-1) +end + +using BinDeps +@BinDeps.setup + +#-------------------------------------------------------------------------------- +# Install dependencies, opencv and blas +opencv_core = library_dependency("opencv_core", aliases=["libopencv_core"]) + +@linux_only begin + provides(AptGet, "libopencv-dev", opencv_core) + provides(Pacman, "opencv", opencv_core) + provides(Yum, "opencv", opencv_core) + + blas = library_dependency("blas", aliases=["libblas","libblas.so.3"]) + provides(AptGet, "libblas-dev", blas) + provides(Pacman, "blas", blas) + provides(Yum, "blas-devel", blas) +end + +@osx_only begin + using Homebrew + provides(Homebrew.HB, "opencv", opencv_core, os = :Darwin) + + # OSX has built-in BLAS we could use +end + +@BinDeps.install Dict(:opencv_core => :opencv_core) +@linux_only begin + @BinDeps.install Dict(:blas => :blas) +end + +#-------------------------------------------------------------------------------- +# Build libmxnet +mxnet = library_dependency("mxnet", aliases=["libmxnet"]) + +prefix = joinpath(BinDeps.depsdir(mxnet), "usr") +srcdir = joinpath(BinDeps.depsdir(mxnet),"src", "libmxnet") +libdir = joinpath(prefix, "lib") +provides(BuildProcess, + (@build_steps begin + CreateDirectory(srcdir) + CreateDirectory(libdir) + @build_steps begin + ChangeDirectory(srcdir) + `git clone --recursive https://github.com/dmlc/mxnet` + FileRule(joinpath(libdir, "libmxnet.so"), @build_steps begin + ChangeDirectory("mxnet") + @osx_only `cp make/osx.mk config.mk` + @osx_only `echo hahahahahahahaha=================` + `make` + `cp lib/libmxnet.so $libdir` + end) + end + end), mxnet) + + @BinDeps.install Dict(:mxnet => :mxnet) From e77fdd0f234858943606a61c90a506f92e7a9cf1 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Oct 2015 23:56:31 -0400 Subject: [PATCH 102/630] default building without opencv. --- .gitignore | 4 +++- deps/build.jl | 29 ++++++++--------------------- src/base.jl | 4 +++- 3 files changed, 14 insertions(+), 23 deletions(-) diff --git a/.gitignore b/.gitignore index 1b5cdca45b40..5660a75bb89b 100644 --- a/.gitignore +++ b/.gitignore @@ -2,4 +2,6 @@ *.jl.*.cov *.jl.mem data -docs/_build +deps/src +deps/usr +deps/deps.jl diff --git a/deps/build.jl b/deps/build.jl index 00d088d4bc17..6959325c86ea 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -29,38 +29,23 @@ using BinDeps @BinDeps.setup #-------------------------------------------------------------------------------- -# Install dependencies, opencv and blas -opencv_core = library_dependency("opencv_core", aliases=["libopencv_core"]) - +# Install dependencies, blas @linux_only begin - provides(AptGet, "libopencv-dev", opencv_core) - provides(Pacman, "opencv", opencv_core) - provides(Yum, "opencv", opencv_core) - blas = library_dependency("blas", aliases=["libblas","libblas.so.3"]) provides(AptGet, "libblas-dev", blas) provides(Pacman, "blas", blas) provides(Yum, "blas-devel", blas) -end - -@osx_only begin - using Homebrew - provides(Homebrew.HB, "opencv", opencv_core, os = :Darwin) - # OSX has built-in BLAS we could use -end - -@BinDeps.install Dict(:opencv_core => :opencv_core) -@linux_only begin @BinDeps.install Dict(:blas => :blas) end #-------------------------------------------------------------------------------- # Build libmxnet -mxnet = library_dependency("mxnet", aliases=["libmxnet"]) +mxnet = library_dependency("mxnet", aliases=["libmxnet.so"]) prefix = joinpath(BinDeps.depsdir(mxnet), "usr") -srcdir = joinpath(BinDeps.depsdir(mxnet),"src", "libmxnet") +srcdir = joinpath(BinDeps.depsdir(mxnet),"src") +mxdir = joinpath(srcdir, "mxnet") libdir = joinpath(prefix, "lib") provides(BuildProcess, (@build_steps begin @@ -68,11 +53,13 @@ provides(BuildProcess, CreateDirectory(libdir) @build_steps begin ChangeDirectory(srcdir) + `rm -rf mxnet` `git clone --recursive https://github.com/dmlc/mxnet` FileRule(joinpath(libdir, "libmxnet.so"), @build_steps begin - ChangeDirectory("mxnet") + ChangeDirectory("$mxdir") + `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` - @osx_only `echo hahahahahahahaha=================` + `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` `make` `cp lib/libmxnet.so $libdir` end) diff --git a/src/base.jl b/src/base.jl index b3c6cee026dc..cf940c9ef232 100644 --- a/src/base.jl +++ b/src/base.jl @@ -16,7 +16,9 @@ typealias char_pp Ptr{char_p} ################################################################################ # Initialization and library API entrance ################################################################################ -const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], ["$(get(ENV,"MXNET_HOME",""))/lib"]) +const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], + [joinpath("$(get(ENV,"MXNET_HOME",""))","lib"), + joinpath(Pkg.dir("MXNet"),"deps/usr/lib")]) function __init__() _import_ndarray_functions() From 263e60211d18fe85e4555facba1c798a07173e42 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 23 Oct 2015 00:07:59 -0400 Subject: [PATCH 103/630] update install guide --- docs/user-guide/install.md | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/user-guide/install.md b/docs/user-guide/install.md index ab62c116ed03..eb4e1ecec80d 100644 --- a/docs/user-guide/install.md +++ b/docs/user-guide/install.md @@ -4,7 +4,10 @@ To install MXNet.jl, simply type ```jl Pkg.add("MXNet") ``` -in the Julia REPL. MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. If the compilation fails due to unresolved dependency, or if you prefer to work with a customized installation of libmxnet, please see [below](#manual-compilation). +in the Julia REPL. MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. + +The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. The automatic build is using default configurations, with OpenCV, CUDA disabled. +If the compilation failed due to unresolved dependency, or if you want to customize the build, it is recommended to compile and install libmxnet manually. Please see [below](#manual-compilation) for more details. To use the latest git version of MXNet.jl, use the following command instead ```jl @@ -18,3 +21,9 @@ It is possible to compile libmxnet separately and point MXNet.jl to a the existi To build libmxnet, please refer to [the installation guide of libmxnet](http://mxnet.readthedocs.org/en/latest/build.html). After successfully installing libmxnet, set the `MXNET_HOME` environment variable to the location of libmxnet. In other words, the compiled `libmxnet.so` should be found in `$MXNET_HOME/lib`. When the `MXNET_HOME` environment variable is detected and the corresponding `libmxnet.so` could be loaded successfully, MXNet.jl will skip automatic building during installation and use the specified libmxnet instead. + +Basically, MXNet.jl will search `libmxnet.so` or `libmxnet.dll` in the following paths (and in that order): + +* `$MXNET_HOME/lib`: customized libmxnet builds +* `Pkg.dir("MXNet")/deps/usr/lib`: automatic builds +* Any system wide library search path From 7934f672e2449a6c221bb3088ed40c2c6256a134 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 23 Oct 2015 01:09:49 -0400 Subject: [PATCH 104/630] add release notes for v0.0.1 --- NEWS.md | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 NEWS.md diff --git a/NEWS.md b/NEWS.md new file mode 100644 index 000000000000..6a43caddce02 --- /dev/null +++ b/NEWS.md @@ -0,0 +1,8 @@ +# v0.0.1 (2015.10.23) + +Initial release. + +* Basic libmxnet API. +* Basic documentation, overview and MNIST tutorial. +* Working MNIST and cifar-10 examples, with multi-GPU training. +* Automatic building of libmxnet with BinDeps.jl. From 0a32996d656a755f416b3daf4321b5de6b9f7f0f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 23 Oct 2015 07:24:23 -0400 Subject: [PATCH 105/630] fix build script (don't exit Julia) --- deps/build.jl | 111 +++++++++++++++++++++++++------------------------- 1 file changed, 56 insertions(+), 55 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 6959325c86ea..5c0dd2bb749f 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -1,69 +1,70 @@ ################################################################################ # First try to detect and load existing libmxnet ################################################################################ -# if haskey(ENV, "MXNET_HOME") -# info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") -# info("Trying to load existing libmxnet...") -# lib = Libdl.find_library(["libmxnet.so","libmxnet.dll"], ["$(ENV["MXNET_HOME"])/lib"]) -# if !isempty(lib) -# info("Existing libmxnet detected at $lib") -# exit(0) -# else -# info("Failed to load existing libmxnet, trying to build from source...") -# end -# end +libmxnet_detected = false - -################################################################################ -# If not found, try to build automatically using BinDeps -################################################################################ -@windows_only begin - info("Automatic building libxmnet on Windows is currently not supported.") - info("Please follow the libmxnet documentation on how to build manually") - info("or to install pre-build packages:") - info("http://mxnet.readthedocs.org/en/latest/build.html#building-on-windows") - exit(-1) +if haskey(ENV, "MXNET_HOME") + info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") + info("Trying to load existing libmxnet...") + lib = Libdl.find_library(["libmxnet.so","libmxnet.dll"], ["$(ENV["MXNET_HOME"])/lib"]) + if !isempty(lib) + info("Existing libmxnet detected at $lib, skip building...") + libmxnet_detected = true + else + info("Failed to load existing libmxnet, trying to build from source...") + end end using BinDeps @BinDeps.setup +if !libmxnet_detected + ################################################################################ + # If not found, try to build automatically using BinDeps + ################################################################################ + @windows_only begin + info("Please follow the libmxnet documentation on how to build manually") + info("or to install pre-build packages:") + info("http://mxnet.readthedocs.org/en/latest/build.html#building-on-windows") + error("Automatic building libxmnet on Windows is currently not supported yet.") + end -#-------------------------------------------------------------------------------- -# Install dependencies, blas -@linux_only begin - blas = library_dependency("blas", aliases=["libblas","libblas.so.3"]) - provides(AptGet, "libblas-dev", blas) - provides(Pacman, "blas", blas) - provides(Yum, "blas-devel", blas) + #-------------------------------------------------------------------------------- + # Install dependencies, blas + @linux_only begin + blas = library_dependency("blas", aliases=["libblas","libblas.so.3"]) + provides(AptGet, "libblas-dev", blas) + provides(Pacman, "blas", blas) + provides(Yum, "blas-devel", blas) - @BinDeps.install Dict(:blas => :blas) -end + @BinDeps.install Dict(:blas => :blas) + end -#-------------------------------------------------------------------------------- -# Build libmxnet -mxnet = library_dependency("mxnet", aliases=["libmxnet.so"]) + #-------------------------------------------------------------------------------- + # Build libmxnet + mxnet = library_dependency("mxnet", aliases=["libmxnet.so"]) -prefix = joinpath(BinDeps.depsdir(mxnet), "usr") -srcdir = joinpath(BinDeps.depsdir(mxnet),"src") -mxdir = joinpath(srcdir, "mxnet") -libdir = joinpath(prefix, "lib") -provides(BuildProcess, - (@build_steps begin - CreateDirectory(srcdir) - CreateDirectory(libdir) - @build_steps begin - ChangeDirectory(srcdir) - `rm -rf mxnet` - `git clone --recursive https://github.com/dmlc/mxnet` - FileRule(joinpath(libdir, "libmxnet.so"), @build_steps begin - ChangeDirectory("$mxdir") - `cp make/config.mk config.mk` - @osx_only `cp make/osx.mk config.mk` - `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` - `make` - `cp lib/libmxnet.so $libdir` - end) - end - end), mxnet) + _prefix = joinpath(BinDeps.depsdir(mxnet), "usr") + _srcdir = joinpath(BinDeps.depsdir(mxnet),"src") + _mxdir = joinpath(_srcdir, "mxnet") + _libdir = joinpath(_prefix, "lib") + provides(BuildProcess, + (@build_steps begin + CreateDirectory(_srcdir) + CreateDirectory(_libdir) + @build_steps begin + ChangeDirectory(_srcdir) + `rm -rf mxnet` + `git clone --recursive https://github.com/dmlc/mxnet` + FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin + ChangeDirectory("$_mxdir") + `cp make/config.mk config.mk` + @osx_only `cp make/osx.mk config.mk` + `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` + `make` + `cp lib/libmxnet.so $_libdir` + end) + end + end), mxnet) @BinDeps.install Dict(:mxnet => :mxnet) +end From 48f423017989d720ed8af3d88b3eb08a9a3272a6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 23 Oct 2015 07:52:47 -0400 Subject: [PATCH 106/630] update news for bugfix v0.0.2 --- NEWS.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/NEWS.md b/NEWS.md index 6a43caddce02..53c3f2e5418d 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,7 @@ +# v0.0.2 (2015.10.23) + +* Fix a bug in build script that causes Julia REPL to exit. + # v0.0.1 (2015.10.23) Initial release. @@ -6,3 +10,4 @@ Initial release. * Basic documentation, overview and MNIST tutorial. * Working MNIST and cifar-10 examples, with multi-GPU training. * Automatic building of libmxnet with BinDeps.jl. + From 119fac1eddccb052e327b4ab88e2a6828ffa9d8a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 24 Oct 2015 20:39:15 -0400 Subject: [PATCH 107/630] update kvstore doc. --- docs/user-guide/overview.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index 33567bd7817d..3155b281245d 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -261,4 +261,7 @@ mx.init!(kv, key, mx.ones(shape)*2) a = mx.empty(shape) mx.pull!(kv, key, a) # pull value into a println(copy(a)) +# => +# Float32[2.0 2.0 2.0 +# 2.0 2.0 2.0] ``` From d738fba18ebf731bf4f7306d81056fc63357810a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 24 Oct 2015 21:31:13 -0400 Subject: [PATCH 108/630] NDArray save and load API --- src/ndarray.jl | 59 ++++++++++++++++++++++++++++++++++++++++ test/unittest/ndarray.jl | 41 ++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index c3207b8aa963..2ae28b3a48a4 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -352,6 +352,65 @@ function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) div_from!(ret, arg) end +#------------------------------------------------------------ +# IO +#------------------------------------------------------------ +"""Load NDArrays from binary file. + +**Parameters**: + +* `filename`: the path of the file to load. It could be S3 or HDFS address + if the `libmxnet` is built with the corresponding component enabled. Examples + + * `s3://my-bucket/path/my-s3-ndarray` + * `hdfs://my-bucket/path/my-hdfs-ndarray` + * `/path-to/my-local-ndarray` + +**Returns**: + + Either `Dict{Base.Symbol, NDArray}` or `Vector{NDArray}`. +""" +function load_ndarrays(filename::AbstractString) + out_size = Ref{MX_uint}(0) + out_hdrs = Ref{Ptr{MX_handle}}(0) + out_name_size = Ref{MX_uint}(0) + out_names = Ref{char_pp}(0) + @mxcall(:MXNDArrayLoad, (char_p, Ref{MX_uint}, Ref{Ptr{MX_handle}}, Ref{MX_uint}, Ref{char_pp}), + filename, out_size, out_hdrs, out_name_size, out_names) + out_name_size = out_name_size[] + out_size = out_size[] + if out_name_size == 0 + return [NDArray(MX_NDArrayHandle(hdr)) for hdr in pointer_to_array(out_hdrs[], out_size)] + else + @assert out_size == out_name_size + return Dict([(symbol(bytestring(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in + zip(pointer_to_array(out_names[], out_size), pointer_to_array(out_hdrs[], out_size))]) + end +end + +"""Save NDarrays to binary file. + +**Parameters**: + +* `filename`: path to the binary file to write to. +* `data`: an `NDArray`, or a `Vector{NDArray}` or a `Dict{Base.Symbol, NDArray}`. +""" +function save_ndarrays(filename::AbstractString, data::NDArray) + save_ndarrays(filename, [data]) +end +function save_ndarrays(filename::AbstractString, data::Vector{NDArray}) + @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), + filename, length(data), MX_handle[data...], char_pp(0)) +end +function save_ndarrays(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) + names = [k for k in keys(data)] + arrays = MX_handle[data[k] for k in names] + names = AbstractString[string(k) for k in names] + + @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), + filename, length(names), arrays, names) +end + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 7d5df826f32b..565846154b0b 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -176,6 +176,46 @@ function test_gd() end +function test_saveload() + n_arrays = 5 + info("NDArray::saveload::n_arrays = $n_arrays") + fname = tempname() + + # save and load a single array + dims = rand_dims() + j_array, nd_array = rand_tensors(dims) + mx.save_ndarrays(fname, nd_array) + data = mx.load_ndarrays(fname) + @test isa(data, Vector{mx.NDArray}) + @test length(data) == 1 + @test reldiff(copy(data[1]), j_array) < 1e-6 + + # save and load N arrays of different shape + arrays = [rand_tensors(rand_dims()) for i = 1:n_arrays] + nd_arrays = [x[2] for x in arrays] + mx.save_ndarrays(fname, nd_arrays) + data = mx.load_ndarrays(fname) + @test isa(data, Vector{mx.NDArray}) + @test length(data) == n_arrays + for i = 1:n_arrays + @test reldiff(copy(data[i]), arrays[i][1]) < 1e-6 + end + + # save and load dictionary of ndarrays + names = [symbol("array$i") for i = 1:n_arrays] + dict = Dict([n => v for (n,v) in zip(names, nd_arrays)]) + mx.save_ndarrays(fname, dict) + data = mx.load_ndarrays(fname) + @test isa(data, Dict{Symbol, mx.NDArray}) + @test length(data) == n_arrays + for i = 1:n_arrays + @test reldiff(copy(data[names[i]]), arrays[i][1]) < 1e-6 + end + + rm(fname) +end + + ################################################################################ # Run tests ################################################################################ @@ -187,5 +227,6 @@ test_minus() test_mul() test_div() test_gd() +test_saveload() end From 4e5e152cccf0176cf5e9be5674ddcc01ab4ef9cb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 24 Oct 2015 21:42:40 -0400 Subject: [PATCH 109/630] gradient clipping --- docs/api/ndarray.md | 77 ++++++++++++++++++++++++++++++++-------- docs/api/symbol.md | 36 +++++++++---------- src/optimizers/sgd.jl | 17 ++++----- test/unittest/ndarray.jl | 16 +++++++++ 4 files changed, 106 insertions(+), 40 deletions(-) diff --git a/docs/api/ndarray.md b/docs/api/ndarray.md index c09b80ea0ab2..87e664ffb0cc 100644 --- a/docs/api/ndarray.md +++ b/docs/api/ndarray.md @@ -32,7 +32,7 @@ object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs wi *source:* -[MXNet/src/ndarray.jl:380](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L380) +[MXNet/src/ndarray.jl:453](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L453) --- @@ -41,7 +41,7 @@ object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs wi Copy data from NDArray to Julia Array *source:* -[MXNet/src/ndarray.jl:187](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L187) +[MXNet/src/ndarray.jl:198](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L198) --- @@ -50,7 +50,7 @@ Copy data from NDArray to Julia Array Copy data between NDArrays *source:* -[MXNet/src/ndarray.jl:175](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L175) +[MXNet/src/ndarray.jl:186](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L186) --- @@ -59,7 +59,7 @@ Copy data between NDArrays Copy data from Julia Array to NDArray *source:* -[MXNet/src/ndarray.jl:195](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L195) +[MXNet/src/ndarray.jl:206](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L206) --- @@ -68,7 +68,7 @@ Copy data from Julia Array to NDArray Create copy: NDArray -> Julia Array *source:* -[MXNet/src/ndarray.jl:205](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L205) +[MXNet/src/ndarray.jl:216](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L216) --- @@ -77,7 +77,7 @@ Create copy: NDArray -> Julia Array Create copy: NDArray -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:211](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L211) +[MXNet/src/ndarray.jl:222](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L222) --- @@ -86,7 +86,41 @@ Create copy: NDArray -> NDArray in a given context Create copy: Julia Array -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:217](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L217) +[MXNet/src/ndarray.jl:228](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L228) + +--- + + +#### getindex(arr::MXNet.mx.NDArray, ::Colon) +Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a +copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. + + +*source:* +[MXNet/src/ndarray.jl:174](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L174) + +--- + + +#### load_ndarrays(filename::AbstractString) +Load NDArrays from binary file. + +**Parameters**: + +* `filename`: the path of the file to load. It could be S3 or HDFS address + if the `libmxnet` is built with the corresponding component enabled. Examples + + * `s3://my-bucket/path/my-s3-ndarray` + * `hdfs://my-bucket/path/my-hdfs-ndarray` + * `/path-to/my-local-ndarray` + +**Returns**: + + Either `Dict{Base.Symbol, NDArray}` or `Vector{NDArray}`. + + +*source:* +[MXNet/src/ndarray.jl:373](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L373) --- @@ -95,7 +129,22 @@ Create copy: Julia Array -> NDArray in a given context Create NDArray and initialize with 1 *source:* -[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L118) +[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L118) + +--- + + +#### save_ndarrays(filename::AbstractString, data::MXNet.mx.NDArray) +Save NDarrays to binary file. + +**Parameters**: + +* `filename`: path to the binary file to write to. +* `data`: an `NDArray`, or a `Vector{NDArray}` or a `Dict{Base.Symbol, NDArray}`. + + +*source:* +[MXNet/src/ndarray.jl:398](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L398) --- @@ -104,7 +153,7 @@ Create NDArray and initialize with 1 Assign all elements of an NDArray to a scalar *source:* -[MXNet/src/ndarray.jl:155](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L155) +[MXNet/src/ndarray.jl:155](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L155) --- @@ -116,7 +165,7 @@ Get the shape of an `NDArray`. Note the shape is converted to Julia convention. *source:* -[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L84) +[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L84) --- @@ -130,7 +179,7 @@ used in data parallelization to split mini-batch into sub-batches for different *source:* -[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L137) +[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L137) --- @@ -139,7 +188,7 @@ used in data parallelization to split mini-batch into sub-batches for different Create zero-ed NDArray of specific shape *source:* -[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L105) +[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L105) --- @@ -157,7 +206,7 @@ have shape (28,28,1,100). *source:* -[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L32) +[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L32) --- @@ -185,5 +234,5 @@ which will do inplace adding of the contents of b into a. *source:* -[MXNet/src/ndarray.jl:247](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/ndarray.jl#L247) +[MXNet/src/ndarray.jl:258](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L258) diff --git a/docs/api/symbol.md b/docs/api/symbol.md index 1ccd875896b9..d263bb4fcbc2 100644 --- a/docs/api/symbol.md +++ b/docs/api/symbol.md @@ -4,21 +4,30 @@ --- - -#### get_internals(self::MXNet.mx.Symbol) -Get a new grouped symbol whose output contains all the internal outputs of this symbol. + +#### Group(symbols::MXNet.mx.Symbol...) +Create a symbol that groups symbols together *source:* -[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L63) +[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L77) --- - -#### group(symbols::MXNet.mx.Symbol...) -Create a symbol that groups symbols together + +#### Variable(name::Union{AbstractString, Symbol}) +Create a symbolic variable with the given name + +*source:* +[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L70) + +--- + + +#### get_internals(self::MXNet.mx.Symbol) +Get a new grouped symbol whose output contains all the internal outputs of this symbol. *source:* -[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L77) +[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L63) --- @@ -33,14 +42,5 @@ Most operators do not have Auxiliary states. *source:* -[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L58) - ---- - - -#### variable(name::Union{AbstractString, Symbol}) -Create a symbolic variable with the given name - -*source:* -[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/92237212c594440455567ec4d39caec909ddd11e/src/symbol.jl#L70) +[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L58) diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index 4e6a6757fb67..b910e2c3d6b6 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -41,17 +41,18 @@ function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, s lr = get_learning_rate(self.opts.lr_scheduler, self.iter) grad_scale = self.opts.grad_scale / self.batch_size + grad = grad_scale * grad + if self.opts.grad_clip > 0 + grad = clip(grad, -self.opts.grad_clip, self.opts.grad_clip) + end + if isa(state, Void) - @inplace weight += -lr * (grad_scale * grad + self.opts.weight_decay * weight) + @inplace weight += -lr * (grad + self.opts.weight_decay * weight) else mom = state :: NDArray coef = get_momentum(self.opts.mom_scheduler, self.iter) - @inplace mom .*= coef - if self.opts.grad_clip > 0 - # TODO: - else - @inplace mom += -lr * (grad_scale * grad + self.opts.weight_decay * weight) - end - @inplace weight += mom + @inplace mom .*= coef + @inplace mom .+= -lr * (grad + self.opts.weight_decay * weight) + @inplace weight .+= mom end end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 565846154b0b..9a6f916ab556 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -215,6 +215,21 @@ function test_saveload() rm(fname) end +function test_clip() + dims = rand_dims() + info("NDArray::clip::dims = $dims") + + j_array, nd_array = rand_tensors(dims) + clip_up = maximum(abs(j_array)) / 2 + clip_down = 0 + clipped = mx.clip(nd_array, clip_down, clip_up) + + # make sure the original array is not modified + @test reldiff(copy(nd_array), j_array) < 1e-6 + + @test all(clip_down .<= copy(clipped) .<= clip_up) +end + ################################################################################ # Run tests @@ -228,5 +243,6 @@ test_mul() test_div() test_gd() test_saveload() +test_clip() end From 9d138da15de27ff86b72c3156770e9e0cba141fa Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 24 Oct 2015 22:10:19 -0400 Subject: [PATCH 110/630] save and load for symbol --- src/ndarray.jl | 14 +++++++++----- src/symbol.jl | 18 ++++++++++++++++++ test/unittest/ndarray.jl | 22 ++++++++++++++++------ 3 files changed, 43 insertions(+), 11 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 2ae28b3a48a4..00edc6c7c671 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -370,7 +370,7 @@ end Either `Dict{Base.Symbol, NDArray}` or `Vector{NDArray}`. """ -function load_ndarrays(filename::AbstractString) +function load(filename::AbstractString, ::Type{NDArray}) out_size = Ref{MX_uint}(0) out_hdrs = Ref{Ptr{MX_handle}}(0) out_name_size = Ref{MX_uint}(0) @@ -395,14 +395,14 @@ end * `filename`: path to the binary file to write to. * `data`: an `NDArray`, or a `Vector{NDArray}` or a `Dict{Base.Symbol, NDArray}`. """ -function save_ndarrays(filename::AbstractString, data::NDArray) - save_ndarrays(filename, [data]) +function save(filename::AbstractString, data::NDArray) + save(filename, [data]) end -function save_ndarrays(filename::AbstractString, data::Vector{NDArray}) +function save(filename::AbstractString, data::Vector{NDArray}) @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), filename, length(data), MX_handle[data...], char_pp(0)) end -function save_ndarrays(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) +function save(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) names = [k for k in keys(data)] arrays = MX_handle[data[k] for k in names] names = AbstractString[string(k) for k in names] @@ -425,6 +425,10 @@ end ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) ) +# Import corresponding math functions from base so the automatically defined libmxnet +# functions can overload them +import Base: sqrt + """ Import dynamic functions for NDArrays. The arguments to the functions are typically ordered as diff --git a/src/symbol.jl b/src/symbol.jl index 097fe1d5b20a..a1f008c29cd3 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -229,12 +229,30 @@ function _compose!(sym :: Symbol, name :: Union{Base.Symbol, char_p}, args::Symb return sym end +"""Save Symbol into a JSON string""" function to_json(self :: Symbol) ref_json = Ref{char_p}(0) @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) return bytestring(ref_json[]) end +"""Load Symbol from a JSON string representation.""" +function from_json(repr :: AbstractString, ::Type{Symbol}) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateFromJSON, (char_p, Ref{MX_handle}), repr, ref_hdr) + return Symbol(MX_SymbolHandle(ref_hdr[])) +end + +"""Load Symbol from a JSON file.""" +function load(filename :: AbstractString, ::Type{Symbol}) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateFromFile, (char_p, Ref{MX_handle}), filename, ref_hdr) + return Symbol(MX_SymbolHandle(ref_hdr[])) +end +function save(filename :: AbstractString, sym :: Symbol) + @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), sym, filename) +end + ################################################################################ # Atomic Symbol functions dynamically imported from libmxnet ################################################################################ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 9a6f916ab556..cfab1dea0d86 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -184,8 +184,8 @@ function test_saveload() # save and load a single array dims = rand_dims() j_array, nd_array = rand_tensors(dims) - mx.save_ndarrays(fname, nd_array) - data = mx.load_ndarrays(fname) + mx.save(fname, nd_array) + data = mx.load(fname, mx.NDArray) @test isa(data, Vector{mx.NDArray}) @test length(data) == 1 @test reldiff(copy(data[1]), j_array) < 1e-6 @@ -193,8 +193,8 @@ function test_saveload() # save and load N arrays of different shape arrays = [rand_tensors(rand_dims()) for i = 1:n_arrays] nd_arrays = [x[2] for x in arrays] - mx.save_ndarrays(fname, nd_arrays) - data = mx.load_ndarrays(fname) + mx.save(fname, nd_arrays) + data = mx.load(fname, mx.NDArray) @test isa(data, Vector{mx.NDArray}) @test length(data) == n_arrays for i = 1:n_arrays @@ -204,8 +204,8 @@ function test_saveload() # save and load dictionary of ndarrays names = [symbol("array$i") for i = 1:n_arrays] dict = Dict([n => v for (n,v) in zip(names, nd_arrays)]) - mx.save_ndarrays(fname, dict) - data = mx.load_ndarrays(fname) + mx.save(fname, dict) + data = mx.load(fname, mx.NDArray) @test isa(data, Dict{Symbol, mx.NDArray}) @test length(data) == n_arrays for i = 1:n_arrays @@ -230,6 +230,15 @@ function test_clip() @test all(clip_down .<= copy(clipped) .<= clip_up) end +function test_sqrt() + dims = rand_dims() + info("NDArray::sqrt::dims = $dims") + + j_array, nd_array = rand_tensors(dims) + sqrt_ed = sqrt(nd_array) + @test reldiff(copy(sqrt_ed), sqrt(j_array)) < 1e-6 +end + ################################################################################ # Run tests @@ -244,5 +253,6 @@ test_div() test_gd() test_saveload() test_clip() +test_sqrt() end From 30f89a8efa4b30d96a97245cbc51a66bff48707e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 24 Oct 2015 22:13:25 -0400 Subject: [PATCH 111/630] unit test for symbol save and load --- test/unittest/symbol.jl | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/test/unittest/symbol.jl b/test/unittest/symbol.jl index fecb25c159a8..3397b7eaf70d 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/symbol.jl @@ -69,6 +69,18 @@ function test_infer_shape_error() @test_throws mx.MXError mx.infer_shape(model, data=data_shape, fc1_weight=weight_shape) end +function test_saveload() + info("Symbol::saveload::mlp2") + + model = mlp2() + fname = tempname() + mx.save(fname, model) + model2 = mx.load(fname, mx.Symbol) + @test mx.to_json(model) == mx.to_json(model2) + + rm(fname) +end + ################################################################################ # Run tests @@ -78,5 +90,6 @@ test_internal() test_compose() test_infer_shape() test_infer_shape_error() +test_saveload() end From 7e86c8f7f001977f5429762112824b0a7d50e32f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 00:09:46 -0400 Subject: [PATCH 112/630] improve defstruct macro --- src/base.jl | 25 ++++++++++++++++--------- 1 file changed, 16 insertions(+), 9 deletions(-) diff --git a/src/base.jl b/src/base.jl index cf940c9ef232..5e547de727c1 100644 --- a/src/base.jl +++ b/src/base.jl @@ -157,35 +157,42 @@ the keyword arguments. """ macro defstruct(name, super_name, fields) @assert fields.head == :tuple - fields = fields.args + fields = fields.args @assert length(fields) > 0 - name = esc(name) + name = esc(name) + super_name = esc(super_name) field_defs = Array(Expr, length(fields)) # :(field2 :: Int) - field_names = Array(Base.Symbol, length(fields)) # :field2 - field_defaults = Array(Expr, length(fields)) # :(field2 :: Int = 0) + field_names = Array(Expr, length(fields)) # :field2 + field_defaults = Array(Expr, length(fields)) # :(field2 = 0) + field_types = Array(Expr, length(fields)) # Int field_asserts = Array(Expr, length(fields)) # :(field2 >= 0) for i = 1:length(fields) field = fields[i] if field.head == :tuple - field_asserts[i] = field.args[2] + field_asserts[i] = esc(field.args[2]) field = field.args[1] end - field_defs[i] = esc(field.args[1]) - field_names[i] = field.args[1].args[1] - field_defaults[i] = Expr(:kw, field.args...) + field_defs[i] = esc(field.args[1]) + field_names[i] = esc(field.args[1].args[1]) + field_types[i] = esc(field.args[1].args[2]) + field_defaults[i] = Expr(:kw, field.args[1].args[1], esc(field.args[2])) end # body of layer type, defining fields type_body = Expr(:block, field_defs...) # constructor + converts = map(zip(field_names, field_types)) do param + f_name, f_type = param + :($f_name = convert($f_type, $f_name)) + end asserts = map(filter(i -> isdefined(field_asserts,i), 1:length(fields))) do i :(@assert($(field_asserts[i]))) end construct = Expr(:call, name, field_names...) - ctor_body = Expr(:block, asserts..., construct) + ctor_body = Expr(:block, converts..., asserts..., construct) ctor_def = Expr(:call, name, Expr(:parameters, field_defaults...)) ctor = Expr(:(=), ctor_def, ctor_body) From 00bd457366d3e7bc12ca7507bbeb3eeba87df872 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 00:10:15 -0400 Subject: [PATCH 113/630] callback interface --- src/MXNet.jl | 1 + src/callback.jl | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 49 insertions(+) create mode 100644 src/callback.jl diff --git a/src/MXNet.jl b/src/MXNet.jl index 0e56b743d5d5..9f404a077aed 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -25,6 +25,7 @@ include("initializer.jl") include("io.jl") include("kvstore.jl") +include("callback.jl") include("estimator.jl") include("util.jl") diff --git a/src/callback.jl b/src/callback.jl new file mode 100644 index 000000000000..c9e5a39c3693 --- /dev/null +++ b/src/callback.jl @@ -0,0 +1,48 @@ +"Abstract type of callback functions used in training" +abstract AbstractCallback + +"Abstract type of callbacks to be called every mini-batch" +abstract AbstractIterationCallback + +"Abstract type of callbacks to be called every epoch" +abstract AbstractEpochCallback + +type CallbackParams + batch_size :: Int + curr_epoch :: Int + curr_iter :: Int +end +CallbackParams(batch_size::Int) = CallbackParams(batch_size, 0, 0) + +type IterationCallback + frequency :: Int + call_on_0 :: Bool + callback :: Function +end + +function every_n_iter(callback :: Function, n :: Int, call_on_0 :: Bool = false) + IterationCallback(n, call_on_0, callback) +end +function Base.call(cb :: IterationCallback, param :: CallbackParams) + if param.curr_iter == 0 + if cb.call_on_0 + cb.callback(param) + end + elseif param.curr_iter % cb.frequency == 0 + cb.callback(param) + end +end + +function speedometer(frequency::Int=50) + cl_tic = 0 + every_n_iter(frequency, true) do params :: CallbackParams + if param.curr_iter == 0 + # reset counter + cl_tic = time() + else + speed = frequency * params.batch_size / (time() - cl_tic) + info("Speed: {1:>6.2} samples/sec", speed) + cl_tic = time() + end + end +end From 06879c6ab6233529e42fcc96a1c01cecbd006d40 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 00:28:06 -0400 Subject: [PATCH 114/630] fix estimator API --- README.md | 2 +- docs/tutorials/mnist.md | 6 ++-- examples/cifar10/cifar10.jl | 2 +- examples/mnist/lenet.jl | 2 +- examples/mnist/mlp.jl | 2 +- src/callback.jl | 22 ++++++++++++- src/estimator.jl | 63 ++++++++++++++++++++++++++----------- 7 files changed, 72 insertions(+), 27 deletions(-) diff --git a/README.md b/README.md index acf0fca121fb..c184eac57178 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ estimator = mx.FeedForward(mlp, context=mx.cpu()) optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` For more details, please refer to the [document](http://mxnetjl.readthedocs.org/) and [examples](examples). diff --git a/docs/tutorials/mnist.md b/docs/tutorials/mnist.md index 219d8936f686..97a12e8aadeb 100644 --- a/docs/tutorials/mnist.md +++ b/docs/tutorials/mnist.md @@ -55,9 +55,9 @@ The last thing we need to specify is the optimization algorithm (a.k.a. *optimiz ```julia optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) ``` -Now we can do the training. Here the `epoch_stop` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. +Now we can do the training. Here the `n_epoch` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. ```julia -mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` Here is a sample output ``` @@ -130,7 +130,7 @@ estimator = mx.FeedForward(lenet, context=mx.gpu()) optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` And here is a sample of running outputs: ``` diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 50d399e32d5e..968781a5257c 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -82,5 +82,5 @@ optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), weight_decay=0.0001) # fit parameters -mx.fit(estimator, optimizer, train_provider, epoch_stop=num_epoch, eval_data=test_provider, +mx.fit(estimator, optimizer, train_provider, n_epoch=num_epoch, eval_data=test_provider, initializer=mx.UniformInitializer(0.07)) diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index af631e44c3bc..92f41e88aeb6 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -42,4 +42,4 @@ estimator = mx.FeedForward(lenet, context=mx.gpu()) optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 66e1bdac49a8..26aa4ecfda49 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -34,4 +34,4 @@ estimator = mx.FeedForward(mlp, context=mx.cpu()) optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, epoch_stop=20, eval_data=eval_provider) +mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/src/callback.jl b/src/callback.jl index c9e5a39c3693..9a2af79ded42 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -14,7 +14,7 @@ type CallbackParams end CallbackParams(batch_size::Int) = CallbackParams(batch_size, 0, 0) -type IterationCallback +type IterationCallback <: AbstractIterationCallback frequency :: Int call_on_0 :: Bool callback :: Function @@ -46,3 +46,23 @@ function speedometer(frequency::Int=50) end end end + + +type EpochCallback <: AbstractEpochCallback + frequency :: Int + call_on_0 :: Bool + callback :: Function +end + +function every_n_epoch(callback :: Function, n :: Int, call_on_0 :: Bool = false) + EpochCallback(n, call_on_0, callback) +end +function Base.call(cb :: EpochCallback, param :: CallbackParams) + if param.curr_epoch == 0 + if cb.call_on_0 + cb.callback(param) + end + elseif param.curr_epoch % cb.frequency == 0 + cb.callback(param) + end +end diff --git a/src/estimator.jl b/src/estimator.jl index 42402d0a2bdf..fc0e55af052a 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -92,12 +92,25 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : return (kv, update_on_kvstore) end -function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; - initializer :: AbstractInitializer = UniformInitializer(0.01), - epoch_stop :: Int = 10, epoch_start :: Int = 1, - eval_data :: Union{Void, AbstractDataProvider} = nothing, - eval_metric :: AbstractEvalMetric = Accuracy(), - kvstore :: Union{Base.Symbol, KVStore} = :local) +@defstruct TrainingOptions Any ( + initializer :: AbstractInitializer = UniformInitializer(0.01), + n_epoch :: Int = 10, + eval_data :: Union{Void, AbstractDataProvider} = nothing, + eval_metric :: AbstractEvalMetric = Accuracy(), + kvstore :: Union{Base.Symbol, KVStore} = :local, + callbacks :: Vector{AbstractCallback} = AbstractCallback[], +) + +function _invoke_callbacks(callbacks::Vector{AbstractCallback}, param::CallbackParams, type_filter::Type) + map(callbacks) do cb + if isa(cb, type_filter) + cb(param) + end + end +end + +function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) + opts = TrainingOptions(; kwargs...) info("Start training on $(self.ctx)") @@ -107,9 +120,10 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # initialize parameters info("Initializing parameters...") - arg_names, param_names, aux_names = _init_params(self, data, initializer) + arg_names, param_names, aux_names = _init_params(self, data, opts.initializer) # setup kvstore + kvstore = opts.kvstore if isa(kvstore, Base.Symbol) info("Creating KVStore...") kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) @@ -139,6 +153,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] optimizer.batch_size = batch_size + cb_param = CallbackParams(batch_size) if !update_on_kvstore updater = get_updater(optimizer) @@ -169,11 +184,19 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra cpu_label_arrays = [empty(shape, cpu_dev) for (name,shape) in provide_label(data)] cpu_label_arrays_full_slice = [SlicedNDArray[(1:batch_size, x)] for x in cpu_label_arrays] + # invoke callbacks on epoch 0 + _invoke_callbacks(opts.callbacks, cb_param, AbstractEpochCallback) + # now start training... - for i_epoch = epoch_start:epoch_stop + for i_epoch = 1:opts.n_epoch time_start = time() - reset!(eval_metric) - n_batch = 0 + reset!(opts.eval_metric) + + cb_param.curr_epoch = i_epoch + cb_param.curr_iter = 0 + + # invoke callbacks on iteration 0 + _invoke_callbacks(opts.callbacks, cb_param, AbstractIterationCallback) for batch in data load_data!(batch, data_arrays) @@ -218,30 +241,32 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end end - n_batch += 1 + # invoke callbacks after finishing each iteration + _invoke_callbacks(opts.callbacks, cb_param, AbstractIterationCallback) + cb_param.curr_iter += 1 # update evaluation metric on training set load_label!(batch, cpu_label_arrays_full_slice) - update!(eval_metric, cpu_label_arrays, cpu_output_arrays) + update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end # end of one epoch time_stop = time() info(format("== Epoch {1:0>3d} ==========", i_epoch)) info("## Training summary") - for (name, value) in get(eval_metric) + for (name, value) in get(opts.eval_metric) info(format("{1:>15s} = {2:.4f}", name, value)) end info(format("{1:>15s} = {2:.4f} seconds", "time", time_stop-time_start)) # evaluation on validation set - if !isa(eval_data, Void) + if !isa(opts.eval_data, Void) # because we are re-using the memory allocated for the training network, # the batch_size of the validation dataset must be the same as the training # batch_size - @assert(get_batch_size(eval_data) == batch_size) + @assert(get_batch_size(opts.eval_data) == batch_size) - reset!(eval_metric) - for batch in eval_data + reset!(opts.eval_metric) + for batch in opts.eval_data load_data!(batch, data_arrays) # forward and backward @@ -254,11 +279,11 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end end load_label!(batch, cpu_label_arrays_full_slice) - update!(eval_metric, cpu_label_arrays, cpu_output_arrays) + update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end info("## Validation summary") - for (name, value) in get(eval_metric) + for (name, value) in get(opts.eval_metric) info(format("{1:>15s} = {2:.4f}", name, value)) end end From f551c91cd5ea3eb59ea11630fbdb48ca485cbf20 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 00:29:00 -0400 Subject: [PATCH 115/630] add speedometer to cifar10 --- examples/cifar10/cifar10.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 968781a5257c..7b335d3b1219 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -83,4 +83,4 @@ optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), # fit parameters mx.fit(estimator, optimizer, train_provider, n_epoch=num_epoch, eval_data=test_provider, - initializer=mx.UniformInitializer(0.07)) + initializer=mx.UniformInitializer(0.07), callbacks=[mx.speedometer()]) From cb067c34dd5ad73c9435cd30ece24416a2969f03 Mon Sep 17 00:00:00 2001 From: pluskid Date: Sun, 25 Oct 2015 00:36:32 -0400 Subject: [PATCH 116/630] fix speedometer --- src/callback.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/callback.jl b/src/callback.jl index 9a2af79ded42..f2a3ef93058f 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -2,10 +2,10 @@ abstract AbstractCallback "Abstract type of callbacks to be called every mini-batch" -abstract AbstractIterationCallback +abstract AbstractIterationCallback <: AbstractCallback "Abstract type of callbacks to be called every epoch" -abstract AbstractEpochCallback +abstract AbstractEpochCallback <: AbstractCallback type CallbackParams batch_size :: Int @@ -35,13 +35,13 @@ end function speedometer(frequency::Int=50) cl_tic = 0 - every_n_iter(frequency, true) do params :: CallbackParams + every_n_iter(frequency, true) do param :: CallbackParams if param.curr_iter == 0 # reset counter cl_tic = time() else - speed = frequency * params.batch_size / (time() - cl_tic) - info("Speed: {1:>6.2} samples/sec", speed) + speed = frequency * param.batch_size / (time() - cl_tic) + info(format("Speed: {1:>6.2f} samples/sec", speed)) cl_tic = time() end end From f7d7ce3a107d7884e03f77410e60a57ace8f6b52 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 01:16:47 -0400 Subject: [PATCH 117/630] save checkpoints in callbacks --- src/callback.jl | 21 ++++++++++++++------- src/estimator.jl | 44 +++++++++++++++++++++++++++++++++++++++----- src/ndarray.jl | 5 ++++- 3 files changed, 57 insertions(+), 13 deletions(-) diff --git a/src/callback.jl b/src/callback.jl index f2a3ef93058f..c124c9b9357c 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -20,7 +20,7 @@ type IterationCallback <: AbstractIterationCallback callback :: Function end -function every_n_iter(callback :: Function, n :: Int, call_on_0 :: Bool = false) +function every_n_iter(callback :: Function, n :: Int; call_on_0 :: Bool = false) IterationCallback(n, call_on_0, callback) end function Base.call(cb :: IterationCallback, param :: CallbackParams) @@ -33,9 +33,9 @@ function Base.call(cb :: IterationCallback, param :: CallbackParams) end end -function speedometer(frequency::Int=50) +function speedometer(;frequency::Int=50) cl_tic = 0 - every_n_iter(frequency, true) do param :: CallbackParams + every_n_iter(frequency, call_on_0=true) do param :: CallbackParams if param.curr_iter == 0 # reset counter cl_tic = time() @@ -54,15 +54,22 @@ type EpochCallback <: AbstractEpochCallback callback :: Function end -function every_n_epoch(callback :: Function, n :: Int, call_on_0 :: Bool = false) +function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -function Base.call(cb :: EpochCallback, param :: CallbackParams) +function Base.call(cb :: EpochCallback, estimator :: Any, param :: CallbackParams) if param.curr_epoch == 0 if cb.call_on_0 - cb.callback(param) + cb.callback(estimator, param) end elseif param.curr_epoch % cb.frequency == 0 - cb.callback(param) + cb.callback(estimator, param) + end +end + +function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) + mkpath(dirname(prefix)) + every_n_epoch(frequency, call_on_0=save_epoch_0) do estimator, param + save_checkpoint(estimator, prefix, param) end end diff --git a/src/estimator.jl b/src/estimator.jl index fc0e55af052a..ad436ef359bf 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -101,10 +101,15 @@ end callbacks :: Vector{AbstractCallback} = AbstractCallback[], ) -function _invoke_callbacks(callbacks::Vector{AbstractCallback}, param::CallbackParams, type_filter::Type) +function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, param::CallbackParams, type_filter::Type) map(callbacks) do cb if isa(cb, type_filter) - cb(param) + if type_filter == AbstractEpochCallback + # epoch callback have extra access to the estimator object + cb(self, param) + else + cb(param) + end end end end @@ -151,6 +156,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra param_arrays = [NDArray[exec.arg_arrays[i] for exec in train_execs] for i in param_idx] grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] + aux_arrays = [NDArray[exec.aux_arrays[i] for exec in train_execs] for i = 1:length(aux_names)] optimizer.batch_size = batch_size cb_param = CallbackParams(batch_size) @@ -185,7 +191,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra cpu_label_arrays_full_slice = [SlicedNDArray[(1:batch_size, x)] for x in cpu_label_arrays] # invoke callbacks on epoch 0 - _invoke_callbacks(opts.callbacks, cb_param, AbstractEpochCallback) + _invoke_callbacks(self, opts.callbacks, cb_param, AbstractEpochCallback) # now start training... for i_epoch = 1:opts.n_epoch @@ -196,7 +202,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra cb_param.curr_iter = 0 # invoke callbacks on iteration 0 - _invoke_callbacks(opts.callbacks, cb_param, AbstractIterationCallback) + _invoke_callbacks(self, opts.callbacks, cb_param, AbstractIterationCallback) for batch in data load_data!(batch, data_arrays) @@ -242,7 +248,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end # invoke callbacks after finishing each iteration - _invoke_callbacks(opts.callbacks, cb_param, AbstractIterationCallback) + _invoke_callbacks(self, opts.callbacks, cb_param, AbstractIterationCallback) cb_param.curr_iter += 1 # update evaluation metric on training set @@ -287,5 +293,33 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra info(format("{1:>15s} = {2:.4f}", name, value)) end end + + if i_epoch == opts.n_epoch || any(map(x->isa(x, AbstractEpochCallback), opts.callbacks)) + # copy data back to cpu + for (name, weights) in zip(param_names, param_arrays) + # average parameters across devices + weight = +([copy(w, cpu()) for w in weights]...) / length(weights) + copy!(self.arg_params[name], weight) + end + for (name, aux_devs) in zip(aux_names, aux_arrays) + aux_avg = +([copy(aux, cpu()) for aux in aux_devs]...) / length(aux_devs) + copy!(self.aux_params[name], aux_avg) + end + end + _invoke_callbacks(self, opts.callbacks, cb_param, AbstractEpochCallback) end # end of all epochs end + +function save_checkpoint(self :: FeedForward, prefix :: AbstractString, param :: CallbackParams) + save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, param.curr_epoch) +end +function save_checkpoint(sym :: Symbol, arg_params :: Dict{Base.Symbol, NDArray}, + aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) + save("$prefix-symbol.json", sym) + save_dict = merge(Dict([symbol("arg:$k") => v for (k,v) in arg_params]), + Dict([symbol("aux:$k") => v for (k,v) in aux_params])) + save_filename = format("{1}-{2:04d}.params", prefix, epoch) + save(save_filename, save_dict) + info("Saved checkpoint to '$save_filename'") +end + diff --git a/src/ndarray.jl b/src/ndarray.jl index 00edc6c7c671..40d4c233e2e0 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -346,11 +346,14 @@ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) _div(dst, arg, dst) end end -import Base: ./ +import Base: ./, / function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) div_from!(ret, arg) end +function /(arg0 :: NDArray, arg :: Real) + ./(arg0, arg) +end #------------------------------------------------------------ # IO From 5ea6b8cf46fb47e606e4c2409d7d974cd30cfaeb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 12:27:32 -0400 Subject: [PATCH 118/630] array iterator and (part of) the unittest --- src/estimator.jl | 5 ++ src/io.jl | 121 +++++++++++++++++++++++++++++++++++++++++++- src/ndarray.jl | 7 ++- test/unittest/io.jl | 33 ++++++++++++ 4 files changed, 163 insertions(+), 3 deletions(-) diff --git a/src/estimator.jl b/src/estimator.jl index ad436ef359bf..c18eb5f3a6d4 100644 --- a/src/estimator.jl +++ b/src/estimator.jl @@ -7,6 +7,8 @@ type FeedForward <: AbstractEstimator arg_params :: Dict{Base.Symbol, NDArray} aux_params :: Dict{Base.Symbol, NDArray} + pred_exec :: Executor + # leave the rest fields undefined FeedForward(arch :: Symbol, ctx :: Vector{Context}) = new(arch, ctx) end @@ -114,6 +116,9 @@ function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback end end +function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) + fit(self, optimizer, data; kwargs...) +end function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) diff --git a/src/io.jl b/src/io.jl index 78fb7cefc22c..389b5a954fe4 100644 --- a/src/io.jl +++ b/src/io.jl @@ -32,8 +32,8 @@ which translates into ```julia state = Base.start(provider) -while !done(provider, state) - (batch, state) = next(provider, state) +while !Base.done(provider, state) + (batch, state) = Base.next(provider, state) # ... load_data!(batch, targets) end @@ -110,6 +110,123 @@ Return the number of *dummy samples* in this mini-batch. abstract AbstractDataBatch +################################################################################ +# ArrayDataProvider +################################################################################ +"A convenient tool to iterate `NDArray` or Julia `Array`" +type ArrayDataProvider <: AbstractDataProvider + data_arrays :: Vector{Array{MX_float}} + data_names :: Vector{Base.Symbol} + label_arrays:: Vector{Array{MX_float}} + label_names :: Vector{Base.Symbol} + batch_size :: Int + sample_count:: Int + shuffle :: Bool +end + +function ArrayDataProvider{T<:Real}(data::Union{NDArray,Array{T}}; batch_size::Int=1, shuffle::Bool=false) + ArrayDataProvider(:data => data, batch_size=batch_size, shuffle=shuffle) +end +function ArrayDataProvider(data::Pair; batch_size=1, shuffle::Bool=false) + ArrayDataProvider(Pair[data], Pair[], batch_size=batch_size, shuffle=shuffle) +end +function ArrayDataProvider{T<:Real}(data::Union{NDArray,Array{T}}, label::Union{NDArray,Array{T}}; + batch_size::Int=1, shuffle::Bool=false) + ArrayDataProvider(:data => data, :softmax_label => label, batch_size=batch_size, shuffle=shuffle) +end +function ArrayDataProvider(data::Pair, label::Pair; batch_size=1, shuffle::Bool=false) + ArrayDataProvider([data], [label], batch_size=batch_size, shuffle=shuffle) +end +function ArrayDataProvider(data::Vector{Pair}, label::Vector{Pair}; batch_size::Int=1, shuffle::Bool=false) + + data_names = Base.Symbol[x[1] for x in data] + data_arrays = Array{MX_float}[x[2] for x in data] + + label_names = Base.Symbol[x[1] for x in label] + label_arrays= Array{MX_float}[x[2] for x in label] + + @assert length(data) > 0 + sample_count = size(data_arrays[1])[end] + for i = 1:length(data_names) + @assert(size(data_arrays[i])[end] == sample_count, + "Number of samples in $(data_names[i]) is mismatch with $(data_names[1])") + end + for i = 1:length(label_names) + @assert(size(label_arrays[i])[end] == sample_count, + "Number of samples in $(label_names[i]) is mismatch with $(label_names[1])") + end + + ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, sample_count, shuffle) +end + +function provide_data(provider::ArrayDataProvider) + return collect(zip(provider.data_names, map(size, provider.data_arrays))) +end +function provide_label(provider::ArrayDataProvider) + return collect(zip(provider.label_names, map(size, provider.label_arrays))) +end +get_batch_size(provider::ArrayDataProvider) = provider.batch_size + +immutable ArrayDataProviderState <: AbstractDataProviderState + curr_idx :: Int +end + +function Base.eltype(provider :: ArrayDataProvider) + ArrayDataProviderState +end + +function _shuffle_array(arr::Array, idx::Vector{Int}) + shape = size(arr) + colons = [Colon() for c = 1:length(shape)-1] + getindex(arr, colons..., idx) +end +function Base.start(provider :: ArrayDataProvider) + if provider.shuffle + # re-shuffle all data + idx_perm = randperm(provider.sample_count) + provider.data_arrays = map(x->_shuffle_array(x,idx_perm), provider.data_arrays) + provider.label_arrays = map(x->_shuffle_array(x,idx_perm), provider.label_arrays) + end + + return ArrayDataProviderState(1) +end + +function Base.done(provider::ArrayDataProvider, state :: ArrayDataProviderState) + return state.curr_idx > provider.sample_count +end + +immutable ArrayDataBatch <: AbstractDataBatch + provider :: ArrayDataProvider + idx :: UnitRange{Int} +end +function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) + idx = state.curr_idx:min(state.curr_idx+provider.batch_size, provider.sample_count) + return (ArrayDataBatch(provider, idx), ArrayDataProviderState(idx.stop+1)) +end + +function get_pad(batch :: ArrayDataBatch) + return batch.provider.batch_size - length(batch.idx) +end + +function _load_general!(batch :: ArrayDataBatch, sources :: Vector{Array{MX_float}}, + targets :: Vector{Vector{SlicedNDArray}}) + @assert length(sources) == length(targets) + for (src, tgt) in zip(sources, targets) + src_colons = [Colon() for i = 1:ndims(src)-1] + for (slice_idx, dst) in tgt + copy!(dst, getindex(src, src_colons..., batch.idx[slice_idx])) + end + end +end +function load_data!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) + _load_general!(batch, batch.provider.data_arrays, targets) +end +function load_label!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) + _load_general!(batch, batch.provider.label_arrays, targets) +end + + + ################################################################################ # MXDataProvider ################################################################################ diff --git a/src/ndarray.jl b/src/ndarray.jl index 40d4c233e2e0..133ebb2a8991 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -181,7 +181,7 @@ end #------------------------------------------------------------ # Copying functions #------------------------------------------------------------ -import Base: copy!, copy +import Base: copy!, copy, convert "Copy data between NDArrays" function copy!(dst :: NDArray, src :: NDArray) @assert(dst.writable) @@ -230,6 +230,11 @@ function copy{T<:Real}(arr :: Array{T}, ctx :: Context) copy!(dst, arr) end +"Convert copy: NDArray -> Julia Array" +function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) + convert(t, copy(arr)) +end + #------------------------------------------------------------ # Basic arithmetics diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 0e9e33f12244..07e3b29a324a 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -2,6 +2,8 @@ module TestIO using MXNet using Base.Test +using ..Main: rand_dims, reldiff + function test_mnist() info("IO::MNIST") filenames = mx.get_mnist_ubyte() @@ -39,6 +41,37 @@ function test_mnist() @test n_batch == 60000 / batch_size end +function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataProvider) + data = convert(Vector{Array{Float64}}, data) + label = convert(Vector{Array{Float64}}, label) + + sample_count = size(data[1])[end] + batch_size = mx.get_batch_size(provider) + idx_all = 1:batch_size:sample_count + + info("IO::Array::#data=$(length(data)),#label=$(length(label)),batch_size=$batch_size") + for (idx, batch) in zip(idx_all, provider) + data_batch = [x[[Colon() for i=1:ndims(x)-1]..., idx:min(idx+batch_size,sample_count)] for x in data] + data_get = [mx.empty(size(x)[1:end-1]..., batch_size) for x in data] + mx.load_data!(batch, [[(1:batch_size, x)] for x in data_get]) + + for (d_real, d_get) in zip(data_batch, data_batch) + @test reldiff(d_real, copy(d_get)[[1:n for n in size(d_real)]...]) < 1e-6 + @test mx.get_pad(batch) == batch_size - size(d_get)[end] + end + end +end + +function test_arrays() + sample_count = 15 + batch_size = 4 + dims_data = [rand_dims()..., sample_count] + data = rand(dims_data...) + provider = mx.ArrayDataProvider(data, batch_size=batch_size) + test_arrays_impl(Array[data], [], provider) +end + +test_arrays() test_mnist() end From b4cc05cf829a22468caad45eafa65a32770ab4d9 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 20:49:45 -0400 Subject: [PATCH 119/630] more test on array data iterator --- src/io.jl | 60 +++++++++++++++++++++++++++++++-------------- test/unittest/io.jl | 5 ++++ 2 files changed, 47 insertions(+), 18 deletions(-) diff --git a/src/io.jl b/src/io.jl index 389b5a954fe4..5e6cc8b6a449 100644 --- a/src/io.jl +++ b/src/io.jl @@ -124,26 +124,50 @@ type ArrayDataProvider <: AbstractDataProvider shuffle :: Bool end -function ArrayDataProvider{T<:Real}(data::Union{NDArray,Array{T}}; batch_size::Int=1, shuffle::Bool=false) - ArrayDataProvider(:data => data, batch_size=batch_size, shuffle=shuffle) -end -function ArrayDataProvider(data::Pair; batch_size=1, shuffle::Bool=false) - ArrayDataProvider(Pair[data], Pair[], batch_size=batch_size, shuffle=shuffle) -end -function ArrayDataProvider{T<:Real}(data::Union{NDArray,Array{T}}, label::Union{NDArray,Array{T}}; - batch_size::Int=1, shuffle::Bool=false) - ArrayDataProvider(:data => data, :softmax_label => label, batch_size=batch_size, shuffle=shuffle) -end -function ArrayDataProvider(data::Pair, label::Pair; batch_size=1, shuffle::Bool=false) - ArrayDataProvider([data], [label], batch_size=batch_size, shuffle=shuffle) -end -function ArrayDataProvider(data::Vector{Pair}, label::Vector{Pair}; batch_size::Int=1, shuffle::Bool=false) - data_names = Base.Symbol[x[1] for x in data] - data_arrays = Array{MX_float}[x[2] for x in data] +# Julia's type system is sometimes very frustrating. You cannot specify a function +# with argument Vector{Pair} to expect to be matched when calling with the parameter +# [:foo => zeros(2,3), :bar => zeros(3)] because the type inference gives very specific +# results, about the parametric type in the Pair{T1,T2} type, thus does not match the +# generic Pair type. In general, Int <: Number but Vector{Int} <: Vector{Number} is not +# true. So let us just use Any here... +function ArrayDataProvider(data::Any; batch_size::Int=1, shuffle::Bool=false) + ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle) +end +function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bool=false) + if isa(data, Union{NDArray, Array}) + data_names = [:data] + data_arrays = Array{MX_float}[data] + elseif isa(data, Pair) + @assert isa(data.first, Base.Symbol) && isa(data.second, Union{NDArray, Array}) + data_names = [data.first] + data_arrays = Array{MX_float}[data.second] + elseif isa(data, Vector) + map(data) do d + @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) + end + data_names = Base.Symbol[d.first for d in data] + data_arrays = Array{MX_float}[d.second for d in data] + else + error("Invalid data argument type") + end - label_names = Base.Symbol[x[1] for x in label] - label_arrays= Array{MX_float}[x[2] for x in label] + if isa(label, Union{NDArray, Array}) + label_names = [:softmax_label] + label_arrays = Array{MX_float}[data] + elseif isa(label, Pair) + @assert isa(label.first, Base.Symbol) && isa(label.second, Union{NDArray, Array}) + label_names = [label.first] + label_arrays = Array{MX_float}[label.second] + elseif isa(label, Vector) + map(label) do d + @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) + end + label_names = Base.Symbol[d.first for d in label] + label_arrays = Array{MX_float}[d.second for d in label] + else + error("Invalid label argument type") + end @assert length(data) > 0 sample_count = size(data_arrays[1])[end] diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 07e3b29a324a..9eb0581b5eb3 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -69,6 +69,11 @@ function test_arrays() data = rand(dims_data...) provider = mx.ArrayDataProvider(data, batch_size=batch_size) test_arrays_impl(Array[data], [], provider) + + dims_label = [rand_dims()..., sample_count] + label = rand(dims_label...) + provider = mx.ArrayDataProvider(data, label, batch_size=batch_size) + test_arrays_impl(Array[data], Array[label], provider) end test_arrays() From 2c9d9aef77fbf6e8e2061066ba7723e8be0f48e6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 20:59:27 -0400 Subject: [PATCH 120/630] more tests on array data provider --- src/io.jl | 14 +++++++------- test/unittest/io.jl | 16 ++++++++++++---- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/src/io.jl b/src/io.jl index 5e6cc8b6a449..efdd4687bff1 100644 --- a/src/io.jl +++ b/src/io.jl @@ -135,14 +135,14 @@ function ArrayDataProvider(data::Any; batch_size::Int=1, shuffle::Bool=false) ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle) end function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bool=false) - if isa(data, Union{NDArray, Array}) + if isa(data, Union{NDArray, Array}) && eltype(data) <: Real data_names = [:data] data_arrays = Array{MX_float}[data] elseif isa(data, Pair) @assert isa(data.first, Base.Symbol) && isa(data.second, Union{NDArray, Array}) data_names = [data.first] data_arrays = Array{MX_float}[data.second] - elseif isa(data, Vector) + elseif isa(data, Vector) || isa(data, Tuple) map(data) do d @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) end @@ -152,14 +152,14 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bo error("Invalid data argument type") end - if isa(label, Union{NDArray, Array}) + if isa(label, Union{NDArray, Array}) && eltype(label) <: Real label_names = [:softmax_label] - label_arrays = Array{MX_float}[data] + label_arrays = Array{MX_float}[label] elseif isa(label, Pair) @assert isa(label.first, Base.Symbol) && isa(label.second, Union{NDArray, Array}) label_names = [label.first] label_arrays = Array{MX_float}[label.second] - elseif isa(label, Vector) + elseif isa(label, Vector) || isa(label, Tuple) map(label) do d @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) end @@ -169,7 +169,7 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bo error("Invalid label argument type") end - @assert length(data) > 0 + @assert length(data_arrays) > 0 sample_count = size(data_arrays[1])[end] for i = 1:length(data_names) @assert(size(data_arrays[i])[end] == sample_count, @@ -177,7 +177,7 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bo end for i = 1:length(label_names) @assert(size(label_arrays[i])[end] == sample_count, - "Number of samples in $(label_names[i]) is mismatch with $(label_names[1])") + "Number of samples in $(label_names[i]) is mismatch with $(data_names[1])") end ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, sample_count, shuffle) diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 9eb0581b5eb3..cb7f9edbf7fd 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -66,14 +66,22 @@ function test_arrays() sample_count = 15 batch_size = 4 dims_data = [rand_dims()..., sample_count] - data = rand(dims_data...) - provider = mx.ArrayDataProvider(data, batch_size=batch_size) + data = rand(dims_data...) + provider = mx.ArrayDataProvider(data, batch_size=batch_size) test_arrays_impl(Array[data], [], provider) dims_label = [rand_dims()..., sample_count] - label = rand(dims_label...) - provider = mx.ArrayDataProvider(data, label, batch_size=batch_size) + label = rand(dims_label...) + provider = mx.ArrayDataProvider(data, label, batch_size=batch_size) test_arrays_impl(Array[data], Array[label], provider) + + provider = mx.ArrayDataProvider(:data=>data, :my_label=>label, batch_size=batch_size) + test_arrays_impl(Array[data], Array[label], provider) + + dims_data2 = [rand_dims()..., sample_count] + data2 = rand(dims_data2...) + provider = mx.ArrayDataProvider((:data=>data, :data2=>data2), label, batch_size=batch_size) + test_arrays_impl(Array[data,data2], Array[label], provider) end test_arrays() From dc8ca183623ff2ff84ee8b441110e38bd731ea01 Mon Sep 17 00:00:00 2001 From: Yeesian Ng Date: Sun, 25 Oct 2015 21:31:28 -0400 Subject: [PATCH 121/630] fix README example --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index c184eac57178..d856165308eb 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ mlp = @mx.chain mx.Variable(:data) => # data provider batch_size = 100 +include(Pkg.dir("MXNet") * "/examples/mnist/mnist-data.jl") train_provider, eval_provider = get_mnist_providers(batch_size) # setup estimator From eb8440215fa75470d06dda5ff9f9c69346a075fe Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 21:49:56 -0400 Subject: [PATCH 122/630] test shuffling of array data provider --- src/io.jl | 44 ++++++++++++++++++++++++++++---------------- src/ndarray.jl | 5 ++++- test/unittest/io.jl | 33 ++++++++++++++++++++++++++++++--- 3 files changed, 62 insertions(+), 20 deletions(-) diff --git a/src/io.jl b/src/io.jl index efdd4687bff1..5ffcf95f0bef 100644 --- a/src/io.jl +++ b/src/io.jl @@ -115,13 +115,15 @@ abstract AbstractDataBatch ################################################################################ "A convenient tool to iterate `NDArray` or Julia `Array`" type ArrayDataProvider <: AbstractDataProvider - data_arrays :: Vector{Array{MX_float}} - data_names :: Vector{Base.Symbol} - label_arrays:: Vector{Array{MX_float}} - label_names :: Vector{Base.Symbol} - batch_size :: Int - sample_count:: Int - shuffle :: Bool + data_arrays :: Vector{Array{MX_float}} + data_names :: Vector{Base.Symbol} + label_arrays :: Vector{Array{MX_float}} + label_names :: Vector{Base.Symbol} + batch_size :: Int + sample_count :: Int + shuffle :: Bool + data_padding :: MX_float + label_padding :: MX_float end @@ -131,10 +133,10 @@ end # results, about the parametric type in the Pair{T1,T2} type, thus does not match the # generic Pair type. In general, Int <: Number but Vector{Int} <: Vector{Number} is not # true. So let us just use Any here... -function ArrayDataProvider(data::Any; batch_size::Int=1, shuffle::Bool=false) - ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle) +function ArrayDataProvider(data::Any; batch_size::Int=1, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) + ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle, data_padding=data_padding, label_padding=label_padding) end -function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bool=false) +function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) if isa(data, Union{NDArray, Array}) && eltype(data) <: Real data_names = [:data] data_arrays = Array{MX_float}[data] @@ -180,7 +182,8 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bo "Number of samples in $(label_names[i]) is mismatch with $(data_names[1])") end - ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, sample_count, shuffle) + ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, + sample_count, shuffle, data_padding, label_padding) end function provide_data(provider::ArrayDataProvider) @@ -224,7 +227,7 @@ immutable ArrayDataBatch <: AbstractDataBatch idx :: UnitRange{Int} end function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) - idx = state.curr_idx:min(state.curr_idx+provider.batch_size, provider.sample_count) + idx = state.curr_idx:min(state.curr_idx+provider.batch_size-1, provider.sample_count) return (ArrayDataBatch(provider, idx), ArrayDataProviderState(idx.stop+1)) end @@ -233,20 +236,29 @@ function get_pad(batch :: ArrayDataBatch) end function _load_general!(batch :: ArrayDataBatch, sources :: Vector{Array{MX_float}}, - targets :: Vector{Vector{SlicedNDArray}}) + targets :: Vector{Vector{SlicedNDArray}}, pad_val::Real) @assert length(sources) == length(targets) for (src, tgt) in zip(sources, targets) src_colons = [Colon() for i = 1:ndims(src)-1] for (slice_idx, dst) in tgt - copy!(dst, getindex(src, src_colons..., batch.idx[slice_idx])) + if slice_idx.start > length(batch.idx) + dst[:] = pad_val + else + slice_idx0 = slice_idx.start:min(slice_idx.stop, length(batch.idx)) + copy!(dst[1:length(slice_idx0)], getindex(src, src_colons..., batch.idx[slice_idx0])) + if length(slice_idx0) < length(slice_idx) + # need padding + dst[length(slice_idx0)+1:length(slice_idx)] = pad_val + end + end end end end function load_data!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(batch, batch.provider.data_arrays, targets) + _load_general!(batch, batch.provider.data_arrays, targets, batch.provider.data_padding) end function load_label!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(batch, batch.provider.label_arrays, targets) + _load_general!(batch, batch.provider.label_arrays, targets, batch.provider.label_padding) end diff --git a/src/ndarray.jl b/src/ndarray.jl index 133ebb2a8991..2156ce76be14 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -140,6 +140,9 @@ end function slice(arr :: NDArray, slice::UnitRange{Int}) dim1 = size(arr)[end] @assert(1 <= slice.start <= slice.stop <= dim1) + if slice.start == 1 && slice.stop == dim1 + return arr + end hdr_ref = Ref{MX_handle}(0) # note Julia is 1-based, inclusive-inclusive indexing, while C++ is @@ -164,7 +167,7 @@ function setindex!(arr :: NDArray, val :: NDArray, ::Colon) copy!(arr, val) end function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) - copy!(slice(arr, idx), val) + setindex!(slice(arr, idx), val, Colon()) end import Base: getindex diff --git a/test/unittest/io.jl b/test/unittest/io.jl index cb7f9edbf7fd..cffb00417a57 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -51,13 +51,13 @@ function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataPro info("IO::Array::#data=$(length(data)),#label=$(length(label)),batch_size=$batch_size") for (idx, batch) in zip(idx_all, provider) - data_batch = [x[[Colon() for i=1:ndims(x)-1]..., idx:min(idx+batch_size,sample_count)] for x in data] + data_batch = [x[[Colon() for i=1:ndims(x)-1]..., idx:min(idx+batch_size-1,sample_count)] for x in data] data_get = [mx.empty(size(x)[1:end-1]..., batch_size) for x in data] mx.load_data!(batch, [[(1:batch_size, x)] for x in data_get]) - for (d_real, d_get) in zip(data_batch, data_batch) + for (d_real, d_get) in zip(data_batch, data_get) @test reldiff(d_real, copy(d_get)[[1:n for n in size(d_real)]...]) < 1e-6 - @test mx.get_pad(batch) == batch_size - size(d_get)[end] + @test mx.get_pad(batch) == batch_size - size(d_real)[end] end end end @@ -84,6 +84,33 @@ function test_arrays() test_arrays_impl(Array[data,data2], Array[label], provider) end +function test_arrays_shuffle() + info("IO::Array::shuffle") + + sample_count = 15 + batch_size = 4 + data = rand(1, sample_count) + label = collect(1:sample_count) + provider = mx.ArrayDataProvider(data, :index => label, batch_size=batch_size, shuffle=true) + + idx_all = 1:batch_size:sample_count + data_got = similar(data) + label_got = similar(label) + for (idx, batch) in zip(idx_all, provider) + data_batch = [(1:batch_size, mx.empty(1,batch_size))] + label_batch = [(1:batch_size, mx.empty(batch_size))] + mx.load_data!(batch, typeof(data_batch)[data_batch]) + mx.load_label!(batch, typeof(label_batch)[label_batch]) + data_got[idx:min(idx+batch_size-1,sample_count)] = copy(data_batch[1][2])[1:batch_size-mx.get_pad(batch)] + label_got[idx:min(idx+batch_size-1,sample_count)] = copy(label_batch[1][2])[1:batch_size-mx.get_pad(batch)] + end + + @test label_got != label + @test sort(label_got) == label + @test reldiff(data_got, data[:,Int[label_got...]]) < 1e-6 +end + +test_arrays_shuffle() test_arrays() test_mnist() From 1538340e3a6874c4380e7b0b5d44aab3ee992f53 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 22:03:56 -0400 Subject: [PATCH 123/630] rename estimator -> model --- README.md | 6 +++--- docs/tutorials/mnist.md | 10 +++++----- docs/user-guide/overview.md | 4 ++-- examples/cifar10/cifar10.jl | 4 ++-- examples/mnist/lenet.jl | 4 ++-- examples/mnist/mlp.jl | 6 +++--- src/MXNet.jl | 2 +- src/callback.jl | 10 +++++----- src/{estimator.jl => model.jl} | 6 +++--- 9 files changed, 26 insertions(+), 26 deletions(-) rename src/{estimator.jl => model.jl} (98%) diff --git a/README.md b/README.md index c184eac57178..33a347ceeb78 100644 --- a/README.md +++ b/README.md @@ -26,14 +26,14 @@ mlp = @mx.chain mx.Variable(:data) => batch_size = 100 train_provider, eval_provider = get_mnist_providers(batch_size) -# setup estimator -estimator = mx.FeedForward(mlp, context=mx.cpu()) +# setup model +model = mx.FeedForward(mlp, context=mx.cpu()) # optimizer optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` For more details, please refer to the [document](http://mxnetjl.readthedocs.org/) and [examples](examples). diff --git a/docs/tutorials/mnist.md b/docs/tutorials/mnist.md index 97a12e8aadeb..ecf7bab8f631 100644 --- a/docs/tutorials/mnist.md +++ b/docs/tutorials/mnist.md @@ -45,9 +45,9 @@ train_provider, eval_provider = get_mnist_providers(batch_size) ``` If you need to write your own data providers for customized data format, please refer to **TODO**: pointer to data provider API. -Given the architecture and data, we can instantiate an *estimator* to do the actual training. `mx.FeedForward` is the built-in estimator that is suitable for most feed-forward architectures. When constructing the estimator, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. +Given the architecture and data, we can instantiate an *model* to do the actual training. `mx.FeedForward` is the built-in model that is suitable for most feed-forward architectures. When constructing the model, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. ```julia -estimator = mx.FeedForward(mlp, context=mx.cpu()) +model = mx.FeedForward(mlp, context=mx.cpu()) ``` You can use a `mx.gpu()` or if a list of devices (e.g. `[mx.gpu(0), mx.gpu(1)]`) is provided, data-parallelization will be used automatically. But for this tiny example, using a GPU device might not help. @@ -57,7 +57,7 @@ optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) ``` Now we can do the training. Here the `n_epoch` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. ```julia -mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` Here is a sample output ``` @@ -124,13 +124,13 @@ Note we specified `flat=false` to tell the data provider to provide 4D tensors i ```julia #-------------------------------------------------------------------------------- # fit model -estimator = mx.FeedForward(lenet, context=mx.gpu()) +model = mx.FeedForward(lenet, context=mx.gpu()) # optimizer optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` And here is a sample of running outputs: ``` diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md index 3155b281245d..7689b3fba921 100644 --- a/docs/user-guide/overview.md +++ b/docs/user-guide/overview.md @@ -130,9 +130,9 @@ copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array # 8.0 # 8.0 ``` -For neural networks, it is easier to use `simple_bind`. By providing the shape for input arguments, it will perform a shape inference for the rest of the arguments and create the `NDArray`s automatically. In practice, the binding and executing steps are hidden under the `Estimator` interface. +For neural networks, it is easier to use `simple_bind`. By providing the shape for input arguments, it will perform a shape inference for the rest of the arguments and create the `NDArray`s automatically. In practice, the binding and executing steps are hidden under the `Model` interface. -**TODO** Provide pointers to estimator tutorial and further details about binding and symbolic API. +**TODO** Provide pointers to model tutorial and further details about binding and symbolic API. # Low Level Interface diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 7b335d3b1219..c46e2b3bf15c 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -74,7 +74,7 @@ test_provider = mx.ImageRecordProvider(label_name=:loss_label, #-------------------------------------------------------------------------------- # Training model gpus = [mx.Context(mx.GPU, i) for i = 0:num_gpus-1] -estimator = mx.FeedForward(softmax, context=gpus) +model = mx.FeedForward(softmax, context=gpus) # optimizer optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), @@ -82,5 +82,5 @@ optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), weight_decay=0.0001) # fit parameters -mx.fit(estimator, optimizer, train_provider, n_epoch=num_epoch, eval_data=test_provider, +mx.fit(model, optimizer, train_provider, n_epoch=num_epoch, eval_data=test_provider, initializer=mx.UniformInitializer(0.07), callbacks=[mx.speedometer()]) diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index 92f41e88aeb6..804fc1ea5312 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -36,10 +36,10 @@ train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) #-------------------------------------------------------------------------------- # fit model -estimator = mx.FeedForward(lenet, context=mx.gpu()) +model = mx.FeedForward(lenet, context=mx.gpu()) # optimizer optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 26aa4ecfda49..2dbae9790c3f 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -27,11 +27,11 @@ batch_size = 100 include("mnist-data.jl") train_provider, eval_provider = get_mnist_providers(batch_size) -# setup estimator -estimator = mx.FeedForward(mlp, context=mx.cpu()) +# setup model +model = mx.FeedForward(mlp, context=mx.cpu()) # optimizer optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(estimator, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/src/MXNet.jl b/src/MXNet.jl index 9f404a077aed..53553d417941 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -26,7 +26,7 @@ include("io.jl") include("kvstore.jl") include("callback.jl") -include("estimator.jl") +include("model.jl") include("util.jl") diff --git a/src/callback.jl b/src/callback.jl index c124c9b9357c..049f19432981 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -57,19 +57,19 @@ end function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -function Base.call(cb :: EpochCallback, estimator :: Any, param :: CallbackParams) +function Base.call(cb :: EpochCallback, model :: Any, param :: CallbackParams) if param.curr_epoch == 0 if cb.call_on_0 - cb.callback(estimator, param) + cb.callback(model, param) end elseif param.curr_epoch % cb.frequency == 0 - cb.callback(estimator, param) + cb.callback(model, param) end end function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) mkpath(dirname(prefix)) - every_n_epoch(frequency, call_on_0=save_epoch_0) do estimator, param - save_checkpoint(estimator, prefix, param) + every_n_epoch(frequency, call_on_0=save_epoch_0) do model, param + save_checkpoint(model, prefix, param) end end diff --git a/src/estimator.jl b/src/model.jl similarity index 98% rename from src/estimator.jl rename to src/model.jl index c18eb5f3a6d4..77b435dd829c 100644 --- a/src/estimator.jl +++ b/src/model.jl @@ -1,6 +1,6 @@ -abstract AbstractEstimator +abstract AbstractModel -type FeedForward <: AbstractEstimator +type FeedForward <: AbstractModel arch :: Symbol ctx :: Vector{Context} @@ -107,7 +107,7 @@ function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback map(callbacks) do cb if isa(cb, type_filter) if type_filter == AbstractEpochCallback - # epoch callback have extra access to the estimator object + # epoch callback have extra access to the model object cb(self, param) else cb(param) From 28791db37922d16a466be6a2ed9ae4726623acf6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 22:05:59 -0400 Subject: [PATCH 124/630] use joinpath in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 4e2d849e035e..8e148c2d7529 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ mlp = @mx.chain mx.Variable(:data) => # data provider batch_size = 100 -include(Pkg.dir("MXNet") * "/examples/mnist/mnist-data.jl") +include(joinpath(Pkg.dir("MXNet"), "/examples/mnist/mnist-data.jl")) train_provider, eval_provider = get_mnist_providers(batch_size) # setup model From 1baeabc4eaae8f75da6884b1d9cd5ee638163fed Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 22:36:07 -0400 Subject: [PATCH 125/630] expose init_model API --- src/model.jl | 50 ++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 40 insertions(+), 10 deletions(-) diff --git a/src/model.jl b/src/model.jl index 77b435dd829c..df364bef3410 100644 --- a/src/model.jl +++ b/src/model.jl @@ -7,7 +7,7 @@ type FeedForward <: AbstractModel arg_params :: Dict{Base.Symbol, NDArray} aux_params :: Dict{Base.Symbol, NDArray} - pred_exec :: Executor + pred_exec :: Union{Executor, Void} # leave the rest fields undefined FeedForward(arch :: Symbol, ctx :: Vector{Context}) = new(arch, ctx) @@ -38,19 +38,28 @@ function FeedForward(arch :: Symbol; context :: Union{Context, Vector{Context}, FeedForward(arch, context) end -function _init_params(self :: FeedForward, data :: AbstractDataProvider, initializer) +"""Initialize the weights in the model. + +This method will be called automatically when training a model. So there is usually no +need to call this method unless one needs to inspect a model with only randomly initialized +weights. + +**Parameters** + +* `self`: the model to be initialized +* `initializer`: an `AbstractInitializer` +* `input_shapes`: the shape of all data and label inputs to this model, given as keyword arguments. +""" +function init_model(self :: FeedForward, initializer :: AbstractInitializer; input_shapes...) # all arg names, including data, label, and parameters arg_names = list_arguments(self.arch) - data_shapes = provide_data(data) - label_shapes = provide_label(data) - data_names = [x[1] for x in data_shapes] - label_names = [x[1] for x in label_shapes] + input_names = [x[1] for x in input_shapes] - param_names = setdiff(arg_names, data_names ∪ label_names) + param_names = setdiff(arg_names, input_names) aux_names = list_auxiliary_states(self.arch) - arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; data_shapes...) + arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; input_shapes...) if !isdefined(self, :arg_params) param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) @@ -70,6 +79,10 @@ function _init_params(self :: FeedForward, data :: AbstractDataProvider, initial return (arg_names, param_names, aux_names) end +function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer) + init_model(self, initializer; [provide_data(data)..., provide_label(data)...]...) +end + function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) if num_device == 1 && !ismatch(r"dist", string(kv_type)) kv = nothing @@ -116,6 +129,23 @@ function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback end end +function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_shapes...) + if !isdefined(self, :pred_exec) || isa(self.pred_exec, Void) || overwrite + if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) + @assert(false, "Model weights not defined, please init or train the model, or load from file") + end + else + # make sure the new setup is compatible with the existing one + for (d_name, d_shape) in data_shapes + @assert(d_shape == size(self.pred_exec.arg_dict[d_name]), + "Shape of $d_name mismatch with existing predictor, use overwrite=true overwrite existing predictor") + end + end +end + +function predict(self :: FeedForward, data :: AbstractDataProvider) +end + function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) fit(self, optimizer, data; kwargs...) end @@ -130,7 +160,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # initialize parameters info("Initializing parameters...") - arg_names, param_names, aux_names = _init_params(self, data, opts.initializer) + arg_names, param_names, aux_names = _init_model(self, data, opts.initializer) # setup kvstore kvstore = opts.kvstore @@ -299,7 +329,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end end - if i_epoch == opts.n_epoch || any(map(x->isa(x, AbstractEpochCallback), opts.callbacks)) + if i_epoch == opts.n_epoch || any(x->isa(x, AbstractEpochCallback), opts.callbacks) # copy data back to cpu for (name, weights) in zip(param_names, param_arrays) # average parameters across devices From 7fa151104fb51d7134da60a5084dfa0d240515f0 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 25 Oct 2015 23:22:05 -0400 Subject: [PATCH 126/630] model predict API and demo --- examples/mnist/mlp.jl | 22 ++++++++++++++++++++++ src/executor.jl | 2 +- src/io.jl | 17 +++++++++++++++++ src/model.jl | 32 +++++++++++++++++++++++++++++++- 4 files changed, 71 insertions(+), 2 deletions(-) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 2dbae9790c3f..0834739467e9 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -35,3 +35,25 @@ optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) + +#-------------------------------------------------------------------------------- +# Optional, demonstration of the predict API +outputs = mx.predict(model, eval_provider) +probs = outputs[1] + +# collect all labels from eval data +labels = Array[] +for batch in eval_provider + push!(labels, copy(mx.get_label(batch)[1])) +end +labels = cat(1, labels...) + +# Now we use compute the accuracy +correct = 0 +for i = 1:length(labels) + # labels are 0...9 + if indmax(probs[:,i]) == labels[i]+1 + correct += 1 + end +end +println(mx.format("Accuracy on eval set: {1:.2f}%", 100correct/length(labels))) diff --git a/src/executor.jl b/src/executor.jl index fe83db86a831..4d57c1da7b7c 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -108,7 +108,7 @@ function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_W arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] arg_names = list_arguments(self) if grad_req == GRAD_NOP - grad_arrays = nothing + grad_arrays = Dict{Base.Symbol,NDArray}() else provided_data_names = [x[1] for x in kwargs] grad_arrays = Dict{Base.Symbol,NDArray}() diff --git a/src/io.jl b/src/io.jl index 5ffcf95f0bef..dcbefb145859 100644 --- a/src/io.jl +++ b/src/io.jl @@ -106,9 +106,26 @@ get_pad(batch :: AbstractDataBatch) ``` Return the number of *dummy samples* in this mini-batch. + +The Batch type should have a field named `provider` pointing to the underlying provider. Helper functions +`get_data` and `get_label` (mainly for debug purpose) will be able to use this. """ abstract AbstractDataBatch +function _get_data_or_label(batch::AbstractDataBatch, provide_func::Function, loader::Function) + data_shapes = provide_func(batch.provider) + data_arrays = [mx.empty(x[2]) for x in data_shapes] + batch_size = get_batch_size(batch.provider) + data_arrays_fake_slice = [SlicedNDArray[(1:batch_size, x)] for x in data_arrays] + loader(batch, data_arrays_fake_slice) + return data_arrays +end +function get_data(batch :: AbstractDataBatch) + _get_data_or_label(batch, provide_data, load_data!) +end +function get_label(batch :: AbstractDataBatch) + _get_data_or_label(batch, provide_label, load_label!) +end ################################################################################ # ArrayDataProvider diff --git a/src/model.jl b/src/model.jl index df364bef3410..6cc7097b28ce 100644 --- a/src/model.jl +++ b/src/model.jl @@ -134,6 +134,10 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) @assert(false, "Model weights not defined, please init or train the model, or load from file") end + + # the predictor use only the first device + self.pred_exec = simple_bind(self.arch, self.ctx[1]; grad_req=GRAD_NOP, data_shapes...) + copy_params_from(self.pred_exec, self.arg_params, self.aux_params) else # make sure the new setup is compatible with the existing one for (d_name, d_shape) in data_shapes @@ -143,7 +147,33 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha end end -function predict(self :: FeedForward, data :: AbstractDataProvider) +function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=false) + data_shapes = provide_data(data) + data_names = [x[1] for x in data_shapes] + _setup_predictor(self, overwrite; data_shapes...) + + batch_size = get_batch_size(data) + data_arrays = [SlicedNDArray[(1:batch_size, self.pred_exec.arg_dict[name])] for name in data_names] + output_list = [Array{MX_float}[] for i=1:length(self.pred_exec.outputs)] + for batch in data + load_data!(batch, data_arrays) + forward(self.pred_exec, is_train=false) + for (o_list, o_nd) in zip(output_list, self.pred_exec.outputs) + push!(o_list, copy(slice(o_nd, 1:batch_size-get_pad(batch)))) + end + end + + if isempty(output_list) + # maybe model does not have outputs + return Array{MX_float}[] + end + if isempty(output_list[1]) + # model has outputs, but maybe data provider is empty + return output_list + end + # concatenate along mini-batches + output_arrays = [cat(ndims(x[1]), x...) for x in output_list] + return output_arrays end function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) From bb4620e6bc228a7b124f038ac7cbe5de939c5d0b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 16:35:33 -0400 Subject: [PATCH 127/630] model checkpoint loading --- docs/api/ndarray.md | 51 ++++++++++++++++++++-------------- docs/api/symbol.md | 35 ++++++++++++++++++++--- src/model.jl | 67 +++++++++++++++++++++++++++++++++++++++------ 3 files changed, 120 insertions(+), 33 deletions(-) diff --git a/docs/api/ndarray.md b/docs/api/ndarray.md index 87e664ffb0cc..fae68df32c2b 100644 --- a/docs/api/ndarray.md +++ b/docs/api/ndarray.md @@ -32,7 +32,16 @@ object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs wi *source:* -[MXNet/src/ndarray.jl:453](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L453) +[MXNet/src/ndarray.jl:468](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L468) + +--- + + +#### convert{T<:Real}(t::Type{Array{T<:Real, N}}, arr::MXNet.mx.NDArray) +Convert copy: NDArray -> Julia Array + +*source:* +[MXNet/src/ndarray.jl:237](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L237) --- @@ -41,7 +50,7 @@ object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs wi Copy data from NDArray to Julia Array *source:* -[MXNet/src/ndarray.jl:198](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L198) +[MXNet/src/ndarray.jl:201](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L201) --- @@ -50,7 +59,7 @@ Copy data from NDArray to Julia Array Copy data between NDArrays *source:* -[MXNet/src/ndarray.jl:186](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L186) +[MXNet/src/ndarray.jl:189](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L189) --- @@ -59,7 +68,7 @@ Copy data between NDArrays Copy data from Julia Array to NDArray *source:* -[MXNet/src/ndarray.jl:206](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L206) +[MXNet/src/ndarray.jl:209](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L209) --- @@ -68,7 +77,7 @@ Copy data from Julia Array to NDArray Create copy: NDArray -> Julia Array *source:* -[MXNet/src/ndarray.jl:216](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L216) +[MXNet/src/ndarray.jl:219](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L219) --- @@ -77,7 +86,7 @@ Create copy: NDArray -> Julia Array Create copy: NDArray -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:222](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L222) +[MXNet/src/ndarray.jl:225](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L225) --- @@ -86,7 +95,7 @@ Create copy: NDArray -> NDArray in a given context Create copy: Julia Array -> NDArray in a given context *source:* -[MXNet/src/ndarray.jl:228](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L228) +[MXNet/src/ndarray.jl:231](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L231) --- @@ -97,12 +106,12 @@ copy of the sub-array, while here we simply call `slice`, which shares the under *source:* -[MXNet/src/ndarray.jl:174](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L174) +[MXNet/src/ndarray.jl:177](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L177) --- - -#### load_ndarrays(filename::AbstractString) + +#### load(filename::AbstractString, ::Type{MXNet.mx.NDArray}) Load NDArrays from binary file. **Parameters**: @@ -120,7 +129,7 @@ Load NDArrays from binary file. *source:* -[MXNet/src/ndarray.jl:373](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L373) +[MXNet/src/ndarray.jl:384](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L384) --- @@ -129,12 +138,12 @@ Load NDArrays from binary file. Create NDArray and initialize with 1 *source:* -[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L118) +[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L118) --- - -#### save_ndarrays(filename::AbstractString, data::MXNet.mx.NDArray) + +#### save(filename::AbstractString, data::MXNet.mx.NDArray) Save NDarrays to binary file. **Parameters**: @@ -144,7 +153,7 @@ Save NDarrays to binary file. *source:* -[MXNet/src/ndarray.jl:398](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L398) +[MXNet/src/ndarray.jl:409](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L409) --- @@ -153,7 +162,7 @@ Save NDarrays to binary file. Assign all elements of an NDArray to a scalar *source:* -[MXNet/src/ndarray.jl:155](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L155) +[MXNet/src/ndarray.jl:158](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L158) --- @@ -165,7 +174,7 @@ Get the shape of an `NDArray`. Note the shape is converted to Julia convention. *source:* -[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L84) +[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L84) --- @@ -179,7 +188,7 @@ used in data parallelization to split mini-batch into sub-batches for different *source:* -[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L137) +[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L137) --- @@ -188,7 +197,7 @@ used in data parallelization to split mini-batch into sub-batches for different Create zero-ed NDArray of specific shape *source:* -[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L105) +[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L105) --- @@ -206,7 +215,7 @@ have shape (28,28,1,100). *source:* -[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L32) +[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L32) --- @@ -234,5 +243,5 @@ which will do inplace adding of the contents of b into a. *source:* -[MXNet/src/ndarray.jl:258](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/ndarray.jl#L258) +[MXNet/src/ndarray.jl:266](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L266) diff --git a/docs/api/symbol.md b/docs/api/symbol.md index d263bb4fcbc2..a17f0c261fa2 100644 --- a/docs/api/symbol.md +++ b/docs/api/symbol.md @@ -9,7 +9,7 @@ Create a symbol that groups symbols together *source:* -[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L77) +[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L77) --- @@ -18,7 +18,16 @@ Create a symbol that groups symbols together Create a symbolic variable with the given name *source:* -[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L70) +[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L70) + +--- + + +#### from_json(repr::AbstractString, ::Type{MXNet.mx.Symbol}) +Load Symbol from a JSON string representation. + +*source:* +[MXNet/src/symbol.jl:240](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L240) --- @@ -27,7 +36,7 @@ Create a symbolic variable with the given name Get a new grouped symbol whose output contains all the internal outputs of this symbol. *source:* -[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L63) +[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L63) --- @@ -42,5 +51,23 @@ Most operators do not have Auxiliary states. *source:* -[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/d738fba18ebf731bf4f7306d81056fc63357810a/src/symbol.jl#L58) +[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L58) + +--- + + +#### load(filename::AbstractString, ::Type{MXNet.mx.Symbol}) +Load Symbol from a JSON file. + +*source:* +[MXNet/src/symbol.jl:247](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L247) + +--- + + +#### to_json(self::MXNet.mx.Symbol) +Save Symbol into a JSON string + +*source:* +[MXNet/src/symbol.jl:233](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L233) diff --git a/src/model.jl b/src/model.jl index 6cc7097b28ce..da9f995f3707 100644 --- a/src/model.jl +++ b/src/model.jl @@ -48,9 +48,10 @@ weights. * `self`: the model to be initialized * `initializer`: an `AbstractInitializer` +* `overwrite`: keyword argument, force initialization even when weights already exists * `input_shapes`: the shape of all data and label inputs to this model, given as keyword arguments. """ -function init_model(self :: FeedForward, initializer :: AbstractInitializer; input_shapes...) +function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) # all arg names, including data, label, and parameters arg_names = list_arguments(self.arch) @@ -59,28 +60,37 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; inp param_names = setdiff(arg_names, input_names) aux_names = list_auxiliary_states(self.arch) + arg_defined = true + aux_defined = true + arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; input_shapes...) if !isdefined(self, :arg_params) param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) + arg_defined = false end if !isdefined(self, :aux_params) self.aux_params = Dict([name => empty(shape) for (name,shape) in zip(aux_names,aux_shapes)]) + aux_defined = false end # initialize the contents of the parameters - for (k,v) in self.arg_params - initializer(k, v) + if !arg_defined || overwrite + for (k,v) in self.arg_params + initializer(k, v) + end end - for (k,v) in self.aux_params - initializer(k, v) + if !aux_defined || overwrite + for (k,v) in self.aux_params + initializer(k, v) + end end return (arg_names, param_names, aux_names) end -function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer) - init_model(self, initializer; [provide_data(data)..., provide_label(data)...]...) +function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer, overwrite :: Bool) + init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) end function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) @@ -113,6 +123,7 @@ end eval_data :: Union{Void, AbstractDataProvider} = nothing, eval_metric :: AbstractEvalMetric = Accuracy(), kvstore :: Union{Base.Symbol, KVStore} = :local, + force_init :: Bool = false, callbacks :: Vector{AbstractCallback} = AbstractCallback[], ) @@ -190,7 +201,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # initialize parameters info("Initializing parameters...") - arg_names, param_names, aux_names = _init_model(self, data, opts.initializer) + arg_names, param_names, aux_names = _init_model(self, data, opts.initializer, opts.force_init) # setup kvstore kvstore = opts.kvstore @@ -388,3 +399,43 @@ function save_checkpoint(sym :: Symbol, arg_params :: Dict{Base.Symbol, NDArray} info("Saved checkpoint to '$save_filename'") end +function load_checkpoint(prefix :: AbstractString, epoch :: Int) + arch = load("$prefix-symbol.json", Symbol) + saved_dict = load(format("{1}-{2:04d}.params", prefix, epoch), NDArray) + arg_params = Dict{Base.Symbol, NDArray}() + aux_params = Dict{Base.Symbol, NDArray}() + for (k,v) in saved_dict + tp, name = split(string(k), ':') + name = symbol(name) + if tp == "arg" + arg_params[name] = v + else + aux_params[name] = v + end + end + + return (arch, arg_params, aux_params) +end + +function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForward}) + arch, arg_params, aux_params = load_checkpoint(prefix, epoch) + model = FeedForward(arch) + model.arg_params = arg_params + model.aux_params = aux_params + return model +end + +function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: Int; overwrite :: Bool = true) + if isdefined(self, :arg_params) && isdefined(self, :aux_params) && !overwrite + info("model weights already exists, skip loading... (call with overwrite=true if needed)") + return self + end + + arch, arg_params, aux_params = load_checkpoint(prefix, epoch) + # TODO: is there better way to compare two symbols + @assert(to_json(self.arch) == to_json(arch), "Cannot load from a checkpoint with different network architecture") + self.arg_params = arg_params + self.aux_params = aux_params + return self +end + From f8a01bfaaef934be49eca2ed18f13c4bf1df7f2c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 16:44:40 -0400 Subject: [PATCH 128/630] allow callbacks in predict --- examples/mnist/mlp.jl | 3 +-- src/model.jl | 30 ++++++++++++++++++++++++------ 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 0834739467e9..6e40c00b6b15 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -38,8 +38,7 @@ mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) #-------------------------------------------------------------------------------- # Optional, demonstration of the predict API -outputs = mx.predict(model, eval_provider) -probs = outputs[1] +probs = mx.predict(model, eval_provider) # collect all labels from eval data labels = Array[] diff --git a/src/model.jl b/src/model.jl index da9f995f3707..80ee36d10674 100644 --- a/src/model.jl +++ b/src/model.jl @@ -158,7 +158,10 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha end end -function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=false) +function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = false) + predict(self, data; overwrite = overwrite, callback=callback) +end +function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=false, callback::Union{Function,Void}=nothing) data_shapes = provide_data(data) data_names = [x[1] for x in data_shapes] _setup_predictor(self, overwrite; data_shapes...) @@ -169,21 +172,36 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::B for batch in data load_data!(batch, data_arrays) forward(self.pred_exec, is_train=false) - for (o_list, o_nd) in zip(output_list, self.pred_exec.outputs) - push!(o_list, copy(slice(o_nd, 1:batch_size-get_pad(batch)))) + if isa(callback, Void) + # no callback, accumulate the data and return at the end + for (o_list, o_nd) in zip(output_list, self.pred_exec.outputs) + push!(o_list, copy(slice(o_nd, 1:batch_size-get_pad(batch)))) + end + else + callback(self.pred_exec.outputs) end end + if !isa(callback, Void) + # callback exists, do not accumulate data + return nothing + end + if isempty(output_list) # maybe model does not have outputs - return Array{MX_float}[] + return nothing end if isempty(output_list[1]) - # model has outputs, but maybe data provider is empty - return output_list + # maybe no output because data is empty + return length(output_list) == 1 ? output_list[1] : output_list end + # concatenate along mini-batches output_arrays = [cat(ndims(x[1]), x...) for x in output_list] + if length(output_arrays) == 1 + # only 1 output, return it directly, instead of a list + output_arrays = output_arrays[1] + end return output_arrays end From 0f632cc2e9aab4428635394832e422ecf5d43a57 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 16:45:54 -0400 Subject: [PATCH 129/630] add pretrained Inception model --- .gitignore | 1 + models/Inception/.gitignore | 2 ++ models/Inception/get.sh | 4 ++++ 3 files changed, 7 insertions(+) create mode 100644 models/Inception/.gitignore create mode 100755 models/Inception/get.sh diff --git a/.gitignore b/.gitignore index 5660a75bb89b..22b57c756f2f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.jl.cov *.jl.*.cov *.jl.mem +*.pyc data deps/src deps/usr diff --git a/models/Inception/.gitignore b/models/Inception/.gitignore new file mode 100644 index 000000000000..3eabb6e80247 --- /dev/null +++ b/models/Inception/.gitignore @@ -0,0 +1,2 @@ +Inception +Inception.zip diff --git a/models/Inception/get.sh b/models/Inception/get.sh new file mode 100755 index 000000000000..392403a82703 --- /dev/null +++ b/models/Inception/get.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +wget -c http://webdocs.cs.ualberta.ca/~bx3/data/Inception.zip +unzip Inception.zip From 5bcd859229fb67eb26a688d54d3742744d2e37c9 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 16:47:16 -0400 Subject: [PATCH 130/630] add example using pre-trained model to do prediction. --- .gitignore | 1 + .../Prediction with Pre-trained Model.ipynb | 233 ++++++++++++++++++ .../ijulia-pretrained-predict/cat.png | Bin 0 -> 123126 bytes .../ijulia-pretrained-predict/imagehelper.py | 29 +++ 4 files changed, 263 insertions(+) create mode 100644 examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb create mode 100644 examples/imagenet/ijulia-pretrained-predict/cat.png create mode 100644 examples/imagenet/ijulia-pretrained-predict/imagehelper.py diff --git a/.gitignore b/.gitignore index 22b57c756f2f..503353f37bd0 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ *.jl.*.cov *.jl.mem *.pyc +.ipynb_checkpoints data deps/src deps/usr diff --git a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb new file mode 100644 index 000000000000..5359f41b299e --- /dev/null +++ b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb @@ -0,0 +1,233 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using Pretrained Inception-BatchNorm Network\n", + "\n", + "In this example we will show how to use a pretrained *Inception-BatchNorm* Network. This network is described in the paper\n", + "\n", + "> Ioffe, Sergey, and Christian Szegedy. \"Batch normalization: Accelerating deep network training by reducing internal covariate shift.\" arXiv preprint arXiv:1502.03167 (2015).\n", + "\n", + "The pre-trained Inception-BatchNorm network is able to be downloaded from [this link](http://webdocs.cs.ualberta.ca/~bx3/data/Inception.zip). Run the script `get.sh` in `models/Inception/` will download and unpack it automatically." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Let us first load and display the demo image (try to use other images you like). You will need to install `Images.jl` and `Colors.jl` to load the image." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAACAAElEQVR42kz9WbMlW5Iehn2fu68VEXvvM+Rw761b062qnrvR6AEQ0E0QFKwhoQEZZaIJRsr0Jj3pp0lmepBkMhpkFCROMooQSKoJoNlo9Dygpjtk5hn23hHLBz3EqTblQ+bLSTvn7Fix3P3zb+DP/U9/qoCqqqokACooWUKvKgAAyBIRsCiCirElAEq6+/UyMiUyPSkcFS5Q640ssFTVtInSzJp1VbVGterWtFHEzEzVWmu9NxZEVESaqFes6+rbtl7Wh/PlPDxHXS7rdh1mUBMRoWYyqiqVkIJE+NbUjktvXU0BoKois0pGcBtjG+eQzCoiREwhDBYsM0UUZSwzQtRTQsSEzVAiFE6aEu4+MlE+mLGum29rVWl4RaUqp25tMumhjdKMEpTUkggSHSVjBJJSQKKqSI1yEds/56oiCSCGswBRjwIYESwgSZKNKMnMQkQEsjpbpQ5eqkA0k9Zm2Ak2yWGal/vj6aDWaK1VAUlElqa7RxYA602WzlYAVBWzMPH84cPT0/Pzh3j66vr41Xl93lpYh0DUEResAjW3QFW2TAeTYelRGd2gNMmV1CKSSTKraKomkFJJEgWSWpFVJaZZqhRtUgabOU2qk8mxlt570zZTuxYUWSJ0VG7p11zP5WtcnmI85vXpcn5cc5QEkUUVEckYIEOAEhdXVRYAj0pInW4WA1nlBBRAIVkKFuv/7/STRGaKElWAqO2PSsysNdk2z0ypQAGwIodfqwpFFXELUsnRe7Qu5qLKTVZrS+9wC7PIjHRvrYmUmSWILIWMRHjBBV6+jYrM9AiqvRwXkACYSBYCqo1KL5R7gEKrQlaG59jgGVVUqAmTopBKzUJVAXRPsFiRKCUr2JUiBZFwAshaK9KTYwt3j+DY6IOZWZWUMDURUBIoVQMplQJmIgMsJ01gxUIlBCgWIKUsJIqkCCK8igUBQQBAugupZhAACEkgCFRCTEQks3JdWzbARoZzVaiMRhXv8PUS00JVRlqjNQune7XWpmWSqQl4mOab+xvtssErY13Xeep9Xpo9qzJrOEdewpGwikqLjtLIzGCKE0RaFSlgwN2LyQQ0IGw2mRC10kKkxIRUFoogWRRPFFUoFAkUSBGBEAAiX+5fQMFKRCSSiKqI9GLKGJHuompmbapg1UhEJgFCzbwSqCREpJiQMhUmgNTeLJlEGcnKzNR6uYtIxU8qw/4vQAAiAiAiqgoQkTQzkh6XcCEE2IAkpQr7J0Qqihs8oqypmbWmqKiqXobKykhjlbQGCJCRsX/nikowI0ZERQRJsCICAPny46kZUFCYNey1CclilYCZhQRHRAYLBEWVRFWgEkWiKjNIhoeaRQ4CfTJVMc3wyCHliYoq+tAIjjVHSIxKR0FEoSyiRAvw/eQyq1gsIimwTMmqqkBp5MtXAKGieLlqsgokARawf+iiKYSqFVAGaSJSESEigGSxhLGtsAgXZjLLM1dHDcg57BznjvMlb+5O05TzzJzCrDdrfdJ+dzicjq2pmc3zPM0tWNfr+ny92LamkErpNbjGFNh0nMf1uqXT0EBWk6BnBKAKAYgsgISSUpZ746CSCohBJi0CGAJVahUJUASAA5UVFQAaDACE2pspW9c2qViSBYEkEembxxY5uF0zNmRWpReTpKq4l1eSEuUFkgQpQGaIkAKyuqn1SZuYwiksFLIg2F8Evpz73E+YiJDc/87c+x9BsSrVZK8SHjNLwJFIpJBa5RTJFAIizABQkMwaiUBpMfHSEAgAJQnJKKiwGJUR9Nzf9hij1nWQFCGSJYVKKaEppbSZZ5ClqgCEBsmIZCJLMopQIFQNQEVFQSn58loXySqIYGwXVbbW21zNUgV0JqMqfdUx0mO459hyeCKRiWIqm5iI1X67qIpw/2OVUimRQtGMyCySBdsvNsCzUCgl9/uFVQQoBWtAZpKt1ARkaGhDlJimKgOxl2dopVcw4AEvcfhWvm5w2DPK8PTueX0zjreHw02b5jje1t3dab5ZptNhXpbj7VEmg1RvWklMkEUvF43a3Le+TYfTIjq5e4zqH7brh3VcAo6XI+GWlSRBQIUCJZRIJRPNxCSSKd0oSqaICclAlQikqhqqIsKLomo2qSUyCTGdu4hIIfeWT9jNLEhLlNPLx/AIVGpGIlnF8qrMzNT9LhFSiUqyur0UFhGA5NRuXt2aFIoAKlAle5kFQSKrivKT1pPcO1SyRAxZ6UFSlShUce7ToA93VAKNVNUCRKWBSWYlPQpSXpE05palgOE6WhNlQ1SWFwVKASNybJGgZ7nnGFtViCigJEypzcTILkCWlKhVlYkWiyQrhVWlGYwIAqIgFcjMYqqY1X4pAyxUzlFn0VwOh76gLdmNFQqiilrmgx61Dd+uW4XtzSsgun9GomoUKQEFRGSJQTS8sgSQqiBE1SpZSBEhMmGJQNV4eWBaSCBbbykFKEhVUWMxAZSUmDJSNKVJAeEukmXVFBWg00dpI1vLNcYYKOSG9z/+6nq9Pq+H+djYT6dX2Q79eHfq82SL6aFRoZSqstWqygfmSdelX7epr1PmWVQ29ZNOvdt6GZeH7fp+SJjEfn2OAtWUlcygSCNpQomQlCZqZKUWQRoJI6GZKC8hBFEAqZOKVGZkJPenqdIELiJAAkhUVonpdLCqovp+OUpZePEntbTJT9p4YaBUFeYpIMVAFrRrO8zt0A1ZQu41JzMDxZeysTcwBJiZJKsIe2k8WFDV2jsMpkgRkfAAWFMmUNm0V1UxySIhJqCJoGTvqDKcaRgjkJuVIaKZqLR0B5CZ7r73Wt26nOR8PqPQu82TWSvtEGMQJeIxRPeXF6IaCCUqMwEzrapAvNSMdMrekxQorEoUS7OGiByPergJO7hMjGBtpFCSvkWW+ojt6j60nBQpUETMhFollawmWqzMl5mWezcPUVX3yEyRBFRIFXF3AAQKUNUqeoQ26b2DfzWAiZimeFUAUJUSqqrDIUlj62jVt/CI0JRsqObcUnQKoXegmm6owMOH81Nsd3KzbP2WiK5hyFYwtKmXFCJV1KIGIjMTJa3sUOZlEnmlDofJbNZP2g9T6Xn7/JIBYYhE0UTSlCihoylLqqqEalDNFJZSKsF9UmKwBAgIzQQmIgqke6ZASmJ4JjUqARGKNFTWy1OVQtJqmtUH070iQVdIsAAECvmCKJAswgQwI9USTdSm1qd56cveEiQAIfbHtbei+5i7P6EMgCgBoyjg3rSyAFQKWa0TDC2RMUcgIpg0aYAnEtgbDKoqkHvDk1HUGptrL0TmFfO8ACaSIVXC/buw0Kz3U0/44TDv3X9rOs2iTZIwulfSKsr5UgNXqBSlPF+6SRCQTHpAoVlIAsXKiChqphTkMs3z8d6mm63aWtCpHaJiOJ08Dx+brmuMrYgGoFCqIgpIiUIFSgIoIlGAkKCQARRRJAyIvxoO8idvCblfelmVfW6ttZLKDCGLqMyqYgmKkGIWeM1GEtoEWlGeBK0MqFGIUMnIitxKxdIk1AFGWsEEvZU2QMrTE0EyIsYYbVoyfd18bO5+TURUrj5GPtOGzqKyhUO2Sgs6zfJG7Dl1/SoqUiiNjeKiIQQQaKpCLQooKFYSBJEZhKhI7mOqVEYVSoVgBVAFgTBr28b5kpOUAkVRKRKQBECkipWISPWmsuioDSEjWFuSEOE+UGXmjjWpGooiYl1Npfc+WWtqpqoREREQeQFVCpm+X2/c625lFVgZSVGoyQ6uECZiSrUesqC1CWp+9XUdvuUYgyosyxBEVRVZKFRSlIBHoRLhUJZwYJV9aiwVUVDLTKT6fq2LiCr3ykCpqOGVhBQuWZVZqAggWWwoug+BkKqVXhpMZUpVVohKpkqmAxBjnyyIYZMeSm6cc8zzHFv1ZKq/T4WgVIYji2osdzFNAnAK1ERMBRDso1MklEVJJgIl2HtMQHWKIojAqEqKogRklItUM6OwKqSEZPDl164qBiuLxkQqnTqVkvRKd2yiKvvRkOQQwjAhJ0oSm8TVMUltPqmc7o+3t7fTYUlIjG1d16YdqZmX7ToycN02v1yv69O6Xq7ndVwu6WezqNDUDBVVYc9RFZereB7ue5O6PkauKQJRsqgAzRzR2Ix7d5Sg7qAAhAVUMUYos5FrFUoqvFStNzQx04IksKZP0rSrqEoxc6AUACI807dQdtLJ/VqJ/czuvboUCCZZEapSG4iiEV3YTCYrk41pYkpSlBFRlSQpEgOlWQFIZTnVyCpElGQgk9baPiiwsk2iXWXuy8H6rIRl9ss1zk8R7sNRXpnIAWzumWXaxaKECRYQiMqoQVICJmqAcNpPr3aS1DIxqjQxCkHyfD6f/XqJcyYFUVmshghlJiqIxRA73h5SESVV4qYVklQaZS1l+e001dw8x2dv3kh/9ywf2mwfvV7Wy/bjd1/d33/y9fXj7z99HzmAiXCwEhWZJsomaiKSRFCtDGBFUbIoyEwWSbAxan8RVEj3FYDKVAhUVhaR3VTMClCNqih2IVgVEYmiGoRV3kiVOdwRPsqlobFDGFNplqqOdC+nqaSVJ2VoiUCLoovgUHKSaloYHsft4mde5zRcZIuxuY7rdn58l3Fxv665eW1mWkSam7YJiMgRwm1jAQ7bQpCjnFoWohUpWdWAMBqJqsHK2qt5iWQ15RbuIkWpqCpDAdiCZNXm6HNvS0MrTDV1MbOpmVDTq2mG1+aMKqx1vcZ6uawbtuuW2/ryfilMELEfZcgOqUUNiVZa64AIjFkQwdRpmdGamM3uuV63MUZWqIjsu6WsYhaz9uNUMiqVYEIo0kobZMrpMNnSlpP2JtOs2mS91vMZ6/O2nmtbz170bbgkXCszUgAhM2vsM8IOgUhZFkAT0oTa2FszMwgFL7M4QGSaSSdTdYvyIIuJDO4IbAMkib0jqkhT3TGoMho1Spzo5DR3Oc1lnHK7e318Oy9XvGr3h+f1x6/v3jytsWJd5CI05IVxBSycBqZRpdSEmkDt+zsgM0NVqRqZIraDTpkECkKhpgelWBIIVr1AIUro3iXs+EdSsyiRVTvGwKqsGAFPV/W9sVBLZDGoBaBaCpru1ZWIzBiQrWSSLZMT5KT9VkMGiSyOsc6m56fL5SJ1jcv1+fky1qeLjwczg7nzSit3jxpoRWVUUZyoaVqKyNwEbWujq/n7zOdEEGKSVJYKCyPhQmFJJsBwoYhVum/BMkgJIqMchWCgZBGIsstyO+lsU1NrJczM9EwhUVQ1X71QHrVtEYMYJaUFlqdk0TR8kACQSArTQ4XpI5OBEmOfe29qFCuMTK3SaWp9mnyM8/m8rQN0SpFKyIjIrFRjCUhP94hJxSbT2aR7m/KwpDU9LG1aWuucpuxLbXM997w8T+vg0CZYsfnYGFkKahPj5JujKE0CGRFiUhmZSIWxAS9gsYhm7pdvRrkzXhqLGplRBc9wbF6DMUynIFBQCpWEYkf9k1QYKGCbzU7WhJNhkGs8nU5f/3e+9+++v/74P/+9Pz28/uh7X/vuH/7p75+vH1xMcBObm0vKgLBR1EAWhaoiYpWoQTYrorDDbJqQl82JCIDKpEKoGVFJIUHsVXefi2rvE6WJKQKidI/MEWuEu5R4VkqlhhilgV60ohVHVgFt6KLMlunMoSIpPTC6NGmtTapNBF4YEbKurAyVed18fVq3p6fz83N4lYS1i5ont1Kn5sZRXWwipMy6qCWvffZClpSTBmArXffVtppWIau8MliKEkCrEpSCRoQSVRKejuoUJiJkZOgsfTIoRFtTnXubu5mp18ZWmwz3fZuSfZ58G3LeJENSIAgkRCg1xvB9TIgKVkFoYOeIRKSSRmwZFpGUFNVPfvpeRFVVBNZknm2aZlEdlV6RmawiKpFEI6hAwfdFTRLt1KZFpy7LwVrXaenLIststujcVRWQqiIoKM3MyH03kTbptMwwtdZUxZlJINPMBFX0UQHA1EQoUo5AFphUT8kgRo2oEbENHyPC4VtdxFK0QC+qqTSxqn3Kp8DAHacstZpvlpySEjdT//TNx29Oh+v43KbpF77xq995/a3t/VfixTDP+el9Pn+5oug1TFSUQFqbhCWEqApVCEpBFNQkKUbRBNRUVKiyw6ncUZ+fbBeFsu89SSGlXgBoLRJMHzlGjLXKS1KYLArjpbTXDj/L3vaqgsUoAUX0ZWRklRRH79Zn08msydQaqaKsqpHx+HR+fDg/f1ifn87b8/XyeF0337anwqiUjJFYSxwMSIau3aY2taHPxW2HOYXmGfTUeNnmKVpVZWaMQgmhO6q4nxgTKBAjt0hUr2xeESG6tPnQptlsnqmwxj6pCkW0qYqhTa2qSBQcTC0WWKkVzMhwz0BlRWaM3IGgApS2l899gyRqbMKmbWrTcZoW06/99BtRUoJCU2lTm+bWZ21mVA0gMhMgtYqVVbI/XEOIY1AxL733bksdlkObrc+tH+c29ePxZjocI2345uFRXok+zfOhtcX6ZNo7lTaJNJQAkEqwpBhVUVlJipRIhAQlqYAUFQlcx7au13V9Oo/rFr7GmjWSwzoFsCprs1IL+55OTBQUF1FFaNHEDjpbE2Vp/uK3Pvupm49ubj96/Orp177+G//wN/5XP/u1z/RJ3n/+8NUXX3799Te//LcPOxjDQpXb3EmqwlQBAUM15IV9pKJGiojY1Ptk3M/A/pPszXCAVQAIIRNIKVCouq8s6RjlGNuoUVraKIRgX2OWJHci0Y7gkmpUq4gkRUukJJNgkalu0vrU0FALrbE1y0JEDa/reX16OD98uLz/6vHp3dN29XTkdqGAqtpaaUKSksIhfVLth75MU9cJIkNFMxAoL5dIA5zuDg3FgHvVjn/vYyiFrNKsLIZFclQQLSpDRgD92I43S+9zaxM1rZFKIUdEInfwJyLBqsjwACcPJAylBkPBtxprlY/M8kigAMhehcNDoCbaRLpYF+2ija1Tv/Yzp4SrSrPWmomKiLRpmqfJmk29m5mIBpBFiDiRrMoiBRUjwlo7HHpb6nBYjgfTg+nUjnZYltNyuhVtLIwR27Y1ldvb4/G0LKe5d6VpmyxZAmpr6ZXhmVkoIl8IIQK2YlcBqZUYI3PEuKzn6/VyXcd1rCO3TIekyr5LVWPbe25Sda8gYEJUm0qqqQlE9ND6ocntcV6or+rm177zG3/nF3/rtR5uXn1889Ev/cLP/dbX7z79nX/2T3Bdg8u6YZIOIDJ0EiHMSlRJVqU2UpSYiqQKyWnq86GrESwKBUIClZXISO7YfxazdF/A/4QXAFTkXmWTqCY7KF0UiWJUwZGVTIUTkKAXSwVUiJUYqIQIBKKpqmbCpmJqaqwiLTy3NS/n7fp4ffzq8vjVkz9XRVFUskyERmslbV/isfdpmu7ubt/eHu7mabJuRGZlZl19VIV648Zy1CaSlZUFsJixM6Oo1AJFLDw8wLJwZGDfeHlqVLZ5nnoTkzb1rEikmoiJSDVT94iIzIoERUnLSEmBi1eMKvesgVw9ghlpkMlaiUApZuxqTfsySVdtqpO1bv3Q9JOfma213idVVVWKQFTEBGI29W5qYq3PU+8z2VAFYhAlTJLhqKo+83BaWpNlEZ05tfkwn9RmlTa3ZpyQUiWtt8PBlsPSp2Zdp6mpCYVIZFZGjHEN30QSQsjO+NoGPGJ1+Fbna11Xv25jva7n4dvmmeXbGCTNlEqqqU0UE1XdX2coRArKKisYtZSmRnDq0+nY527L4Xhox1/57Fd/7mu/sET//I//8PIXf8CxfuMX/tHf+63/TTxczt//N4wVdiNzt9aHX1sryn7LQATsVrAEqYShTzofJptV9sW1kkURyciIRAkqXwpCCgUqiEImMxOFgkRV7oTQQkBLzZFVWZkK4f7xp4AojWK2JrCSRqhQVIzGkiIIVqmayaSlW65ZzBFxqffn9fzhLI9Za25DCGksiqVszbI10IZriPbb0+uP3356e3Naej8cTjfLq9mOkcjKbQMG6G1cKy7ktpcFLyAdmqoGkX0pK9qaSHqGZ9AldzylOIYwxGNrJn1u7dBSUk2lo6tNsxLmkSVIVEKjKob7GJJVXiPcURmFKF93xNlURERKUE10mXqT1rR16820yzS3aZmtm7757L71Zi8rATNrapYFYRKVCCpMm5Im2UTmxt7AQgEqQGF4BKIZtYlMnKw3O7Xp2KyZqnDKgujUDkubmnVbltl6t9Z6pzZ41tgiPce2jm1grCMzMjO3a54v43xdHzZ/utbTsz9u2M7jfNku1/XZaxu1eaT11lqnYOoqpqoiQlOliKgIhWUVIKgsqinMfbATGZ/ev25JL/yNb//GT03f+eov//J3/8W/+PM//u//7H/4gz/7g9/5/l/+N9/57l/79d/8xz/zzW/94Z/+tz98fP/p20+m3j88fdFa3wl5qqCxRBwhmhC1yeZlmqZmvSm5L8hU2v6SR1QGQCECRUIKQVZUFTTTUelBeYG99lUyq3KvDdz3qYXITK/QYId1pVU1UkWYJtLEQCmUlKo2CkV6KQpVoTV43bb12eO81Vp0EfSIjRJijRpqUoKy1DbN0/H1zavXd/c3p9f3t2/nw83hcNt0AjUSsQZ8Y2KLsXllaG2oACAMMTNhAK7K6dR1CZ3mdqCa+tD0LGKNTFDNSgmiH0wnzJOZ0IxspiogMuGxFekjI0eOuoxt3VwCNQpZ6Ts3kdIUxmxkE7VsXczYDjbNRiuZqbPAaLP2Q9PbrzVTmomaalM1pagQKpVSkZVZWQ5m1l4PoUKKqDbwBeEdW2aKmYq1w3w69KO1Wa0rVLSbaVF6M+uAlJiKlXXIJEHZthGxpbtfY13X1YdfuW4edvXYPCN0czurbtJG5QgfHheKgEomTPYL3Vqpqs4KK0tQrUmyoNp3dlTtbWE1MFjq5ajxjdcff/2jj1+Pu9/81r/zoz/6w9/7N//dw/qejuVwfC57/v4Xv//P/uO7Cd/5W//LX/qV3/yd/+z/8OMvf3z38Scfnh9KCW3ampioSOlebITCqdvp5kanpkqRLKKKBc+gj+GelRQEUNiJfxk7w3CfgmonBr60Q9x7YVQRidipQ7KT1UOYAjahlbKhFVrthDQUiVBRbaJNhSSiCpFIZwXGOcfFt4vnOqpk5xgIILYkVhEWA4q+nF7fv31zejXdvDoebk/Hu9vjq6nNUWXaJWpLjNxip+lnRiQieirVVQiCLQGcjoflruuxHQ/9eHvgyfrNJJ1eEYQ1scVsatMJot5677OWBKFlzoK7J0tk3zxlJisKUVIWKVlSUYxkUVSgsI7eWjNr3djEZmuLahfpgqZUQmFdpnnS/pGWkFpiKiKonT8nUSAL5ZWeXhnIqMyI8AytF1o0hEJBFkduAjnO8zTPp5s71gtJgCJmXbUlQJVAUKkqfTr0PvuI9br6GGON63Ud2zo28TG0p3UCbN0OJ5xuTBeqlXZVM20iZqCJqomqgWK01nqqUmy2vl8bO6NVMnbWpzBFcowRsOkwTRlXq/Hzbz/7tW//bTxefvAnv18jTtNNpr66ub+ZGm2Ktf3gX//Xdze33/yFv/vzP/XX/tP/6v/yOCqaVJPWRbrYJGxVpJpRyxqWuS+LNSMkgJ2CK1FIxzZGOlRMwKwEUVUQRYGQRCWquK/kWdh/ft9v/wwKFeBOkIJYgckSlqmRXpqU/XoKSJGKgiiBqkQmPdI907ldwtfL9pR5VUahHFICiVEWZq2KLi3N6rAs96/f3n70tddvbl+/uZ+XuU+tJCGCApC+xnCItVltZktPXy+qEOmKrgghptPST9bmNi3z8dXCLm3ufZkOh2NbejtObZa+aOuTNUqHNTObqAiskcPDRwzPUSg1Q6qkVFR5sXZ9U0XUtjmoUjvcKtZVTMRUm0qT3vWv+oKCtN5aN1HR6UaQgqyqRAnBysr0BCoBB0t2UUlFREXFyLQdb/KxywxSrTISolNvvfVlXvq0IKGgiE66TDaJ7kKIzHRVsWUy1W29nM+P6xbrJS+Xy/V6HRsPi928apE+zXo89uWGy0G0SZtIlLLpzglFy9zValQznWjNWmvNmrZuKipCFYpWie6SqjSqChmsFvj47g3dfvb4jV/77i//8e/+988Pj6fjrZai2eWymWmbl8Px9n2s1z//V/fdvv43/tEvf+/nf/+f/59+bAjNpReasCElCIaQFdOsx+Pcu4mKqVQlIJAeEZer+xiVOzUILwPEjg/KvuOTAD1dwMJOJdv3GcliRILKyqogZf+CfGmKFFIhxWYkKM62LzuZOVTlJ1wuzagcFQO1jXFFbOBOLWOhFNCAW2NvLTJs0tvbu/tXr9+8/ejVq/uPPnp7Op1UhcqsAiojxrZtEVW1tHbTT01nkusY41rpkBo29cPtPJ3YDq0tvTT71I83J2uqDe3A5bZNBxNNbRSjdCsplaACEpt7VCTTY3h4VmWkghEVm2PEtnlElCIDFSVNQIiBjdDSLjaZTa2ZkKRKiYiamKlJ66KHux5bRuzzVUUiYgBZERVZyUqke4RXZlVmVkZF5nB/Wa8iSkNV19yaNRWByLIcunUmVSeTpgpTBgpC97HPKRG4XJ4+PLw7n9dt5LZu4es0692rRTRb5+EopzvtB1gTGlSIAsUICS+UVgpFKM1aNaXqYjZbExNtNvfWd1puEyFSgG5mJnPXtK2b19g+++Szv/3Z3zj/8PMP7398c3yFKpswH7pvo2xqmqp2Oh6fPqzx8Pmbt8dv/uLfP8j1//m7/6SaHeZZVaCVggKc0ZrOy9Sa9t72q4QgRbNkXdftPCKCxUpkReyvL7DjP1UV6aKiprHXLJQIdhkGwAJRYBUrWWQBoFBAsJigWC+mULSxZIhQAO5XVKIclYgRlYVkrDWq9uVDelbsSwy4ZG/CQiJ00pv7+/v716/vXr356KPTze18OBShyoKI2LatT+uzh7NoEGHvtvQ+S+q2pV+uCrSlT4c2H2U+zjK1ZjYtc5ubTW06NZuEUhRYkzZTm4q11qG9RAAmdYfRXmhRmZUeUvBIonx4RBYlc9fGclclShNrKko1tW7WdIfmEkHVHX5WI1l6840pqjKZkZmentsY4Q7ntm0ea6bvgFdEhkc6KmN/B7JGwaFCbfuwHxEBlHBp07EfkBrYaQIjJYss1hZjHRvK13U9Pz8+Pj9sq2/XQfdlstu7uTURwe19P5x0PkInoYTDKbBm+2/rxc1dyBLQVKR3aWaiJqLWtFlr3SbT1pp23eF1agPN+8SmaFbzXG/6/a9++9eeP/+wXc9N2rx068bMyYSgUddtPTSbb99Ua88/+BeHN7c/9bf+ox/83n/xJz/68/bmtTB1l6gyiGiTzlPrRlP5iSgGlRxrbKtvq1dUvUgGWLs4DXxRzxW02+3NzduP3izLvDMUwz0TpOzdEvZdDKSAQlF2sExUFE13asauhaWESok0gWZ6JQGtQBUqmV7D3auaqLVOokaOSoHQKyOpsMnu39wcTof7t28++vqnN3cHnSjK3ptnhucYvl6vl3X19bpD2qqWkeVpIruWyCBSyY5+WrSbmS7HuSaVSZbTbF21N4jSmnTTbtbZFVNvJRJA2dq6mlK43xE/2SzsAjIC2tq0TK3vC0U2FRVRocK69MlaMxGSLztqElSKSEVVBJH6tZ++EbHMcg8fSM9y+jWuV79exzZ8G+4R23AflSGVEuHukRGZQRFtbV/QCuE+2PQ6VkTcnk4JeqbHdR1nr6DI5sPHdlnPvp193S6X8+W8hlesrqz7u5vDcVap080yHbks0idhQ+QVQlVT1czyRMaL3KeYqQIxU+t9r+ZCa9ZMRERFqU3LxFo3bdm6svF26lOzudtf+/rPnsZxOz8dZCqE9oNEVUZvTZAA29TG2DLW17dvS/CDP/5X3/7F3/zZX/qb/9l/8X9+aFs7tEowMOBmOpvNU2tmKFSBFCTdc9tivbhv8XIQE+B+DUntZ7nApje3N6/ub9+8fv36zf3t7W0zZda6bfkizAaq9rEYFN813FUsTtaiZZWrFrRKqAqgZGduF4XKkozKispyTwpPy3SaZjXT1syYmeVAsEjnWG7a/avTzf3N64/evnrzqk+m/QUm2bbr8+Xy9PhwvjzGtkqFaqcZhU2lq0QGtswUH464CgGzNvXDMsvc+qHPh2Wemgj7PLXeYPKT1kVEQGNSSkssiTRTch/TX4QSWbmzT3ufeltYbM0Ox2U5zLd3N8eb+XCYe29m2kx3agF20SbAKoFI7WLu0k+/d9eamJFgRoSXCEXkul4vV79e/fLs2+q+ZXi547qtPnzfxu0diAohWXCS1J3Uko+Xx+V0uLu5iXEZ4zp8eIQIfIx1u66Xp8t1jcz1EtfLlutG+GFph5s2tb4cpM/VZ/TG1ltgZK0glW2/8BKSIQrbr9MkIU6FaiOaKK2Z7tujKpEygZm0KVVi6jpP1lrNE24Ptz/95qdfr6fnz79soM1zmUqVj3WeF0dRtaT7iEoMXt8cPsnzdr385bd+7h+JXP/r//Y/wWES6Tk2UMz0ps9Te1naibRKZiFczpdxPY/wF7k1qvBCUqmdfqvNDofl9eu7m5tDbzZP0+1pvr05zIclkdfrNbN2AGQX6OU+Pe9qVbDAkgBZrBfaxa6ZTtlZAGBVxc7RQcHU2tTuTof7+5vbm9vD6TQvfWqWGU/bRq3jfbNj3bw+fPzJx29ff7QcDtZEm0A1w7dtPD89PT5+OF8e3C+QbNYhCE8pmfvcbWop28gYq7IkSyjTcemHZtO0HA/NtApipl2bSZvVTCBClTLdN3pqIoCYklKoXVWyE0DMzGxuMCmVLBFp3VqzZV5at9774XBYlvlwWI6n4zzNZkpJH7mv0irSrJnaSNdv/PwrsaJ6MLS1w818uJmmQ2uTZXpEbtdtW2NsGQ4fft3W2FiObVwQMJFCkiVKQFlkZbpfI2Se3ry+txojR72gloHy8/n56fxwHWPb3EfmqPChwvl0sK69SW/Z5rSuvZMiWQ54wgGByIixjULt4jSvrF1PXvtpEjWqUJA7zJhCmEhrNc3ZLKVBmfevT0vrb/s3fv7tr8UPr+a+rddpmaVhJ+pN3frp5vH5WXJAatb2fHl+ePb7jz85f/nHxz7/2r/z7//L3/m//fEPf6jz7RbPaq2pHDpNhSgVK7zsttY11mtUFrijPLsEb2cyVElJt+PN6dX97ek498mmrofeWnvBMYp1vlzGdXtxUsELdY4FvpjYSAoBL0rBKKCiCJHSnfikun8vERTKrE3dDofD65vTcry5ubl9/fr+7tXN7eGofaqe02SnV215fXz15vU3Pv309es3fenWSIGq+Iinh8f37z88PDw8Pbx/Hl+hRNiqskqQmsPhUJpy6qyxrRmRlQXhLH0yNopIFkM1idZMBOho1l609ajhnjHKsqJKS4QgVFVopn2xucvcRJXatE299WYqJqIk95ZHKK331ro1m6dpPlhrHbUjB4XdlkKp3/zrbxIxMppNd3enm5vDsszLcjje3t3dLofjrGqbj1gdG2obLzw/L8Su2d5pLxqVIklGZgqlzxM1b2+XeVlSCIEaKJEVz4/P56eHaziifA33KkltOM7LZKYTdKrWlOLSQWvXWD1DTEBW6rgWUhMIKVUVmHtpcO8HACq0oAI1ShOIuhlU2E21iWp2yZL89P7Nr73+u+1hGg+fm85mWlAG4E6T5+u6LLfjmtt2OUqy8nDzpp+mh4cfv3n9cX744enbvzgJ/+v/73/uhyUvQ0QPNs+dBEQ7iCobwAjfhoy4bhwmu/JWSgMIpFAmYC315djfvLq/nZaJ6L2rWWv9cFim3sYY18t62a7uCahJq8idJha7D4OwKcL2iRfWmhjYU3upQdv24megWcVWbdJpnm7e3r89vr69v3l1f7g/HOab4/E43x0P8+3dbFNC8s3b+zcfffzq7cfH2xl9mBhlQvl6Pj88Xj+8f3j37sdfPX5xva4o7rp9yWTJ8IqAX5Hb5p6Pl22M4FrhVdpL4NhUW5WkDm2AElaFylpRycK6XkDHfrZq32TvpEMqtIsKKoqpQk5N59a6mZl1qa6tKcW6WbOqSo9MT1SjtiatSeua4PAXKEc/++XXQE3TdLw5LIfDvMzzMh8O7XQ63dzen46nZZmsMTI2v3puCqsQVL3A1rLTYUJRgDgpJssyLcdZDW3G21dHNUCiNAJji3G+Xp/Ol9gGKit2BaSrZV+MAp1VBEyKkGZb+GW7jMoSRNIHq4zQKkiYVQeBJJIZuY0VCcJEd2nqLuFX1Uk5QRy8sPHmcKOoy+XyvU9/tn9JPD6INqpCikgV6G69sY7T6dW2ObRFstLvbw+Uev/+cZnm+9tX3/zFX/vTP/jd3//T37+fjstk0jBLiU6U9mLwQdkix3YFUkqKSBZUMguAmkKyQr1KlHc3N6fTjU6td+2Ttd5EOE2tTW3d1ut6Hdv64h4ie62T2g1YSgqNL3pvh6T2HSj2QihEbEKxChRtqkvr8+lw8+r17enV/f396e3t/f2rpU1ouajpQee5H47LfHN8++r+sCzsOingmhHX89Pjhw/P7x4+vPvyfHnYtufLeo7VCaoaCz62zPQYVeXn8XS+XC5XdWW284j0DFTvs6qQbNNkZqoUCqlII3Yp2ItcNF+cN5BFT0clBVUCNmVlQNmEikKp5E46UlOh2ovXmJpily/uMINpaw1SntvwqyP1Z379ralO83w4LctxmabpcFgOx8MyL/M8t957a603m/YFMXJkZkGlNaESQlGoiBBR5pXzbDe30/H2YItoG/OiOqm0grr78HB3v1xW9x3bBrIqHQIx2txEo2pX5mdWPl/P63ap0lHIkOEFWhVYomUZOYYTWgEIPDLcASGkdnsJqqqaKukFz8xRNeks5RPn++17848l6zwiCDYzs0ZTLxfSr353+2Y6HVqfzfrw6/n5wzQdIvLHn/+44+H19/72dz799D/+z/6P7Lg5vEamTgcKyZ3CjaT4GIiN2CUsufeygCb2Mwkt8VKPrRtu7++Px5ummBdtbWpNs3zzVZTWNHIA0WezDmmUBhWaEUhIUJTI3WMuUaUlVtxVOwQQIkpQgN77YTkux9Pb+7dv7u4Pr47TfLg5zKqkGU3Z23Q7f/TRq2U2MT1MvauOjDG2D199/vju4cPD48Pzh+en9+fL03V7Xq/rdb2OkQAyR2SFR3r4iu2S1/OzX0Ow+MZ1O+/I03JzEIOKqgIKUY0RmVGsQnrmtrqHR8a6XTKyihGxT8FCI00EL8BOJZU0K+yyu13Bmy963r3jyRebNxGKJhXWDKqJ0p/9H30iqq233qd5XuZ5OR4Oy7Is82GarfdF1URKzUynXY1f+6KxqfzErowgRCIIsM+8Oc7LzXK8XabFwLJmVAdjXdf1uoXH2Mb1WvtucoxttwnZ+THkTkPfvLYRcblexhYozRSkEhpZmVCYigHwUXTxQO+TiPjwLKkEIvfxkghwKw5kEbMzmGVX+ah//Zdv/2Y9fDjHemhdjGPLXd2FciYiiiIj4vHDVzenRU2vm/tWonI8HR4fPv/49u7tz/zmD//wX//RX/7rw+tXrU2UTWli1RopmcDqHrn5vteKZClLC0wioypMKSkZOcbww83pozf3SzNrrfcGQ5Rvvl3WazLVjCZs1iadFrNZtO19XkqHWnM4IFUsprRsk5i1nRJGyZ0bTIrZPPfjab55/cnb00e3N69OzUqboRlEVPyK0Q/Tq8M8ebzR6ZTydLnYFtvj+Xo9X87X9PH09O7dw5cj13U9b9uIdK+CSBYyZFt9u/rYhm8R1xpXzy0yM0tX94jRD1OfJ9Yg4xpO3YWwYydkRNbm2xYjM1C1W++YiLAJlLtzw7542FERM1DDMz0SyErPoIC6m/u8vA+qUClRJFK0dZuqysQUuU+K2q0dj4fD4dhaE5qIZErvXTtVOv2pIkuGXsXdq7irXXajh0oUMrNiE5X5cDgebw/U1OaJ8Ijy2LZtXSs2KQ9hRWKMsW4XADO6avk2YqCwavOq9MDYMkvgzF3MApDarUkyR8ZIRvMRplrJSgo7ioBE1TqCXLOiRLoYSDIbX1+v+b23n3xt/mw8PAevZrZdvC82tz4i3QMFCm/uby7Xy+F0q6394Ec/7N3u7t6w5Hp+UObc7/7s//Nf/vTH3/kP//3/9e/8wf9dLWkL4SJMYbFQpDh1g4q2jFEtLIKBKBQjK1SojiSrwdZ1/Oirzz/+6P70+lVwXP0JAg+HpM3E5jLXQaYJyiwgo3zbrplIN5RFpG59bJWZtKoa7tBm4aDCoITuPiU+Bsg2ifbox7Y002Ybsjo91vO6vlmWG2+v/fC1ab6Pfj2v26y51uV6/ZHjDy/PD9fHL7768YoV7jEq4Cksf9ahpTcSa1a51C55HRJo6iO9gEGPuvD61Q/e397eDo7xnJgm1a1MRYssaBVDm/Tsa60NbSdBKRvRdi+wQkVGVSJ2f7ytqmUiYjg4qRBoU5vnWVjUbFooZkUBO8lWoM3qZpmsBLsIWK2sYZrazc2h9UVEBOruYswqX3M7rCOWLY/Ei6cNALx4hGTV8HH1AnhvNjf0ztaXJlq9FSXW7bz52Lb0VVaPGiODY91iJFWGr2JaYNaguOdWhTFkHZlCr6HVqwopKJaWJHNgbOruJlrFbd32Kx8imW42i2lRipaQyCatgTrLTT/qF+/O3/ruLa8tfWvVxYwZtJpmg3CMHNulqswO8zxPh9O2XWVc/XppNh0Oh8enr8a66aw//K/+r9/9h//47/yt3/p//+t/MX26AHNl7nAzioiLwPWFFAYa3GO3hJMSowSQFTtlCpJfPnzxx9+frXGeoioKkcyi9FkP6FyxjSHSTRQZXiNyBqQC2zrWdXjMvmVErON5kC/+CLpbldm+PvAYw70QTTsSObwtjYqoTdzvq1WcfgGvvqEnEemBoy05FYRrjYue7zY+5oc/ef9HQp6wfFifUIDIGGPEFVLoypGZGJFWbVQObGu6u0paVaZjexzv4qvT6XD36WlgmBcz+jKLABqAC8fUFRG+iSgydv+yoKhKAzQzlZJkeIytqjwqPSM9vPLZQ6HSbJrOh9maoZtGyRjr7kbT2qQmHpuoG/f5A4Bkm6Qv2uc2z936DKaPls9cR7Sp9blP2zTPs3uoKn5ioEuRQqxrdPbWrbUpXYmumLoetFNkZLpvWwUiar3GuIyxbT7ol9XDtWuojAjAsgLpVZFlmUVY5e6kIJmIUeXjmlvTXslxHbudtGdQCUh6SIc13UmwomI6dZtb782OrSZc5dXXXs2nKda5IlEyVp9n86rIrWezss4pxK+r13YOhx7vrs/Ps0X4dX18b8qPX70J1W199+7dD7724fm3fuN/8Tv/5r+xOVnqDpSjZMt1zS0otGRCiREjIaB6VCZIjeEisF5NOffD4Pjw/NUPP7SP7m5VNWvsWgbrdtSDmNgQlE7dVOeqUpl2n9YxxroOH7WuY4z1fK01W5lHXcjazXeBlEpRwHZiDZj9/HR5nJ8oSpePng+vPh+3l6k5Ro0Pj+8vz89cZhYk89Xd3duPP1mO84/j8XdTJ8zXzT9AMpG78STkvF5R7+HctgxHPsu4VgQ8eN1WHSXQgitsffIf/cWX/TTLzKC7SuCiuheBjIGInfIXuycod0btLprWUrHy0VuXfsCE3fobW5Y2um8eVXE+Xx+fx7zY6bjcHA40e5EkvVjXbECamf70r3+klACs2en2Zl6O1vvUWp9Vm9B6Ro2xDs91g/u2bSMzzJQQUZ3miaYQS7Xe5uN8WKbjNB2X5STaRQyqplqhl/NlXZ98eFzL1+15rNdzhiN3xxZkVe4WCFERUWNoJVUMbkyJTZDMLfw8fEspI6oyhIaq/Yil5+7q27vObRZRUW3W5nmel6W3aeo2lW1pn96+fbPZPC7nS9U6dgwuSdsJWfkiCuzNhLoss5nSpB9n65K+Ssntq1chxGZ36V//7nf+5Me/9/n5Q5+P59qCW+XY4tmLHlo5alRsGKO2DdcNI8srKsMoU5fp0GWuZlzmhYrEamoQjUJmQWiqZkqRaZ447fQi09YPh6X3pk2Xm8P98Waej6qihj7bfDgUi4SZggTUKGQkC7Cm/e4wTTcHPcwQlQ/bJ9+vj//t4Pvr5d2HL5+ent69f356QrO6buP58nB5+vDuKxY/++wzbfL5ux89rI9PHA/xkBVQBYU0CEs8gteRdN82Oa/wrXAObppOVGpWgVGSTDvyeNNVubGSIyvSMTbdbcK2bYtwoShVQGEjVESsmZn03vvcp95b671rn1Rk75VKyNZEOrUXZada6XJcluVwmBYjwz29ICRFf+bXP9pda+Z5OR6Os7FJibU20cyYNbYxho8ttnG9btvYtoisgjXtTc20TY1ddm7Wcjgel2We5953GoaaECN8retlu1yetmvUZtvq63Vsl2vGgGgpdyMd7GyPEqKhGMF1q0zWoAfGNWJjOjMyPU1apoyxDY9KRURalNbUmmgTk9abcudCtWU6zXZsdtOmvvm2SPvW4d7er8USEsNH7DY9jowSLxmq2tuhJJd5Pp1OBSSaQJsoIsa22tQvl+fhT29/7hfeHN/+zr/451vPzX3lqC1Xr3S619iK14ix+cC4MDfIbnQlsC5tEjHR1tAJRrcWw8HIqoqsGoXISoq0qbW5mU3adqGJUqFN+zRbb9b7PE8UtnmSppPVpBpAwRdrQBUSyooi9GBtWe70/uZU/e49vva5fP0HUe8e3/kHBDQL1nWeu/SojKrZpvL44bsv4lrf/dovvI/HP3r/p1+Oy7kC6ASqPBBQ7Bp2MhIpQ5U9Pcbz8AsqtEYVLMpB2lztKPNhkh4piEpUZUX4KqIRta5rhJkuTCB37jelT33u8zIfl+lwPE3zZM2s67RM09yW49Q7wQgJtNRJWlfrDYouebpd+qEndhK9Ib2JWaSUwkx7M91HDEXKGtVqjEzJzEwvDMHublW9yeCujRRVlkjXiY6LX4haloOJwdU3TJPWagO1rpfLZbs+Y7uUb9f1um2r/2SAJksySMB3t8XdUCoZGzIKyAzsfPoazID77qrjGZGZO89jOenuNiwCAcIzNNGkChkC9KnfdJ29yXE+eeLp4bGezzm8qrQ4NpFwlS5oLHMfpaWMKF639XBzJ2vVOI9thXvF1ppw226Op+fzh6ff/aOf/pW/+7Pf/af/7A//+XR/g6t6RgrGyPXsNWKsOTZZrxHO8paJXZYpzUJ2znhSV6FkkpSHh+friKnrMps6hgnrrvdmNO0ibDnKUyKzKNpEKWPEiLSpT4f5iPl6fn4+Pw3BWJF1qQxP7wmrMGGzg5jx3Tr/6Pwdb5+09pjn1eqeN0PQD8fhyOvWhEXpE7lu6HVAfPnl5996eN58fPnhR96N1vyyiVaABCUqcfUW1cgobxU+0pydQamAoDJRAsmcelPGdr1qn8BKS2eZKAWZvvtkV8AzWMXcmaGqynme52kyXXq3TJeWu/fMSQ6ZOXy5Xk7vHz48Xy+bb6xcepumGVmbb0X0Q6upx2VNwRjDnGMnECgLFaCWWqav6yoSGVy36zYu7usW15Fr5RYRfDGD2vXmAKr3aQzftgFymmdVZcp2HQJS7Xz1x4fz5Rqxruu6Xq+embudfDpZTKYRpbXtcswMKS0yS30LCWQhRlXsPSWqPP7KbhHQriKiWSPHEFftTK7DFaQ5YrXtuU3HNs2zTtKsj4x3V+1t27b1Ovq8bPWU6zZ11T5BxdpMcmT6GB/ev/vqi68Oh8Plcp6mab1cjocufVJp07LczK9+8Kd/9DPf/u5v/83f/qM//ldn9Ofi5sNHrteoDXHVbWQM8ZE+IkdBVK0X6ekqQLmMfVZgVolGIS7XM1Ib5wgBzLdzblNWp2RKFWy9DqBUEGa0EpGit65m4tlynsBqk52f5HwRH+uGUTDFvMy32jW38bUP/qvt+Dan82V78tVEqWbCy/WqOi2HQ0YsMEFu6ad+aBd73J7Pfp6l9d4m4uGyebqRJppehSyVbbhCUVHCSCKV2ltnbOCg5yBpXbRZkVWVnioB3T2ZTamArGMrAkgWACnJpma9995UlYrpQDIrSmAqEFMz80RDm07WbnCzHq7XLYZPvROa7lHJim4dkowKiqfo9/7GRzvLYmq2TL3PE00LvnPZfYzn89Pz5fl8PT89Pzw9P43LcC+AKoKftKM07uTcEau0dry7FbAIrw0lVXK5rg9PHy7ns4/h7h4Yq1eC+xQVQBCFGFERUkQgI4GWgdgSXrFVDI+xE5R2R0FqMctb47S0QmBkJbPCzChIEC9WI+XhQl36sYs2wRzy6kO0hPt4vmygtkXcfaxXoUMyqyIK1m5Pp+Px6OvapikitYmK+4jVx3pxj6oxTq8OPS+ffOe7lw+X3/3zf/O+1hF5fvb1GnHe3HP3dYvIXedFAoqSrMydqEkUaZSJFI+ozBwDDpPetffWBFAxRUNKelwv23YdgSShJUxCBBAhutoWHo7wYcC0TNIOyLisZy15Nb+N28Mc+Zv5+u+8/bm3vHnWfL8+WdZhPqLoLFXr0ltrxZQIZrTZBOGJ8/Z0/+btyPzdH/wP18rzCC8HKyhMk2QJUhUQZsRgrJZX1lq1BkZg7Efaj7ft+Gqxg7QZras1YVOzJrp7IWAbI4YbOwFVsybWupmpCqS1uauBWm2yeTI1qpJGUWlzp5LK1uywHA7HpXURLaO2ZiroqpN1UfWq6xhmJEQSHFGeY/U1t5pNygo5xubX9fm6ni/X83l9uozn7ZomKjZn7hqFrJLYPTwkrNV5fbxc7qY251jZK4b33guBKt82hO/mmlXcLSKLRdWsQkqSFhiZEE3fXd+QUbFrMr0yMzxRJTvDX9EgS2siO9N9D76p9XKFIMaQLdR7IyrtistTPfbb3TF9ntDSB1jB8fwYco23H71aun7xwx/per053bW2xLZe0O5ev755+/HY4jjG5fnd8c3rD+/eny8P2/X8+vbGx1Yh58vjcrn8w9/6j37n3/y3l8cPn4/YRvnQWadrnN03ZFBEpAEshIAVVDF6ZVK7jnVDWQb3HyrDr7nJ0Ll1CQU0N3hdIVMiLufr6ljH9XRziB7NtJ0mSosUYwmq0hEvROh76jjdLRg3yWlpy8a/PX3v3/v6L63FR+IaMfU+VVuzTGyxHrSuLT1MGmwXVtaIbJbT3GrDx3ef3s+3f7l94caezd0TJSa7kQvJ3TCrdjqK7UA0X1rpsp2p0TqWg2mvYjq004Da29+IyPSIYdLI/pKKArg7lTWGjzYsijJps247jJmoAkIHoTNvs21ZoxDuNZIYE3PkNgQ0a631RJ23s37v197ihTZfu/PULjSKcvdxvl6u1+u2bZfL5Xq5jHUd7jvuny/JMTtHrYAKjwp6ZDGtWdTIyM0vo7aRl/Pl6Xy+RGZERo7IjIjC7h/74huNQkDGyBgIRw7kYA2M1cNrtwzZadw7ERqJZqb7S7z7LEiq7UoB+jbSB1G7qXYiI/18vSimnzp9ejrHNi7n6ybCiM3X7fn5w/2ru88+++kvPv/88vS+CefllCWXy2Xz0N4//dY32nyCTL3PLG+my6zLzd3z2U8HG09Pp0+/e7T5X/7e7//5F19K4O50B/byIMT2IJrdcZ5lKkZTSnJXuhGpY9R6vvrlGmtsZ1yfnyO34+mwLAelya7eGrFdt8saj4/Xzz//4vJ88Rjrul6v5zEiM4RBxVjX2hvMrA2RETcyL/1okH/35jv/4Nu/6tneP39AuFzdZCpaE+t9FuouQgMpQt11VSImlkFkqk2ffP2zf/nF7/3eu78ogSp/ouiskmJjIBGBlEyWW21ZVxln8XPIMCHZMN325Y3ZgWoqJm3exw3drfLc04cDxVJAd2PwXRgnRjB39dVOlntJqlERoyiygNotkjJliEC6qjYBxMT6LK0BCYaKZJR++1c+hrB1qrH2wCJG5HCPMWJdV/cxfGzb5sMjImKU76zsLAap/Ctha8I9UBmxQjwR21gvvm3jcl2fz+t127Z13bZtDc/MwT1VIrl7luyy1+tIH4i1asAdPhBRdGzuAKr2XBZUFRUUqvClA5YsiZ0CSRqF7u7uw3P1UYWxjXU9P6zr4tMvvvpsqRiXyzoqU7rAoyjyxY++WJblZ37upz48fPnlu6/W8IiRcTWz4/F+3Twzrk9PmaHC0HZ+fJCSnNp2ftC6HO3u07/2a//0v/wnf/LDP/2Zr30v3R/OH2bI1CbV/cnskTI7ZT/J3Tci13UNr+265rjG8PQUojgCW5+X29P9ZA3ch4HNx0Dq0+P1+z/40edffu7DY8Tz09P1cs64ekQFxhoRY01fwy2q02LqN1f924fPfuun/ka7+sPjB81CVFsOoIhJmw/KllJzm0REu2WuXWVqlukkALtenp+u548+/tp/98N/9Qdf/PnSZgLarJjgQAWhYuLY4wcRTjiwEZtyQIdFrnrC6aObfiu64Hg4TK0lsqC66+yF2xjh1bWTfY/2MYqw2tSsNdMCAzkJe+QAvU9mrTWbREB2M4U6LKUZxZAQMRVSG0srk8isER5MWqYrdU+Ky+zunumwNkvfi05GZaaZTYclCZRuHAikRwRVLgWTsqAURslKMqnP57M0YcVu4pg1Nl/HtqVvGVEBoe7WMXv4ko8kNaqhrpXwUS8FFYjdQ4q6p57uoiDuNqWsERtKUdWoaJoSSIfIiNi2EVFqynWtKyZz9qk0jwe+bXNwc9Ara4SwQ7aKujse/uJP/3Csz5999gsZf1adp9tb7T0Df/pnf7Be/WuffBzjGVmHPmXl4ebN49OHj28n97xcMR6/P+Mb//N/7x+9e3yg1+Xp3LQFRM3nmkNAz/BMYSQiK2vNsecc5jwZUVFok1FE5TrxsMa2ruu6XrrM4urckOkZKh1Abmtero/5rs6DxpIvp8N8/+r1/f39bDaQK8EcXRqUh6G/8eqz//FHP7++H++fLwADukwzVKUZklkQ42yLJqiSLLgqhVSBJsuwAYjVFTj2ubXlON9dYhu+TkKarOFBoqBWYsyV0sAlePHVtBr34ITldmkHFc2pWTMBEmx78KJYj6gMUWm7lxhQTUTVao/RKGtNMzOHr5FrPNuUJO/kqNYp2hSQCrFSTWSObWBNr4LFuDIZEbGNiowx1hzWF7HGRFLUa6inqiLqrKWVexwNibaL3dE0FUlfnY2eOUY25aZOajF3t3wpZIxMzxwuexgL/8oelCVUqQCqSb3A3SxBhKCssAUiZEQZkpF7eUkja6hqoviThDlBD3NksFpZK0mUU1UqRqzukq7lQ8F1DTbk0DL/9Pbt3Xz4y/MPM9jZV1w8nbGbafJ0c/PFVw/r+P7Xvv7Z51+9GyFYyS5vXr0G5HpZD9Ph6el9TBMDHy6PrPH+/fs3r14DuT48zZ9/+Q/+wX/w+Z/92f/+//WfLPfLEvZue5rUUthFa01QVVhrwDnGGKutY520z0u3VpVjOvH8/L5ZqfU+zOtp+DUihoyKRFZUDtmK6NpLrzm2D+/fG60sn85bbNmlnUWa6pWp4Wvyl772zV9+9cnPnD46+/O784Mnux6mLpBWVTG8t0XRFKSD0oDKHFQpwN1VGxjX7TrPLSk+6vZ4e3e6a9NUq4io45nwxkqVMgorfUiJEpsXZBLJwRj0+WTTqXEa1ffhs0pVuyYwUCoY5yEhqg1AjiFmVRARbdPUtDXzKiZ9PJ59Bafc+rNkl6tJa9PchSXoXajIiiHGZOQILW99XFcAqi1jLVRT2v2rWxGMMTLTaJUyooAU2UoEgJmoakZVayJSuUbEqIx9m5cIRBQQ8ZOAVUBRVcnIjM0TKIFERZFsxsiEZiazarfOTyJLVEdWhQCSvmUiaZWoClVFBKClKS+RksUahYumRBU1UYNBCoKbi0qE6ECq+FQDA2DEtl7aLF1qrE+X54dMFKPgYiqV7l4lqtObNzeX8/rw9Hh7ezpvYzoeT4cjgMvl8vr1zc3drT3c/PgHf/rR6zcKXM/+8OF5sWm+u7tk8Ud/cvvtn/vN3/if/Kf/4v9hx9s/eTgvN8euMdKleDvPRflwXrM8ISyR9LfzAfNMrLc39zYF+EEXq2rWKM9RYUissfq6IVlpEa4NsZ4zr4FCkWUDWXtUxPrYcUjI/Uf37/zLefW/9a1f/3vf/JU71oenx6frtQdFyhpU2h7e1uYutP22UwUZiGjIopAc40rR1tty9wqP71tEbuv7y6VSFFyWggfScg/DxGCWlngaKjKZKTn2uERpB5tfWbtrs8lMBWzLkBaQMFi6+9gyKcUuarBoxkalmMjxuEjvI7wLPa6UG8ppdwAI5/PDtc+LzekwCE1U5AVelybuFKFRmD01cgcggcrUv/nb3zsc53ma90Cln0RBkihVba2ZmYiovrQkWSWiCt29/EQ1gdidUvfxNFF79m9tufOwXsK0SsA9o49VuhcLFHbzmxIKK2IX7lUWMiOKwB48KCBUxUqNQIo4SW2EaFjBSClrQdscgxIsUigUlVZhLCTyvK7bZf17P/Xr37l7/cMvf7ytgoiqQRqrosrEWAQ5LUeUfO3TT9+8eXtdN79u7z68i9zW7fLh+fHrn3z6/v0Xjw8Pb968bq1XxuV6PRxnsz7W58P93e3bjw9/8W8fzk9fWX00HbqZAnd9/vh0s7mf1zi2KcoJReTtcfn06x+vfLKjf/T2xtqYl364ma64lIzGFFqJuK/bBuxiQR/X6/Xp+cFjCKVErPeTHbaxxbZpVveS2N59+YO//51f+ce//Btvw56fzmsOoiTR20RQpSWEZtPhSGpktKZGbmOTyorBPS9R9hQoZfBpe9Su/f71P/3B73zZL8fpuLuGUyVKIgOMykAplOnuI8pbbahV0qM3uXt7Wu6tzdIm4aTs1EkEqk0i3CNUtVszaKPNfTod+rLc3My3piK7x3SEsmRnDmXueprdTI9MUaPuS7QUpRjJiswKiEik7z4dTLAYMfRX//53em/LMs/TDKAQFIClYi+nX/fE1Z8oLSorA1moyErfnQqqaico7CSm/ctYYO0yfwV3bTOrtFIpBmgJC3uy10v1qGJJRrBQnpWJSmRVhkymzShFSVG3LiKWlDTahNYbCWtXsyBMCTVBsaAjEZUGVNaomKX/xrd/6bM3n74/Xx/eP1dQBGNzVfMYrJynlpWFUrUvvvjSIzz8/eMjhV++++LDVz9+fP/V5eHhZ3/+F/74j/7g8enDPE+UOp8fI2KZD7fTaT0/Hr/3c/N6/W/+4F++uX3zVuxoy5vD628f7046Pa5bbPnZR5/EGB/efXj7+v7q5zd3x4/fHs/PP/zOpx+9evVqUmlTbPUEwZ5pT6WvNa61ByWQuI7xdL4wyyhVWdC3x7vz8zU93t7dzdvl6cc//u1f/rv/27/12zeaW67Xy5qbT9OOuquIOYSi1htKq8oUhErknmkKloiCrMiuqoKiPJ7fz/Ny+Pqnv/Pwh/1mmjAPKAUmgsptbJ6VqJCqciYqVUtlIDaycjnY6eZgx7IDZKFNbLtrlDaPTMDUjDq3PvV2XJa743GZp7mfzDpqt+UUZGUxd6Z/JHeP4mooM7HerXUrRMSg7DmeqqoKicRLdkcmArnlGKv+2t//KRU1UTMz1WYaUT6CL9kL2qyp6R6XDWFEIiQLXhm159NFRXhmZsUewL77VzBVbJd/7wED1D25MiEqBdllj7tyi5WeVVovHoysksoKD6Ban9gT6qTQpHeIajXCoi8UHaKiJuSGgpTuFkUsIOYMSmWtmm4i8fVXH/2dv/Y3bzj76hRet2ts21/ZVBGcp+PNq9eHw02fljZPfZ7vXr+iye39q/tXr8Mr1vH+3YeB+ujNqz/8N7+X8MpQ4HhaQsRUZsTyrW/D/MNf/OFc8/0y3fXDT3/8ta9Px4eHc9n0ts8fn47ivD5dv/X1T6+xXnH59scf9V7a66Ob+9N9u8RDbdtuGOpp1w104cWfQGBjjW2M67ZZawTCq8KX+fZ8Xln16rjo5fLXvvO9/90/+A9Pjq8uD18+PabHPM3bFkSHCm1iazd3t0L45ktvSs2Ipo1NkkmUirBETaapi9rz9ujbdmjHulv+sj6Mgmoray/Z8CKZtUUM+ChvKUgLbzlQV4VLMz3MCxpkrn5oYlQRUaQhyO26aXHSNtMOvR/btKiqSNdpTyAsVmZt6S41aoQP960q1/VCBqUm03lS5aImYEbtFr80ZRPNRL6IED08cmRFDV/t5WS/pJLTrDeNMLBSoQqt2jNqAcCoXWbnNej14qVeEIZkxQAEyt3Scv9fUblzHKilBkFBBTSERCMcimqq8EgwHUitPdxgB6bCM7NNxknLoDrUjJpiLCVb7GHuAiOzpIKNkgQjogIqXYqWVS6eAlGTZia9HVtrlBSo0oIVoIK9L6fD8XQ6SZuTHMMPx5siYotpmh4/PNzf37/9+NPcBqS+/2d/8au/8ktf/8a3/uzP/uTV/WlqdjgcDkeuNZ6fzncfvn93+9FvfPrt3/vqq4TNN8dvL6dGRI7p4fHuk0+/+PDO3nx0b8ui7fbtcm2Ph4Hbt9/cND599c0P24/ubm98I+uxcSx62C6H94/venAwAb+Ma2SdJhFtj5crvcpxPV9m45brjeB/9u/99m/86l+f1usXDx8uY3UfYv0aQygAQgDTeZ63MVR4mHvbT1lWcrfnTRFhAshp6ptfK0q9eltsOfXeyya/uTl4+HPFaMAwzqZDdesVAqFnOmOEbywPEG1qaj0koXtMTA7ZGxFf60pRVhnQmyrYgcr0csJVSrrJrqiuct/Kr8PhnmRt2xYRIha5rdcuGGXoRxbKPZtpVZm102lSbdvzJSPCScuMQdL2Rcbe+qvqS8QkSq13bc2mrAgEiu7OLEbsXoCevhtTQjMi3Xfs2iAo391opZLJrH3RUyKSKJqRYi5SiMwR+RK5QVYhd+b67g5LSGtNGlJGM9Om2rw0XNlmSc0CSylawkwhTbJQ4Vr07GNVjDS2Lco55i7Hw6tPP/7MNh1tZIPpxHwQQQ70rtJMTG2e5tNh27bb29vwej4/Pj089G7v3n31xY9/+OknX/vkk4/+7ffPl6f1z/7iz7/7Mz/34fHheFimuW2Z16enm/mQotvzOn3yyUff+Obj+Tl0WW6XWy8x+6k3r2/mw7qO+ea1d70uR53mt+sIvnp7e/uhztX0m+3Te2/WkfO7V+3VV59/MbejtNs/G4Nz8uk8RrTWJzOTMp3rGk8tTI+3fZ4n+8bbr/3dn/3rv/3rf2e7Pj6vH9ZxrcLU+suMR5bV0k/ae2amh/ROQUSIVOsS20Dk1GSs2xZ1XA7rukYOZKmallfVdHNjT6eF1c9rX61rhyJqZT0BQO4eCXuitQK1C75pyk4zy+JwbxNKZctUQY4RuU0yU4pKVnkOgVKQtRKtIiClGqxNsUGgFttYGaGGdb1G+XVb5eHh9nQ5xLL4ga1aU2iBGrTjYVZtrs1kQj17rOvmvR2sqqpQCVBVLZC7FSNJ1fZCm1at8q7dywWQFM2dXY5wj0IBpTsTYVfiQhRVyMqEF7RS9rWvGcvAiFYFxUpElYhVZAiE4VUkMxIl5L6ETNGkDSqlhzbZVKoHE6CpRWm57iFQQWqVxRaTHBy5bePsWyZu7+bT4djl/tNXn/WmUUVp07KHIAhjD9uAr56JU8b9/X3EyKj7+5vV/bQsh+P0F3/+bx/evxezpnOb+5fv330jvv2tz757eX68vT2FX5/G5Xv3t11dCuRxPr46dcU0HW0a41wje+DTu9vrJdnl7FeZDjd3tz9697h6fPub3/r+0188X9dv4TZO09MPfxzLN8X73enG0m/vPn18fLys6ze//tH3f/T+9e2rU7dYt5vT6xb4gw8/umvz129u7k3+g7/z9/7mN392uz6u53dXbNE6/39E/emvbt2al4fd3Rhjzvl0q9v925+eAqooqiibzkAAxzEGCaVBchwl8sf0iSP/HcmXKCJKIsVKHAgmIGOBCZ2hCqgOquqcU6dOf97+3Xvt1T3NnHOMu8mHuQ/5vqS1pDWf+Yxx37/fdUmx6cjuJEWkW212kIqEqVsqBSJMNaG46SKfgbDmYeEOcBpnBGWhYFSdwzxJV6OVvqxEMaBvq6mauXALRGZO6s29LQV8MDdzwmDuAgFAJbETNCA0xKaGQODWmluoSKMmgV1ZgUeEgYM2c0DhjOANmnqtWmdQ18mhmjYzm5s+jIAgJHR/vLmMJ1s8W3Vl6Tu5r6hPZlayQNWSupo0Fn24uUyjNsOu944A1AWg48yJMCigBbBVAAwEWY7x1c0RUKiYQDLTcKvh3ha3eRhRkpLBY0lsYpCaMytGWrqcyBQLsCyciCopELgwzkjqBBaBjGA0cw4jCIoQI5Igcw7O3iFjiFNVbpGYGeNNzKC51YCeqNdwksQi83Rcb8rbz5/uhp2eIuphkknpnGoKVnaojYwcpjlQjnV89emHBLHdbnePHm3PNlJ7rf7y5csPPnj/8Qv/+KNPQDW85pSmafrJx5/8gT/4c//yn/3j81331otnh4fT4bQ/3/Y47uH2+zO03fpsNmytMaVpmodu1a2GdEaqremqXA2qh20uRiWlfHX1VddaSrHo4PznPx/vPjt+/JWrF/uHca/xB65ewNxO1i6u8qPNdtWtjk1Tvz4dxtN0vDrv3x3kj371l96/eFbrwzjvpwZ1DCGweqJlDJJYun6aa/Ioq54qmFYRIVhSJACBzGQVWrMQJ4honooYGDsYykO9e8xJy4YItoRTn+RUclp7PZg0YkYVIknss9UwQg22AGBOIkYQRpQCGVwRKZwNNIysEQGOoeEVciGaRTRRZivIHH6a2iHC5lbnWo82tmjhMzh4DZ1tMWdPGgB5SnNQMEfCXYfZiSE5GExtjgjnoGSZ2aggTGYmt/uxlGaRI7tJEkIWQcRwNlsQKBBuRNKWLJqZ+8L2QARJiRTYY2IAMCPilNLiWoKIpgtT2gAXbC57YDQnAggGAk5QKDU1jEiJ69JYAtc33S5mCczGCVRcCuHCkGFYBFlESBxAysjohJFb03DMlLxinS3cH11dPHv86Ops26VBtsNlN/RMEjOhjq1hSTqNRdLxMNc4WXMCZsbj8TjNp+uXpVlsdtvVavX69etutWrNbJ7M2/XxZpVXt599evf8xR//E3/yB9/5zXVXLtfr7/3kBxmfln6N8Yqbg3Srdbm5uRnB53nadWW7GhAxcAVBGctpzpdDf3NzY3XablbI2cxzN2wuzy7u/KpdPn/05BXf/d7HHz6/uOiG/tvf/d47z9/yUc/PH83ot/vx/fPLZPX5s0dfffSVf+tLf+h4+nz/8IBQZhvDYJzHrusgQrqSV5t4o44GD6BUyJA9St+11kBnsKZoFuARTIwBi0FdXdvi4EMKd8lMq0w2QygIBmoko6CudbOr+aQRPcoEFgScJEsSI3c3FHNXgyCjcFBubuEK5izkwRDGeERImcHFECeYskK1aAa1eWutNWuxSDmrhgMimQY4smPVNjZAOiU5iHTMzGzRQseGpthFzpkyWU9ttpQZg/jya+eBCr6AeNQh3kBEA9zB3RdfERFHuJt7eGvqas3UzCPIws2NEJJIylmYSy591xVJb2zEjEiwqDNj6dFgRhRYAmHEyLi4Z3QZsjqqhaMtmjcUA0FMKMlJjJgdPdiAnAiRnNAZAUFc0YwIuwDWiefZhpxfPH787PLRZrVed8Ou7C4250/KWdKoZsdpGoaVZNY6GS2IyX7o1+uz8361Xu/O12eP3KxONTw+/MlH5+fnb7314vNPPy2pUMCk87Mnlw8PNy+eP7tar159/vFQOiJx1cz88OozbDOmxCl3fX9zc3d3f9t33W63a61N8zz0q8NxbO73D3f399eMMXRlbjWXUnKCOiXCQvz8+Vurspbml5tVO5426+Ht7VmPtF1vV5JWq7UkOWd5a/3s/WdfSeUwz7fjqVJO6mzz3K/XbuYWqaym2lprhUVSYiQkZslLwVyIVNVtnq2Z6TIX+WmWqi3pQrBglO36vD5dfR9ehauD1cmqNwBzUwBjAmYQIW/VakRjcMTgePOsglE0cCRXb3MdqzVtS+MNI5asILg5A5vb3CaD1rTOXitYC9WwgCAia+HmbhC2RLbDHTAQQ8ygqaaUh9UgIgQ/9dgukp3m0ziaNq3aZuPteyuRYBQGMKsRQYEU6GFuXutsphACCBHWWlv+Pl943rE4HqJ5I8Yky8WZc84d58LSpYyZ3qQdwgGA2IkWv+iyXECLQCYmcDSQRe4tS+7N3Q2VMiIT5mBySYSCCwUfDJFiwf754iR0YO4gRCeFyTPBbr15dnV1eb4b1kPXDXkom7KCMaaHaViv15vNeDiUUhyW9DwRJwvQ1jhJWQ0o6fzi7PLq6vziEbp/55vf7Nbd2dVlPYwXjx5vdv1Z3++6/PH1Z7lkPO1fH/ab9RnApLU+3D3knIZhUPWSystXXxz2h812uz07n+b6+uUX4zTeHx6Oh7v94dZN3ZyY1W29WR3H48vrmxa6PtuV3c4tusR97tj5ydU5IgX4OufNet13Q0FJDRPnFy9eYAmtGI0VvY61ELbWwKPvh3Gqc9PSdbkrROhzizpLSrlfmVZXBVwuYQSBADFO0ziOqhWX2TNhAJU0PH721su1fnf/Uc7irTWdEEyEELC22bQRspAQlzAHVdAAo4iEjIbL9pSQQEHfLJWW6zUgEYjwEh9AD3VvEbNOLZrhIoc2BweHcDcPRAlHr2S6bIzCGxDy4jEC8Fy6nLtwXOD+rTVtOh4nndp0PE3jOM/Gq7d7phAQgGhePWCpzpupalOt7oZBbq7WWpsw6E0exdzN3iA+GJgwiRRJqUvLHjuJ5JQkCRCbofvCs8c32hbi8EXevswLHAgzJ1yWYhgIGAGORgLExAVZIIQIcwSQEzpEEBiipwAMVPMAZFzI+NYccbVeP3tyfn429F1XhjWkyD2CwenV/mx7cXZ+Fk33+5PkMj3s728OD3ev5+lwvH99uH01nk4Pd6/ub2769S73w9l2e7bd3N8/vPvB+0aw3aw2m441NuseEg190TZ6tdN4Avbx+DBVr00vz3bjWG/vbh4e9mW1ktJbYG0zUty8vp6ORwBLpRyPU8lDq1USB8Krly+Pp8Plo6vVqtNWH27vLs7POGFOJQsdxiN6nO92q76fJ3398nbaj8+/9P5uu57up3k6eSg0dNM6uWTklDQgSelKl/uCCOxBsVhlDD2SsGRp2mozJonwcRy1qbkyLfkAYABzTGV19c5bP8DrHx0/FqGms4IRLs3WQAb1qja7ejM1DQgUFgJkFAABWtxuFGHLvhacWISIBGzJYC/cH7No6oowoxlZsC8M5sQkjAxmwW5hCosvzMxcw41Cl/OdTdNcqwpl+Kl1sDWbx1rH6m461+k01qny8GxgYWECcAdzgDfdNvPW6ptrQKC5am2wnImWNPTCm0P0BXoPmCUJMzPllBPnnLqUJAyXCPByWofAJdztTurmAUiAEe4GQAxEBEgGYBGLFDpQkAVAINjdEDSzMaACkrtEQzD28ADDCFdk5uQcCE4oiZ9eXT66uBhWq1yyMAXPNMNQ+0fnj4Nhs+r7fsVE06woYjadDveEkZmmw1F1AvUFY3q7v099v05DyfLO73///uXLq2F1bIeRlA4Hr9PmydNoBiGTWZ1H0GamJdHpdPz008+s6fnVo3BaItggsF1vbNLgfH27N+L1sPLQhYL/cP/QDd1bz18cD8f9w/7u9o7CAaD03TxPNy+/uDi7MIim8/F4evX6/tnzZ++9/z6oPuxvPRScx2kOB+47FJrMUhn6PCTghRFGzZGQZS0sHM3U2lwB3C3m+dTq1NoSpBUk0lYzp5TEVLlfbV48++bhJx/Nn2G4qs6zhqqqOZhGMxgJ3aLOba6TWYMwWvbDi0UGKWAx2nmE4pJ0JvDEgkDgIJQRMci5IDIEARIuzAU3xGWW5Bq+oOAiHGEpmRgACJFEgKpZtVYtAgSQwuaq2rTWVue5TtVVp3Ga55mHpz26pCRMjhBhbuaIvtTSalW1CLBwr61GLKxJ9ABdXDBAvuDuIxaBRRIppXSpS1mW+zAvPisE8wBgJkHkZi3cEJwQF5b4cq0OVCCLIA8MWJbeEajIhp7BxJuhYcTSt4jF/UgYiBQO7iFvtBNMzH1HF2fD+flFWXfUEbH4DH1dfeXiPRHqV90babVCpHQ87fcPt8ICKalC7oZUhpQzOIyng6xKbQ3m2ubjxZPzs8dXL7/1ve2T9dgOp+vbh+Phxdvv3e+PYvAwHsPa4/OzcRprnQDi5u7B3YGw1nZ2fmbgEZ4ln06TAc9qpcsCwSSTzo+urvZ397vdWcnD8XT66OMPIQCR1kPPzJ98/Al4lFwOx+OpzgnlyeOn7331AzLY399WNDDSGqc69UPHqXO1Lg/uHOrgCMxIKUsWzlRKLt0bWrjqXKu5Qtg0TfM8qmpOKcAhsCslmIn4bPdo/d47//LT3/7B7Y9QYxqn6XQ0natOVWdzd53CnJgC0Kq1qvPYdAZXAAvX6gAYpBCqjZALsbkJCqEQACGlyAEBpEtJigGEkTkCnSAiwDzMMXRRoxIahfHCpViMLYhIIIziHmYtwpkFI1zNVOs0W2t1rnOdT9OeV89WdbZElgjA1c1C3XRWsKY2z2C+4OTMLQCX7EOYtQWwHsv7H0FNwyOlVEru0iLmoAVyj7IICx0gwsLUXSFCTZ08CGJx8wFF8JKVEw9WV4vqIYgJWBkENQuKqpoCukTz0AWCqEvVlICF0GxOJJkLZoJ02qzy06unwyrNug8UOnJ3TB9cPnGsU53ncbw/HupJp6k+XH+xv7n18JxXKF2/Gdbrs0NrOaXLR5fnm/XTi7NgI/KbDz998ZW3yvnq+sOf9AGBDCJpKFeXj1+//HhuD13ucuoawPE4e7O5WepkfzogR78ehqG/GLbH+/3D8eTM/bpbd6WkfP36evfoqraZEQjTfn+c6vjDH3zv3Xffefzs8TRPr26uD4cDkHTDalJd+jQXVxcNY26tzlMCqbON89yXgUhO44mpMCVCIuSSulwGlgSOyNwlDmLuViywTDgYgrnUVk+nU9dlEZ7mKUkqqXNJjvBk95SePfrH3/pn33/5fQbRycy0tXGuc23VWniry6sYDBFSBM/TFLXGbGFokJEIQtyDlqABBjKxoxsuXXAKZwYidg8BBDQEUgh3IETwCDNXAzVrjibknSuqerizMKIREgYhMEDEm2o4MeE8z21u4NbmWVVrG9VnXj8dzJsFopMrtKamas28YSguSrb4NxcQx+ZBSGbuSxgIYgnomztEpJS7lBdHOy/CpsQiQkxAqOZmoUstUsEUm4ZahBsyICLEkhJij0CK+qb/brQk0z2shQCCkTV0awzohui4aNTeqMAEnC0xdmQdc0p52Gy260StnG6nK7h4b/O21uk43V9/+noe23g83N/tx/lU5+N0PI2nk3u11lqN1dnZl778rmJ8cf3Fbii79apkRrRWjy8//vSdn/3K/rObdn+IIQfQ7RcvH18+vjvi1dBRspcPD+e7iyxye3c7T6N7AOTd2eMlDnJ5dXV7f+NmrcWwXW36fjod7k/71dBrtYuzJ9e3Nyihcx1Ph3fffw9d2qx3d3f7/UMWIcachZG1VrPJ6+zAYH7a76fakAmYZq3i1FASIEkGhEAzQgEWAARwB/QQBGJOXelKNouqNUJFCIjrXAVh1RUiMTImefb0vbbe/LVf/Ruf375c4cqiQqU6m6mqVg8N9AB3h6azWYAzSwKLmMkNjWfBYujuc7/4zPtOgIhIECMMGRbvkKMGOgUgZURzNABABUByjHAl6wv2TOyOaIqYgoCAAAtiWjDrKJRTlySDWasa0eaparUAN1X3MBt5+3xFSNpaaLiHNm/NwsgdTMHdECUiTG2pI4YuyGloqgHhvmSagRwglrUx5ZIXlJmISJLFH9PUF5KJq2lzV9W5aXMPDFx0igsaaVFVIRi15tFC4s0j7hGhgY7hmYKtqdYAF4CGCAKZlk4wCKAwRielH0A6Hta7nJOpyUhP8pP3r95DjZe31w/7/fHhOI5Ha7W22sZpOh1bM20GHsPQOTWw+b33P3j+/NnnX3y4P1wvgzJGfH33eXt1942f/bnf/fEPURuB337xUi0uHj8fD6+7JHPz0nVqfndze3vzKufEmLdnW6KUU6YU97cPzFkkW+i6L4f7QxmGUnLfr0/j/Or27uLi7NPPPn3y9Mnl46txPB4OD6fTOI6TgSNhM5Ocd9vtNJ4YKUPZ7/f7w55FRHIzZRFwWHKOiOABpmCGKecIj+bIISzqbG5CVHIBiqajVsiCVauHlS4RiXAB5H7VX7z9zvXh9q/+1//Z3bynCnWeW53m1g7TwawBmLam1qZa1UwrWENcOL1Opt60BjC6siCRZ5EskoVlObIQYgAgquviiAREoAZhizecGQGwNUtAhbehZBYLA6C5IyESAsgb5zimvuu6oeeFkB8B7mFe52nJ2c91Vqs8PC3gYLV5VTIwtyXPsEwhEWihT8ZPZQy+jH/cm9vSy7KF2LgEms0hIom8OfwTMS5hO1yWaBERjmGual7dZw8Nd3MwRIbAf3MRbOrhDE7oCIFuBO4YAC5hblbdmusCjkRZEEWIQsCSGDMiSgZOuCppu1pxyVXH4XhxIeeXu11muX99Z+oIdLo/PLx+XavqVLXNqi0szBoLvnjriQh+9MlPrh5fvvP+u599+tHdzct5HFPqEvhnH334cLp/74N3Pvnoo47F23yc91ePtvOkOa8gzE0P+/v96eawv+/yjjtmofOzc49otbV5Pp0OZ7tdncculcPxYbvbch5OUzVtEF5Kev367urxE49Ipbu9u71++UXXDXOdkpTEaXGSW0RgHO5ur+9uzs/PAek0nQIhKLF0gSkc5jqaVYAoXde8NWs0JMzJKLggeJz2x+NxT4Q5szCD4zSdkE2kL2VVvSYqVxcX3Vtv/c4Pfu+v/8P/8niadLTpOB9Ox6nVh9PDaTwsyJup1VnnVltTs8qhPp+m6aBmDYjA0bURESTKCQpCKalwEHlCcY8Ac2yLP8G9BSghuy15TXMDwcxRfOk+m6tBBKEhUBIoy7kcgIRTyiWnlJjJCeBNTmfJfUKAVaut8erpKmLpRXmYI4C9iXN6BCKRWbjF4po1WyLPoI4QvEDZFv2Ug/sCfY1gokVYnUSInJHDQc0QwZq7RmtqLbxFm1SrmmlYENKbYJ2DGyyHrlj2ZxYUiB7gYIoeFL5cPgqQpmXbxpTSG3IMEjMxZgLGVe6HQrmAa3m3/5nLvKmnh/OzTavuZoBx9/r1/e2dV0Bg9WmapwhAdAAoXTo/v+w6/vCjH7nBk8fP6nSo8zTOIxi0aJ9+/OOLy4v1sH59fb17dLE/3iXudhePtE3aqlCu437S4zzWWXm9W7HkzXp9Op1MPbyN+0PuS05pOo3VWtd14XCa5r4UaxUNh9VmfbFRU0n93f1pmufEeBoPfbfCpbboYO5zq3d3r/vtOsz3xwMnLikzSdOYTlML9KbhQCCubdKTtpkZCTnA3BqS55Ka1qlWRg60w2mc57lI3m4vAmB/ut+dbzabVT5/9J0Pf/Sf//2/s9+f6mT3x4fjaZ7GqlaneZrmmYiAUV3B3S0gsgAnkPnQxuNsRsBk5iLYD5zYS8IksUjdChdw1JgM9E0kMypGmPtyf8QgJiHjCCBAi8UWSIJIPzVnBIGpMgsRI5EIY2CY24LM8cDlg1XNzLUqr66GsGWKgwsNg5wW52AERqBZvFEBOEAwOIUtuQMCCyYiQASAFmaxXDdEJOX85nFkDGBA0kUSoNGqa9VoTdXGaZqrxfJ5X8oEBKoKgAC5zQ0W0pUGLHpnC/MgFqRgJkmceyy9i7BIkYQkyszIQYSEzJmplK5IlwvMw9fPvv6lx2/X64dpmrCj2pQd7+5v1Zs3qq151JTSPFVhSl3Xr9aYCT0y+mefflTneTucmdvt7UuhRASS5NMvXj55+gw8SteVYYWxSiXf31wjxGoYxnEaD2OiDplW67PVsIYQInLTOjetEwl3pbhZbTXnTtUO48Fq7buuqvabdcpJiE+n8dXLawwbx33uU1e6aZyYGVEgUKe5255z4OHmzt04pyJJmzZr682mDH1YhDkAjodja1pSEfN6PMz7g05K4VrnnJKQMdHpVF/dfFHyqu92SHY67RH53befM0l68uK/+rVf/i/+yf+XqUBFa3Mdl6NFi5gtFAlyt9TNkCllWmVIEizRE9A0zc3MhVms77HrUuEAaoiQMgsKQDhbs8okQMEUgkyJkRzdQAEVwH0hX7FiYBEgAHRyDjA0QCRkIpYkTPzmCrysd5dJv0I4tKZzNQ+QACMIX9icgK4RuFThZfm2kkQR6F6JISUhFgglAgRHRGsCiAu5P9w8KIzcUSNaLM4nB6gL2oUJhSPhcmx3Yl+gKuHsCorhaCQIiGYaPnsYAQIEEYFBOBMgSiAbi6ckktGZUxJmRoSIQEwighKAQaIRMrdp1u08xwpwEGKHYbs+He9rbVmSsne5a8NmgjY9VDfzqpvVGsg55wBobVbzTLgZ0t3NF17nt99+zmKvr19tt9v5cBLpPv7443fefvvlJ5+9/cF7bKVNbWwaAN40LFqzOs3d0GeCeTy1Nvf9KsKO4wGxdr0wpc9vXzrOK+uF16ZxsMOwWW/WAyDPk7Y6Tqe70NNpf0iC2/Vam+ahbC/PTfF4eNiuhob5NJ4IDVgg6HCacumvLs9JVqeTgiShnCiZs9da0pYTgk7Y6jzttU1mXpi3m/5h3t/ta5d3w2pYrYZpOu73+w/eeZclYwxY0ne+9x2bYlivHBp4nxKxo9UZRLnjqZ1o4lKGQBKiwoTBc3gpadWvam23p70zHE/KOYY1AioGUwQRIBgnI/dEbB6oAQkNfFHvIieBwHCRQpEgQFJCSwhu6IQCFG7N3SUVVUUMFtJmvNDhkTCQUMJbBHoLtEBEQUQIxTBEjIW2SeIQcHJITBwYjIiuTgzgAWzC9IY6HeQWyxweFxwRoDZvs4ZGqJsZIgHqUlnInBrq8qWRcxeBfedew5TAwNlDQUdAljD1mGlBexNSCoAIRSRoCMjRlVRKwkzEvowQkJyZkqSUiMWDDAozL683Ok0j2vT6+vqS1scYMcOg6WFqTWtKiaRbbbjZNO5xnKsk3mzW41TRqxm3cTSKgBCOOh+++PzTy0eXZnZ/d7sZuu36/O7u/mXJj5+/ePnJZ1/+8pfnCZyRcnHI01gp0XRsVNP+8FrDk5RS3p6rRljOeZrGLz67Do0QnVs1r2belQIoktPh8JCoI/ZpfBiPD4icc/FGTNKv18ES7tuzzc3167HpbjVsL85Gg6axXm+kX6vGdDzU5oSdpJxL36/PE8LyWxJvFPcahyR51125tanp/f3RHM83T1JyAJtnBZBh3c/Tcf3o2e3dy9/4rV/lDlOHaMA5g1jzxoAUxTTAYppP6sgCm7QCrKpMRMi4IOfJo83VDQ/iZ1sbMgMSIVprLEAJYQ4mADcQRiYgd7XFFkwUhMEIGVYo6iAUGEiCCKHGGIrNNMwI0MyYnYnMLCG1cNOAJX9h1lpDDEbk9eUKdDlPBoQjcEAsnjpCREQ3C38Drgogxrx4+QA5nCPYFokBwJKZQ0QRGUonjAUpfBmnEjiEoTXSiilJJqFAcK3N5moeGG7RWrSkraEDBRMCI4pQRAC5MAcHCkqm3AEll8JJhHjRdHvuqeskF04ZuAALdrIKUUQpBHrirV4+v7joWI7H0TAE8TTOk2mtR21a6wyAm836NO9ZCMIyy2rbtdpqra1NhEGE43j67PPPt5urdbcyMEF66/lbx/FU51a6bn946ZVX26fuBn4yMA9KkllKgD3sbyyilJW7QSB6TON0ON6thiJpYC6Hw/1Yp/X6LCdq2sbjuFmtT8eHH//gu1q9Hzopq6FfoYgjqQUh3l6/aq3tNmsz6/r1VNt6NayGoWokLOvhbLvZ5DJkzlOt4zi3cR5Ph3F/mE+HaTrprKDqaqdxvrm5AYX17gzJofnxYX9/dyslXT2+UIv1ixd/75f/yf/p7/3N7dV6kESZSIzFSRCZkDMSMZuTq4bFRER96sEJjdnYnU5tOtW5TSHgQJZdVlIMLTBEEqEHGQjGAoSSCApOkgQRDRctIEVKuUAmBAhiEEJAISSHxQposWBEwmlBNcDSTas6naZpmuvcWtMIA0Bi4d2jTQR4NMRgWOSS9lMkkYUtsR8KBwISQkQkDMQMKD9F9FCARwQGMBGipJRKkS6J4+Jy+ik7zikMIxzcUhJGaurTPJmGVUZlVwMXACGWtPhCKByJIQECJ0aCtID/E3ICEYQUGETsuYPSldSl1EVOESkCIyVFTNGkk1RgeCRP15QBnMgP93ttRsJNdZqbh4KDqSXh8/PzXCgVqjZnQWEKb63NhJiQs3AhvL57terXV7vL+VQD4tnTZ7fXd+fnj4L0dKhXj97JQ7q9edmXToQlFVXf7s5vb27A0zTp5ePzruvqMQIaYDiqSLq/26cub9brVcnW7P5w64rrfvji849efvbJbrNZb7bIpZQ+d3m2hkg2T6HzejXUcSSksdZqrfQbNUwlMXdplQ3NXU/jMdDcbZwmbZYIicLfOIbgdJwOp72Fr/ozKaJVT8fDeDpAxPZsu9l0qRuGyyd/7e/9rV/56LfW6+3KhQo6mBAQL/2pQAZOYuggjTpw88K547UbszF6zCc/TdXqFMxgjKpSMGWiQMAqiQgJIILMMMwjQAlj2SsJQZYkIT2tEWAZFwUALFoVojDXCA9bPORCpLWFujcws7nZm4PJkrFkSkmYkfur5GHmhsGIGZECGCG5xZISAEQCRiIAoKXMzgAEiIyA/wblgAzkwIwkxIlkeSsQAXAEuQliAoDA6mHqLiTMubk1C21u1cOCGAk9clDBVFgSAQAEMGIQASESMSdOiEtERJAAICIlTplTV6RPlBAEmHJQVJyEJPk6z/lJfvpMHmcsHu5Wx8P+4XCIiDaruhNynU8AHt5KH/3AQ58RKwIg6ul0yyIIETCbW+l7YpjGE5I/e/HiVOt4PLz/3pccdD2cE7vqhMQJUzvNw2aQbnWa6/pyo9o6FEYo2/NIXe71tD+Zq0iZx9PhcNysd6XvgfLd64fjwzUXJE0PN1+MdX9x/oTL2m1erdfdcH6apq5Lp9NECLVVN5h1nqztzs7DYZ7GeR7V23E8WCWRTBJRW5aCjDpPbj7Z6GbWFAPH8TTPp+1qq5BYEDhOpwO497vN7uJsyGl19ThdPfrb/+gffOuz3xtWkMmDgBc7dBJzo4SUAkUxGWVwEnVHp4QpKkZVbdWa17GNqmiURUZ0B8iUgCTQIgiFnJZUNLpbTjkFESAGIiCBMAtA0gAMRyBYDiOAulhf3uQ6iIExyNvC9dQIqE1hqZlZQwpO9CanM1ysIt6keiIs3BAwYCH2IDiGIzMREhOFh9CiB4bFDoAAHosCDoRQmEkwZeHETCJSALDOGgEiCQDdFrotIjIye2CtzeamVWPJvXVAGbmQpJClmokIGBBAuEQmAImYg3jZPVA4IQIQYArJIMmRlJPklBg8dCl35C2tz61nYQfb395PpxGT1Glq86ym1uacQMi6TABN20LfR2aBwNNpX/KqdMUhcl4hCXLenW3DfH9fHz95NtXDeDpAyo92T8Y6mXnVVueZQp3AAHeXVyJpHsdSinolgOePnh4Otzd3dynlJNzaNI51e/FIcrm5ufvJh99fiTl1q7Oz/d3NPM/dZpdltbs8W+8ubm9ORD7XUaRTnXSeqlV3Xm0uVWN/f68613YUpjJs+/68mbkqGt6/vj/e3Y/HY6uTNrPmoXWurdaYWwBhPwy73Vqt1rDS96WU7WqTS9p+5YPr24d//K9+9aW9ZmopCwBFOAtwYuQABhbAhMEBhIbgCGCGHuSsc1VX4giI6hoa4sREro0JKVviFA4OgUzECQk4EbHQ8j8GJkR3ROIIimBQC0cPCEMPaPbGK+wRYYRBYRHL4ugNnwFwEWaHL1+5JMzCAssAPhDCINrynoYwBEBgAEQAbW42BuScxQPDgSwSuHAYLQ8nhIMLARKhMAou4yCNpt6aikBKCYLdUnggNESEEIIqxClR12OzaOHIb9QDgogEHGTmtow/39xKEBCACTzeGKcdQwM5TBsG5pKRgMgIM9MGyE7z4X48fjLHOnd5ldqRvFnKA5JRpvBIDBmkK11t5KaAwzSfaq1EZN4Ipe9Xkmjo16rd0O9SSh7zMGw4/Ob1yx98/1vPn7/lga3ZXE/b7fnDwwOxPNxc77rkzVNHfc7j7JeP353uP3/56raUMo17n2O9Xs/jKXEAihF16xU6fvLj74ZPuftg2FysdltIHUnfr3brfpty/uTTV+FSUktMx/1DbYcsJVPnmKZpMp+YnKmszs4BRYLmaY/IOtfb6/3h4UGI3QLC1ZwDTFXRJPWZJfdDN/A4n47zaXOxW2237XjCMCk7SP0Xx7vuaniyP5/nwV3b/FDnQAbhCAsPfMO3dG7NCN0wKvjME3FCIWIUi5XTWOOkEeY53JFaQPUQtZyzNUuMmYUQkDEAmgZSICCRqLk1dSAKNyNGAEf3cAtHNPXZF2XoGxSnmmIEkRBgYCwKTSRbsnqLXog3jwZEClu2xwwhZhhgsbTbidwNIFgICSE4yH256UKEmQf+lNZPAcBJck4JBZGRYDEBm/niuUFcPoTmFghEga3WVmtEIKFjAC0ouAXOHZTIAcKc/f+vCnyToGYAYI/QJbDqgESYQgrlTliYMFiyAzVDDleNh2nusb8oF0POrY4elomZWHIuOW/Xa0DLWbq+Tyml0okkD2jVUpJuVRCg1pNkYHZAHfeH0nHpu0dXT5CYCS8uLkvaKkJJCKBmgQ77h3ti2G3OSuqCsC+Dt9Onn/zw4skzLuubmxvC0Ho8POx3u8eb7Vli/vTDD/c3Xzx/+90X73+tH1aDoM3T7nLDiVLqbm5vWTKzt/HgdartlHLpVtvpWFXHuR3dqjXry9Yj3T6cEKUvyU2Px0m4O7+4VHewCE6uDUybOqYEoMK4Wm9O88R9d355gQgm2A/p6ury/NnzH3/+8nc++93vfvaj69N9nzkJuDBlTlQiyBwMQjIBO6YIchJxMEB3h4x9JhQKAaIwiGYeFqliYJeRUsqJCyMHvnEFGzMiomsDZoiflmCamwUaRoA1t+bNMAwNfNK51eZvHhGP8DA3DW8eEYQYjhFY64Tkkog5ByKSyxuYYThy4JK/AIjghYqKuNDf3V0oKBZvSsUAaot+b7EEeLAEkjDz8mPuvgx2lgREM3NFL0H85gjkDm4a5ozMLJxD3D3AXCEANQLB0AEAjMEDwxHJAyPMEWwOJQNCd1JVFgwAymQ15tEBRFgg1B1VAzwpehNQr3Ozgx5WfTfP8zSPZ9udpAwRVSdBsRrMCbm2URkT+Nj1KcJLGjYXq5cvP6+jnm2G1WrVut7MoukX95/studDGp48fu7Exxo6XUcEmq/yMOHRImq0zarzadLWJHecUp+LO4gMzKf7w8PxUJ8+zY+vrn7y4Q9fvvp4vTu7uHzx+NHFaDrf71dl5SVU52k+bbdnEHV/+2ocZwAathsgvr+/BZubz25IOAz9RgPG+/vUD7mU2to01aHb7LaXL1/d3N0fXjx/Olp99ZN71Jb7Iffd6fgQzvu7oxTouCBCs7rBIQWUftiL/JMf/KtvvfzedbuGnnXWXkpIqtUNtc3zQq8DBBExUuQAx8wc0FqzSlPGJIICSdR1SM2iRaoNEQwB3cksJCEJL3dKCIJAQgDzJUxvvqR0IjAEoJlqdQ9llAibbAZzhuIU4UYA6LCUDhYaj/tP0WcLxxAAEQ1M3IIYGQPRkYAE0EMbhCIygUcENAVkl0SIaK6oVt3CCXnZfDvyYuRlQQpzwyACdSIKRDKrrVrDMPPF8QQAod5aVbdAWFj/uLQQkBZyKARAAwhy89Y8XJfXPhFEUGsaFICsgGqO2MKkdFRHYFZCpiQW5pC8mUARBp+NJRODq2vVi4tLDiR/U/Q3BZGU+26aTmqt1dENkKLknlmOx6NwfvvFB7c3d/PkKfl2+9i1blary8vd9fX19WEadpsvf+P33e3r3cs2ja9rm9jp6upqijmY1MfVsKktDg+f1dG0YS7w7OriBx9fT83OL6+aN2Ca2oRJnrz1/tnFpav3fRlvD/1qfapjneL88uL13e315z/Y9P3l5aOQ7uZ2b/XAUMf5GBFnm8cspZrOY12tt2dnFxEWmDarNQVf37z67NVn24v+ra+89eEPf4wBXDpKMo2neVJadTnJZpDjw81h8rOrR+cXT1lr3/ff29/+4P7zl/bgObbRj4ynNksQd9AUMQsaCCKiRyiiuLoIgQdjyiTRamSgnIQUXFLomvLczFyFVBJFWKvBTCJAwuHRzDInohTWINDNVR0ihcESl9RqVnVuEU4BHugE6D4vCbQIxGUGCmG2UGuxNWVmSUTMHuHg4FWEOUIDnEhIkHhRWQkw1lnjp6944UQQQMYsZB7NNRBTZrGAxdnMKSgilsWwhZMDI1kQYRcxtVYXYihzEBGo1VqnVtUUAJaNLyL8NAuHC0g9zLVaxAK+ckAOIvfm4EG0mJfAXcMroNY0jWYITqYFmSA8xLA1M4JtObvcPO27TY+GWk3nlAatLVQ1NBFoVUBD8DAnFE54dnbZ5sndu6uz4173D9PubJ2kO46nNh5XZ4O6P758EZA//uTHH3/+o8357q23vzwdjoe7a0A+zuOOUPLOw7p+kzC3dqy1fvm9rz168tbDfP9wukmprFeXfc+PHl8wJ4cY1punT19cXV1U8CI50ai1glMZ+tM83d5+kcrq6dtfrqoff/KqcOo6cRPp+kRpHtvhdAfsuazneX51/QnKar0627cZDOaTXV2cvf3B2/M817H2w6pqvb+/m/YPqUsFi0eM0zxr67ddGEDXb7cXgfN4uNvrfbBnlmqVSIQ8UFtTYGLgwGV00yAgzIUJiSQXbC0FFKIkzBiZVTmkoEQwGaSgXhw90Ah4bkrCEogYEuFYkZKQWNg8V18AaxGgNmuzFlatNkMnBDZwgkALLgmBa60AgILo4GoAi53EF1xVmAeEZJau4/4iEQqQcHJiZRZEZEZAanpyg4ycO8kdclJiJJyFiio0nQkgUV6EqZiR0+K4YeEMwACQkiQsauCmphrNogJA8tqOdTxNbZ5mqDPAGxZKM9CGERwYZhoNY0JvWJtnUHIGz76U+x1CGS3CFAzRAJQhUoAhIgJjBBq4NrDelaY4iXbvdM8f7Z70wypLQotaa5gJAmeGwAg3tbmeVBsh5lI4yWq1Es4R+fzRo64r8/G4GoZHjx5Lv97tztfbHZBfPH4seXV9fXP76vU41/Uqq+vd7V1oPbY5qMsl9R0SpYfb22Ykq9L3NJ/G03EsnAHg5vbV2fl2KcJeXT7ZXl6y8Gq9mY+n+8PnNzc3gqtguLu/Pbs4u3r0VI0+f/nZdp2JoAIkWWnANM/H8b5f567fltK7YimZhc0ijCAs9Hj55JKG4faL14fXD69f37g38FnIcjc4lWgTWnDJYI0M3v7q7xveewuq//qPf+2bd58KSKta3awaajQzs2UBasiOC08GnZCInEvOQfJGfohdHrpEnTfV0R1caT9O1QMSkrASuQRxAgxIRGELoc3DgzD+DZokEjppddPwuUEQLmFJDPcApJREWJ/2jcMAAIAASURBVNwhkRABE4lIOOTcEyGqLZ2TCOeBuZNVPwiFc/FADHzDGCQkgxmFUurm2UEsZWLBnDkQwknYTsdqxljQrSIiQ5ihahRJboAdEdnC4kNyEYpcIsBqm+roVQUwrM7zrG7EkBJ7mFpFpWgKJoAJDdUswiKCgdQYyYEagnMJylybm6K2EmDMbObzNHKS2QlVQ9PMVUIgaVn1gz95zJu3Hz0/79dtPoHaNE2AsV53WWhqNQGBJ2PgJBHOBCklAra2GDHV2ry7vHr61ruvX10/zPHo7CKl3IIxmGD46pevzs+2H/7k029++3uU9Btf+kByvpn2WYY+pHSpKbH41KpI7rp1gM9z63J3c/OaWZ49fU9nRJyfPn1mioykrqpKROG5lF3qu2Mdd7vdlruXd/fYb54/e+vm+rP7h+MwrFvU6XTfWjvbXUmi4/GIMOVcxrERVxGzmHOXNhfbeTrxsWvz+OrVF83bfLI6PgxDl0sPEF6rh/S5G8dmduo2K0hPceekAgBTzK1pKDrj0m30aBAkQguWoQEgkmNtXnvtMAkGUDhHCCiGt5gBXSHe/FsDBQnAQ4CFAKt5gAUxKzkjegNTFpYEEcjaIhq4oxsGZPOGJAThqokppSScMJwCI5CZ3M3d+tIhpimC+9UyFcUU0smw6XJOIpmHYWDx1maIjOiUWSA3nXnKiKhROUnpIGVwCHA2bxbOWMKghaWUwMENTUO19QMTG7IAkFlQmKQkqc85W60Pdw8PD+PokNzNKkSYAAG6a3Vrjj4ahCxAaQ9abvyJGAO9NUrKCYcs0vNx1nnycNeKqkYMesIZ3KraFFaFhkjkrc4G8mTYvn/23uPhCq1N497bydWYubW225wJ5+nhgBhLYyGlVEppTYUzBh11zIxgen/YO+Wrp89zFvAmktt4fPXy0x999+HRbvXWu+984+t/sKwv/u4/+Fuff/bhz37jD5QyIBQu7JHn5lkcEDnharM+Hca3n7/9e9/5prb57OLifPu4ttPx8PD40bOH+5OZrfJQp1kgwPthvQ6rpXTrYTOe2ubiSbde333+yXSc+lLq6eF0OnGJbugDsWkM61WdZrVxnkxyx2Krza7vs7v1XT8djtP+mBAmtXracwDnzWyhh7uSUTmHc171z56+K9IHAuScsK/TeKiza63zOLqiO7TWqiNYFgAHZDQ3QLDw5tG1ikwcQJ5ESIgYzN0CCBmIASAMIAM6uSAxRIQZILovqmQPECNgCg9wIQ9XcI0wMHVGIhJXh8AspevycvSHJf8ZCB4AS25AwKzjCAoA4pxR0EhZQFil9DkLp+KlSGupRcVkbkDIUoAmjGBgzx0xozOAUpgAz17VLTORIzgguoIzYORcmJeCDC4woEE4C0dP1TvFmOv9cX86ztURJAEvn7Jm7ARq40wABqREGAhCFIFuQRERLoybde43BGwgvN508ylO99HmarOFSgNwra1R6DFOKJ1sV0NpleaHpvV0mhgV0FQniFjnM8F888XtelhJovu7o7u36n3fM+eUuqXn2ZXBVes8hR6KlMrQjnb7cH93/er+9au7/c1Umx2O69Xq/Ol7f+xP/zt/+S/+9//q/+3/XNtv/cLP/tKpSffo8ni8S5OtOUfEsCkBmohPh2NgnJ+fHw73wl0SWg+rMC8dE6d5spzBTJC7qR7CmqRhrrp9fLXaXpzuX+ciq8324eZ6qveUZbvadN1wPLRUOqsK4IyRCUWS9CtOyTXmOgmlOkedp+aWWCrN6/XVanv5+u41h20uLoCKElxeXDx+cnaaX/XwPAzv9ncPrx8eXKFWbbW25q1hGGggQ50cSYKawhQLIZNkMk3SennTFFwKvoGdgbJgMEQWd2/g7EGmgAQsyGhN1YFZABEDMUDNIUidKQwI7c14RhEJcTG1YzgTp4AAdHcngYgQplwSYyBBlwQ8jCkPnYNP5mbNkSUX5DLljjjJw2mOSRGDSAOIM7C4G0EgEbB4YFiqhCsR1OZhEOzuoUwdOUYk4QgD1AAA9K7riagkWfUZRGoYsZOF2mgM0RwSS3EOtWaSCDUog1dbdNLhEGbMjCQBFRnLWlbnTKVFxOXZtmlAdV6Tlzwep7mqTnOWrpce0aD60K13Zf3e7uzx7qrLm5PWLqrPjVG6fqCUCamZvXz5eb9aI0Cd55wzc3Ijlpw4ap3MTShT4dbmNh5eT8dXr2/raTzsxx/+4NNPr1+3cKGyW9Hw8ndeXV//+T/37//lv/hX/trf/M83P/rixbtfuTuMHXGW3FpDpPE4XTy7OOr+7v6hX/f1MG93Z6uhS4m32y0RUWYS4Zwwxc1+bgFzm/b7/cVVvrh4vNvtxvmUxBDs7uZVL7EadtL1QIU5dUPKub+5edX1CR0xOQqENa3Zw8BhgtNhf7zb3589ubCppu7sfPf4eJxtPr54+0ULJYaLR4922/L6eN9/8cXwYsT+8jjh3c1rLVxHtUm1za01ImCBcCJMKBI0veGWBGLj6rUWdskA4GABQEQgQZoIghkpEwIKExEEvpGqZGBERgWPAIS2pNAAHNjM3asruBeEJIvcBw1goTEjEJq28JDF6IIBECKSJbXWaGGWFCHBeVYKUHcwln4jJDNlyIVzszoZAjAFsnsTSaEQAIKIEUrojgZcAxyhiwDwoABAJ8wsiGSAoRZAXsrQ91kCS8lp6CTnNeKu73rJxnW/f7h5vQeunBzRyIE0BafeWtTl5bIsIDTIFucTZNo8KWXbKEOfh0Rl/zBLCkZoHOLkEAK0Kt269IbiqIyRUKKl6VgTHI7JEHpQFU4AhIinw7FOo6ve3d0NuSOiUgosNU43Yu67DADaYJqhaTxcHygXyOcffvj627/7o4fR1Fa1Hcd20pvTxUB395/f3/3NP/fv/tk//kt/8re/8723v/S10/4IfX56vr25f1nn9uLRIzs2CMNM2eiorVTvzlM/ZIW4PL86Hvel61EwIvaH+9B2miqhdF0eNutq7XB/PR5ubz7//Gy3ksyJOyKaDKZqpe9as1RyKSlAZju2aSwcrkZECK4aieStt55O1Sc9Pn787n6v43TYna0piSo8urrIOe3ncZsud7IyvaGcuq4zszbadJqjuc82TxNmGUrmLoLAohEJBSvYMspGTtWjerDP5EAoJMkCESPQIdGS5CQBYTYGIARUd00gQKihECCQVJ0ZA4IIAWPJOgASQkJsy+S15MwC5hMSUhAAmjXiRTpkkvLyXhbBkgIxPEVzI4SgEBmIJSW23EOuBAeNYCdgMpbgBNaWeIUDB2BkThiJaFZoIoDI4BjqgYyZjEdKWd0QmDiInFNZDUPOWXJKKTFuSumd/YtXcKwPk8/eiYgImVTVit4rrQgAWgAgIxcDa2YEsTrv85kPO+q6Qg5sMSWWNBskwaCmCct6fd7lklIaKKmg4vHV9Y0ex0fbzcXpZMceL55upBcSDW/HfT2NdZrcVdsphs12vTuepoAGoUNeMngiIiyZMGFZn46n+7v6/Q+//5PPXr3/la89Xw2nh+mwv73evx5nPe2n3/zRFx/e3u3rf/2n/vQfe/Hes0nnZ0+e13a6q+1UbdcPZgZuFFCkONb1sEopqVvpVobgEdvdpYYG8HRqq+Hs5tVnD/vx8eOr3e5yrqZuOXcqw7NnXwqxKSqYoMca0R8e2lzbPK+7PjxYJDGrNq8n7KRZZEwpZ2QKMIr25OnjTz66PRxazrkbZIx2cXl2dX5uPKlKnyA65ry5//x7/+o7vzLV0AZ6MreGChhUhBnY5iZDODmDMCTFBoIBiGKK0CKFMZl5MDAVQBNd4H+JKTE6mqKyFCRjYfBAR3/D4kRYsGmGphMEmaE6EjmhI7lXRYYiaaH+uHtKmYAjAoBNDQBaa00hSyRmRLUGIpKIGWkZM0nKwcxA1Vmo58gQRhzq7lKcBYgTYlhYEvblu8dl2CAnh2ZhAZAIILiSFMkcFIt+rLW5SWbpUkqlF5FUUp9T1+duhrFZPdnd3bEyIZCUjVC1w8FKIx4wrwmR2+hEDMBzM0+Quxg2uFozUZACgQmHFCFOoJ4ASWNzuepKjwQDClNuUEa+GVbd+frJo67PhFODDgO5AbRax3mcIyKiAfrpdOi6rvn4sL9m15kkUfZQZCDMm4tnz9760uXT9d/5u//wk89f/+E/8m+V1cCEf+Iv/amyvfx///W//eu/8s/KZd1uH3/7W/96//Ctt99958k7zx4O+69+ddvR5UcvP2RC9JmoN4hoMwbc3x3WXXd+sctdZ74wCeY0rAFJrZeutYeb24fDo2dPn794gdzfP4zry13fb4G73A+n/QPVY+p6aHZ4uCMCrT50GRFrw1XH/VBQnK1FRoQkkSiX0qVVP5Dwq9fjrMd+GAjYrD46O39yuVYfKWKVEkhEIoD+o08+fXVzOB7382nEyq6QUlqvNv1qve1LUz3CQ0DjkE541NkjIxb345v6ObEpKRAAFKGanJVNTQAJRZMxGYKnn7a+AiEmwyBADg9EDA1oZh7NEyATOaIuAf5FM+puP7XFE0RzCwCMQHebJhNRIQlRETEPZTYLRHY3ceAX31hLMhYTRkZqszMxOhGSB7uDNy0FVucpD5QIEIiIiD314mEcOXDKneQhdQP3PQASWlQ91HkOyB33XZ+HYVNKVzruenKEAEHiU9sjz0whCMxNugUjRJsd785tvc5UiFJgZ8N5161LXyD3VnrGiOoNoYDkcHJsjhMw5q7r+7xer/u+6zerPDCkmPSEYJthWIv0sircJYRwdQ/VYACoOnoj6ihIa1Wbx3HvHtNxOp1Ojhpgm7Ord7/2i+fPvvLZ7enXf/d3L148PTs7+7Vf++2PP7/7T/7T/93jxxd/9f/6n/3uj370+ublk0fnX/rKB7/73e9zxJfeev/86slmuxnWHYTa8RB1XO96c5qPx+PhtutXfeklp4vdpRpzwvV2havd5H3u+3FsR5ve/dLbzx8/a6p3d/eqcbq/2+9vMKfjaVznnkKQCqeOCIauL1lASnXNvahqAEpyt4BmXemkrIf1MGy3JW9yXq82w/pip6ba5rPz7ZOnj1u06g0jUubd+VvD+x+gH//h3/+7//DX/sVeT0PqqCEEdauyLv2qrNerbjNc9mk7xRzUaujJWyAKSABlkIQ4iCRo6wSrLgmlCLQgMx5rnsIkYcrCREGBRGAqJgICYQARSOpg4RCEQUzIQkAYAOhMJEgmQhggLCUlouoRhEqh5CkCLKoI9p1IXkDMTmgRoOamYcFC3EQKCzMhZhyGoY6mAQFL1ACkQy4A4Ik6FJBFrQwWzfuBJgdSpmwpkxBLhEILFDc+HbVNp17GeZxg13LuKYkhcOa+H9I4DsO6RdfkABZhKEJpl2CjOUseOkrOtatTqjrnHsCHAE3dkcUcAJq7VHexYnMdVWrqC7NB0jLwqvQMaIzVpzX3hcGpGSW3INIMKcLbNNc6g3liGrCYYmC7vb9jZqIhE1ea1A5G66uLR8/f+32rxx8cVL797W+71idPn7bTw/Xd59/61g//wl/472r4t37v25n4s0+++PTjj/9H/8P/8Gs/80c+/PSj29uH7eUFJxaUvuR7tzaeCHAoopS227P1emD1tF7PbSylpFXPwznIrrAweqX917705en29ru/+3tffPbhw+GLNtf5YSbibt332+6DL33j6bN3jZwzswpmysOK9vceLQKaW2KmtLbTLQGHOQ3OOWXqQ5tGONBuWHdvp/3ZPmWe6wER+7w2r+6eh8Ilf/G973/3u998erH+yuWXPrnd//D2e0li062TkLBzcObgnHtfjebCyibq5EyZAABaM2ftc0ZK5uGkgO4QQJgydW82uMiSISzcF9Il4ptYP2AEKCASSSyldQZwQAdOCBSIyZfoGzROCEhCrr5YcsGnpq1No+uGJSoFI6CHYgB4qlOb55Mwp5SZyJeZY7dKrS6yVGYIyeYBVIwQMSBR13XNAs2FhCtPBuaWOHeClIVEBBCNBBDq6XCcbtf9er3ajOO+36YwCRCIRgApQ9+vTjVbQM4MligEkSHRaigeVIau6/KRTh2sWfrwpuZJOMnCYOynqgETZ6YETEyFc8mZJbEkJI8ghCzIltCNJQKahpuHIWvVphNaTHNTgs2wmvxU34DzsKTu1GK9Xpc8PH/23qO3PujWjwNgf3vz0SefX148effZs3/+T3/54bBPQ/dP/+k/NaTdbrOf7v/Mn/3z/+P/yX/4q//sX+y2xzre//Czj1+8eAYaKCGAJISp88qpb/2QwHk+nkrZUGROnjqPLpX1Y8au+Xw81reev3N//ek/+gf/1fVnR2vlVHkeeWpGjtu7k8jdpx+9fPvLb/2+b3xte/XWvB0EoR6mnLAxnY6TawvmHF3aXZ7miXNKyB0mV6ttBAug1HXdsOZ+uFD1aZpK7ptGVcurlNYl9vcxPXzta19ZX15+74uXx+MxF4mqhYckRAApc8oYiFmoQRaB2XSCEEJEI1cIsCgWC8AvA1bwpVGl5gpvzLmL5oHdEQkJCHyJkDkAECVekLdgQMpiqABKiwAYgFogUEhCycYIXmPBN89tVG3uboFzqyVhECAGU5jPBOjNrLoQkQghhQcSQc6csrXGjBDhRCHCKVFJQFCJexRAB2AohVmEOZhKphW4IYIRpMiub3Tyx7vT7c2h6/KwxrTqV0OnrTFjrUpsORNLztFnIsSEzoxCKfqUKWPpejcCr+aClDwaumVhITY2MHF2ZBWjlCMhY3ZJlgiQGvOAHpDMWAh5gLyVoda9UuYGYeR1qvOxYFqv18Bw2I8aTWsDgKYzsAzDmnN6/Ohi6C7XF5eb8/PTQdvpYZ7Hx289v351e3d7NHfEcvFoHRGhdnu8+9N//t/7j/7KX3l+9fj/8L//PxrK/WnsV0OHzAHeXPU4pBTQlUGPxz0aIbAn2pxforXT6frsaserjTazSqnvbNLf+OVfvbluH73U0WMCEj5Lq+1pf3j5cNrI5iLi09/7sd492M/Ho3fePt6Pp4e7gOZcUvGUsmo7qfV9GRKrTkBxmg8I3Pc9MnFOscQrASJitRlqrRAwrPrHz9+S3RnUdhhv7vcPv/693/3s+jZttk8v3rq/eV2nfT+cp5Q4UekYAHKl2SEWAgq0wgkJXDXcXZVEhFFQBDGS8+Sg4E2NCAkJPEUgs0YQpohYyljuhhji2R08nAUlIbEjG2ZxNUAkSgs0SNg4jBmBHZuC0zw3NUBOEOgarTGTs+BPtQQjU1AkATDmjJwFMUDZQjLjPAEgGoFTztJ1uRRTmKsCOiF1qQTxLCmI0pA7RjCXlAnAAHCZFaMlqLZ/PW63h/t9KasppQnczMIh1BTJk5BAl0AwMnIGoCScMG0Gwky1zZ3lqgRA8wRETuBmwJAUXDKbUoQxA/cByQiJIyCBC4Qb0CwJgKlDgXBJZIbu3rwxgIh48zrNWCAXoUbBjuE4JAQZhoHAEbHbDdPoz1+s6nTKWTjpfjrsZFO1jcdKMSBQ4CgiEPL/+Zt/4z/5X/zHv/DzP/fo8dPf/u1v/oEvvVtWgxCTYc4duj1+dMEid/cPfbedp5OkfrPbOWEikUjinVFuudnptC79N7/9zdfXpx/8ePz+3YOcd48uH+8uLnR+2Dw/h5Db6/HbP/rx164udvPmd7778VcN18NKcjIgNm+mREQuhiAiXZZx1KZR9bgeVpk4sET4IiZpOrvbdPLA4KHf7Xb9+jI4/eQH3/onv/kvvv3pj8b97aasuv6MwMHn+/kBMZiR0ATRGfrSmfvslTEQ2CoJQ6ItYgVTgoSISFZSLxQOVNDmOrVa1RsgRHWDhsJvdL2OpugGiAQpWBDDS8KcEQkIyV0bmPtiH4UkLGTChN48nDhQEYDcgDHcwYM8FNHB0B0RCSlYghgkyUACxA7BZghvlCsWAB4ZCPsVdQNzhvCsjuhIEcKYBwTAlBm9BYRgYWEOnGutWrGFNqRE4jHu2/39cb09gEffr5pNVScFcPfEIrhCCxQKEMQeAQidiIgZdJFlzOrhgQiCGKgKwYIwYHiAs9GQlutytCKYKYXGTOQUwICApBiRYAXbYjlrSDCAOfFCQPXmCz0+5czdDj1EpI73jx5dcaZC9vTpU6BUGC52q8uLJ7/7/Y/+zB/7E++8+/6PPvr44f6W+FxSHus4DKuPf/id/+n//H/9pUfP9w83t7ev5und3e6cF/QZ17Pd4wgc2yHlDB7H/f7q8VPBzgmFyBBovUFKYtGv+3rdPvnw81/5rd/7/OS/8Of+6Fe+9tXnu6/+tb/5t/+bf/73dDz9O3/0T37j9399/9aT7//Wt/pKP7d5cjpY1xkR2FxtnnJKgZ5SX8DcdZotycq1CRX0PNUmGToqEGhuWTBy8hbCmTLmoZf1xfT69ld+7b/5V9//nbzpX1w+PjWeIJDTdnNp2A7H1yjrNg2nlFBaYu9Sp1NlzAMxB5hXYCeSAiVDIiIiwrBhGKRPrSLl882k++k0zsdpquYqocBOAeFASJ0kU/BIpTCjZonEkFiYourMkOuErQZ5EkGEeeFUMUoRIkpnkU7HmThAWoAiObGaKYKEpYhCzCmLDKttLmNECyf35dgjKfVmcyBSIilUekAKceEgoQirENylFQp4hlbJLRgdSVF91mbq0WhqFROmHr2maR/393sMiggkG0+jgzccI1A4iaBVVYOUilmLKKE9JRMyxNHdwxxSJO4lEQrY5JRSGDLMKJaZcyokbCDQKEyDKhNITi7mFmABjjq3VWICZgkObpqCNSJ0rjZPAHG6r6XHrs/Mvt70HhpAn378WT7bIb2VpHt0dvELP/MH//k//60vPvv8/S+//+lnH55O33l9+9pKMtPLs+0f+P3vlZh/8OGPv/eDH4LqrtuNp9YuYNP7eJiZ8tR0s05CIT6t+r7O2l/ltNno3f328TNYd2bVZ1fnjz7++Nvf/+yHn++/+od/Vuf27/7p/+Dmlf7eDz6/fj1Op8Nf/xt/89+7/2M//0d+8eu/9As//q1vfmXevP/lLw+DHQ5fGIwAgCQEKkDTNAXGaj2YGaCXMhAlCxd3V8u5uJqHI1DOXFIqfZ+HC9isX/74u+O0f36+PYbL+bbdzovnwdLgsNnr3OJQQ1OIYENsWXKWJC1TRJe8BlfdZxx63vYiffKUEgoSwabr5hKY+o3JbPowne7u9uN4mOZDm1WIE4twzpLQJLjkgsKek0JMDDNTCKUZCBKHQiAjKAOHV8qQpGsK4CxdSkAeaggCkxAmwZRwrjMAIpaUEsHAP/PHH4sAcWHKANaq1hqtqptFMHFaDf2wEV7OzhjcIyEhrlNfmAE8EuSFRkdspnUcqzZtzsc9uNF2u0uFgaN0fZLkWtGjjqO2yWM2VJacJQOQKQinrtslIaRgEghq7cFMl2R/Ji+ZMualNemRxjZV9K6kPq9S6ogEI4OBMJVBkCvIFDELpZVsV9Sxh86u4eBxmmszc/V5ns21X2/WqzN3BKSu64mlzZWIGrQPf/Q9bY79xVyn9x9fNo2//y9/+emTi0ePL1f9ENFyF++98+IXf/5n//if+GPC6V//9ne++Tu/++LZkz/7J/7I491mt94kYp9P4zx2JQ9dSeDH+1sWOb86x9U6DatxfyjrHeQNBpZudXx1951vffO/+Ee//qWv/uzji/L3/sE//r/83//WX/9bf/373/8tUxkPh/v761bbarfaXJzn8mSq9etf/7qftIPJ2r26grbQOp+OjNCv+tJ3p9OJBUVkaqPDBOGBDAgsRCgeABDIPjx5Kz3/wEtq93fXh9cPt188POxXq83Qd12ggEuPkV1xBom+y6UEoxF7otTcJ5gZvc8CZB36qgxM1HW07cuqT/1KUsopJymZ85ClpMS5z9thV/JKkoSjayTMXS5Dzqths+6H9VDW6261KixBCZIQAQLM4W4utc6JISUJbIuCS1AEBZ1K6ggTEUPU1FXJRhQLpKd0HZEgJCkdMRm6c8pE/XgkhEbomHmeFECYObFDkKkvhicWypnIQDJjSXUGQQ0P8zr5UZVMOXQRvPg47Vfbp9v1usfMFQx9cm+Is2mHsk1FzZxQUCY7qHcrzkjM1Ajbgv3NqUGAOxExO4IbcdFaEYOwE80pupxWJJQoAEtUT0IMFkJCjNJCp6pzjdSZA2NomqYx5qlWF8acGGkTFA18vV0TRCZh9BC+f319ebV9cn754Y8+jLRaDSUNm//eX/yTiu3Xf/03P/iZL7/1pffXl+dD5rOzs5SH7/3o+jd+7Vu/869+5cXz53/5P/hvX56txtNDqxevr291OpZMQ8fqTSMo5xTYpRVzH+ackkLqV1ubHMZ6eHX3L37zWz/+7OYbv7ht7WY+3f/mN3+jdKuz7W7/+tWf//f/0n/6v/2f/W/+V//L737r++fr59uri4eb16/uH56thttPvjjef9iOY/UxEheW0g1qp9bOLndX0zQdjreATiFAwEmIiBeOd9Nm86orabOGsiHwYRi6Bptu3XVddVfJ945r7maPhnFqanjvXgl6oHuKLOiM7i36Doece0DsMwa4B4uTzEJY+pJFmFJIz569SWuYvG8dStenvl+ttnU/RauALeeUBXOSrmdOE3AwnhEzo47H6l4rGoai8+nYIDR3WOtpWHOfh7A8Q4TDsDrLOTfbtfgi46RxEqlTgAP23XayEBER8UQsJNVyW3XjcQ/E7CpUmDkVZ3F3kQV7whoOBtEJM4BQZzyrhkVVr4ANCQD7VpGQSNo0Hi62m0ePLlWVSUgiGGbXMGBPDOJ4UqsRDEwes8OJIoMXxACeJDFGJgJDAvcIcguLIEpmEQ6ClFLpcy/i3pyoUwRiT50gBkY1AkMNnVqjBwNokTyLzW0+UTALh3lrc8bCaTYPlqQ2a/VQD7fTzcP2rfXTq/6T7/7ryWl19ej3/8wf+o/+0p9963L7G9/65t5cuk4VP/zeh9M0ff75XVL8j/8Hf+EXf/EX+rwZb1+/vH613pSr8y2Bi5CZJc6tnkrJQ9cpONgch7lfbWDoyZUI7z99+cNPX/6z3/idy8vLvksvP39ILo8uzlUKSJrnw9e//sG//W//4p/6M/+t/9f/8//xS3/E3Gyufrs//L63nj5cw9Sm8f56aqe8Xo8OLIdhfXmBZZSHeW7T6WDWdtvLROV0vCOMzfo8563kJI0w9xFuoKh6d3+TxvlqWF3buL95tepwtbs42nQYK7P0Xa4qhOpYE60X8HIS77Mn5JwLUhKcBDszI57cAVIYQVckpQFk06G4Q6u5ap7NEWcAKClrPqK1BdlAgLlLnD1lAWyEgZzRWyq1tmxuWqNVrzUinFKQELiVwhQpTAlWl+dPhGie140KpZP67Wm+A9fWqhTcbLdCBF3qSuqYGWdd9V3botlxnBpCYkYiYxImCq9EQCJN4c2RBwWRchZVrXpEIhJOHVozQOPEoXpxcbY726xWXWvm6jlJhQlBhZCREnJIttBwECGHaqZApKYSlQSSdK4tYE6Srbq7IHL40sQHRGbwzJKldGzLBFM4E1HpyGOGpbvJJMjjNB81BsidSNFAmBrA8TBqm9frLSsTB6ipKwuq+fFwEsbjWG9+cPz6V76y6bNotPn+137zlx9fvfsHv/Hog3f/+PX1w83d/n5/4ovtaiibVX+2G853qy+u9/vDuNusyNNcT3NFBjztD5uhV62qmjkCEUqZpmnoOgOkecbxMJ785ub21c3xZIUI1Pzs/Enq18CvOka2lmT3z3/1tz789AtxeP7isYpNp1ksCQXm03vvfTDf39VjW+H67n7PBNt+fTydjtNn2/v95eW5cIeQ2qzuhwqnruspSN1SKUNaSwDUEK02PdTbL1YX3eEByt4uu3KEada72zodvVHuV8xpylZAYyY9y72n1Oox1tYxUUkdyeRKmQUzeFASaDrR3M2rUlYlSY5ITpxMZA46zWHAkuosmgDNI9B0JAzmYEEWMZ+BlRGbOQMyVkL3AHdyg9ZaQHSFABugdXnd5w3BpkgPWK1Vp57ZNttH6Vhw/xpsnOpnZ9uVABqnIikLJXfFruA2E8zXd7RvjSRSZmJEhEJi6oiaZEXYZUk5ZzVE9NKhIZlra5UTS5dyzpVstVu9897b/TCkIgBgxCyeHDsujM6CyMgLRQIQgABMtVJm89qsJUZc6DBAjBYMYUaQCIFQuiJTI2+VBTAMgJEzODIVpqHL4B7hE0p3+3Cc6mmV1itOyUAE0SkPHWlExGq1kpI92CMBSmBrVUW4dGmapkxop+P3v/v9r3/jZ590fUn8k89ef/LD755uL59cXD3a5HcePRrWq3nGaZqQ/HS8f/X5nqDbFTkerkPH8dBgl6tCmysPa3e/u7ndPX+03p5NwKrzbCpV11nq/c39nR3n2qXy7PLJd774yf3D9VdePH/3vWff+/DH9aB9H4+fPvrtf/3rv/DzP/feW+/87B/8es6AEcfDfLla9086fH3x9lsf5KyH2/u5unBMMyCHYBBBhBGJKcx6IqTcFeKi7uJuzUrmwIiqYMGtXnTlOpehu5znGOP0cHiYYo/pvAMO0o5ljG4fTUpKNPdp2w0XQQenCaOloigOqWujMUZXCrO6TWQcOGDHWViwgzSMrQmcEiByoXHGmBHcGxIosYJRzpkFiBsDxCIEZorGBJJYE8uxeVgguKsDYspifqLkq3IZmtRORQSx1zolWSFMRCCE69VqHPF4fOA/9GeuSteV0iNJRM68JRB/I4jVnHG9zd3AyBaoQSq8hkiJu6GsA2DW2QIEI6Xi4a0FMRKBOULQ87euHj960nVdSh0CizALaAsIyoU4MTBpzOGKgGZeW4tQphLhJMpJAav5ZF4JBYmW1SHiYtZhD4vw1dB1uQRGYDAV4V5o1efImRTV1GmOHF3S1FVqVb0aB5r5rG3WlvuuW23yUKT0QiKckkjTJimzZKtaurxe94fjw3oznMZ56LrHF1tyRSRtOE9VEm4uzp++8+Vutbs/nObjaVVkmg+vXn5+PN09e/Z4Oo03r292211mWQ9J5ykcN9vz3PXj/c00TZv1tmzL/nD46AefBZfd7lyj/pN//C+H9eWXvnrZZzo9+GfXPxmn1mYEP15e5C9//WuPHr24uHz6+vXNz1xe/uGf+/27ty9immAaOQeXohpQx6HfpMIQARDa5jrVVCRllJxK2jBlTstiE0vp6HzD51dtmuonP5yODwYy+dxscggiFuHt5qIUZgoWrKyO1q140/dZzrpy3q97zgThKEokRBIOdW5M6jEmkdx13WqdVlvKQzd0XdeJYGA1aCktcE4gRAxnnhFaRDCuAA3lJMnCW3gQsKtYDVMIlfFkENQPqXQhuZXSS1JOKsIISaS4IwmmRMRMYhb3QEbEGFLrJIvAHZAQc0rMLIyJ4rIftqthnOa7xGOXABCnSTMn5i1oSpyZk7oHAkZCiG7QEGhNzAxKW211t7l4dPGkT4W5IWgpvcdoBuqNEubcKToyoc0RIwN61LneYWwtn8zCjobcOFWPGUAjxJURGIGBDCFaQ22OCAwoTOqNyDMLAxEEUE8RENxay5wzbwi6nvP5buiCpFrL02y1d7em7lTyAIA6qaoBzgt3z8LKegCjafSut/vDw/bskRsU5ot1Tuv07K13ADtOuV+fl25z98PvTYc7gJNZPT5cH/dffO3rX9+U9OMPPz3brnORgDbup2hVU5lrw3qbCKfj4eHuVeqC1KOCQwyP1/+dP/9n7h+u/87f/5Vf3+x+7pe+8Sf/XMI0/uSjTy3sS+//3Fe/9sHlo6vt2fPXL++2E/2hP/Sl9bMBujPfvp752Ev2kjkl2Q0WPh6rSD6NszmcXW6H1RYAAUJSVzJTpgqwLplIAgp4bdc37XAQ7nfrjarSdNwmmfrd0dpYTxPUU8KZvKbIqy4P3SCPSLCUlPNqvd7eldub/bXHjMF9x1B1GmsujRMndcLsmjTxZAZslFOh3tDn46kvwkglcc3QpjSHIjrGrYODH9iByQBHCyJmlJYLT+K5IGHHjMiV5f9H05/1WtNk+X3YmiIih733mZ7pnWquYnWzmzO7mxQpURzUlCEBvrEF2xIMG4Ig+8rfwveCLwwbsD+CYcCAIEG2IYuQLMqSmiJ7rO6uqnd+xjPsvTMzItbgi108n+Cck5kxrPVfvx8QNoLs1lt/PSQmeolERKyWglwSs0v2AomyjENxMe9m4U4pc8rFGxAk2t/mFmPSpnPwe5IViRISBBML5RvmpH4cigCPtdagrrax+DhRa+AeJdF+zDNTyoHUER3ZhOj8eGx+TsEAE5OwZNVN9dSsOrTAtrU1D2dErtVo7cnVzMwU3VRRBJKwhdMFUYeRiBDUrAIYBIE4IQpxZhKZClloHgAPfJfzzlfNyTCgtQWEGHjIg2dXdKAkQkmKqdx/OFmtGHZ9tZtGeXqswMO4O7x+c7/0+OzjT7ZzTUK0tHeff3777OX903Jc/sU8jL13jk2m4fx0Oq/Lzc2Vbuu7dRHsxKbbORD6uopI0346H/t2zjkDuLf13S+/enh/6gvu8tSW96TL/+bf/Td//MnNf/Zf/skf/HP+7Pt3/6N/+9/66vMvQPrt4eXdi89Ssj/6w5/Rh/W3f/KDmx+W3fe+4wD9fB441rotT49DpjTdPa1nbBjEverp/kmGcd5dMY2mS7fOHZNwSQSMnoeUD7GuaX0w1GgIfRm1suQK5ZHPvcVSTYQkibpN2K1gKfupTJI540SUxgRTPgDl9+8+BwCkXkrWbdtaB+J5l9QEjAkEgGtXCYegQZKJoHSAKDmXhGdVbSfADYEY0RUdILEBqqMRk2Rz9ZRpGNIGHNEvSB5kstAI7bqBHxOJZO61A3awttkjcs1DuArgWIT4r/zrLwFwyAWBKDilWXAMENSeedhN41QyQCA2STqO05T3g+w4ZRIUYWZr9qjR3AwxE+eIDZF3824slCXJSJS4dQ6MZtvD0zu1bT8/Ixo4cU7Jvao/WTQA783cI+cxJQL0gEokFhcdeQZAgAu1AZlYRBwhEafhVxk3AR7KVIYroqEIJeEe9by8W9v7qqfWV4vN174uZ4Jo64boOeWcSx4GckssHvrw+GgG0zAnwnVZHh9O5+VeUhCyaSzL1qsPw/T2zbfv3rz98P6b9+/evv72q/PDN8vpA0JoM2t9WVcFXL1/8/b14/E47qacWCLAjHJmyUMetu1MFEDQe2NksvSnP/tZ3R6f3e1yWns9Dow//M5H3301pK5v35zvTw/7/UGQlnNrT/H1V1/OZr/9w89+87d+8oPf+nXsev/mz/2bz5f7dw+nh3o6QUKRISKbewQKyzgOZq1bU62SgUEcwANK+JSn/PKjOBzs3bd2eupNH9ejrU8EUDgFwarYVLtwK1zZlCNlR6JOtJv3Q7qa8iyJCWUer6c8Lkt9enoH4YQc0B1WcBzSCOO+jBNPBRNBNIhwA7fN4mz2BO7hqF3dkIBUwV0DmmoPgIjNLzb3cIyLDTITZo9GqXLaSJSFkALJ3EK1d+vHp+PWHljUfFE7Cg25ZLrEJ1H4L/+rLxF9GGZm2dpS0m4cb3p/AsZduc6lDNPMnHv0SD7nfcofEZechTlHYLcHi+UiekR0ZiaGiC5USi6ptMziNrW+mp9Pp/OyHklwmg6SMgGzUFDVvrgpkJpSeBKRYcyOFQAAulm7uCsBiQiYOKdEyO4O0Iix0MDCnECEmTNBCQcnA1b182l7/9jebnZsG5omRCySEyOA1tXO69K1erdee9XWthoKJOKoqis4dusX91suOZc8DeP5tAzT+OKjT57O27DfYSrD7gAiDtwNlnP16qe6dO/eZOth1qY0XM030zjv9nsFaY58mTgOiLb0cz09HllS91rXh7mwJNKUbvbXu7s7s/XlIf/0Oy/HdZGtQYWbJDPZR/Pw1376g9/4m3/pO7/2PWimH748ff2nx29/8eH169O6eGKAYTfuKM2SACAICgut25mQ9ruZMVNO+2mmIjxO8+0LvNpXdd6eCDqYRIDaIhFAWAGVzACcU2RGdEGkNDkl7SdAfn77/ZxLkAmXeRxL3o95cm21Lu5G1DHUXEvBYd5T2achAziBI6oBgNVYzfqxra0rqbbokWjIPAiTo9a2uB2HQZl7RKDPiCAyMhYkcGgRgQweKtIRkyvZxXIabAGInGRgQQQgKjlzEkJUABRkCITu50TZvD2dv2QYhnyrViNwGgdKlPI1pn5cgWjHRAEJwHNOgLasyhiZseHizhBGrCyKVIFH4mJgtX/YWmXGquvW1pSviM855drcXRIfUrrvfu+GuTBS59TMgYIvbuy1q6qzB1OAsTDknAmlteYuIjKM7FG3tjoGkiEJ+YGhWHSPToRCxJBLnnLkWhuwL82SRdcFHJMMtvXWq3oQY+1L71ZNhfxqvrq+efkcnx0fjvcPp5RW6+urZx9/8cUXp9Ppo48++frrr8c5ffzJK9P2cP+u1/N+Hk6Pp/BtW9fweT/tMu9LnrPsZdx3IO8bY5gFMKPa24djXc5hwAOnotM8t7bdUuyvxvV8f/Xxx9//rb/1xb/479vrb/76X/1EW+K8dwrK6eZ2f3197ULH95+/+/yXT6+/iPOC5kE+5ETlkDh1U/UGADlnGdKHD28tNNBTHnZz6aZb1NtyMx2u8eWnuDvwm2+juYBARrVagSKCGDPSLiWmCttZiAvjqZ+c8YbnJ5S35/a0Pr64/hiVJTkXZh9v5pfz9/cfnt598fXvnc6PiXgoXe2+tw/mz+s6ug1QsnonCMDsAraNXc9JUHhs1N0QIhW+TpxTzKfltfYnlkBKxAxBDp6HMIOdDWVIarxVreeaDqn1SkSE3aAiIUaoasm5Q1OrphlFCIGE+G/8w08DFakzMgBrP4fHbnpmGpJzKUysSAbkEA5wsUQzEQR24tbs0WwDCtNqcVEV/apsWXIWSuGOhK1tW/uA1CGCmeZ5n/OAxA4qnJBa93uHSmRAyJyFMgaZuzGYe9ss1BGJORNxSiKcUkoEgkDCDakG9sDK4swlghyUSAHPtR2tKXuRXkpMU55JPSubhWSG1nXb1NXdwPzx8X453bsqBo1lit6HkknILMKQKYhx62pNv/72Cxb74Q9/sJzO63IsgvMg8zAgAOXMlLvBaV1Titu76+evPrm5+0hZugYjZ5KUZsnlfH7o6xKhTmEEhKXwkAjN9O7lS8oC2vKL5zff/RGYOegG1tvbu9vD9fVsvmzL/Zd//ifvPv+zxzdf6oe3EF1HqRdvltrpdH/ShQnLwIJJtQN6GQphFoiUC4gOeTjs7/KLj9rdC0dOT0eqR6+tbjW0YgQERViADykLAAQ6IkCwQ+KMMmDIuR1Xf9xfXSWZLxrwnImiueYx7+bpFkKRn5iRpYMAiuScOGdHY+oE0NVCrW0BhtE00wiatKPZBQuCwiJUMLJbNF0ByI0CutqGKOE0lIEJhZhwuDB5zD2CESUcIiBcESTCzDfmlNPIlJiZf+fvf4IAjBHechLm1PoGGEO6JmJzD1QiN61NN4Om6oAIKbqdFBaL8wWsTpDCIsAAxQMRW07ZLrNq+VpYAJUpBURAzWkYyh4jIwQyEAdgmJ8CAjEJT0xFrQW5B/au2npKxMI5ZWZCREIWEjcxD6KVUrBkAAM0ohIu4QjQVWt0B0u7fH1TXk3plsKEU6LMgNZ7183aGt6Wuh6fllbrBeVbZOxd12VdHt/X09K2bV3Pj08Pp6fz+3f3VTdiHobx+9/70Yvb/fH4er+fX756DuHvjyfO6Xhq5+rTbpfSsL96fn33aab5cH3IJQNmSmMei7N5Eo6o2xNCSsMdAWyndyUDsu6GabjeP71/z8vy+OaXE/WrNGdC9HpaHj68+7Y+fvj6Fz/fzk9MsNsd8v62ajvdv9mOT8S0KxNCCZKUSkrD0/Hpw+NDV23dMIyt17VRwrv9i/H2JdxeYRapWzy8i3qioN57XY91XbRXAMu5dNettiBA5oiIgO6xMW0SgNKhEZxJAmk80GFkljylNJpp107MZSiAgEnBgj0NeYc5IQIHqyXX8BYUBUzbeqqnBT2D79yJuBHZpbfDgO4twLsqXtDPnpgmZgpQDCNIYQWR1Lw3J8pJBqZMQMzJzIki6HQRjSGM4Yn/xt9/gYBEYXDyqIgU4dtWJRVmsOgskDlaX9e+hPWuF0r75tHUFgPLaXJDQBMuCEyQmQnQAEl4Kkmy7ETGYZhTGtWbxpHES5qYExEjOxMBRLeViAICnYnIyA2CEOTiqifAC2OehCBBhBu7kSQhuTAwECECHELCS3hCtNC2rrW3mhDJsJ43Ao3wAHA3bWvdzsu2tNbWZdnW1lo7HVcPbmbH8wlLzsJrbU+nczDncVyrrkvL43C4fnVz+1wbjLtp3E0a0oxk3Odxv1TT8A/Hs2k6zPPbt1+//urz+9e/QGvjmHMZpAwkDJww7zbzrTbBPmUJsHV9AID94SZxmvbXfHdLeYr7x9d/9scfPny7H6Wv29Pjh0zU60ZIdzd3hHE6Prx++/X94wcmmuYr4eTuqkaoaZhbxZx5dxgCJYnkBAlTGmk37K5ffJKfPwcIWJ/g/Ih9QzPrTohuW90qYFDExdw8YgJCd+PAAgnMK3ZKpXBCYFvPW/0AyaYyj3zgPLgJBNR+6npiwTKVHoHREUI4B2cItubhGKbRzc0Aums9nU5djYgBAdQll4sKt61HdyVkdQf0CMFIiCSJszCRaI2uzcL+JQz90tbJTEKYEZEYkJp5NTeCIQj5t3/3FUmn1ADcIwAt3MO49T4kQXGgDta6rc2qqfVem1YkRXKzHhZMCYMCTGgsMiQsCQtDQZqmcS+YIhyiiBTOEGi1b246lCkPCdE9Asm7V9UzUvNwIgwIdXRDhnB1dEYphFQSljwiJL+QV6gEAFFH1rgQdkMjOGy0ixwcvWu35q6YYDeXGyzZW7PeKULXpS0bGHjr29oCAYFKmVKZgfFw2M/Xh6WrCef5wGUEZM4Fc8oyHG7vPvr04x/+5CeGjGkkmX/++de9w/Xdi+vbjxzjw9Ppi1+8/eKLPwd9f0gW/e27b79+99XXx3dfPb3/8uHta+sdTLtlwFwSjQmHgajINN7kQsSblKtyuHGyQf3h3fuz99b1uKy7+ZBkVIdAOh2ffvlnf7Rtx6vd9d319TjfCedxHIIuplCD8CIJo3VdGbCUJMKMhRMdDtfjzS1e3Zl6Wo7ezuBMAdu2uVcENwtzC9NcpKAQwOodwUtgIAKhAAzN2DgMG56a36s1KTPKCO5mUdvZLDwq8vliL/fwFpeVboxI2BSbhnqY18sksy/a160uTVdmxAggYFnDj9p720AhnLZQJEyAcQkYjGUOJ1NVg9ZVZGAamdM4zEyFIBMJoF/iLh7d3QGB0Pl3/vFLkibJIghjANKuFTx1RQ8thZHATavVrjXCuvVuG3DzaOEaGOAsyMyZQYrMwskMEMckU5biBqq1pCmlpL7Vfg5qxMw4QUCwawc1DWgePbwSJiIhEQwBJ74IkSAhk2AIRUqZqXTrCACREAKjE5qBhYeHAiTwITy5t96amqpuFGVML5lkiGHMEzVHdeEUCOdlpYD9za2kknLK0wgQ0ziot9PxFERZOMDX1k7bZs1ePPvo5Scfvfj4o08//v60m3b73dXN7e3dy+9//4coMB5uFGhtrchwWvub1+9O70+Pr5d5mqYxH+/fPb193c7vc3YEOz18/e7DNzwcDoeDhALC4Xr37O6WhzSl5GTDfJUOd4tDRuGAZV2n3d2rT18hBLg/PH6IaPM87cY74UtOPjVda12QM6eMgK21arWHIaRhGBHImhHC7dX17nAYd3u+vo40sBlsmy2raSfTtp7XbRGmJIyhROhq5F6ICKH1FcATyQjDnHaI4gJPvoQsUZ+6kvXStlOtdWs1oHXd3NeIaq5o/qv914m5IETT1cLDGxCeT0/aFvCmtjV92vQRsQMtCCugujez0K7mJwaKYCZm5lJG4nRBSWMgCSMREw6lTPNMxOEBgAAkicxbwBbh4c6c+e/+41eAxgScCELcKLDH5cXCSClSEnNYtyNAA7TeoHs32OjCNwlMWBInRmESwEzEAa7q6CZEdJkGYkiZPHBrR8lngN47cCoAEKDqm2oldkJB4EASSeDAgAHg6AQoBEIm5ECIlHpvph0tuStGcDIPc/NfmWlsYGSzbt7UzmEqMGfcJ0k5ZTBAN2ZGYDQfE2qCp2WtW3N0xw4ArXVkHOZ5LPtAWratdZ33N/PVx5znT7736a//9Nd249W6rvePj93gtGzN/O7Vx9N8vb+5SWm4nq7TMH7+5TdPb8+/93t/sD0dX7w6mC15GvO8P2347ddPtS6nitP1bV9Oz/bz4bAzqxxGBIwy757T7oZePKMXr4bnn2YRW5at33/55S8fPrxJ0m+udnpS6NG8Gu9cQgCG6Sbvb+arq/nw/HD7Ypxv0jjP13fXNy8DUmvNrKmeOdHNdDPmwa2DCPCA22qn16316BXDh1wi3HVLyA4klGgo+zKo9arnAcM1EMQzBcGKa8fwUKfH1pqbb1tvXVvTbVtrVe09omtvGMRB2swxM47hXXELdVfQqL1aPS3WVoAmKcKt+SlAk4QwIaKjMQL0FmEA4AFEOSKllAMtwJhHgAuSl1PKQxmI0V0DVGSIAPdGrB4GEEyToASTICsCE5tqQBCAImaSpTYkEggB7EQBQIYerq49JLvncMHEUqj2hpCzHLquhFVYVHXZzvO4B3SLWpsF8TiOJLdrnFZ9WDuJ7ZgZjN2NsDogIbNEhIuk5s2thzkzMwv/SgfTzZ66mikpPxBlFwAFQGytA5pIiiCNCOeI0A6Oq8iHpQvDrfdtxikn1taXukRvKedEfJWvxqEIjwiyvx6n3fVcrnuvmyEK7w8HABkPU57G88NpzAFYOsF4c6uPp69f30/T7t3PvpBScs4/+tFP97e30xV/xvkv/7W/+vl0Z325HuHd43k3wLOrK5Kb/+o//73Iw1/4td/44Y9/M+3pruhHn73II58edqftJI7D7av06V9EHrVmV8yYx+c/LF++rcejNtbA9+/7vA+7ugv3rGs9b4/3xyzpxcuDjDsDhHBEKfsD66SqLSjtbmZCqAXhFWXaikw3e0wD9g30bNtaO2y1JrAs7IAeJCIIho3QA9wWcHQ88NgimGyzh165M1I0VsuSTfKC9yeXHPVhe02R2QftweLTNObky/rYnIJFe+3ba6ALaErQ0UJCm8YKURNnDCmjRqwYzaJ2W5kzS4XI4pPVioDmFgBgW86AZGArkJeBseWIztmcuzUwM2QMaBDiEImh5Nwrbe3Ef/N3b4hdmBEpgsxAtbsjkTAzov1q/EohoLtBtyByDxPOANIaYBAxmoIbI0HOORw9OmCvm6r27rXb6rgAakrDfvx0zLfmfVsfzXtc5K4BZo3ZEQGDL6AvCAIAJCeCxMBMiAbYI0DVPaK1djHcBziE974GUHhBGMLIbAmP5keHDaGBMToTEJMQDvvrm93V/urm43F+Pu93u3mc8762bfXz49Pp+PDheHp/2tbeWvemoKfl+M03X7358ltQu9pPuaRxmJKUrgHIz5+9lJS/84Pv3756+fS4fliXtVph/LW/+NPa+5/+wb+4HWger+ra1WN/9+Inf/nv/vbf+bdvnr8cZhn28NOffjZdX+N8fVo7ycHoCspOrl7E9BzlgIBuK+aQnOpywsBWq+R0Pj4e3351fP82DKI/9m3RcPXqrZ4ePjw9Pbb15Fqtt7o8RTu5LghxXk5KbXcYd8OdOWIhdY0eyUkQQGjMiQDUDNzAm5kacIRBmLo7mGrDCDA3V1TN3gydAZlayBIIahxRu3ltZ60NAJGMQJgHD6tb7RaE3b25aqhGN1c323o7WmscQGHhxFxEsgggbkDdvImEqoZTIG3diDMiIrGaRqzmzUIhSJgup2dB8ubWagCadSTnFIA95axmtT4KhJgqgjAzBDIrc1I3wN5auBPiqZCoqoe6g0e/IMdqreEXBXHDmoVmAFrW99mGy44BAMJD9zV0gW7EY8lDyTvBw5Chj1hXX+sWHCIumBBHiMYJwj0CARUJPLpZzSVdNN1ABEbWUa212k2lYwNKnEi9Qwh5sghOatEcKwtlhNN5Q5wOu0KQym7/6bMfFp3O52NE6Hk7Pnz7+P716fT+dGxlGG4/fn5ze+edHh8f/Xh8Oj4s520+XAHLIPPNzVXA06Kn73/8ats2ijTNh8++930Kurp7UXbD9fUL+BH/6S9+Vtfzbn/Vev8Lv/4X3/xrf//3/8n/Iy25n5bMw/nUvvfZ87IbH89/zoV+8NGnGYina7y9vdrfFIXltCENCBBhXAYAWe+f6sOb5eHtn379FfQj1vv3n7/JnKfDtZQhYZSrZ1e3ubmqNkLfT8NpOUePeg6lOEwzKGzrmtNw2O05yX7a764mxcKRrW29nsAszEcZ23Jat9W9QxgHMWWLBoEBoL1dWD3gARDknIUi8ZUEoKtRb4gl7QlO2iIYjBgZ8YJfMW2AkNW9aUtFwSHMzQxBiIv7ol7DEyKHeYBG4LQbmLtjp6LoSDSWSAnByLFTKLPIMO21L70/WWj46OFCyGxmPYwAovceYSTsGOgKkVTZUVlMupIwBw6AIpQCKyGKuGuHCEaptfb24MZA1cJ6uEeYATj0vjEgp8HAPJ0AyKN77SKC6B5+ERZFJEDCGKb8CQaZn5jHnGbmK7WOpAIpECRJgJutSI5I4Qk8MBQ8rCtnIEZiCcBAN3d1RZJLmLV3QzKiMGMhUFsMqrsjcUS8evbZYfzufrzNctXP6e0XD+LvT/UhPCWDhFGmm0bluy/k+e3dw3H7+hdv1uXhm2+/Tnn86W/85l/49e/vr1+WgRnw7bvXMuCPf/zruyH//h/886e3T88/+mT4apzSDJJU2q4c9lc3h+sD5vHdh5O1fn24/d3/yb9DGf/k//tPvNOOrnl4oYA9Hm+flWfPXoRIa/z0xVt4fy7jeDTN034/32EacL6KIATMaTo9rf3+A67Lu/fvrq7m+cWnx/t3uj6FQynz4/Jk3pkxp2nBhBSJRdK42x02V3efpskWfHh8O5Xhez/4i9cvPrOZY0xRaYgSLG1do8yn01OYa9+0nURyyVeIQbYFUjNFiiJJW1iClHMxqM1WtAYdQZOnwW82b5q6oPf+RDgiZmIx8w6W02RhJMXaB6sbM4ODdndfAx5AyYEDL+I6UG3gjXPkIohT+JmTISycc6LAMuQ0bcdoG/kG87Q7bab9aLYyiUVYXy54PFNs0bwHxwwUiZypAGhKGDHzX/67d4jENIoMgiMAh2MEmTojqru6LdtJwbsv1Rd3gMv3HOiGrmQGHtBiUTMMNKuALcLNe4S3rqZKGO42DKWUnXtHbA64rUvXSiiEmYgQMQLUnyK6u6mG28UFDwEhyYio5IGQVdUtulrXTShLskvcBdAIi9BU27b2N/M4vHjx2ctnn+3GGwi/f3dejmqrZ5N92edhTCJZhlYD8vjq4++V3fAnf/onP/vjP3x4/9U0yr/69/6Nv/Lb/9rV7avWjn/4L/7b3//n/93jun36w79wfXvXq37+5VdLXX7tp79OXJpZq7b1Fmhd9enx2Gp/fHza1vX49FSrffv2/uruxdPj47evP7z40Q9/8Jd+4+7jZ4frstsVbJrHbN2++urb1+/evX3zzfnpbCetFbwjJwYG4iBJCVnWZb4ba22tWt5dyXQdMiNnZFYki+CQ6AyY1OJ4PLp5HvI0HUJ7zokEXT0P4/721XC4CUgQyk/v7Xg8nc+hDaxZNEqUy+SewAysNt245CJCSDmlxEIkgWTmrbfACOsIRgAOqVIGJHCv3mvdwAgiEWVgEkRCDoRetfezaQMwIlRf1Z/M1DsFJI+LVhoiolsHB2YSYSDP2VOycJfYcRoL7TEyU7ZuARwk3Ra1k6oChEV1hwDstjXfupMZM6FIMEVgY0bwgf/S33mFUBiLUEJkQkB00w4AW+2m3HytvVY9dm1+0QCGQqi6moU5Vq3qtXcFgG5q0YDNwdRMLVTRlRDdzdS3eb4Bl96bW9feEaTwjoIxzB3MIeBkdlEOoimYIYAjQk7AhImLO/Wm7nAhvAediYzFiC9NMz+e3uci3/3or3zy8ffC28Pju9fvvm7tacxjonHeHcpYuioEhvett2l3c/v8+f37t3/6sz/Lkj/79JPf+Kt/9ePPfno8t5/94e/9f/6T/9vP/vQPvvuj7/723/7buZT7h69/9rPf/+//6R/M+8Pf+Jt/nTmP81yGaZx2eRg++vhjkRIBb9+++fDhAxIvT6eHNx/evnmXUqpray0++t5n560JD0AulEeBp/MSrmXi3aFczTvKQ5qmy58zjyWset0iA4/TKCVI8+46zdM4DuM4XF0/nw+3424/39yVsusa3WpdTolpHAaG4IQIQChAmJjneff8+avd9RVQMAHU1Y5Pdl5tWawu2s5b3ay2koZxvoph7EwEKCAo7OZhFghIDKamGyEUgsTi6ApuQE7chFrT1jYHMq11a0S7LBMAIlAYoZMQWDMCl0uNMoIwmXmYa7RamzuycFe1jm7EQqlwTrnkjJC8s1rGyAgDEopI+AQktZ6bLrU1QGBGM2IqRNQ7qv4qxpOSEF38uMbM/Jt/+7tJJsYEECwM4A6rea/NtTnhEOREol7dFSEiCJmsq5rWrq1uFqpWL34o5AD0rrWrmtvFZox4gXFlj2rRmbm3pfcNwhnSkK4I2b2qh1mobu4OqAHmHhYG6ICepRCSX8zg7q5gHZjS9c11ySOLEjUzbX37+OOPXr78zBq/ef3F4/EL03Z9/XzMzy4LQK2ttXNEgCOhzPublPLnn//Cev3pT37zuz/8sSX8sz/92Tef/+z3//l/czyf/9bf/Xv/2t//t0oZ/8k/+c+/+vyrd+/v3797+Nt/5+/98Aff++WXX7Lww8OjcHo8HcdhXJft6fGx1m05na8O1/f3xzB4/vzatFdt33z55TyOZb9/OLfe7Onxvml/PD6pwXnZ1nUtwzzvn427w3zY3bz4NOfROSCLULa19naMZA14nPe78XBBsmrv63IUSaGGgcLk0bVtHlbyMM27cF/Ws0MAQKjN+3G+PlStsGwYCzWNNONQJHEqmRFdN1Dr27nVI4TN02F/9SwPV2aEVJBEDWozCBBhYFT3Y11PrV1GyCNoQ+rgXrm1trYPVU+9gdABgcM0LIQk5UlkTDJDsBsQXbqlsrXe2qa9uRshEYzWCWEIUAgU3pWcmcO8qrFpCM1uRDhSzBgpydhaILC7uTdGyjkDuTbAEAYeypBKQvQAw7AkyH/xtz8FF8RgJklERIDV3U0RfMAL6Fc40d5jQeyc5VdWU/cIAw8zxbi8+gHYzRe1LQC21WurFs0M3QHRAajpav4gTBFGmBGIQgiyB29Nu2q4BawBG5IjB4BFeIBjKFMGQEd3t8sNJedUhmEcx5LJY2MaP3v1V8N2X3z15x+e/rhkmFLJXLIcgDnCgpxTHuUw0K1QGcacaXj7+v3HH7189el3Wt9++cufvX3zDaCty/qDH/3k7/3r/zCo/Df/9X/1//tv/ot5zEMZj0/tf/6/+Pe/+6NP/p//yX/81/7aX33z7v08lvN67q7LaXl6PFetfavI5OHeNM3D/elxW9rp/uGf/df/tJ1Pzz7+7u75c8SutTKntZpkPp4eT8fltOqx+vFhHdK+pNw1elumDMGA5uKKuSAzBsgwKicNqMvZ+7asjxYA5LvdNJRRHVLOTIEIWrWudatPErAb9/PhbvfyVZqe+TDZplByKiMPgwy5nZ4+fPjQzqfWKlAPb9gU3Hpft75SygZACKWMJU0gbMweUqNphDEjgprXLmSDWTOjrfbT0rd67K2P+brI5I6URFIKz+adqYhMAbhpRQhHuAw/RWDApVqXEUZTAKyqFk4izgShohZmGWxIuFfN7sYhEcy0QyCEIDZEyakEAMKEnjMlSoQphiEBNABnFv61v/USLDML86Xw0pG8tUaYDZI6uy+JxsIj4lmhCQ0p4WXG1DTMwMyQUCQcDNADAJDX3k29bbqt3bxBEDgIFCaPaEBJ8Fp4KHnUQHfW7hC8LEvtJ8CNxQKBiCToV+JuV1cCSuESwY7EUkZhoqVIJ+RnNz9+fv39r17/4qs3f0ggiWb0IScHN0d3FOKx8CjM1hvGtrUNQwjo40/uzOznf/7n7799OByuhjwQjj/46W9+9J3v/P7v/Ys//Gf/NIN++uknzeLc+T/4D/93kvn//H/6P/5P/53/2R//0Z+7Yu99W5fz8emwv1LtLELCvbV337xe6vr6m2+++uXnT+/fvf72XUIkkd5drd/d3pacAWgcBxSap/H67plTmcZ95rF3b72uy0IZSx59aw5KJTOodxcIyQUScsrT7iDgvZ4OeQbTQN4fDlMqGNSsaTfDdSpjTlKmfJiv99M15QnmKe0nyxPwQGj9fKpP5/V4Xk8PBJaE2dC6GgZ6t7ZoPUN4WAVw9dVAgYmTKDR1SzQwpe6dUBKOKKVVPOqHZnrarG/BkBAySw6QkkfhjGS9b6odMcLN1SMaIpgugeHhiJhoAG9qbhYGOg7F1d0aEXRXVW0LmObehTlTcEpjBEXo5T5CDCCLeSAUvnSXgDEkJZTkgOFuAMF/6e986gBJikV1vCdyADbzAAMcTZEuXyNbQHMARGHO4eTu2rtqIIJIkoSEgoERoN3dQFXXpbVqYYxIzBhIhIEiAMA4SgATCu/MIcARvVntdrIwZCMOBEISIA7HgOyOSEAMWnsAD3kWcskGYM9uPy55+PKbPzot70u+CcsBCLAi10AJCIOGthvSDsMZU6vdFD/9+EdjGd69+erbL792TB89f9VqH/b7V59+dHw6/tnPfnZelrvb2/3d4dzIY/h3/71//3xc/6P/w//+P/jf/od/+Ps/W07Lbp4eHz8cj4/X11fzPCzLKXH66suv/tl//3tbbXXTra59q9rq4TA/++jjMh+O51Ndz9ZrTmk/j2br4+MjJy5lGubd/up6dziUaRymqUyDe2cmTgjWxem81uhba74tj7YcW20qfHjx6Xz9nfv3X9fH9x/uX58e7nVZgCPct20jb5nwcHf73e/95u1nP4qbaxCJh3fL/c/LuvpyOp4ezw/HdjxqPwfYthy1NaJILITILEOZh2HPwsKcEiMABlrvdVusbRzAgO4dKcDBICr0Vbe1H5f2dF5PCkQ0sucAbFYROKUSzu54qaWsa22tBVmAdt/gUmGNIKK4nISthA9judpNV+CpdY/w3tQ0tgUiEtOQ0x4wAYBrRJiDdasWG6KJJMLEXNwRGVMGYuu6AToh8l//+z9wR4bU7YzpQSQQGbx5BNJMOKSEEYpI5hUCiNnNPfyidXWDgB7YEDMipVQSiZpHmKr21rUzhBCTMF56FkRsoOZbTkk4hRESRARgCJG7mjdHTZexB0AEAnQA9Eulx8PDqmp4jCVHyKtXnxD1Nx/+mfOS4Ht1jW4bsmdid1dszSuCiw9oqeRkmxW8+8kPf305n37xiz9C8v3+2c3t84e3j9dXd1d3t1998/XpeJqGcZqnPMyPp226evb3/vV/+8vPv/q//l/+o//Vv/+//PKr13/8B3/04x//cFlO7r4s57u7uy+++KLW9vDw4Y//8A9N7e765vmzu/28S5LmcSx5SOP+6u6ZpMwE5+Px8fHBPZACkRWI0tiaqnYZMoBLYhQch9x6G4aZQIjZQB/WLfry4f4dRpC2h/u3X3zxRR7GH//G3+RyhajL04enpyfrJ0aXPCELRJpTZia53o83L4QPGJ3WM9SWw/p5zZzA9fj4YX168tCcUiJyd0lClx4TEiJFgF8kvAGqXXsDDw+tfTmuT9U2Q93Cjl6f2qZ66qDnuoZF4qJGZpCY3TsCmMblbTCLbavrVg2bgxl0pCAUpAhojAUxCc9ZClOax0Muk/BAmLXbtp29W0DyYEC6fBWq5t4QtcfZooowUyl5FhYiMddcOKAiBSETMv/OP/gBgQC6+SLZWIBICU3dMYYiE0RtvXmQRQ1yBEIi91CtANy7mVWIgEgRwEzCbNrMzUIRJQwBmIVyYmZuql2NGCw2i0AQEoYQASQgsEAOIHBvRJqEL7dqRnSDRICBiAAQVauausPHrz5xjPf3Px+H0W3XtVknD3BSpM3sbEAQ7E3Fpix+Oj0VHj/7+LP7d/dff/WL26vDfvosHL76+uu725ci8vnP/6yUoQzFwK5u7tZz3c2H3/iNv/L2/Zv/+D/9v/+b/9Y/VLXf++/++a//2q8/Pj4y49u37z755NMvv/xKNbatdqvzNLx69gzdReT25vbZ82eXrPaQ5epq3u1KyjKOo5q9u/9wPm23z17wMDnykDOAJxaIMGtzHlkSE67rtjs829RSlnmarR7XZXn75sun89Pdfj/p8c/+4L/96svX+7sXu/1QpjLsD9Zb73U3TLvbV7cvXx2ef7K7ep6JsFcL9M1FcpU4L+eZcwA0a2HKmQkcAUUYL1c9QDM1cBZmuTyAS+hKmXEYc0KGgDwUESk8UC7AQ9u02hLATQGDEEU9hSN6Z0lde2tba5uqr8u69eO6HTucUTZiADTAxhKIGmCX13eeZ0Q0tVwA0JiyRW3tCRkDSbu6N0R3g6ZHB+u9YTobLADANAonZAIUCGJhZiZCohJO/Nv/8BMi5ARBhmIkjpepe0hmyd1a62vryAbsDk6CRKzqCAl8qisiAIALZQRABPCLOi8AEJzcIIIIsORhHEcPU6seGoDmm4Uxc6bMiMTWbVW3ALeoEZoSiwBiMBCjuCtjYsoe0Fqcjk8vX90iLa8//HPizZTDBzU0jUBk7mpL1y0AiZLAzOTn0+NeDj/94W+9/ubbZVlvrp4xlG2t796/efX844j48z/7o5LFAVnk5uZ2XSpz+cmPfv30dPwv/+l/8dt/67devfrsP/9//1c//Qs/OZ+3/X7385//+Xe+893Hx6O7MdM8TykXAiKI3TzO+zlAEb3p9vzu+sWL27qekuB+f7i+vhmmUVUvedXpao8IQ87TWERoSGWahmnaOVAuxbSZ9/lwUKPzu68ZAgOsHtvp/ZuvvyXA/W63Pn7z5pufA3QZh2G/313deKRxLM9evry7fnF7+2J68Um6+rirAi02X2kZYn2q9+/v3795fHp3Ph/dsACHtd7qZavWcARMSVLJIokIiQiJJHMuCYlE2AGZk1xkss3UCDCZQyATswZ4UCAJZ+Hc7YxIEWjWa11No/dW+7HWs+GKvEgKZoGwX7WFcGMacs4AmHMK7EyA5ABntdrVERmRwwLMwTnAa5yarj2a4xNxIPIF+otIhIUoESEzAhAiBzr/7d/9fgA4avdqWJGMwJNkRjGn7dRqbwbKCZghIFggvFBMQplx5JBEucihpMyEGIAQzOQeXVut6iaImVByKuM4SObEgBitrRGOhGZRchlyBu7NNzOtfjLTABNB5iyQiZAp/yp+ghUCn+77Jy++l3n89u0v5pkDajgHCMHQtREj4KbePCKnnPgKUNf29VX5yQ+/+6988dX/4K3f7l+15Xh8fGzr8uL2Y639m2++fPHiuSEAwfX1DQaa20effqTW/+TPfvbi5Ytf//Xf+M/+0//XT37yo9779fXhl5///Nmzu3VdT6fTfj+nlAFoW8/b+TRP0zCN3ToTEvjVbuemZZrGaUJVDM9jub07DIUIA8gp4TiU68Nuv58lldu7W0Y8bmcIkDIMQ16Oj0lQhhJhD2++GnZFys1EeTeItp5Ynz1/Nk55e3qigDDdD7tPv/djuXo25XF/feDnL2wqIEnKtaSpguKYOAmeTc716f6N+UbaODSlAQkBADGQMbGMZUglu3lr1VQBCQJUVdXMAQFbq9u6ACiZYQASs1AYb6Bn6xrdobEgJVHdTGuSjEiqql2JAUiJyehIrES/antRAHMggakSUYAxh0iuVYdcAKy2c2uKIgxDkaS9tt4UevPVo1lsDiujmF0y08I4EuzdBUEQhHEgzIDG/8q/+d0AtFC1DbEjOYEziTlBpLbBVjcLJUZmEE5MJLhHGLwjeWYcGG0sOQubmxsQATM3t9Zrb6BNKAbGQSRLQqaUkggXQOoNXRNCMrNcRsloCBZqvrXew5GFEybChAiIGIThFuFPj+tHtz+8mq7fvX19uBkzEYJFuMDOwdW6e/OoSB6gCCmnIdx35cWPvv8X/+SL/245P0y8Qw1dTrbqy5vvgsObt+8++fijnDKQSCnCUrf67PnL1tdt60+nh7/4a3/jX/wPf1gK7Pe3HriujwCGGI+PT/M0sZC7927LeqRwIjfvta1a28vnz0XScnycr+Z5Nxchs21/NQb0KadxHm/urqd52u/maZzHcdrtdimlXIowRQBGlDQlSa5LQst330XGxw8/vztcld0tsA8cEC0Q9oe7/bgHwJubu3DM43y4+6jcPKP9deCcJak3ZKzrVuaZpquMJXuAbg5mmw6JuBj4EOjMxEyAzEjE3Ky5O2IQ/ao3jwhEpBaIhB7uqh7gyJJdZG1+//h035cYM6WMocgISHU51daImClHRK1r1wXZWBTSxskuZq7EiuREDNQDVjMvZQAgJHZTBEuclu2x2VHYRZglzBfDBtRapaqPASe8yNtpQmrESFgYd0IZIhDIDAmTx8Z/6x9/jChNt6YLQECPhCEogNkUwmPdNABRNHESSVkO5JM1ImAwCo+ciMgtFhIgZkCwwAjyAIzilhmHnKZ5HMY5lzIjIhohT4SiHREFIhF5Hkg4HAIsWl89IuHEEAhKUQgpoJnZ02m5uf7senr5xec/m698nke0GcEjQAQtVN2YLtCUysCCo0eHTj/+/u98/fWfrE9fPZu+X/yAsFlP0/Bsf331+u23z+6emeL5tGrT/eFmWU7TNKhhEv766y9evfx4OS/39x8++853Hx/vd4fd+XwiotPpnHNBQvfGTACgauC2GzOCeuhuHHNJqh0sANS83dzc1N6JoOSyGw7z1dXVs9vb27vrw+10uDrsDyUnREzznMs4lHHalc22vDugIPaNest3zx3i6asvOPH4/KWHAqya5gQpX92WUvIwzy8+CUPtLY9X44tP3AnUINy0cxI8L/3+vdBEwGiVAFRbD+2dWl85cxaJQJGcpaBwmaZShpRzGcaUS85Z1ZpqTqOhm65Bdrn/ba0eq64OD72dXSGNgEXSkNPsBuZPta0AgZDDyWPbtgUpSLpFc++5OJERXjD+gWABTsSIHEBdl4i+nM/gm5OZniC6ZIZwJGnW1lZbde0VEHO+jA0i8gYQEAwxhDlxmLkpunnrR/6d3/1OhF1CAbX2S0eMkdXJVHu07hGISTyLCE7JiwMgi+BgtiGypMv8QB+GVPIEaD0aBCGgG7QKETKUcR7H/TBe7w9JcoAz8yAlAFrzEZlgkySAkCGEfbO6ViUcCAnZETJglsTrep7H/fXu2ZdffMGJy1CEx5wHQAzSwAhAMCcJChIcxvRsGK7r0j/75Cfe2sP918/2Lz3YYDPQteLN3ScP7++F87atj6fHWuvN7bX1BsgAqW56Pt/33r/73e98+dUvP/30k/v7D4fDrre6rsu6bkSX2hZGYIQTAWFJDGOWbd2K5K3VaRxFsqtSipRKGcZt3Vqrz57f5rJLpUzzVZ7n3dVhnEZJiCIy5GmcOI1BzCOVLBCYxsk0fH3E1qcX343E/f5b5OnwyQ+9OkIM482w2+2vX+bDQcYx0zCnibaGuvHhQOOVbrEdn/h8wu3Jv/ni/bc/R2J2WO6/9djO50foOQ/s5s26Y3BOJImQE/+qzHKh6XdTNQtEi+h1C+8hQCyhfKzrB12dBVAggEsBcowAHlzM+qraulK31i/FcmyqGyKEb4AOYCJJDT0gABwQYQRMampe+7q12rd6WuojEYoY0EZoKY8KDh7bspl2dwJkpDCvHhtzQnB1RWQMEmSMar333npb+W/+o+8G4NbWblvvzUOHPFxKre7QVd2BGAINach5T5AghiwDErmh0MDiIh4YLIgM6tXBAdjdEZIqhWPJCTGYZZznaTgIFzWxpkQAUdxgyDnCJBWiDVEMUL27ucDEOJk7SEO3kuZ5d/ji9S+B+LB7ISjClEsiAkJqzQhKytMFvpJ4P+WXCa6f3373Zn/31c//bM6oGF0X26rA/PzZp7syH98/6La23pB5d31gSWvdbu7u3r//IJLevf/2xz/+8bfffptSGcex1rrb7d+9e2/eEImIcs6qegElEHFOOUm0ugxlrL3VWg/7Q5hv2zLv52EYVTuAm7V5nsZxRJJpng9XV3WrwpIk55QBIOUxJSE2sMycApXSnnmE4s0r6zBe7zJI0s0tdq9+GJIkglnGYS/THvMeHdLNjONYTwv4RtPA82HIWbeVXUqZ1vtffPPH/4wlV/SH+9dZLkV+6L0zcsoJkdwjAIIiSb4ogGqt27ohoVAiRETOqRDCtlUkSiVzGloQInhYB3O2ZluYETijI2XT3NpS29K8pRQIqTdDbuGAmMMREdwrEREMZtw7uIf10AZaL+HgFl6HYcjMDoDozG5u2mHbqroGBAIzpfBMREk4wNyAKZtW88W8dq0Owb/9D76jEFuvvfW6NVWVLEwIQeEQAF2bRQcAAM5lSFIYRowS7utaT+dzxMbsgebRHHqABZKaqXaIFP4rbMuQR8IRg8ZhLjypX9YREJJwNW855SyBABisXjVWiCxxQzghOTIg4tXV7v7hfe0t5zlRKTymlDgBUQrMtZ0Bfcw3CW+YZiIWzkKHFy8/ev/hW8JgnIrRjscDfDrxxy9uv/vtL7/t9YThAVByHsbpw/3Dq1evzsvSu14frolhnndfffX1d77znaenp3mej8ejuzMLXaZ2EMwMEUXEXYcykAATofCybleHA0SER+udU7q7u1uXc4RFRB7K/nBIw5xyIWYASCkxYCoZIpDlV+V4kEvFInCIEE8tsdhRSQSHgwD1vjmk/c0nQTkAmjYnT3mKklwJ5is+vMLTKZ6+jaTRKVSTLu38lOYpoZw/vNde3fogWQQtkBBzzlkyIDIJCwJGuPuvRk49pZQkJWEipAvkHmkcSkk5IYGjGEb0p7o+1WM3Q0oKttUTJsiyYxx6b23zZTUHA9faFo8Ix0CPMEQKMHcHyNrBDF2TKYaBh1/mgHtriEycicFju1z2XEs947qFXzR4NBEOl/AeUTIDN9PYIFTV3QMZ+Lf+4WcWbh6u0LZWW2WOaR6yJKTGHNpdmwJwGDFhykOigVwC2DzqdoLoRAbY1RqQEUMEtW5mHUEYEwQgyDTvE48SidlzTixea3UPDUPo2htioKzgnGVydPUONmQ+CBdCU++7/bTZg+maeRdKiJGHgsHBJkk80MMQKPEV+TzKPObJml3Pz7EPfTnOAyecB9qP6bZtueRkbXv/+ptMoSGENI7T8bSMw7Sfd1999eXt3W2r6zCWh4eHYRj2+/35fL4gWi7JbQBEhEuHKOUUEaVkJiEWFnYLYpIklzqGCElKRFxbZUkaMY5zSoVSGeZZRNRtKAMx11pTyaYhQh4BHOYunN06AEJTDKQioKsnNpgG5rrdQ4Dsb8lWZFxOJ44Ybl9EpNiOlEBuP4b71/r2TVCHtp7efbm8/UVr/fbVqwTt8cNrFiEEAAqCVLIkuQRmiVC7AkTORYT9X/4ggmo3M4r4FbcSgQ3V0SKSelfrII4QBGv0sz2JsNAotMucBIQhbbWdlpN7D7S1KpAICxG21lvrqu7G4bQtDjCAEyBwIotwZxEhSimRu7pbQISzdupb3tbonQEyogA4ElwGDM3C7eKiSESDuTET//Y/+sHF2gth7r1ry6WMQ8mJiBUpwsHU3YCRhRJiEZxKFklFJAE0tJBkw4BIFG4GvVtsW3UHdAlTc289IGI/7gk4vAZ44rJ533q99MnCcKurwZaEheac9iSFMYhIpDAFp2a4BnZm5LjQhzIxQXSklGQEJAIGLwIjhAxlf9i9ZMzovj0dRx4ERSh55G2JDvX5sxfQiD0VHkNwGGYHBI/rm5s3b96o2f6w//qbr6ZpcvfdbnepfvTea61mhhgifHFslVIAkIhLHiwMiQkTpzyMORCQkCUNQyplsHBAyKUM076MO8mFUt62KjmVUrQrIvbePZyFwa33noekreu2EXbvSCmDNyAiQgrHNECvqcBmRwTEstenczwdt6fF6rncXQMjPL6L5Hb3Ez+/L++/9ixOQwNYv/mz+vBV3h+Wta3HJ2Y0YBYys63WICLhYZiGcUqp+MU3EtFVkQiJWERSiktpiED7VlvfuhkFE5ljJ6qwPG33j/Vk3vbDnvIuuoBpgpy4NO+nZenaHYkQ3ZEwh7N27I3cWNV7t3Bn8pQxMFQ1AACh8BQuzJlJ3ClMVAk8MYg5blWBMMLcnZAI2AzCHSKIEoREXK40mX/nH/3QwGpbPdTRkEiYcsYsxOyXloGZqYY7Co/MI8eUWKQQAjg0M2eOlAECWt88uoFvtZpBRNpq7d17h/PpNOScZXAHRFRTlBRoECGIgFKbq/dhnBPtJe/zkEsxMzdNqSAnW+qxFBEWt2CahIbMlBJ7SDhcVNVojFAy7zxiv79Jab8sZ7NzQJhht1M/v4UtbuePr9LL+/f3Dr2vm1OogjsAU87ldD7nUoAJ6eK/5JyLSEpJHh8fL0tgKQMiMctlKxDhYRhEBDlKGQOglMxMnHgYxqGMYxmkjCx5mveSp/lwLWUCRBGJgLrVknNcLl4AddtYgAi1GwExUj0fySI4eUYBhQpEaM05hznp+UwY9d3byNepJKFtOuyPH97I5mW+9ccv8f1D5CiHcfvwjW0nlwLz8+38+OaXf0xIn33n++fjk7oyYdduqoA4zXPJY0oJidbWa2sByJLKMLKkYZwkZSR2BEI0bapVLRjYzDtG7fbN+d1Xxy/vlwckJiAi5lzASFt3BVOqvW51UWN3FCaiBMAQYhrhbObmFsApE1IDMARwx5xmpkSWTLltkdOOI2lzj4AA62ER3Rwuaw+yO6gBuACgeyOGPAxmv6rk8u/87vc8LpuIeygjY/iYIJcQNoJAQDVRZe3gRomFhYmJGQEdcEMId0/ZIkLNAdFRa93CmaCYkyskEnAM8N1+RyCS2L1qWM4SvRPJPO3DyZwIr+Z5GOeSeJfz6HHuujGKee2xkLSSM/FgkZBERJh3bm6+sQThQDSWtE+yJ0reOaccsJk1COMwqCEIQmXCu1BUWyIgD5fp/h4QwzhdYFCUpDdDoOvra1UdxxEA7u8/+EU0TpTzoGq9t4hARGYaxtK1J0ngMA4jAgpnIkl5LOPIaeRU8jDmYQZMZSgA7BHgUEq5nKyGYfiVxRPZwMI859GdulYB1NohyVoDVEE30A2Y+7ISynpc/Wlzb+v6/vrVp7ZWPZ6vP/kuUqy6Yhm41dAjEAkOx8+/rsuHkQPy2KKtT2/LmGWaTuum2pIUFiZmU7tEfj1MKHHinLNIRqQhlZQHRGitUbhuzdSIWVIBREU4te39+nh2M+CU2QWPfW1VC48Yg3kN16Zt2U5Vt7DLqEokTu5w4eIENYsegSkHoDMjuiNEkSHhQFGsU3heFzUDRo4wjHC18KTu7mgO4EFMgAGggI5AzA7ozJxSBuiIC//tf/xDDEcIEujaKbAwzIWGTIjg7ojSN+pVAqK1ypzSBCklRo5wJHPwiAa4RUAEIrFC61oRMuMAIRQoAEXGrptkvDncUbRAVN8wdJBdREkpjePEUFjKVHb73ZDyIdG41futPqi22lcqR+RtkDHRGMgRjJiFCxIBmUXzAIJB8o54TDKFuelqvvZWQ7ekNiiHixnfzXemjYUZpEOEd+vh7k27RxiAG0DAfrdj4Tdv3rx48aLW+vBwPwyDuxMRgNW6MlNKOSJ2u/n+/gEAkyQz2+12iMychceLAA2ZOQknaVWXdZEshNTadsEYppQuy3/O2ezy5on3DSXlYdrqmSCW86lBL3nfzls/vfdwiXw+Pq3nUxpH1Q6xWa01dvPV1dvPf4kW4+FKMsR+H7tDd/DPf5mR0m54+PrPCsP1Yd5MEeD04UFSBkQiBkYWDPDMOSdhiFIKESIgE4G7sIBHb6u2RgDEMAwTj6ML17WqmXFsW21cSpkR+djOj9tT7X05b4lGxNztXPvJvFVtEJ6yloxCA+Il6FWIimMl6cQUaMKEhokL4+WNymRiuuudI3LrGoGuFhHhKYIiSI0IBEkAPBdhgZQREYg6kqlXlobY3Rv/zj/4EZInEQjUejaPJCUlZCJ1NxB3UvNaPcBDOrBkygMTXfqUCGpbt5qYk4iZI0vigsYBIVkI0MOSDIUHdGf3w04C1aEPAmrGZZiGK4yym6/KsGeSVPI43EoiIN96X+r9cXlE8cxVQ51gGmfCHGDk5AYgCsjeSXsluLxOY+KBAnvrrmfEJWzrrVkwWAxUxjRua+tu1UybtfW8tQUAEQmIkNjQEWmc53fv3+eUxnH88OFDShIRy7LM81xKbk2naSainAUATqfl+fNXiMychmFAjJQYEVQVkYWZkCGw1uruvel+v+9dtXcS1t5TSrVWImqtCaI7n44fgmMYOAI3w9PDBzyfaRwYCUxbU5CShPu2hathn4adLP3161/w/sVut+v3v8iSeBxtKPh0HqzBLp9Oj0kwmb/7cE+w7lO2poFs3QQCgNxUWFLKampu9KuLb5CgQlcDQgEkYMqp5CQsREkCMDwIiVM6914dCiUQOtn2/nj/0I7mFh5P/YSurdq6ndWrRtv8kcVTlpSQizLzIKOIIxFxIWxCVggzFsSCVCKy9qEbmrJpIGBoa7p2JHVBQgVwV0aQNDsAseWcipQxR2Zz1e6r+gOSQVDtlf/mP/iRQhMhBN16d4oIZQ8PdzWwMLO19mYGyACM4CUnZAFuiIqI5q3pKjkN4wCQASOXnGWgIII08RxODJQTlyQlYWYTpgikJGadYtjtnu131/v5bhrnQWaEHAg5UUSufevxoHZeN3PcSmZ2IHBJhWKnG/TmddMwj94cAEncQ60L5ySjasVUUbTbEtDdacAdqIMTS2ERvAz26KJaXTWAUp5609rqkIf9/up4OkpK7t5ay7nUuhHRfr9HJBEhIjM7HPYfPnyYpvnq6hoASimIMAyFiC5WD3edpt3lsNRaG8cxIsxsv9+vy8rEl06Cd3UIZl5OZ2E6nY8s0tV103Eoy/Gp1QqCZRxZiAFVa+ZgZGTazJ/efH17OPjDl8f7D8Pzu+LNLDznHDlG7B8+ZE88j29/+efQz7shn46t5IRg6iuXACKP8EsdFFHkgjlnulRbUiKSIpkJkiTGIA5khAA3Iw9BRiaFcIQOttX6fn16WD6sdtKuusWq9rAct74p9q37povh4mEIncUNUNIwlpk5mIlYHVYmSJQIOfOesWBINLKWtELT7m7m2ltr3QOJ5fLvlkScOTNBzll4KGVADGIADo/VdPWoQbXpUtvKv/27P8oCAC0wulvrHRVAqYeCOwZpj7W1ap0olZwDGogTk4VHmDC7Qbgie0rZ3EhkLDkRWVfBkoUuAWaikJwHEaGeWZCpA3iAbrq/ut3tbwoNu3EqMhho7cdhGIR2VbfaHxPNblJdhaxA6g4YkWDoXeumrZr3retZQznNzKluhsZI7rg4tN6dwXaShthrjykN14cXW4ckLIim1qu2doLAcdpDkJmzlKvdrgz58elJRJKIqhIRsyDSfr8joog4Hs9XV4cIf/fu3SeffDqOk2pLSZh5GMqlO5ZzvpQRe+9mJsSSZLfbmZmIJEki4u7oceHq5ySmflmJc5lSHr22kmkYB6u9ZJGyezzeD0zWq9YTuW+95v01Ht99ePNt2k345ptl6LFlbtvIaA/vdC487tvPP883+1x2H95+ftjN3WNZjtc3NwF4eZRIKJyGofxLgTwjIjMjsuSBIRG7e7PeERAgHAhQiNPlGSMRIarqw+OHt08fvj5++14/bLqa0uasxuh5tbXqsesJXcG7UOBF6sY85KssO0kSEQ4a3glmpnALMEEvZBNEAgPgZuEB5t7Dw4MgQCQYgzmNhRgICKVcZAWMiO5EAizozWqt3Wv3Tbvz7/zDHwomhB5oFmCthyo6CDEReXjvpuZh4U7EJIndgBgRs7VISIIcoECasjADURJKhNANHIzYNBQQSTgUzC1lzzkZhYYBodmZU3p2911GFlLmBMxrPSHpkEZwbluPLomzQpidL52yrhaW3ei89Nas96qxOgULM4t2XdfVvVqv6F4kZQ7T6j3v8t1cdixz75EIwKP1BtBYeOC9RQKAYR5yHnpXIF+3mlOapqn3flm/d7udCPduqpZS2u3mDx/uI+Dly5cASITTtCNCZgEIZimlpCQ5l2VZtmXd7XZJUi55nmdVLbkMwwAAjMSCl12lDDnlZB1KljxOicCtYy67obgjUAlb0BoRL6eHpw/v6uMHaHW6/SRsTR5NH+/fvc7jmERH8Loey+sHubleJI5ffXn13Vd63J4eHsapfHj/zfHxEQHq1olS72HWzP5lmh1Ata/rwpeKh3tvBkwimUmAspSJxhE5QS6Yc/TWW23akkiRGTidI87VHur2FCsQFRyZ2BTMGkYkolL4UlAdhyKIAoiILKn1jgLAwBdPkCJBUWUECSA1VauqrtrMlZCZA7ELQSLJiS4uk8CQBIxIQIgmpAC9d2/NmqmbIyT+V/7B95MUhW7RrUFXYKLAgMAIUutdO4SBsakQSUkjYwkXZMcAbxhUWywQDojECdwhhPCASN22qjXAARFJENCsB6uhKvRASlm4YN3as5tPyzC5BTIT8+l8Oi8PJbFAasumtlJgD1ULM0BAU1iru+JS+7JufnFziHMihCCM8HANCp6HkcApaOBxkBm7gVM4123VttXaFMJtcwuMjJLzkCTLpq03HccdC43jmBK3Vs0853xzc2OmZn45/avqtm3DMJRSUsrTNJYy9K4RTsS73e5ScxiG4Xw8EdE4jtNuHoYh53zZWzgJiwxZEkvKKaUEALv5ICzoiilR2FjkXDvUhom72cAgGF07Bq6PH/rTGzu+jd3LQ4aRqXKG+6d4fr3bX8XDIxSw9uhPj/l7L8+vv22vvxKOx/s3hfmjFx8/PjycjkdCIioi1HoFABFhFne/HNvcI8KAgtOMMhNLAORSUJKDmZp2beeln1fCQCYSHsqwm+a97AfaEyZbbauGEV0BILLMiRMYZM45E3IwIQUAhAcCQgCodYuVeROEUHC/bKjq0c1bV9UOgB5hlzkpoShFABAikMHAgBwpEg+JGQLAMRw6WNVaa4NAROS/9Y9+nAubm4c3dd3CQTSag2l473bpZNUOZjTkMVEpskuUELq7Ne0aW4OtawdElEAXwcmdAzElbq7dNnNFIhGMsO4QAZicnAKRU67rypwOh49aw+6Lu671eDo9ONgg8/l8qv0IYV1rdwdM7MVDLMIsmnptysyXVSkJZE4USYIQkKMkSmNJIxdR5kbW1TZKNDIEhUlOCIGuHoyElAhFgNmDpmGaD1fC0lpV7cMwlDLs9/vLDKS75ZyGYbhcakspwzAdDrvLuXnbNoBIKQ3DUMookoahHJ+ORHQ4HFLOpZTLNxMe5lZKudrvmYgQc8kpJWZhiKGkIHLt4D0AtvMTWQXGAGrLKYEyYHR7On/blnfnp7eA0zQU4sIUHGbTTV/bmFrVDksf2jbtdl6DqKVBHt68H+fh1ac/bBoMxpyATVhyLsxsZoickkQAkaQkKac0jJIGJAAwQFDv0Lr31tZFuwWTmiMhEJgiEoHj2rs5OOWl1WM9btaJ05B3QxmHRJIwgLr1ZhvCGI50GUkn3GpFNDdIiATg0QFR1SAIg+3iQEJzMiZixgAnhjyIR+26mAKgCiliiBAhEVymc8Ad0Ar41BryX/9HP0FAIq6q5obOtVrz6qGMGB5u3rp3pfAiNHAw4jAOQ8K8bbrZ2REutCz1xmyZZvLsph7QPQCgtzWoBbqrAiDSYKHMAkiIHdxVodaYhxui1HXdWm3tuCyvz8s7yUVdj8u3hisJWHRmFJoJxwiESK5MlEsp865IRgC/+LETiwQPPA4pZ0HVc+srsKAhhOzGnRDkPIIjYVhv5kQMIJDSUJsjpaGUlFJvbd2Wq6sDIpo5AGzbduHS7/f73vvpdBqGaZ53t7e30zSWUtxDVUvJ0zSIpGEYc87upr2LyDBcOmPDUIacc5LkZh66n2cmJMKSyziMAFh7C/c0DOBmrQ7D4NZsW4PYu4tuy9Ob9elhvxt5TNZOgvkU49tv/vT27hpwtad7GdnaxsfFiPJUsNn9h8frVwdEgeApyePpzXh1s58O1hYAAqRhGB28toYoklIEAFISGccppWzee9tMGyK6O4aJcAByTsN+x2W4kJ6X9XzS5am2p7r1dq5eGztLskAPoMhSOGci4ZzmnGYh2nqs2xbhBGzOapgywwX/pysLAMHWcFvVLNQUCBDRw4Mh5UA0j96jpoE92rK1CEQyZu29mfWUMhI4WngwjAyTdeyN+C//vc8wiMUjFGEEyqqtLu6uKWF4mPauEZYJi/BENFIumQ6ZJiI496VbTUAXlB0SFRqEBg1em9bmrTW11uzsEZc5N3VijkyFEzNiYmeSeokSDkM4qS1rfbtsXy3t9dP61mnb6octFmIgFOEJQC6QCQYsaWSZCHEYZRhGQgfvRJwgCdLAKQOzIxglZHRzgFF2o5TWtoeHh/P5EaxLylIyoUzTlRoAYxlGbTrvDogUYUjQmhJRTsN+vzPTUtK2rcfj0zCM87zb7/fTNF3OQdu2EXEpebeb53mnavM89962dZvnOeeMhLv9TliGYRAQScJMEI7u67KM08ycUmIA7O6pDENO2hYRyUXujyfziGVd3n8rYu/ffft4/+bV7fem4aZ5uXr+6jDA1tab3f7+7bvFlut5evv1F0o85MFKNqtPb78eMB/ffygDItnT+w/iPO4m5CC41DSj9W4el0mqcLhEvtethXehECkBlJNkZr/8WmVCZHFIGNqaMJGwUUTUp3r6UOv7p6fH06OCEQliF7YswyDXTIVF8vB8Hp4D+nq206OaJUqZmRhTkFftGu7EYalr1KYGARoOUWsAQR7RY4MwZHK/8AgTSAD3i87RFIgAIBCCPEGdVFPrzTX4L/zWzNJKnjBKhJJkpNKb9nYOxAi15hiFhb13hGmcXzLtBDonwJy003JeAS8jyUhBtS/EjJj61qxHbdu2roiVpBFlAGFxJAFsGefEBdxzHrPsEo9TGV09wNft/v3jL1s8LfW12tlxNezMIjRkmVmc2RCTQ5XsKQsFJJFhSiKOCIguhoJEjEOapnxIktU00LFbrxrGy3lJA0+7mfM4lIKUmPm0nQEpp11KZZqGcbc/L+v5fAbwaZpvb2/3h2ldN7MWwWY2TfNud5inPRIg4jhOqn48nqZpOhyucr7EJXi/359O5659t98P43DpYIvIbrcLUCaZpx2S5JK3rTbtwzA+PNxzOJHrVplimMv5vIRaInAg257WD1+dTvdJRI+LLieLk5Kv7bwbJgjfuj2/vf328QOW3WHY1bpp7+F2uL0Rzh8+fIjQ++M9ggy5dOvNXCiNOefE3exSN0Ezd1dXYTJyBxvzMMxXwkOSBJwoJ6YMLKpqtTFYuHZVYZqmcZ8n83iq7dzW07Y9tO20vlv9LFkST0nK9eGm5DnnYTfeTOOrZ4dPrufPslxttbuvKcNcriMAwps264FcyHN4AkaA1Fa80BeGMXEOAwMwuygDMFpvSA3IgC66504AFhie3YsphuPWjf/R//hvrNsjEhAWjSd1QhgI8bSttQYgdGOAYWBKkfpSp+GqlMngTIhCYq0tW9OoSMjMgNyttthQRE2X87FrN1egyikAIqfEJKlIpgyBhAxAETTNhyIThgRVD+xen+ovt/aBmQM6ogP2JFlSYuLMBcFE3APMex6QJTA4cREyht6tkjFgJmYIdtC1PRKBWsfAkoYp71IeUx73N3eHq2eOhBHLsgDzYXc1lHGa5mkcP9zfn5d1GIZpmuf5sNvtlmU5nU7unnO5u7sdhmG/P5Q8HJ9OImma5giKiHEabm5uELHWmnMexxExVFVEcs4550uKTkTGYUJkkTSOQxIuQ2FKiHh8Wk7Hh/P5AUDPx4dWK7ltpw/Wtvnq8PjuK9RTVwv1/VzOy2Mpu3Ger1+8fP/mGwlwgKf7+xeHG9jNhDKTnNbT6eGRAOerQ183dA2wbdsQaBhnYoYIYprG4cITZ8KUxN2RUASFRVIiogDgVESSX7LQNGIghFFYoAZFyilTOS/LYh0I1GmtlyBmXdu5a2XMIgVQUpnH4Woqh914dXd4tp9eHua7Vy8/en73ijiFgVsgCwPWCy8RnVyYMKwhJgIJU0B3i5TpcqYmAuEiPElQdPNuSSS8rbY5kHruiuHglWxTEON/73/97wCmHgtGd7AL9CQa9h7mK4KE58RpSEOhtK1bbTVPsyMDWaARmQauG1hUEWHirqt6M7WutepJu0eEqgEIkucCIgNEHqd9SQXchf3Cqcs5S04UjBgkQ7d1bQ+AwCSAW1xi6BnNNvJEII4r5U1tQYGSBV0ZVchV116bBQGQBIM7sQiKNZ0lH3YfT2U/l4mBx3Eqw6hq6t5byyntrq4R0zCMkuV0Oq/rsr+6FpF5nsZx/v/T9GdNk21Jeh7m0xr2EBHfkJlnqKru6gYagEAMhJoADQNJWNMISqIZdYWfLZmJkCBA3eipTp06JzO/IYY9rMHddRGFv7Atduy13N/3efZ9LWXdtp2Zvv32u2kaRSSGYd+rSDgeT4jk7syUcsw5ulsIUUTcPcZwj1XfS1XMDAC930d4bGZEyIRENIw5hsTsRM7kIuJdy7pA6307f/npx9KtLS/nl98c5zkQASJFUZK6bTHFw+nw429/ezo9aG/t/Tp9+wmI4LbnOW/b/v7yCu7DEIcpsbCgnI6PzpRDZKTWaqnNXN0dARGAmWMIbqCmpsrIQASAzCwxIqLDbl7cOiMCumoHIHRet23TWnqtDbZi13q7tuvuTaGTZyIoe221Hw6neXh6OD0/nj48P/5qnmdVQ+AQgkGv/SIchJJDUHXwFhhIAAkQmAXdgVmI0FpH10ghpDilIVE+TQ+Rs5CQASqqYVd0jgzBGnhHIOAA/L//+z+TkK2DhHsYtYIXcN72brAFziMNgSiwBHSwuq5rsZbT2HVDRIlBKEY5QC9qHdCIm1nT3mqvQPBfCSmInjhAjB7lCJ4BKPFMBA4lRLFu5pjyKEwOFmQ+TB9a68vyZt6RCSG6EwCYWms9hhQitP5u2NxKIkBtap0YALS1VpXcjJEZQpQxSB4oJoj7an1X7wiuHLiUsu3bvYudUkAOiOIEtXXt+vTx0zBmRAghrOtK7Nt2a01//es/yjmr9pyzdmemjx8/qN5Hn0gMx+PxjtWJMfXeQwjMtG0bEd0/Aojo7uM4rstNtSMCmhOjdSVEkWC1lr04eG+l1xvovpy/Wr1aa7UD1EuwXvcWmY055GxkTU0bELqwbMv27Tefat+/vH55PD7pXta6nk6Pw5C77jHIly+fswgRO7irIlJM6f16bqWaOoDXUsyciO5TUXBAQJFIEuBekHHt2gA6gpu5dUVAUwXXrpUDVTNDC0Oubu+3y/tyvu3XvW9mLeXgxstaUsofHr89TqfD6fHh8fHD84eHw3Erl62e97p0uzbd3JRjFhlMDVwphiApYUAAoVBb4aAiId9slwAAgABJREFU6NBDkHGYgnGCIJxSGLIMoNDUDcSdwSn4gJYdPGeex4H/t3//Z3WvtW0d1NEZFaAYlFZu++5TimNOBJxFoKl3VW3X2wZkOQiQiIRDenoYD0JQajEyTm6oxE4A5DFIZoAYAiA5KhiijzEGQXRlQFZ0iSMBtboR0TQ/W8OUwnF8SOFxWc5L/1tkAku9mzsiBqYYYxinEQnMdu0VujKKGzkqMbTetk4OCAwkqfXe+g2d9rWrbjlNQgmIem/v75cgQqR130SYSAB43XcJ/Pj4nPJwvwDGGM18L7fb7fqL7//wdHwqeyEiRIoxPT4+5pzVmrm2th8Ox8Ph0Kq5YWtdhEXI7Pep6TtXOEoo+x5DSCnd89Xgjo5m3rsSM3u9vH9dry9vn3+oyyvUvZe9rzcSnubDcvkSSFKcSy0VHAhGSdP8KDysy3lMfBiPe+vT6Wi9X94vrtVNS6nglgIiYM5pvd2G05GH4LXWvaR55hjbvph3bX1dl33fWmuqnYgkxiAxpimliUV6V3MLUVhGkYxOpVXtnYljzByEZGCPW+ubY3G5aXnfb9fr+rrttdwIacgzgO7lMqZxyk9jfjydxo9Pp+f5cUy5+H7d3i+Xt27VvXRrCIBE2tycIqDwnVxBQZwcJSBy66aDTBln70wKA+bk2bzfbGlQxIZgA2JAvD/4EKPzv/v3/9zcu16uy0V9RW8OW7UOagx5iFMIIgECduhbbXuzvpS+tjaP4zQdiGKkIUlOaUQgVTWjEEYHRPckg4RM9/AU5t7NzFQB/L6uiojkzGDEoGpLqZcwhnl4DhyHHAMnh7Du6215B3R3VO0hhDEfSIQJogRB6t3cOcqEzoCKBB0IQEqDtezmHb0hGDjJkA4h9opCyb1v+zWGTECff/7d8XhsrZTSatMhj8+fPmozJ3cnkZBSPJ8vpezDMDycPoQQAbDsJYQ4z0dmYSZTN3WzHmMa8qTqIqGUql3neW6t9d6naSbCVur9GrCuq7vHGO8dAGFW1W3bRKj3kpigl7effqbetBezuu1nIfr47XfWmhoM0yGP2cDBoe0rK44xA3RhEQi9b4D7h6dvnXG7nuuy7vs2D+FyPmsHYh7GcWv1NB+tNQcszRhD10buzKKqpRQRSSmbubkRSUgDS0QOEmJOWVLGIOik2s3VEB2DGtaqpXZwr+Rvpf10ff+yvb7dzufl/dJuqrs2SDwJUSlv6/6epzDGYwgyxDGFSUL0YMXq+XrbzhfD3nFT3ZmJgb1bQCIiRCCmKFE4BWb33bRCS2M6oTMaUhdwAgJwt0bWGTyypJxyShEIWDr/d//u+94rArn51r42fTfbTTuFPE9TkANjiFHY1fqm3dbSb62ZI1r98PwhpSetLcUxpti81LohkKkxIYpRIARC81ZVVbqi+33chxIyubgTuBMqQmt9WfsZID49fj/ECVEQrRUWeKh63vcLeFCzGCXFkYGZLFBEH/dWCIR9RIjIDbmTiOAwDYfIA5sFSJkOh3RgEF2McZjHUy1rDBzTeL2uf/THf4wS3l7PccgPD8+S8743ZlT31g3ALpeLqqnqw8PD8fgQY7her5fLeRxHAByGQfWOEbBxGkWk1hZjQsTWCqAdj0dVvd1up9MpxrBv+z0zl3O+XC6llHEchXjf93uGdFlu6/KuvaYkOY99L+6KiEKyti2GLJymwyMIc8zWe9tWpEi93a6vQMVBrXchf395u+3bPE1JJARa3l7AdZgP5mZmXe00HPdlp8AoASjspSD4EAdCBIRpmuKd0ZLi6XSMId0P4BwC0P2OQCQECIggEiXEIEJBILCISAwwpA7x1nRtbVtW091oI2xmDszAxoLrfltul5glpqH0HREdDJRa6bf9cr1+vWxvQDWAo8MQslBiFu5CLr17q87MiCZRgHpf0aylGFS1NwDCLGH02Wos4EZIFNlxGEJMjA78T/+nD70tvRfE5t5r20pZhnx8mD/k9CwUAwhjdPDrdt21GLKqcHRsHYEOx2dvkFLOYVDbq3aFTtiYiCi6sVlza4gMgObgxqaGQMQYOQEwoHJwclBrRd8QUpTTaf4oPCDavldtNqS5lrbsV/NOCFN8iJgQu3DsaMva0XKSkYUUNhFjAYnHIY6Z8il+GughuEx0TDocH4/TdDJDVR2nyYB+9etfk9Cf/8XfDMM4H45qaCZ5GM+X166ecl7W6/l8UfXT6fDhw3POo3n94YcfWFCEp3G6X3HuRZnj8QAAX1++HA5TrRXQ3N2sE8v1ckH0lNK6ba3W+yColIKISB6E3ay1GiKHSHXf93V7O19YsNWr6g5OkdL0+GFbVzfLOQJzqe7dWtnQ+vH5cS2lLKu1qlZTCOPhuF7O7rDdbtM4HIa0tQ4A1/0WOByengLHulZ1ZwmRowFoL73U3uq6LXvdzZqDiQQCDByZ2RyIAAiZxQjJENwMgEMiEASgECilINDAi4JhoHFSycu6LOWt6Vr2pauFKDEkJGEO1+VtWa/j+JiGvO83gQYFtaXr8vnn60/n5TNBO4aZOQDCHT9oJqokRGDOzoxOiCzRUFuvte5EQiAJwyA5UEyc7jEk5EghhkBxCDHM/C/+7QO67vXWdHeHtvOYHz59/JOH+QOSMAm4uffd9mq9maq3NNA9uVT6mlMIw9QVjWvtCzMAVNMaMBKRm3lvzIJAxDsYWwcAQwBvHUDTEFgcvCEioG3tZgLEs1g4pBNHvJZl2Xd3yFPet9teXyl0wjjiya0bKJhpY4GILgiBY+YAjBbClCDnNKIF7Y4EA+bT+Nhqvdze67L1bmuvp9Op1/p//H//42Hgj59+OQ4TCwDRtuzIkMekpp8//9x7/fD84dtvfpXTZNbfz1/3vT4+PuU89N7v0MzfK8uJzHzdlhjF/PeRodfXlyEP7r6uSyu1t3b/IEzTpOrmjRzQybVVvS2XV2g2D2Nv6+12JicGL9u7EBLIcJqsW9mWvS+MJJAaELm3voPzMIwhiLauit1szoM5MFKvvdc6HaaUs6TY3ZZtPYikcRw/PNVq2ot5C0RqXlrR5tpt226ILUape1+3fa+3fpehEKQ0IBNhAxDnQMPkxE7ghK5WLpdSFjI3kI56vS1vSzm39/P2ee1vN9+hO7rcUxPiLMnPl/O6bM8ffjHmk9aOoCFCgnHr+8vttWyXcZwPh6eQxRHckXTf2qK/P5gJQFMzZIqRkOy6rg5yGg6RSd2ZQgox5SFwIIopHyNHIY6c+V/82fdE1LS32upev/3wq1//6h8c5g/z8BDjUGtb972Ucl2vwI3EqxdEjdHiMAAPFVqKubfS+tV6c1NANd8DgwC7VoOm955bc4WNuAkmJEM0EpfkAA0cGBwJVGnf/Xh6aqYkPI7PrfXl8uK+h0ApHvay7bUgAGhzs657LRujppjRYu8sHIOEEGIQxWhEvG19aztoe0rTuly/vL8zmSAxx2k+iMhvfvPXTDYfTqp2vrzebhfrlsYx55FCKqVeLudf/eoPv/vu+2EYhjy+vr1cb5echtPpJCLbtuWcEfF4fAghmHlr9Xq9PD09Et07Q1jKjsQp597q+XI21ZTSHYEfc6x7RcdIoZbdrNayvr+9ZLR1Obu3KJHAXXcCzykrkjYjkd7qerlEkTxmJy7abS9tW6Z5QBYHjDF2L4CEhL3X1gsT5pzVIechBIE79ElCDEFrdTAHFkEmWq9Xd88x3E1AvRsjEyOLUAgO3I3uUAIFBAIE876DKaGDdlDvddfenOJe6q3sq9X3/a1CNSitdq3YHIX87gULlETC+fa5m3/7/Ms5PqtayJzzccYHdVrrrdRynD8+nX7xMH1giM2bArj3ZkuHW8jEYuDgiB2qB1i2mmQc8xyYhV1EwpQDp0BDiGNOMTC3WvlP/6dfpTgihd41xekX3//y0/M3x8PjN88fH45Pt7V8eX+53i5lu3RtFgRZiJUEjYOkubbdVQFpaytoBW0GCq7iiKbdugNgTGNKCKywU6RACVFCwhAdoBOhqYEDIrKk1nYReXr8tncNIQZJ1+Va+iqIhOOQTqa83TazRaIiiurVVKE5SyAkIknpFOUpRjLSrWzM0hsc0kO0bB2Px2lI0zQ8pzwSwrKc9/VGRAiwLMvXrz8z8fPztzkNW92WZXl7f//22+8+ffx2GAZE6tre3t5CCNM03bNApZSc8zQd7jm5lNL7+/l3v/vdhw9P4zi6g7vX2og5pdR6bbUSMxOJyL7v45hrbYwYic3vXWplNCjV3az3GEJvFdR7tZyTAW7XGwaZUtZaL7d3sHZ8mEOe1+s5MCDA4XASSV1biDzkIaW47VspxRGfnz611rd1AYeiRkRk0EohATPT1mvdt+Xc6s5IRBhiEAmIICGkNM7zY0gBSZGRRRDZrC/XS1tW2Hcsm9WC6JKCmK/Lcr5er7elaOtMHaw3QPFe277V2pUBHMjczAGR3cvL6+840tPjN2M6lbZSwpQGwhQ4akP0aZ6ejqePx9O3QziSCbi7r6WsXV1CyJmdkjoYgJrV3mNIQ0gJo3DGFByQAIYx5BhNO5rxf/tvf+nOMRzGOT89fDPmecjDPDw+PX1LELzjXvafXn4L0NDFG6mZmhOPjujegLqDE+WqveuGBrXsjM4EHQsw39kVh3kec0ZBswTgLCABhRHvbVkhwogoRN5pNaMPp2+DpF49Cnf16+1q5hwdIY7hWThtZe3Wc5QYhB1aOxu2GB+FnnI85GkOOat69bWWGi1FDtApUYqIMQ6AwQH25Xa7vDn0+XR4fvzW3GLkT58+xTj89NPPr6+vps4xPj1+yDkTkbuVsrtbSsPpdOy9L8siEmNM8zzfuXGqqqqXyzmlcDqdav1919Hdh5yZ8Hq9EmBK6R79t95M+74tIkbkdzaNMCECU3DVXtZWVkJijqU3xK51cWjklnJW93K7bteLpDCN47aXnAYCHIZ85zuQYJQYJOU89tJba9M8CEIej3kYt20H8A79Ll0jwrZvdd8BwFyXdeldgUCCBImECODWOxgysZp2M0aJLHQnH6WEIWKI4EBhDDFurZ5v58t6fd+WatrcOm7kfSt72SopUUiErG5uGimWtn99/Xkej8fjE1EstTJwDIKRhvEBIArLMEz5MGd8GFJqdlO1FKdutbp2UODIzqQoJN1q63WQYZABOhRoft+KgXpwdet153/zf/lHIgNTRu4hhDGcTvM3D6cnc399ebtcl252vX5l4kFyQCbKIpkAQxhjCA5eq7l762vpq4MTKklXbJuuEifhEbQ4Uog53mN7VAIDYVQzUwLnzHHOxxynlCLjYPWaMuf4TWvYekXU5fpStaWcmCn4kIcjS1rX6r7GQMIRiUt19OOQnplzylOICSiykzeFpq7NwQl6q/u+lm1rtWznty+t78Nhfv7w7XJd12WfpgMh//T5589fPo/TPEyD8PD49JxSBsCU4h0pllJura7rmtKAiPM831+AWquqqfZlucUkOWezOzOC/ytRC0IIX798CSEMw5BS0tqh91q3UhcE1961921bQDClWLel1mLa9n1JKYUk1gHQGKy1jv+1fMzo18vrPB85TkV1msfWyuFwbLW10sFRm+aUDoeDqu5lC4mFhjTmaZrdvXXdarFu2o0lMouIsFBMkUW0N3dPISOSAzAlQHLoTgqAIUQOgQJjCJACCDPLfcokwimnIUZAPm9lKU1JK7SuDV0DJnRG9ERM5OBKEvLA5fb+8+evHKdhGAJz7RoDAkDK03E+1VrRgIWGmM2rmtZWGXvIDoTdcGs7A6OiKSJT7armEkJ3Ne/OhIGbtr01Qyh15//5f//nh/HbIGOru1kPgd1xzMf3r2+v18tW/Ho777evOVIIkZhjIIkokYc0TjmkPCLYtl3I3YCa7SGCozbX7qbWCYVczAmQg0TEBqwpRCI2gG7OGBPkWXKUxJRjCK7m5vPwaLo4aWt7qV8JnGkgCOZsyjFFoqK9IVCKh5ifrY+lQB6mGCaRUTipO8FAQA5uJowUEIu6gx+GKTBFGT9++O50fHh/v1Urp8OjVliuy8vrlzTG08Mjc5im08Ppobae8+AO1+uFma/X27ou8zzf8aCn0+k+y9+2rbV2L1IN4+8rkffMDwAQkWqPMZrb5XwZxzHlYOr3zO2yXIYcrVUW6Fq2fQlEKUvZKrhq38w0hAjKLDHGSZ2WfXOrriUwRuFt7w/PH0lAXTEECpIkEEop1VRrLSFIzLFbr00HCZf1hgCn42mcRkNels3Vr+t6vV3N7c44IaAc0r0bGLKMh2MaD44MQIhJQiK5D7IR3a1UaB29qxarBg6ltWVZt65LL2tbO2wQkoGliDFEAhIKAyZCQCFiDpRj5Pfzl69vP0vEIT0KsxbLMTpBHNKYx/W6QtkNzYmL1tKvpb47eAqI1pzE3UHB3RScYyrFFu09EiE5uCIoem2tlaKu/K//1384xuM0jN1g3W7q+9vbT5frzTy9vV0IrdSX6/qZkhsoYUSshCB8Ly/fy5xOHAUzc1JTJDcHRSPW0m/qznAAYKAeJAI4OiJAswZACAFUBFgQcooOaIAOfW9nFhAPvWwOa12uXXsQjnJEk96aeSeuMU7CA9NxyB9jnNd1a17zMFkFJrhDjtU8xCCctTdomvIwDkfq4kqHh8dhmrblpk4fP368nM/vb197v1XbWrU8HJmjxICAxBxCeHt7CZFvy/Wnn36UwCHEEORwOA7DQEREdLlcRGSapvf3t2ka7pm5e3vYzFRbCKGVejeduRohUGRCtN5qK4geyBEd0bHbtty01RhiV0dAVyBkxKYOzDHE7IzaWttLqz2nQWJS05xjktDB3ZzV7+1eNVXTtaxIEDj22jHwmIbL+aKqIcYogk4dtJfSu27rptpDEEJMKY55TNMwHz+MD99KjE6dI+U8hzRgCNZar13NSAQcdK/3Qf+tbi/X959fXn/7+vXrcl1LWVstuIJ4YhDBGCKhuDJSVA+mFZ2Ec0x5vZ3fXl+c0hAfY5zUGzP0fU8SDf318vV92zEaRzOvVZujdS3NKrv03pHcsXdoIbCIlFIMCQCAEaD5vcGy171X/rP/679A9BgjMrzfvp6v75/f/ubLyw+1rZfL19fzb7pdqlZH4wCOYI6BiV2JDSl0a2Yagzize59SRgYi6r0ZFKag5oQODqpujoImKN1s74vVFnEGT2qVArCwCGJbq+2b1q4bge/l1mqNHkrdumoQZOTesPembjHNh8NTiE/EUUIE5Je3171c3FrRhVBUfd13RnKqTffYYZLxTtI7HE+U4vvb6+38Mh9PZd9//vEH082pd/WPH3855vn15adpnubj/Pj08PnzZ2Iw67/73Q/btjCHp+fHeT4cDsfee865tXa7XaZpBoD397cYBRHvZ4xt2wAMEcu2D8PAgOuygGurDRkDsQiVtr++fmZ0cqh7jUICuC8XRw1BwB2cABxFEciaqtYYJaU45wFJWrd7E1+Yg7AAgrp1J4GYYspZ0d8v7633eZxN/bKt4ziHEMuylHXbtg0RCEH3LcdwmOdtX9Z9kSD35loaZ8kjcwAC8GZGxAkZACJ0N21uiu6g2lrpqmpk4AbQOxTXrW/XZVuWssmVgyRMLMDixGTsW3etVdEIXTuBMVO63m4/nX+LDvP0hMi1d0Ta9gUiuOr78tb7mgNEjsN0ZKSyF7PG2glUSCmaU++9poBRDJu5EEdOARGpF8XaDYn/l//7PyVEYu9I1+X69va72ta1nd+vv7vW162/VKs557rZtl7dnYh73zuqAqGptuKAYCBeQyDmFigH4SEiaDWLQTDGLkLYxEHMUMgR3LtrB2tgYF219ptD7b4qdu9dqbR+035rrVvrFChEKXW7j357sd5VUgBHIhnGMXMix0wTOb9evuz1uq/rXhVAxEyrumCWNNpoFGMYUore7P3t/f36hdymYbq+X1sr8zy20o+HD998892yLfPD4zefvmtWl8v5+v7+8dM3v/3px9oqAR6Ox9PpIYR8p1m11s7nc0o5RjHT/yoSh/tB/3K5NG05DG9v7yQyzVMrtbVuDlXXOYe6b0kCdXh7e2H0ul62dYlswojuvW4izMJuXtueh0yECArQ0SCl8TAfXVLTIoTgXlujGJ3QXD0yB2EK4zgDeNlube/ffPMNgf785QuRnOZZRPbe97Jp11abOUzHYxiSILCTd6dAIY0BRXVxU8RkzRA6jgOmDCzh955pJBEAdMDebS+rMU55rqXf6t7qulvtWN0xxwi0FDIDBGtt37ujMWnvpni3jBLS7f39d28/A+PD/ETGdwLVXtZdW/BWt8Wqo3vkIBKBOjESAzgwIZO7kzcBVAIMEilIjiFLQvW2b2AeEPhf/d/+BIHMwE1aXc+3L3u5llp6NcTOvOQgH46/+PbDn6yrf33/21t/681q9bWs67ZWrWrVoSDgPIwhC4CHEEOKIBgYY4AQNDEmyYiBgHvfHbv74Aa1+l6LAmzlupRLtdKhOSEgEJKZV6sGAI6M0dHW7bwt27puwJ0DGXirNTIKRVcH8BAzM19u59fttu8F3VBIHUzllE9jmomiqtay7OutbI1RDtPctN2ub6eHOQ8DUfzuF38IANu25ihvL2/X263s7ePHj137+/u7m6eQjqejdjDze8vx9fV139fHx4c73PN+iWTm//pxKMttE6YQZFlvKSZCXJarBHb1UrdeayAGNK271g7MWnvZbkSACNaVGO89AVe33gE6IQ95NHc1l5DjNBKCIwQeEFFNk4T7IqL3JhLieHj6xfdEvF2u3fo8zinl3uq2rRJDDJJZmE17B2t13yLh8TjP8zGGMUoackQmJ2II9zg0iGDIJBEYMCaI2ZAdPcYMzda+ddRlL7elLrW8X8+17xgJgZ3AxEBAW7fuRTc1rkW1KnTovZupNQGQana93S7XN+SeI63rtbayl91KIQJT751AIhJ1rd53Ao4uaKJGTdWVHLgrFCVAHoYxxSGIeK91rYDetfM/+tefANi0k3PVer6+b3VprVqnyGEapueHb56Pv/7m6e//wS//vsj4tz/+dlvVAdZlXday77X31axMw3Ec8jTNxEQEKc05MeAlZ8pCQpwju6tabWV1R4XQO3XV++xIode2GRiImJv1FmN0tGq3pqqV1F1ha7WW1bd2a31VYwO1jr00h2LWyZ2AMx8UwqVsbW9au5oNwzFBJgjT45BCWLZL2RdrLUpCDNo6hfDx44fj46dS2+HhUTj95of/crl86dUI0d2n+UhB3r6+CBIATIcjswDgPB/MrPcK4ETEcld5o1o3s3EcS9n2feu9A7ianh5O27btpYzThOiXyyXHqNb3dW1lzZmu54t1Y4HTMGzredsWZGbmcRz1vlNhUa1gigwIlPJMJLXVqi3FZN2b1pSzSFjWjQnRXVvXDta7apmGh8PhQ9W97C0Fmae5aFv3rSzrvq1p4CmPwFRa7a062OFwPDw+K4C25oQhT8hBzUkCs3gQdHVUFcHh4NPA4wgUQDhkMYd1Wb+eX7+u7+/rtfTeuxclJeu0mxijeSuAwB5KpX3feuuGUDvUUrV3RYwSSlk/v30uViTIUq4AOhAByzAdJWW1zd1due8GBlpqLe4au9LetKnvrTUl4cBR7q+4qoEzGDRQ/vWf0hBnBAAK7rzt+7betBuqhNC+/fiL7z7+n+fhOaUUZDgNv6jVrssbEYBL175uRc1OUwiDRJlyyuMYHaM2FypIK8uas4TgQi0EUqdW1cG6YTeqvZiBu7EUAPeGaswkKQiQAZkQuKkbF7feFR1rhW2p+1q8gyuo3lrfl/1LEJrjHIm9OxNAw/f3q6MSq1o9zo/CGWkcs3jX9bIRyLbut9t1GKd5OpHkz19+XrdFJLy+fH0/v03T4dM334P77Xo7HI/LvpStpBifHh/VVdXGcRrHqda99/7w8FBrbX0XCczs7veYZ+9tXdfW2t1/CIAhhF61lGpdwQ1MxyG03i5vb2OKh4fjy+uL7gu0LQm13hEDIA3DWGptVYdpkBgMTM2I4zjOIhERet17sxCk9c1dx/k05GlZlyHnGKetbkHU1tK7TqfTMB3NoZdVcjw8PkHX1tpWys8//bjt2zjN4zha17qXptXBHRqGKY0TRsHAEAK4IxGlZB3AgJFUO4cASL0XxCqIiBSJOtq5rud1PW/NfVBmEajNt34LmQNH8OBoiNDatm513dq2bdZabbU1MDNADEFK2XuvMTEH1K4dXCQe5gOg7evqquhe16ZudW+9Wzcs1Xv31sydooQQQwzeWqsKbKTNqjf+o3/eA2WmAZQRqZuu663W4mpqu7D/wTd/+uHhV7313rzVXrflWq6t92mcAXDdW4rjlA9EIi5THCRmQGJU03dOilTVFiYmZAohp5PwtJVWStduQNR7b22V5Mi9Vo0sUUaBZAACjaGIBBVxj73W3tyctnXXBr1Bb9tW3679vWPzXhPEo8wctNeyrXuxpmgpCgWOY/j0/Inq5N1BWSi+X25d23fffneYHxDw7f3ldz//5TQl4cGBHh8/DcNJ1d8vbzkNdd/2bXf0HEckWMrycDox853s0LuJBNWeUrrrM+4IrbvgFhFKKcuyxpjuEFxCcLd9v7l3tBruUo0ol8sNwQk69Gratm0nCikOQx616zCk3nrvjYRCSIQxxdx7uwewc8r7vjv0+TBve12W2zCOKaZaIaQA1pbLNQ9DJHt/+ULEpVe3VkohksM0xTQQSy/1tm/LtnpX79paRQIEE6Dp+SPFaNCJgCgAIQVxEiSu627rldpGrUAtDEruvTfsJgY55xAHMtkVlSKnEMYjYbrtK7MchiMhO3UG7Qqtw+XyUvYdNJIHrb2VbRzGwzxI9qWtrbbH41Mafq9VZqQgEJnXZa2tIIFqVG1Fb62rAbsGcGTBmHhK6X7tdnPr/Q7s4T/5l6EUiyFbd/BurqU2haIGWrnua87y3adfmxKAKbT315/flp9QLBARmrMSMuOYOU4BwMBAwBWhOHrvlVCRqyq5RUTI+WHKn0Smy+1ctLXeiToLtGqIXRBBmSU4GFp3q2hGQTqDoGIPptLNvRqqhWCC0Mw2uxERIVDrYmjurWpR3LR2aHGQnGnfvgaSx/kX1jQPGdEV2nEYEw5drbT1888/ENLj48fn50+IsK63rs3Nh0N2s23dhjGv6y4cQ5RhGmJMAPj49LAum0iotR6PhxBkWa4hRGautd6Rb6odAEppiDCOo7u7ubadUBl1vd28ahYJkZzg9ecvCVCx5mlyQ+EAgAQowq3veYj0e99JAEQEIAbz36sriNkMJMR5PrjD++vLhw/PhvR2/t3heBSZl+2Wj4fhcLhdv/Zal2XrpS/nt679eHjqTbf9dno8AfK6rWoNAYeUCIkpcYgiotq9dQQEYSAGjoDACKAFW7WygzdHIJl4PgiGttzW9epqCXjMydCrgfIAAXov6DDkwzRPgZMDA/XubV1tWwAxMrv1Ns6n54dPQ4oiHhNrbQjy8OH7ORwRhAF6W5mRUbZ9rbq5WbW9WUcSxkCGQBwCDOPIAYTykMcUHIHMXdH5H/+bb2t3BCInJkOCqsW0Oba97L15LQuJMvPb5a3Ubd0vS/spBmRHxc2hkAEhJ84i5IhNm3ZruqsVQgWoCEYcjvNTSiewMcfjfHhgHr++f9nLdchZkGtTAmVkYDdrwkGQiJ0luhMJCHVwri1t247uQ5DTGIaBhzHHELU7Wxhj3rez1lb7vumytaVaG4YcEUj7XmtIKecEoO5gpsv7pVx2ILrdLkzhD3716198/3e+fHn52x/+c+/bYT4+PpyI6Yff/PDdd99fb9eX97df/+Gv3ZEEm+rjw0PvWsqeU+bfu31Kt55TBMDee4zRzEorEkTVtbd79jVIKLdr2a4piYCcz6+380sv+8NxLtva9r3UbduXw3gaxhyitNZ774AmIikl4uh2RxX2+5KhaUHAex4bwGPMUaJ530uNUVB1ud0Ox8PT4enr+bK3/SGP1rT3nkMIQQx9r9vzw2Ngvl6vzBEIUQIhuXcHkDimEChGksDI5g4EiAxhcARk4pARmYTMtLcOABYycQbzy37d9r1rkUAxJnUpW9v6tZNO6TDwIU/HYTgEiW7afKGg7uCdYjx8fH7+xfe/mqfHECLHOA5DTulyeSvbNuYJzFrRXnTbrs1uLHttu+reejcHwoB+r8kbUouBOPGYD4MIsoE7Q5CU+J/+D98BcYpjDDOzhoRuVtoGSKBTLbqX/nb5adlvL+8/vLz+5aZfwHugFsSZUEKX0NHZkXdrxKitLst+P76BrWBmOiJE4eE4/eEQPxCZBBrkgwNt9WJK1hXIVF2C5wEoYpAh84CEmDgFEVQmagplt66NUQ4yTYI5hxCGIRxjmAljcAHv23Ir3XZfSm0sPATOAhLy1vrebnl67N0EhlKs7wVVS605D3/wB384jvMPv/mbv/mr/9/pMH3z8RNRCnH+L3/xn2IYH56e//Jv/urv/b0/QeCvL69DyoeH03yYf/rd747HE5PcSfGt7eYeQzBTdxjH0dxVe601xqC1uzsSDWGAbtu6btuWJRwO09vb15eff5gCzPOpmtfthr0ScoghjNGZDJyQAYA4IiIw3JNFrezMTsSgGoQcWq21V2XBlHLZTdVSFgdflm2YhsBYr4vt4ADaGiIic23NrO3bcpxP5rDf1hhSTgNxADBhCndgyjhCmpyIBAgQnFwiSjcHxwCSlCjkg0gCq32/kSSZj+r+fr2e1+vSq5IYBWttraVIPw6fnvK3jNN8nAK7ILe+o/BxGhjaGPP33//y9Pgw5jFPMUqOPKaIHNvl7efz5WsIXsve2q3r2ut+VzwAORMAmJupAyGaq4RK0lmYxJkpBUFEAEY1/mf/+o8SDZFhCDlyTBHvTxuMUhiwn+q+rWu9refb8mWr54575E4CzQpRmQaexznlANq3fSsdiAfv1HsD2l21OeV8BOO9bCzhOH9jls2s9x4k9MaX24UQBAI6SMA09nkcAFibAyd3YvEooXpTU+2m3QxkEhqhRGIJE2GMfHyYfxEpg1lpftsWA2CnIBhHEhFGZPbrctt1Oc4nbMMQSHtRbYcUjsfnvdXbZb+efx6Oh48fv/vpdz9spV0ut7Vcv/vu43Ldpjx9+vDNf/6Lvzg9zPN8Oj0+ff7pcwrhu2+/e3t7vyfkCPl6W+fDwTq01o+nQ9kqKBLBHZ8/xKHt+zwls91V9+s1IA45RJay3y7Xt0D4cDoZ9o48TKN5F2ACigKE97OOBg58b2QhAcI9jddbL6XmNLvjsp5DkKenZ+3aWnOAIQ+B8fb+RubjNKKAoqOE0ntOgxAul6XUvdUSxzQdDnZfJiCGHCWEQVg40DAgCpGokFojA5fsYbLegJ3jZAbYC2AlTwxgpRACQ9gNd8aX9fq+ree6fqlLB5x4jDGN80MMAwGxUBDpdTO75cnSMOfxFIaQh5xTZnGhDI7b/k5oLNBaKbV13S+3l1L37lbaSggOoOZqWNuuauauWBp1RwCsLBJjgN97nV3V+L/9l98/PXyXU1KrEpkZw8BBInQa8/BweHRtt21pzWtlQ2FAMyMGiTykGITMAQCjBKG4llq6InFX3fbSAMCMkXM61GJ72URClIdt1WZFu+5lVe+AHHk8jBOSjSkQIRNFieYEICwMQF2LQzcNa+l73QIxE1GkNJ6CnHI8jnkaUgoYaqtbWXv1KMLEMQcWUi8o2qGJ6pRPHGP3xkJBAqott01VDSoTjePxr/7mL17fXp6evs1xHHPSrrfr7fvvf/n5y5dhTPPhMEynUvd1Xf/JP/4ntfTL9XY8nlIKtVZTnca5tSYi43Tcty0EykO2rtZ6TNysIbqrai+kfVlfwNV7SYG9t95bSBJl4iDkEqMQ3OvgfifP1b2W2kIId9oKk7gjYUgplVaut/M8jyGEZVnQ8XQ6bevaWyt1jSG4w21dzREluAoRifC6bYiQUt62ZSvrbV176zEOrRXEPuRDHmZhdcrhcEBhJHJkdEZkYIDMIvleE0Oo3hT3zbbFTKG3fnkz7U06kRPm3Vop+Hq5IsGUx240juOUp2s9e3cWQvBtfTe8xgmBwNFSTIRRKxJi13WvL01LSmlMc21Q9lpbXdZraQUwAKhp36u2pq1U1YoEgOTdOlYKioTgbA26OqC0zvwP//vvnp+//+7T32Ecm14BIGAcc0qJU5pP0+NxOvZme2m9Y6mVICEIIuUhzVMSQVWo1RHZQUgA3XpTA29OTokRu3ZAYcl72dZ1Fw61luV2U3W3Rl5CYGLmSGngyEPOIswIEuJo0M2K9tbca1mYFaCVvSKgiKtpiqNQTjEHZuwWxNXrtlT1LkxBJESiyGYVEKbhgTo23VKaycfAgtZKLQRGALXvIcTPn39a1ssf/OpPvvn0i9vl68vXr8zh06dv1q0EkdPpuJV9PAza7O/88d8Bpx9/+p1I+PTp075vZV8R6XA4xZgAnFjADcCH4YBO2ru7phBa61HEVRH99fWn3sttvQzDEFIuvQGQGR7mIUjqvZhqCJGQmcM0zEyh1WZu94AdIpnZtq1Ny2Geay2t6eFwtG7n81sIHGNclgWJ1r0cTw8xpt6qtZKGbKbLtrZeLu/nWkoMSWvby62tW287MaSUnCzlkIYDxyRhhnE07bhXSBkPB/MOgJRnbH5fvxo5CiAjWF+Wde/lfb9ebq/b7QocYxqqw3LrxRViIiPmMAxj29Z934OICHXTps15c145KgD01oW1tovBBXkPkXIWd2UkR2Hmve57NeJQa2v3BmrXbd/dBCAQkAM5AiOjM/Pv/997VSDkf/5v//6cnp8fvxvTg4Pv+2K+Cevp9Bz5lOM0DhlwvK2L4bVDI4eUJAQhInQHYJHEnrypO3dsRCKMHIwkRg8oU++4t6Xaba/7dVlu29l8K+2ybmfTxurMEVkUNAlJYAN2S109SCIMbkZs1rt7BTQUUsPeEcCwt9pbCAkBhsTuvZat176um0LNOaU8kIhECSGZA8cZvKr1OKaY5oGnMSdE2ta6rmcE/Pr1xbT+4hd/+OnjL76+/vib3/7l6fHw8PhhWZfe/HA4AuAwHvI0PB6OxPTl5evlcv7lL39Zyr4sy5AGIpCQRAQAt30nhBjDneQE6K1UYc4puakQ7vt+u1xykt46Y5hPx23rSDEE79qmaSaEUndwiOH3shY3jDkiQu2tqxEiMyF566XsdZ5O7qjdUhqEw+VyuSNvm5qEhMA5JjMz7QomgWttrZQgsi7L7XoLMZt1LesdRt261roS0eHwHIYRRJCR1L1Xgo5Z4PGjtXAHN5kZRSR3VO8kkmYM46a+up7Xfeu0IijHW63v26pOQgLObpCyNK1b2bU7C8hATSt4Z7Km4K051G6r2d61AzozOythRfYYxiC5m7k7gDtQ0Q1V0bxV3HfrZiQIhEwI3khEQmaIfa+q3RT5f/x3fzLnj+hExL336/7F4KrexzgN44TKKU4hjute1vaGoaMZmcYg6NIL9YagHFFcBQ061Y4QKTIRAQkiGFsPvffWb6XXve9bv5a296Zlv2nfkQVAmCRxIAOUEVwAyaB222MM5g2AhymmMDKmJFkwLGXftQFJa4tZVbNmXrWo9a64lc1aHcdDSqecMhEadJOGQUWCG3dHThIo9NL6vgH01vv57S1y/Oab72IYz+f3l5cfP3z8dp4e397Pl9v1OD+cjk8GPabpcHj67W//Wl1fX78Ow3A6Hd7f30/HB0DqratXBnKwbV0ZKYXYeydxb92tpyCmpdWt7HuK8e3r15zoYX7Y1xYjg4GrT2NsTcHZUYXZze/CbQeVFFgYhM3d7iJR7yKUU+rN3UFE9n3btj3nIaWhNYsxWO/buiIAknQFRdlrV/PT8WBVa6kxZiTc2y4xCYuZmyET5DgmyQIUUobH5x6jgyqzoFgzm0+cJ/SOAdrbhdqGblqb3a61lPT0YTycRkUzK5F35ZROzsPaLk13gmjKtS9OhYOVsnYHI1C91fZmoNrYK/Ve3FvvaiaAsavV1gBsHFKMYwiht/sIuCEC3V3Oe4VqURk7ABAKEYJJkQghYIgRzFqtXdt1u/F/8y8Px/nZVYLkrZ1f3n8A2IccDXoek+mIkEkcHLsWAGASVwMjBHJHcyKIosKQgwQIqlZRzQGBoys4ILowSUzJAYgIzM3AuiJUBhVmBwA0CQFMWlcAdVMzV+tESqwGLhiCDCnmlNKYhiABgUBVva7boobd09771lZz32sx6k8fPohEcMsRiLz7RtxjDAh+u65gkHnAJpnjvm/b2jLzmAcwLLXvpUzjLHG4Xc/LuhLSN998L0HWbR2GfD6/E+Pp8eHl5eXh4bQst2+++SaG/PXrSwyB5K64bgg+5XFIedmvDAZmoIbk2lurO5o9TMe3t6+1Ls9Pj6W2urdhnACs9eruDr7v650AVcoWUyZiCoJMKSVCBkRwW9frul2HYch5KqXe1w4Avm07AEiUFOPxeGitvb6+Xm/XPORtXeu6aO+APh0PXXXblpiiEJdamQIRAWMMIhJiDMwQY7Zh4ocHYvFqDTsFoS6/t3UCooG9vXnrHEGw7ev2/vK57WfQ5uSNmHFea40SxyHc1tfee7Nlh7cGi8ECvCpsDq21um+tN7cK662sa6lFe6XWqfVmZq2b0JDCyDIigHkh0tqLWQNDBkdA7GpamHDIiQIobRRsGFKOLIiotK71fF325vztP5Dj/MgcWq/d19fzZ8BLiqDutbFwvp4vtRWiFrgKUBAMLGZdrahWRJ/inChHBE6hWDPviK1qJRQoYdM9pzHnKcdjknGIM/W70J0JiEECMkA1qkjd2N2w1lb21nTvtXmnMR845N4bEYkEQYoMMcTEQQK4S+APKT2Yc9dQum5tVwALUVLMeWR3ogbUzDa3zcndQatqU6aUOYHWUppAADN3kBgdcZwO0zBdrq+Xy1cR+uNf/0mM6Xc/fc7jaRhHM/vjX//6/HZhljwM83z4+Pzh5y+fhSUPg7buvW/LMqQ0jxOg3g+s+7oQuhC69bsOSHttWpil9q613sF3IswxCBOzANjtuozD6GpddZ4Prtp6124pJQIA8CHHVmutNcXxHlMjohhTjKHU7Xo9l1K76jRN7v7+9lL3JQki2bYu19tSyjaOiQCW6xUMcs4IIFGIHTFyEkcDAw5IHAnI1xtrhXsHEioqdmY9LxwDJQR3K404h8dHInp7e3lfrte+N+ghHXTXaleVfbm+gC81vBhfEFvRc+lvAJu1tZTetlBvvK5627BsUDaoBddbqXu/3WqS4zg8WwdCQ1Lmilxbr6VuagCEYiCIAOrYIXQIFaQLGVOLRK6+1X7b1rXYPHzPv/qH2W2PnJf1fanvTfdWX4VTTuOyrdt63bey1ws6uLF7D2EgJiC/b30BMQUZAgPLrovZZqoOJAhaSzV3yDmNYx5D4BRzwJyHgcAQegByt04dmMjM1IkQWLR7a7psl77vgUOQMORhiHkrqtgpKQUwVQREiMfD8yl/CwbdXD0Urc1WkVygI7RBEiiYmXprvqurW1VoyLHZXrUEOblKZMGq4H46PUVJKad8SOtyfv38ZRiffvmLPzgen19e31Hk46dPaUp5mLdlqbUfT08p5I8fPvzudz+q6dPz0205awNh1t6GManWXss4HZftFpgIupZdiIi8tWVdLmYlxel2XtwrgWrbAT3FcDw8tdZDYHQEw3GMe9l/327p3bQb3BcOtq0lp0lirqW5G7GN09i7AWDOsbV2PZ9vt4WIjsfTOEzLuoDjOMxEpHW32pfzdbku67Yq9Noa3AscQCych4OEPMQU8kGiOAJ37V35eDQWW6/etxCOOAxwvXEzEPYhGmeSKR7mIaf3y+uyb7XV635tDEi92OvW3jd9W/yl+KJta+Vca9cOvWBbWyvtei2v7/u6t7JrV+3VrJo5l821Y4qJKQAW85tDq2Up/QJAhKwtkylZh8pAySOgdOgFDa1Lb1y7VrOOHOXh+0//gP/eP3u2vpPUYm2vizASBwWTmJj9dlmX29p0V23oPaYpygCMTuTIHEBSFBkEySV2atve72tjdOulmpJCTDHMQ84hMkYiijikMNa+VVtAqmkMcZAwmLlaQ+jAaF5bJYacZDSlO5UJWLTvrV+VOiMIEIVT5mN3dLhvKBeFKwVF1kgVqQGjw77Xt+q1mjqZ0252A9mL73UvI41RMphHl9M8T6cnRGKi3vjLzz9KkO++/T7E4XK5NW3PH5/GaQIIAFa2NY7TfHwY5/z25Yt1/e4X3++Xa9nqaZ5f317n+ZBTeD+/IPE8H1W3yIjm1/ev6C2GQZj2968GcJwftuWmvZBQszpNQytWyz5N47osDw8P3fpt3XLKhE5AXbuqau+MnFOuta63JUq82+TMVEjyMPTet213N0DU3rZtJYJhzDlkdHxbLm2vcxpqq4AeQ9jLzsTu2mtliQElpcxEIeec5xwDjTOKuDDyiDSRsEMkpq4dchDtsCyASIFZxExpLzKOx8OpNV2aOfcXfd/bbd9fipaX7fPtcvZWtlLeL8u2mLUDaNbuvWvr/bzv+2rgdMfoI0VmCpFNu7uhooMirF1r7Zvb1YmrUwKraqVJ54AhNrKlGvRWCUwGg+DKASamUXgcwyP/o3/2S1BF0jyF1joAjPnR3XZbQ3zozdfl0rqhEzGFcGQ+9g4kSATGRBLHNImFZiWkrJj2ukEAdDTFrtEpklCWiADE0QyYAvhoTqW/qoLIgBBTeEhpROQGbq6MrooOnOOYRKKm1rx39gbey152N4gclAJYZxJERsSOV6ebmgLuFB3dyOJSz7f91a113dR7CAy0OhTgTpa4jwIYIRzG45w/dUO32ns7XxYWzynelr13jTE8Pj4lmW636zxFwdBaH3JOY9jXFR0eP33Q1i/v7w/zuNd1XW5Pzw9IsO/b08PjNB+W9V1bBevL5SzsKWU0ur7/gJHTkLfblVRDGs287DtGKWUNgZmp1sLErVdwDxLNDIlSSuiwrkspZZrGYRy6OjOKBHBat33br8OQGMnUsiQm9N72/ebWp8MMTExyWdaltUjUe/PAmaMbdmvunmPOQ+Ygd4JLSiOHiDkjshlRDk6dJKII5InC5O83Lxe/o+LKBl2hN3Iry9LAG0BptbEv29vn9x8dKnNsFdet3q79cm2vr+deOcZHkeSq6KhQb/XCKAIsgvdpz12pACDm5ozdSrd9rbdWqzkWBzMD1w66gzdGpBagB+E4DnNOmVJiEUlugTEMkZmd/7t/9QcpsHAOSSTGIGMMLAHNmqsC+V57bUg0CiUEiVFEotVG1CUEcxBEQjSlbp5zcrR1PROouzok5AAu2q3U8506D+4GYLD2rog0jiPjg3ZiCkRJeGAiVe/WSn8H0ofpFGBC462Vva5dda+91R3Q3IEZXMNWXtby1g0Ox+nx8SQS9l7ExQ1L8+aO1AC7mhk4UycyIBBO3obMHx6nD1OetPfz+afr8rovN7cK0LdtT3l+OD0dT4co+Xo5jzk+Hj/u21tvdZxnA87MIlK1bpcruqaceitBJMbYe0FvHz98WJfry5efj4exlW1d3g6HKJK3tWzrl9u+Mcm2LndVLiC9n18P0+xutZRpmlSViYc8xJTMlJiPx4Pq3WYXr9fr9XqNMcYofv8FAIiIKexlzylLDHdJ831wva+7de1FyeHx4WBt124I4Xa+dW+UWDrGnB2USUKMd8FrGDLliYXNzFEgToys1EwYgTAFar3XG0RgCQaAyCjsqtf19ecvvzuv7xi4mP98ffv59W8YcEgH11g2//zj8vq2dQ05PQ/8kGIQwsip9bJuZ1UNAZGIBWJmEWGkwBBIAqNBbeWirZhj62KIjlBKJUTANoXwcc7fPz1+/3T67mF+nE5TjoKMxGrc0RS2Us78p//TL4fhUSg4IIkhakwSQyDvpd1UoTZuWoko8EAwppiQqPfVbRdhhlRb67gjRnQ2pZRGQa+lGGg1IwfhwVyX7e22LSwBlEq5qu4ORkTT8HgYPuidt23CADmNiGOtvteLt2bmDkyARFjB1ma91e5r1RVaQ4zbuuzbAgRjPk3D06en7x5PnzKe3LC2JUgmj9frrbYeOLh3AiAMgCZBMx8Tfcox1tLO71/3/cwcCLC2DZw+ffr+8fmDGrr55XI+noaPHz9+/fn98+ff5GGI82HMg+3b2/ur9pZDiJEAgADd++12US3H+XC5vC/LNYV4PKTPP/8EVgA8pbnUzawTMBqZK5CLCCIMQxJARFI1+L28kkiYmCTGfV1rrSGEbd8khOPpVGp9+fp1bYUBg2CrxRyGYRSJiCQcQhREUHdTHMa57j2FgL1vt/fjYYrj3BW96bZdt+Ua3BGN3Ls2U43pXrKPPE4s7BAo3gGdAFPi8ZNxaOsbIYiM6IDCLpl5RNaqbbutL5e3L7evm5bOoTuetxu6xBAIad3aT68v13IpBofpwykdIzIJITE4tFqaqUsgTshCjEw4hDykaYiHO6SXQFvve28da3dV7dHTINMvnk//6Ntf/oOP3/zhN0/ff5yP0yASgFid99bX0q7r7XJ525ad/9m/+nUOcRgZCNUdMTBDiIzs+9721bQFV+u9pRyGdDQdCQjctSsRO4ZSaq2ViGMY3RiMcjowTbVp8xokOZM5tNrWpSEAUyi11taIPVASpjGNkU97W4yaQDYLzIfj/CFL7rXXfqXYHaF521rprboV7V2VGFLZilltzdxknh+n9Aw6aOspTqfphEraNvC2lq3WIk5qzhQYg/kegqV4UEUGrL20CsfjKcRxW/fA6cPHb2IaX1/fe28pj6fj8cPHD+/vL3/11/9Je/j2V3+c53x9/XJ5/ZqHeJynGNhUJVIQup7PrdXDfAgs18s1Rnl6fti329evX4JEV09xaLUGRlNjCSFKYCbB6/X8eHogROIghMys2vdtM7daNwBjhNvtaqaBAyDGPB5PD0B8ud5u13eEfphPUdK27+7uiExs6mYQ01BaX7bt+ePTMEYl37dy2RcCR1cOGJkRqLWtls21IiGjhJxTTAOlOAZDonjQmEDcGIwD0YgYuFZkUgciNEQKSdEMMUqMks+3y4+XL1fXDoKQx/TL4/AQjFCDg3x5e72Va1VHpG9PzwjQrKurUem9IqbIc+SQIkX2QCHRmDAPlEgoyRzDwQDW/ebehHn09BAffjE+/tHp+MvHh4eHw3QISdy7X+u2btuylrfl9na77Ldr39bSnP/7/+E7JiBKDaw7pxSQgFkcuDUvtZs5ALoCow05BQpjyJFj2ataATNyol57vUaRlAbiCKgxpphGVKxWJAAAaTdTQpd5PCny5fZlb7dpOsYQGTnKw9rflvqaQ4hyDJKYaBpP8/xk3BVa861502bbWm7rufWdMTpBK7s238t1vW1g6TA9gQsDpzCcxoenw1MMtK5ft/1MAOjQq+yLIpaUmGmIYQAiayh8mPMhYOjdjvP04fFD6/b15XOK8fH0+PD4cDg+qPoPP/zlXtY/+ZN/FObxtz/+ELQfxowBh5x7rdYqEZa9rNtyt2Yw8Xa9TkOYhvibv/7L7bY+PnwwMwcve2l9WcoSQ5imad82Znp7f+vWRcK2rUG4tcKEiLCvV0bw1nIIzMHccx4ohLVVdZumaRzmVvfr9eKAKecUUtd+3ZbuykympmrDMH79+vV2vRALx5jG47bst5cvpVzdIQ/DMI4h5ZCjs1CMUx6DRAnCQjGzoSF0SCPPHyHPXr2vV/IGIk4s7N6679X3K0fEOJt2IXo+HNMwdyA3vu1UK378MM95aJsW7V/f367bTpK0tUABQZdyM7BitWgVDkNAIQtSY9RAjK7ujZljHEKYI08xBWGWhgeWE8ePFB9zGLMQE1O3fb+8rS+X6+u1/PT6+npdbktZ161vJSLMY+I//TefiCOAGmJt4NCFA6ALC7mXUtZ9CeIpZXDLIYQYokmIVvray4bYEYl1CdS6vXMKIkMKkRNNeR5Ydl3VvWt3MMZEPqR0EMnrfn6//MQSDuM3BCFGZ8FSbm59yg9RsjsBcBrmcRrVWutn1JaBvcCyXl19ig8SUozkTqq91r6sm5kJp3mcT8NzrRvQFgR7uem27qtfa0ULrbnEPg3DEOc05CAHwhT9lOh++ovotrzfLpfbw8PDhw8fmIQZUh7/5m/+i1n7g1/9XWH+6etPD6eHbz588+Xrz4BAjgRQtW3bdjufp2F0MHez3pbL++PDVMt2fj0z8zDk3krMUnv78vKzg8/j3A3cvZey7ZshWq3q1sp9Xa/DOA7DqGoI1NViTikOyBxidKJa9/V2S4KfPnzK41S1uXmOGQkVrPd6T/C32pblNk9TL21d1nZbiCCgr/vSzUHd3ADVMAzDOD88xukgMQ05pyGnw0TxxJysLiSDD0dglCAAytet9xWtowTPAxGAVlgMvZnV25evDeDDd7+Sji/vZ+BxrxvYLUkA9b3vr+eX99uCxO593zezvuzvVVvrXlthVpZdqRmQIxtvHWtzNuJ5OpEkr0joYxoPIU9QHxJGh+7QGmz7dlvO19v6u9fyV1/Ov/n68rvl/bwt+7qw+8MQn2Z6PgT+J//jJwdClK6goNbdYZUooJWMBSeySMDCwcmISThrgaVeFd16V9uMLAgFEqXSnSXkEJhjIBw/HB6m0+m22m29qnXhw5QPMQ5I2Frdii3rMk+TQGLyIBQT1r7XfjO3yA8s3H0PRGOeUFO/XEO9Jtgj0hieh/RxGKaUc+u91BUAW8HLednrxbQAgekSya2X8/nrttu2Y9nNPQCASApBUsZxPHA8MQZUhh60921f12Uh89PTcRgm7a6qh8N8vV7B++Fw3Ld+uVxyzH/yd//4z//yPyHR88NT2/fb5brUcr68JAnTmK7n92FI1/N1nkYRvG2rdX94eFRYupaUhpTp/L4ETixB1efDQa3X1pLEthdmRsQQooRoiCEOKY85DxBE3U3VEQGAmbRVUrte37Z9m44P43QspfRazL1U9WL7urVWgQCJVDUwai+9ln29FC9I0vcKZCEKUuCcBZGR5vlRjqccx2E8pvmIRGbG89Hi7CmZdacsMrgMTgZ1pbpRmPvxieYjNW9vXy/vb+fLzz+cX3+6XN5vb6+X7X3Hbd1TDDlMt9v72tdLebuuF0MFMMLe2n7bt3W9oGnX5l6dSu/dzen3mR5256baYZ+HmTg4igCK7UwlEwLO696v1/Xrevntefndef3hdX9Z9lttG7SOHbwPSY6Z52TTgfmf/MtPtdXmoL17Y6POdBGyphEZj+lw4Ec23Pve4WJQGFIDqR1dVSL3uAO2CQnRAb17RVJwDzCN6XmYnj89fBckfz1/WcuWcBYaOVII0ZRrvS37167hOE91WxiRCQDOdX8rZdEOxE6o2inw/Dh9nyLfzj+XyxJgmA5P4/x8mB7TkJf+ftk3LZ2qtKplf7vcLqMEDqnp2nQv/bq1i7aoxVx3EgpIg5ziOKC0QCzUte1dQ9MGrhmG4+mIUfatuLcPH79Bx7fXV1N4ef/aepnG09/9+/+n3/zmN9u2/vrXf+TqX19+eru8EMjjw/M4ZLUaorhbDPzwcKx76a3GmHJO21YIhBDNTIKcTh+3bTEvzCBpWPcGyNNhIufulqaBhFNOpVZiliBxHIlnBUhZtGsvpd6rwyED07rfwH0axt765fze1jOpMiAilb0exymm2LyrOQfspr1pihGZmwLFFIIMEkOKTVutS2bO8zGMQ8wTMNZ9VxZ5+pYxkFfsHWjUFCUJhUmJ8PaOAJ4e4ZCY8/L1d//vz3/ZHP7ib/7qz3/716/L21/9+Nd/+/oXKBpBz7ev13q7XM7n7SsFZzG30ro5MBMBsbm0rm7sqODGEAyidhT2kPauG6cgMZSylnYTKOKbk+3At2LvdXtZy9vuG7KRABBFHwKTdnAg53nCeYQhMv83//xbU8VO4qh+T5VVC5VRxzQO6TjmmMf5srxv9Yyubd96NQPaew2RUuhgHchqp2JguBlQs1ChPZ2eRzliG0Kc1/6+rD+jUsBBREKUIAOAubVtvRCLQyzt5rSXfgYCbf769kNv13F6DDy5o3h4HJ9zPl5v+9uXzxLz/OHT4XAMKRbdlvXSqt4uTZSQgyORYZPrz9e/ei9/C7wBiZsBAAUA3gNhTk8cCHExqG5dMWN3dkuSx/xgqutyCxIAadn31/P7vm2tlzxO4zR9/PBcavvy5cuvfvWH6Nha+fL5s5l9+PBpmkcOzATuEEI4no5RQqu1lDqO47YviO6gx9OhVAtxSjnt+x5DMndDckcJnEJUI4qxW2fh1nt3W5faDaBDtyaB0KE37aW0XhARYzpMxxwHNwXiFAe7m+7au2uFXgnU3CXGGAckZqAYsoQQQkg5xTh00xAI0FPOOQ+glVynYYhpRLJumKZjVyBkGh68me8XFAIJpAqBKH3bsMnrjwRNc+DhMH76fnl9+e3rb0Mefvrp5998+e1r/W3d32p7b/t2Wc5rub29f73c3kGcTFk6kg1hGNLASOQsgRMnwCgysiRVtI4SO+Y1hB2gmLdl20urGN3YKvFt86XjTfuuCiQxhZCkgxo0EXcopntKMk7iYh4D/+N/8YduLapnlpBBWTfdPTaC18D5MHxzPDyNw4OBXK8vte297rWqORoZoBLsUdyid2/doHl1ogZY2sLuh/y8lV513cq72i1KJ0IHtk7mIIRCZrCu+6KGe7uZNrW27XvrtdVyuX4ptqfxFPmBANFTnj/k6fl6296W6/j0MByGIGHZlsv5a9l9X0y7pjTEey9fnERab7V0EmExSZApm1fmFIeHICMjGtduzU3ERChFnrXq7fI+5gHA13VrvYWYkOneN398fF62xQxExMzmabhe3n766beHw+njxw8AkFLc9v308OiAEiIgvLy9DimYOQLV1pB8mo+qqN3NKhEJyzgdendViyG0XiUPMWWkYAbmmOKQ8mBG75dbt45ErcG2NjBnohDEQ9rXmiTkgbvVUpsgxxjM97JuwjyOsyMUbcgMDqrIRHdLOXOkwIjgShQCUhjzlPMhpIkopRCpd2tKw4mPs3mDOFJ6Aqb99iUQY5x63x1ziJ/awLSf+esZasEkz9NweV0/v5x/u/z44+vfWq/TIIrtvN7Ot+22l+uyNzAkNPQ4DnkcU47ESCDCFFIQdCYchjFLtAZW3LlBXKkbgbnXrqKGCtUZO4S941ZrV+u9a1NzMMDWratJQCYlghAFAyhhA+R/9Wd/ShAzg9sC4pJduRgoWjez58dvDsM3gfOQvlnWy/n8BTEChiBjYEEw1cVRMTCG2m1X6BZAQoCqddubwqVd9/altjfvhg7V1ByhgzsgV6DCqFX3ZSutNu3mTtqg9WaOzeH1+vNW9uPx05BH5KE3VROZnov15ls+TUnCvtzOb1+u5259aBWixxyHGMZEhzEdcpQGa/MVEDmwhd7J8vwhpw+P8u0kc6VVcQdXLwI4gXJdyjQMwrSui1l/eHqUkNyRkB8eTj/++OMvf/HrZb28vL18++03puU//+f/lzB/8+n7cZ5TSrdlzcOU0nC53ESkq7pDENr2nUVUjTmqqgjv+8KBwDHEGGMsZY8x1FIcIMfMHIRjkJTSIHlAiSjRTEhoXfcYBgNs6o6kbs7NzPZtN1dC1tprK+otxjANxxiGNJ5kGIjE1AJ609JbBfDr9WZdY0zo7AoKFoUIgTiGPKaUgohpR2SQSMQcR8uT0cwyRrF++9kghvTB/IpmXSaQYD9/1vef/fZWCRzSbVk/v/30+v7iyCLoDNV8r9CaEM/Hw8d5+HCcnw/D83F+fjh+OA0fxvBhGr57PH47TkdBygCBwLu1rntrtbv15tar7yA4pDGERDyaQ6t1WW51a702VayGACgcRJjIJXJKEdEBPOUA7vxn/+u/BpcxZiQ1aiiMiATaHQDKmI7Pxz9AEubBoJxvn1VVJJFHMC9l630lQgQ0JCBrvjJjDpM32kp9W5Z1vSy3rwhNOPXGW3GSNA4TsQPt3crp9OHh+KFs2qqiG7MQRG1UezcXM9n2t729UeQUDqyx7Nvelig9Mk2nRw6xb+16ud7ed2tkzcraneV0OOQwubUOm4bWSQEcoVa7AfJ4+MUUjyOJxNCtVH9nbGyzawyOSRJz2tfltlzm+SAxb9seghzm429++NvH54fT8eE//H/+H3/06z9097/567/odfvu2+8fTk9BpPW2rus4jr1rCOHe0wgxqakBkAiQxJi1N/Bquud0J/R3IEdwInZ3lsQckPnuj+AYQhqaQjdCkWmY91vpvc+ncdlWjpGIXZvW5mbMpK25KREoWuSU8swxdwdidgdC6GZm3u8fx17W5X25vqEX9B1doWtgAm9gNRCxREoZ5lmGERF7iMIjEwMycjbX/fYlRCbeHQsbUQgwpde3L9fX143hdbue9+u6rUutnAJjJgzIMcpxiM/T9GHKz6f0YYyn0/R0mk5zfs78IYXnYXg+jN8c5uPTeJiD9FpbK61ZbdaaBXK1FehGHHIYxziJZyt+u71ty3Zb6tasEdbeA8OYJWbi6DFyIARTJhBG08b/7n/7X2qtpTQnvHfHHIDdOnaEbs2n+SHG3PqquC7lrfRLDAEsOsSm1bomyQDRkAGwt5o4z/kJOXbH3g1rDSIx5t4bIBgwIE3jYFa6bQ8Pj9N4nIf5+fh8u12ulxUhCAZG3mtpamM+Bkmt19t20d9vZxjqDfp+fHj+7le/TjTtCp9//nJ5vUYUM1vavu015XCYM7F2K7VqL0qoDfba1LErxtPwgQmr72jv0G4iMdIj+uCmAw+t9rusSUI6n7cYJUZ6+fruSH/3T/74P/wf/89ffvcNOPzmb/82CI3DeBgPLBERz5fznYw7zwciRMR+J8BRRMJhnNxs2/dxHAE7mOZ46F1bb2YuEoSjcEQiY1SkPM4KwJSEY+/uZsM4aOu9tt/+8Jv5OAKTto7ovVHf9hgIAUWktaKq83RC5NJ7SMmg7/uaYxAJtfW611Jul8trWXZTc2/7dt3XHXtHU2u1tgaAzOTmLBLnkzNhGPowIrGX1fsNDTHPkgSXi2OAEBgNsIE8yTB8ffmq3JYK/+Ev/vw//vY/llKZghEG5ma9NxAaY44MoNpJGCgys6nV7iGmkGitq+01YbCua7nWWra9llaQOwfmcIVUKYxEo3qr1cruy/V827frdueSODowQxwoJk8DBSEhZAADM7faKv/P/+7fgNvb7XzdLmAOrECLNiPMCK6wAGgacu+11OW6vtS+BmSwXCu6iTVUo8Cju7eKASN7FpqIU2BmIFITlphj78W9IgMFT7kiNbV6Ok3CAR2mMTmW1hS8l9IMST1oxzyknKcpJfR+u7wsdXFA6rvr/ukXv/746VeoVNz+/G///Pz2HjmpWzcEAO1VuAEUkW7atmVXr+aB4JDioLoS7sBYvZZ+q17Qx3E4ARAiCzF06K0Ipe6OBimIaVvW7dtvv7+cz2o1x/DbH368S2uYxIEQgUVaa/X/z9N/NcmyLfmdmKslQmRmib2PuqovWgEgAcOMkWM0Gh/4dfkZSL6QD4MZ4wxtBmADaHSjrzxnq9pVKUIs4e58yIPJx6gKs6rIFS7//vNah2EMISBiirGrhhBVLeXBzZZ1a11Pjw/uRkwSsgPc1/HGmENMZmCuABjzEOLgjkCCLObOQqWU3vuyrR8+fko5PR6Prvb2dkaiGNjV8jCXbvM4mjkzA6AjgrsQufVSas4DBX55+Xo9nwmBELZ1DyGO42hu3hW9mSuQEIeUUkoBEAhA2E1FwhPEaH2x6wu0jZOhBEzB3cwadqXe0Ip0Lm3788ePX67Ll+XL6/VLX9tNb0CAoKWv2hEM1apa7163ui9LtW5mdds3U5O7qTZLgPt+vW0vZjs4mKvRHqPGoMzGgsRoFVrFrdRay76VVorV7l0JAZgo0pCRBIksEPdWt720Dq2p7O0LYGm2XJZXS8xONIi3CsUoBJ76ZTvT199lft62q0OlAAi76+ZgqMlNWte9VisF4K6U8ltfxvzo7gxgWRCRwMGwNW28hAgGs5MiwuX653fPf82aartOE/72L78D4x9//PK2XA22XWtq+TilyBxlvur5x0//+Bq/vE+nSDHHzE6IzN0UFnn01gqhj4oYOae6rF9ZjhakmTl5b5BocnLmYT7E3m9f93+sq0Pj4TAxx618CThyeOjaGFHB3WoHIceyeu/Gwuvttm3bcUqfP30NKYlQ0/10+tbMzH3ZVgR8fHza993df/3LX+37zsjjOJWyM1Hv7kAxDU27AhJHJ08pEGJXpxj3vRqoCCNEkaRmIQQiaVXByd161XXf19o8xG3Xst/nhFKxPk9TXZcOziLb3oc8GDiaEYAw916RRUv5859+nB+Oh8Ph+vZxuV6fju+/eX+63M4O9Pj0jhyYmZmRhBEFwBGW1ritwWYI1L98xIdJjrG30Za3tpZw+MZowLBT2dUWMKBL+fTx4w31dPiLP37+d2+3Px/HyCbrbVtvW4Wi2NR6t50jIcS917013ywnyCNbI0DJOU1xysJNovaVfLvPD8bIZqi9m0bs0G0zfQueTMHQETlwJAdXN3YAB9VtW7aYlSQFdC291ta0NiUC/m/+L39V+/p6+XS5fnHdGR3doPVuCqT3CoF7b1ZKW1vba9vQQRC1YzclZiQwNgjqrZddA2ZzUlwJSc3ZmSw5cScofV/K6628OanCXsvtdquqalrcmgjMYX5/+GGYD01bM20G27aehnEaDpE5Dw/tVi4vH8DteHo/P30T0qjd/vz573/8+PdDxCEGGTAdKYgngOnwNBzfCUTt2lvH4o5J8sA0Hg6nnFLZLut2a020JUJVKgoVnVkzKEUkU9a+m6mpbfs6jad12cYxrdsFUY7HAxGDQ8qBKI7joezrOI6t6fV6eXx8PJyeXq8XIwsxGUBzdUJ1U7UUM7ijuzA1tdo1jSNz6L2auyMDCoXAIiEEpFCbOqCZXW9bLdvl7ZXcD/Pw9fUrkyADuqtpyEkBBbFrA8IUo5qBA9jdOylLePny8qff/ZfjFN+/+3UpVfV2mA+Pp8dpGFhSiMM4nPI4gTUh55SJAqeU0yNzUGiotb58Vtfw+GyOdtkwDCxHcCB28M7Ebdk+fvrxzy/nOurab3/48R+Wdr71Zd1u2q97QzPRWrsjS7JOWnfQprq1vtW6KxTHfdvflu0z1cVq6bCikDkYFBftgL3uqj1IBuhWjeHAksGFi3TtpRYGYxIjDMFJCFzvTIF9r2bcetm2XTXz//H/+ou9rKXrsq3sVRgQUDtW64DoAFGYkEyRmR0VTK11MGIeqkEIgwO7K7MxEyKYmbk6NAQgk2aKHoCgQUOmWvdedVu6VQ3h+brVHz/8eD2fmW2ehtPxcZqeTo/TmMfbckHiHCK6TvMh8kQojn1dLoCcp1NOxzSkrd/+9NN/vFz+OAwShhAnnw8QogcKD88/pHACQ0Gvt80N1XCeH5McUVPinMJslrZVS6kGzVAUHQXJA3vMFLG7Qnf3ZV2tQx6HYUyt7722aRpSTL21eTog8sPDw7IuxIwGl+uVWb797nsD3vYqzMzSmiESAJha03Zv9MY4kHBtxiHGNJoTsRAwOGMIIUTiwBxr0VZbCFG73a631tbr9XycZxb+/OWTSBynQy3bvt1ECBF7qUzuYEwUmAF823eRkFJ091bbx59+v12vT0+H+XTM+aSA97GE6XjENFIYKETOWeaZh0MaT+N0DPkIFGur3ZTBcN+xUxgnHtgZKEyAJ0PWbrDeJGZS/8cPf/inD3+3rl8NfIe+7C/eb0X3ujN6JokxzMLxPvXvQA7gPbROhJFIhDkPs2BGRIrshE27gzmxmZS6tlaYo1AinEKYiHMrWPfS2n5f6A3kOVNOMiQMjEMSVevN3F04lE1bBf5X/9370pZlWwAMwcHJiN0JkAFiVzfXnLJIdCNwHBLnFHuvwpJxQCVArFhBlYMIMwV/eDiMOfTWrO+lminFHJjBTQXhGJ+TPwT4PsdvUzgu1/3l7atjiSEdpl88vnuach7j2LXe9mXKp3kYc44hDtq99ELEHFByOh0em21b7XtttVTzPQ0cggvaMMbp+Hw6vs8hiYSmutUlxsgQmkKgjC2SC2pCe6zN962V7oDMbMEC9SRGbGLKW7nVWgBgng7jNCzrpfeWYh6GEZEBsLV+mI85xU+fP6gquIeQpnmWPIHwsm2EzBR6U1NwhxgjuKcYGDmE6MAikUgAkFh6N0cMIZKMBsySzLGrxyBEdFuuvev58uZuMabL7bys23x4lJCul+u2rYd5imHc1+KgZoYokXiYkqnXWg38elmYaBpD36uDUghxOEqMgO7khIRxnKaJI8eQTg/v59NTylOe5lZ7J045CxO7Bqu9XVyET+86m99eSZRoQs7UFlwaY/yyv/z4+ufb7RKFSVOUFOiqDgjZ3SVADDmEgICEbIa9ce9qXogIAUk8yjjw1LopUSXroErQ3VtVbebN2Ikgs0xOESC2pn07m7o1tQ4ing4+3PWDkQABOZJk5sSUiARQ+Zd/c/zZ77gxsyHeZ70QIkZsjtdtK/2G96VraNBbzBwimyLDEPiUxnEaI3FXWw9Ten5+nqdTJtTeStnRopOwUBKJkVMIQdNJ3j/mH6yFIRwzTdvyirAfxveZv314OE1DRHI1XMvabT/MxyGOLNys17obdUo+jeN0fED3uhlTZErrsre25oAxUu815Hc5jswQmaC2QJ5CJMzrtte932FsZlwbEAlQdxdwDsAZ00DjIAeCVFsDa4iQcxLkr6+f9307nZ4kxBgPOY/bvseQHk7PX758LHUfhjHEmNIwHo5IYoa32zLkFCRs2waA9ySbGFMMIVDX5gqE1FsPMZqjWidmCQOHyCwxJzNwdxLatqWUbW99ud0QoGvftg2cjseH1ouVAqY5DUOIrezgigrCIcZMRN26ez+/vvbW19vtMM/TPKm6xJBSTPdNjyRMZLWD7TkSEzBLFHZvagVDNANteyD2OCA7mToSDiO40P6C66vvX2kcME+wvb28vpy3y8vlx8vyqspsU+YJxZU2B0QMMUZ3k+jEzMSM5IqGG1JnQQkQY8pxYpbqvncvfe1WCSITWWvQhRDdgWCI4eRO3dys9nLxamBmrYREcYYQHQCYOcQoMsY4xCDjNAZJCMCPv46qtSmCCxIaGCEHnIHJoBp4TKNrLFsXj0mGmI4h5CD53fHd99/95W/+6l/89re//fb5YZySegtMd2EwQe/a+l6GMDQFIOSAzh5iEI1imKdDCKdIYwysYbPQT9P7d48/HA7f5cCl7K2V5fZy25c4TXOanFh9RzAkI/YQYghhzCc32a43V9j3dbm9uYNgDoDdhQmLm2knWw7TMA/PINxBrtd2Ob+ZmzuZU1djDvOYhMVVgsQ5jwEnBEKvrqCmbS+363lbbymPeZwN5OHxm9tyu62377777vPnl/Plkod8fHja9n2c5mk8mqEwt7Yf54MIq2pKqbW27Qsi1NZEJOVhXTcHyMOATNqBY2AJZsSCEhiQ9rKZamvttiyAWGvdS933FdH3dQ9hSIEul5/YLIYYSKxXgs7o3q31SiJ5mK7nV+0bKvTSJFBrfZjGYRiFJQpFxhACM7t2dq3bWtYNkDhIN3O1xCGO74JM2+Wr1xKPz55HUrReEFeKo02/JCjt80+Xjx8YUR0/fnn7009/+unjf/56u1zbYJASh87keFa7hZiZMqBLcOHkQFEkJCA0gw5gIiGGQ5Sho6l7LVV7dTPmgIjmql0xMImQZ/Lobu5dba+1aa9CSliHSfIcmRsxk4CbpTAc5inHOMQcJLZWBJVr6QrK4m7J3buppRYDa7Xv3n/3L/7637x79z4EqWW73W44+mFMDKXW/f27X/zw/W+J5fVyTn+Ia9k//fST1zOhF/F93yNQtHaiuSqt6+oRj2nMIZnupb8O03Rbb80vEZHDMck0Ds+EiSBo36zdWJdedd2305AQIiFM03POx6YXIK/1soUZceQg3ro6Bhx4D3ULkkY+tM5Xpcg9RfQU45AGG/BmnnZu2r9etg4cY5EYCEJvIAE5ZATWJhgCoSEhiDhoqY0AUgpmVvZ2fDxcb18/f/n47ft3f/rpD7fbOk2HkKblVkhijLm1DkAgpK2rKiOEJABmXt31dtuZmSgisTIjirMgiXojl1o6gBNAGJOqWXdVba2llHq3vb6YIUJcbrdPnz799td/WbZ9vW3z4xhzaqa6b4g6hNhrK620to9DqmW7vn0+TQ/WaxAeD5MpSACRaIbdjRz0zngKkRVVATCoIZrxOEl+Z+qUwvj8i+v5T3I9h8ODzpl3s7JTUopi0z9P3z++/v1//2//n//3Hg5A/vHt03W5tVZ6WC+35vnQWcEmDj+F0L033JlpRLTARm0mIh9cfa91ZYiMpNCss5mZNTVXYqyd0IkTR+RAzEw+3GeITa2bIUMaiB3HaaAUKAlxZElNvffOkUlkHh7m4WHI02F+keP0VO0CYGtZEYLQUHvrbRXiX3x/+Ntf/cu//u5f/vVf/ebp/enr2+2yfDlvX8qycLAdKgfBXoTYLbhJL33Zl13dHVvfcNOHdMiJa9NipfSNKBZooAKt93Y1/HGttwbr3i+Izgyq2ra1pkwojOk4PfHLn6+XVyadczqM7wI9u1q3eW0fHPatf2E+eALtjYMchocBxau33ZrXohuLi9PII1ZUWkOS+ciEEsSu50vdKmANcRJMBt4qxJECsXZ0NgY2TPfOTAihO9W2JQIEu769XrfL+3ffqPrnzy/DlMd5UINm7RCn3sy0pzxeble498LMJMRtW8zAzPa9nh6etPvb2xsJS2aW3Hs389Y6IjCwo6jCvtfWmllXVQBalqXsTbtvtb18fVUHJ78uNzdiFlU9v70chtjrXiSQ9WZNvH7+6Y8SqDc1MxGqtQ7TGIOoNkQlEiQmJAdFhGbKOQ4hIQgzzePUEL7czkF86Ft694tx+B5ezv52g+cA04H6qOXCaWxhpum7b377L//x91/+X//T/6f6h3Cwi112W/bbjj66r0BwWZdKPXOLCYLPYIpQhccg2I0Ng7Tcm4MHBzKzZqCtlb7XboxDQEE0JI55QFREZCJ33Vst2ok4RwyYyWGcBooZhAVJna0WgKbAEqbnd7/85vFXbjHLr/h/93/6rVAijAZa29oVxKN3KH2PzL/87tffPX///PAcUljKpftm2m/bdr6dL2+3shUFP1/Xz1/Ov//Dnz59+NO2r021FOwFk0sW9g6X5bLouoOSBEK2ihmz1Wut54a19BXVy20TjEN6QGtGO6EGGgFlWc7bdnVmRiQ+HoZvJA5EVtprw53EWrXeW6s3wS4sAhIkQCAgBCbEtKuvoRujew1sIWdiDyw5ivWGVJHWQMgiLAO4qvYUco6BKTAMrr3bZgBNCQFiDGp2vZxDCI+n5/N5W9dyOM4IzJRSHE07ALau13U1tDwOrmCmKY2uVmu73dZxmMdhPJ/PgAZOKaaccm/dDJgpxkgsiG7m+77v+84siNSqXi6LOS7L65eXn1rZH06nspdWd/ciStq29fxlCIIAt7evkbzutykPe63jNPVWEf3h9IjMe6kxkjpqaYjauwNgIGEUpChhYggcMaQcwoySwbs2hd48eIoTqyNURPYYKDAaY1BgAHTI4ymnH18+/Kcf/+5Wfty1XPbb29t5021p58vb2+evn/JwOhzfCWMQUlNTDvQtw4TUHVqv7IDIxiIG2Hvdlr3ue1XrKgjuAJtWQCUJIUQkKl23UreyASoxEEx5GlI+SXgSHgWDmWlfHXpKASU9HH6Z80NZt9Z2IU4cg+k1YGpQzBWJSaSbffxy+ft/+I9zmgrUp/NxL8vL+Vq0mrfrUj7+9J8a7qc/PpVGn18uL18/132dpuM0JXSA2gUVwGqxq/ZqRZFhJUqstQfsR/IuobiTSQcni+W6vX791Oq8w/jt89MxZJweTqeny7Z4qxvneSLJCVEMNgDobXMTMGi9dt2ARXutWIQSUiEmjtlZetOy99Wrj0Jg5CqRY8YdOHDG7ga+9xLYmcCsEgY1a9CFzHtXqK17b91bZ7RtW2NUdhhienu77Hudc9yvW8wHJ3bXum1A+Ho5D3n6xXff96q3cp3ykMeh976umzscpnHbNyFiFwDovbfWeu+IaGZEpKq1dhG5N9SYWVWX9YrU3OptXbuWmES73/bLwzxj1yZopaJZ1RpjNuuAgmC9dxbet348PZf1dd8Wotzb9XarTBnUwDtiBQvGgk4yxgGHwzhwpobWe5/inMdwWzdEg69vtioeEw6sW/NlxXnCw+zubisTQtfb56/ocDq+++nlp2q+bLbXGlrYy3q+vD4dv3uX/2bEAOED8623M3FDe+3wjmBCck49wWDeDLU31L1XrZuqmWG7XgrGGImoNHxAwZaMVdvebsW6labYYziy09ScWy1mRthB16Y39Wtpodr6+58c8JJ42KxJq2vkSILaDXggD4FjFBwptN7/y58+fPjp/0GRZeJhTPP0MI0Pw0ja63lZ9vJ64XNf5fxyu8E6DAOym/dpyHGIvW4AptiDhtYhCTvVZltbAWAfn9LANnFcfLx5hylpbZevf+z9WwmhjyaPNMT0zcP3y+v5S/nJdtbWTYvD0vXWjbRLb0uAiCa12ra9MgeJ0ryrggBEASecKILkbl5qU28xojqadcWKyWIe1bqXpr11uLAQAKx2pabAOKAzAUMzUEJQdUVbl9s8jGiw7mcJaVmWmAciqm1db3WW4eXDl8u6zL+cVXW9rtfbWd6/3/e97PvtdpumqamGEFS1ahckkajqZqDaQwjueL0uAODu95V7ANBaQ8Te7e187r0jsaqt17dxSDlna3qa5uu+tq4AkFKoWWKMTLG1HnMCc+ueUzq/ve3FUhRgwaE26+bCSL32lFIIod+2xXoMLcoBOCCV2q7WMyMYBLcd6icrDxRmEdGMphUkG2ap1f3Wv5x//+MfPy9b2fq+9G3v2w7FYKN9L2uk+Z/94m9P+VtpN5qOu50prBSI+mvfCXAKIYQQGLoC3cVIVVpb1tYWNxQUc7zUQhJjlWTYArlab/u+l9rWzlpsDCkyqbckIiB2azeD5rAArtsGLNtWP7f109PxVyTIf/Gvx24NndBib8YMOY8xpSgp5QFBXl7f/vzpx58+fPjpx4/7vk9TCALs0UOs1i1wjNmgMzIjDIGTSDdDIslJxZ3ZjIlxSEk4iWQkxACSaIwxpcmInME6gJqCgRjnNMR0GMYcR4CwbOvr9TOgOYFDC9IR9+W26B5QgTxYVUYOHMfx8XD8Po+PRhjJp5QyDENIknJ30F73tS5laXst2o1ciBnQCRmwe0MCQFdTd0fiyGMUxtaxaW+FWjEtzQw7sNxXJUCphYVEwjBOW9lbqYb48vpVRIact20rpVxv1xijEJ/P51JKSomIWqtmpqop5Xmee++lFHdPKd1ut1LK/dyv66qq9xfAzC6Xy3K7OXjZt1b2Xus3794nEW3bYZq39cIMKcVxSAhdQgickTRFZiYws07MsZbL9e1T228OlQDBqJZ1vb4hqDBarwiKCAGFQUIe9q5r6YKAgRAK9CpOXs3QZIyeknUhGchBX77+l3/4L//h4+9frp/run19e7leqzUWjoGEKfzmF7/95vExBUkhNV2u+wtgu8MrwEnVAJBZEAMidu2m1mut267NTMG698Ztd92c3E3NVaE3773utTYl9hAJ0TgAcnXeSr9s/exYiAuQAUJre11uaJlgZHZp/VV8Aj9YFwYEa2qNaQAA7TpNU0ppvh2+fPpyeTu/fHz5i1+/j5ZSHErDtrtapRin4zS2m1cI0CPmGnCzXWpCHgkgjpzBAIxIFLyLkTURsDjvDMQtOmkGIAegSl7aeq7LsRzfUZopvz89ff46frq+gHDKPsR34B5FZQoRvu29m7QgOeZjHg+OcW83YbTF3CggkchSN+172ffr+a3WSkRhGA+neQ5kqkS0NaulS+yRYpI5cOjFNq0hcTQ1J1CutatXZGYetHnrZ+IY88zubn25ncven56ezuu56C6G5HZ9e+1uIYQ7mbmUcj/05+vlXnZUtxDCPc4xsxjjvu/X65XvxGyAZVnu2lJE3Pf9drsxM3fsvatqCJJSIjMWL9slZjbN1p0gMEVzBDLiuG5N+tcYY05HRIpp2MrXUs+wVGs9ShOhTnpb3kpdTsOU87Mb194kR+RRCLa2vJ3fhilPw0jxBGoghUh0VxoCy+xl8XVvt7p0fXvd/vxPf6x9zcORtxYBh5yQ+PHx4XA4Irrj/nndXi8/9ngbDjQyOK0cxvtmEOLpDhYkBtTie3dFRmGCqmrduSMrOGpxiwicU+/dDUNKMaFwwegqtruSVuPCyTLFxGNvbNiikPk0pCnnsZfIf/mvBnQWDEzRrdZatSMzBM78c/U6jsNwOJ5CjG4Fvc5DTimJkDYtS2PJMaHzlmJMIGWzYowgZBE7AQBQIg4MkFNmCtbB1dypgu+6NwcFqYaNnSKNeUIkZz+MYUDqve1929fXRc8Gt2kK8zBlOcX4IDLn9JAPeZpOMc6nx+PDw3PKI0c1retlJYBACtSLbuC27+v5cml7BTD3Fmh7HnhmNtJtK+t56WULOBzy42E8THEUj9hItQMYA4EHQDdswmLNu1pOkZB6t4fHx8v1bRyjq5XlWmt9mI/M8k9/+sM8H6dxctNlWXrvIrHWptZPp9PtdhvH0R2u12sI4Z4M1NZqa2ZWSqm1Xi4XZgYARFyW5Xw+p5wvt9dat259yPnh+Ljvt1auoO308ACILCGG2NSHcZqnATrcrlcRiUEAK7EQRkEjFABmQhYPIaY8ioQoghwxxDQO8/Exz+8wz3HKrtrKJhBCyHFISEhBaJwgTxAHpRMznX//n//zn37fIL18vf3hT//04e0PQJpiIFB3f/fu/bvnR0F0wmW7/enj717f3gyAOQbIBs1wY45mDI7u3ru22vrem3ZtDUsTQAosDsGQAqUQhyiSRFiQqIMiOUYEVgidWJmgWyeWwJRQhMKQxkAJXVxDiOEwH8fhIGChVVXcohAxEqC2vi17lKTNJAR3jxJSHOf5eLs+1uvn27XkbGMc359+Gfj6slxIte6xmB7CBJKFRBgSexyVGa26gSlWtYVIpsR7rGbayBlRUUQimllDJwXzTrYsy9vL13h0iHlnD6fj94dv1nJLaQAsFC4RD9TJfeUg6N0MKIKydOPr7fXl5XNrW3RtHJNQCtb6BrAeT0MpMAwcmHKwGJWhUtsDrmTaam6IMFnynPMskdrOvRNzRVQRWdYSNAQJ/WeAOJiX+fDQekWHGPLb21stxQCZ+cuXL8IxpeEeu3/68vnx9DAM07ZtT+8eiWgaD+Mwv7299d5zzvfl8qp6j39qrf/blWVZcs7btt1joVoKIjKziLSytbJo7xBDCMmNydRMY0zgLHEMIITeG4QwupdSWo4pnt6Ped+bK1QkM0DvDmgSUxyOMQ1pnJCFSGJKe9+Z+TCfWi+9d4boQREZiCEKYCBgwEjj/D//x//4hw9fK602LENhtaJue22Hw+Hx4SgiTfttXT99+LHdSpaD3qwC32YNg3FEZkcyt90hIN49a6GukdgluDsiKamLhxhiHmKkEEHB71tRHQiQIDA56x42VqIh+cDaOgFHoMFyHnM72txbt+v2IQ0qXnVfAUhKqN4dEYYcAwYAEOJ1WXqOmDAlJvfjfJT5aF61uRIIhYfpueywX7+uuzL5PJzycGj95n42WTBo5DGDd8o785flz9BLDocU3HEHJ0YROaIbDpRW2LZ6A+NOqvgTejONaehYwzSF8E3us0QFbnu/an8Bp5xOpQSSvdZ9+xophtr87evtdi2hsBJYAks1UXHuDvFSJExI1qKkFLERI3NCTNHzQOUCr9fbOIXH+TuiUYIDB66IHYkVWht8IDxYdwlqXnvteRwcatnOh+m9K/TWmsN4nGtp5PR4enp9ff3++2/33i6XyzRNAIboUZJwlDFer1cRuRd/SinDMLy9vRFRCGHbthgjIt5/h5m3bXOAfVsCcTeMLGB+PZ/HxIGmNBxUtZbNahnHYRjGbdtSD6otTkNEVrMUDmRVArJnppAPUk1dO+GdCY0o4fnx+eH5u3iYOMZhethK37eVu8kwJozUwZpSHjwwULjvYzH7s6+ax+cffv23/+9/93/7+PIf8sBzHkrT8+W8l/aLX3xPqIjcQb98+bRe1tFjCGnvrV6gaR9NQm84XlIChWiFVNGNiEi835GZ5s7uXdzQUbpTG/Ipk69t66ZQeyePmCMyK5ZCTjgPAmuj0CWn3ny9lR41x3HM2UBUfd+/ipKXxq1uOWPk0QBVeh5TlDiOMwh++fJplevhdBzHPHBQDt6JiFRw3/ecc/J43ohhikEBoxM7Wmub0jmYduyWhHsXhDHGDV66vOaUQDACiAg5OjzV5lLWAGfD2hVWCqWe2+scwxSnYQgYKA5xwLBwJFD0vX99+ezX1yGLWFyW6163qr11IDsQ5gFOQmmIXKww2zxFigEvdV2jOvUulXEzQGakEGQeh9pZl03fyjqXK8TBWYRJRCJBKQXQp/HU3fbr0ryBtXCHKLinFKPgst5UNVFIILfz5f3T+5dlkRjMbN0XEem911r1XtED2EtxRzNtrc3zfK9+llJijMxsZszcar3dbvM8l1L2fV+2G96JhyQG3k0D8zgNZe+BwrbcyrYys8QgwoSwnL8C+G7w8PRAgGg+RgYgRKLhJMEyQC2diVLOACCM6XDI0zHPDxQHawTWRATcW6/TfOLozdRReBhhTA6ETanU1z99/Mcvrwb+m1/+5u32Yb2da1u99uulvP/uYTwmMyXAclu3ax/4MGdW1QOn12277u4exlmJV2YI9GgCvWxGa0jdVEi1qFVwcDfELuBkjwGp96YdmpICQx2Zg/Ts0IGDD9OQpLuvpcZSm6EoAOiglm/bqixRhFAS/+ZfpL05Anpr7IxMqjrPw5gyh3w4HvZ9vazXqktptQMnSSHAdb8WNe3bupZKe23Ltn4e5nw4vsvxVNv+dv3p7fK2FW3qqUeoobTirgCVhUVSVDAXaiSYG0jRmnsXahx7TWZerWylrkVbrYbmbsCMIgn7OMrD0/zPTse/Wc7X8/mLtqV3ut28rL1rMG0z+qOMhzx2bU7daAWuQUb20Tz11kgxcZQ0dEM1AGZni1OfDxwJtlqAo6AouiOCI/TOYMjUenFvSZIwA5B1zykPIe/rNbAHQEbYt5u6YoSt7t9++12rdXt7Q+jTdIgxv75+zWM+Hh/Kvocg9wmveZ5vt1vvVkqNEs10XRYiMoVtX4kIAF5eXnpr7t5bcXcEU22Px8ccE1JTr2XfokiOaZymUouD3i4XLTWxu26mpdbFFYUiR+YoFHIMMQzJmFhwPBym+TAcHymMBqyqX96+nq+vhIWFGZmQRBJLJBGbB+UIEN0CO5L2/+8//N3/+vd/dyuXyLz2cllvr+e3h/HxN7/8PkUbx3kt+6fX11ZpzIccpKtXtx3rVnYUTTEwIkmn7A5KgA669mqldbdGuDeoqxm7IiYMJ54CoO4NDBxUsKURQ6QRB5QxTVPwHDqx2r7DukJdzEpjjKrYCry+3s7nbb81/vW/nAwpZOJI6oag05TGFJ6fT4giPJ8eTqq3razbWq7Xs+kqQntp5+vr+fr1ulzn+fj09ODIh+n5N7/858f5edvq+frl8+cf3160nKWuu1XAgrXurrTa3KpoCWGfoJA2vNayrRuaEhNqBFTTpg5GaAy1ay+we61VE85DPjwcfzPz979895u/+c3/WeLpp09/PN8+ag3rdXBSImGaZuaBRoFktDauu4IgRg85ppBHBTcCcCxVeycDoOBjwiHGKQ1qrs6RM6FnZwFLFCRILbWVOqSRmdbb0vcaGQPhul5QG6hb63EQtTqMMUYZc3p+et6WZduvOeeH4zdq/nb++v79t+7Ye7uXfQhRe9+2DZFEpGu9H3Qi2rb17jR677XuvTd0VW1uFd3nKQsbARzm+Xp5O83HyCzChFxLJXAEZfTWtjFO7lD2EkJMw0ghAHMKEmKIwpJC7Q1qG2IeDqeUBzUwMFdbbq/bdstpOh2fkRADe4wuAZghJMaM6/ny8Y+fL+efXs7/7p/+w+cvPzboBrq3PkT5zS9/GAeOEaz6x59ebtddIHIglLj22qAgkymACwdndqQi4kJEQO6szWvbew9gDdpa94qFZ8pjiNlJgNi9W0dWCIiRMSVFoRCZkBWomhW11vbbm7GGYUKMvdt1K7dLvXzW5VL5b/6bB46CzDFGQHMo4yDoJWaaDw+9yzxNMaRtrdq32i7nr0utrfX+tn66XF6HcRzTu0N+/NUv/uZp/uZhfhznqZT1erlt+9Us7a96W+t2a7pVJBKA5nG7wX6DbCNhhi5FpXavzYNDig9Cw3pr1o3QGI0x9qbEGEh8t23rKT384rt/9u7hu2+f/+rXv/pX33/3l+fz8vL1z83WVgmcXSGgJslMsVFbdd97r73HkBMPKU4kBIDadN9r3YE1ZM5zPAYIFAanESwSgHRiwhRTDqmXbq3N4wgA58vXwPL+/VOKYdsX8J4Dr/stRZnnA6EKyzROzw/PkXhZryw9ShjT9Pr6EgeZp/l8uUiWbl1rN7N1XYloyAnclmXpqkPORPTx40cAa6323hChlMJoqs1NpzwGZtV6mGZB1ro/HKavL1+CUK0NkY7HubcdrRJi4HDXV4sIilCIQQTRmYUQEJyRwExSng5Hd9trd8AYk1pbtxtamB8eghAE9uMJY/LakRhJQG//4e/+3f/4H/7X3//0D79/+d1lvSyt7htMQ/jF99M4YJ6Cu376/PrTT5/3wkBiYMakAMV2dUNiROxeACsxETFzA2imgE6tQa8U1cndPcTOI2DMxHQvL3kHpQgUGZg8CMDASAkg1I7dQFUSh5HyHCimvVettl19eXPdQ7fO/+y/mYaIOQszInXkHjMcH0Pprzk9BD61Wphx38uyvmnzbl62bV22favrtZPnb9//IoXpcfr21z/89uHwnbus2/Xzl991veY0nd/2L1/q7dJquY5pOIWjeOxKdV8UyPKA4ZHlhBD6ZhmzoQR5FJr31hxgkFGQwHczH8IxxXnf2svLpzSN73/4zfvHwzzwfHicx++bruflT3U371ZrEcgpUMxBwlyrbWXbizVjIWEeDdD6rr1pE+8hGEQeA43esVTTFgYeJIQkU8iDste9Yu2DyLLezue3eZ4eH08xBWbu1oXF0cdxODwcXQWJAofT4RDQe9vRivVbIkbv27aMYyZ07erurnZnB93NfM55WRY32+uGQLXWWvd1XXrvzAQAXSu6mzlLYEQ3n/KUA3rbrJXz25dal3EY13Wd58NhHrZ1gdanIQuHbsaBgsTeLQgPKd4XOgNgTsOUBgzBWJiEQyC6V3jd1GMQAYlBZIxGiTDhmDEObS2EhWj++unt3/2nf//3H/7LbVvrtmMrx8Pw3btpnjTnZAhfXt5++nR+eV3Khg7IdCdpmoIDGrMKazcFHQgzMwQmZlftraq1jrtp68Q05CkxCWGKFAG9dlBDNhZkRAoBA5GN4i692V4ZTEYOM8QxItPaam29LE3fWAtt2nff+Lf/6uDYHQ2wi0BKItE4V8S+7fsQnlvpZb+6oxuCoUHrtda1aZvA4vVyzVN8fv4m0/Du+YfD6fF6O396+fHL6z8OWQ9j7sDLddfdhkSH6IcxTDIWh7fm67Y5QMiPw/AsMup2rXpWTHdaAnLI+fQwv5/GUcgRRMHdIthwPr99Pv+5tnbID2k4nJfzui9qrrYVPdcduyFTmIcg5CKRYlr2basFlczdgZraVjatvVVEwBAiU1DtrXpvoLUzB44phFCs9d572am3upd93+Z5fnp4ioRkbmoI6OAi4TCfmEVY0D3FfJgm7bvbjr5TXwlo229ECEZdtZROIKVqraX1sqwLsxDitm29964NgVprtZa9rKqdGB2stdZbJ0IAQEYRHwOhbvt6aXXTvhEYIY3ThOQOmofBqgKZROlaVXtO2bq79SDBiViCSCIKgHg4PHAUBCSOHIe97OuymSGSD4linpkTxICM6ghpkCR+eVs+nl/229v5djmXdr7t69d5pPfPeUguEhzH27q8nc+v594q9U1DiDEkkdytAAlTRtpTlBAj+oAYlJuDuaO772VppXnFdd3UDMkQOYQYQ2DvaGbeHRtId9AOqA6gdO8LU+AwICcgNkLcdN1KvV379lb3t46sOK35QKKFnONmXSIl0hQ8RGjaAgTV68vbP4ofazXA9PD4zGLLy+pGtVvduwgh4T/+4z/keBr+8nC+vRJRLev59rlBeRznhPjd+6C38gY1SmJHx33k+mD0xfC2d7by8IAhjhHI4u3z+VMKSqreVVUPcjiFBwoQQIhue4d938Ecgr+9nv+n//nffvrpw9/85d88nJ5LvXlv7x++F7bf1a/XdZWgte4u015rmE7fPf2VfvkHYp1iru3qHrQBURon7FXdoRHU3vtewA2JVBd3Q20EgToMhM4ecnoeRiIiTCn45fxKBJKkao0pD9MMTrVfsTcENF3cm4AJA3LaSxVWBTi/fZlODxzGVmoFu13PTds0TeM43m6X221NKfXWGOV2u5SyM7Nq6//1Q0wA2HVjiyiAsEYJmEZwvZyXVjYEPh6PpW5MMD9+Iw+8bueuysy91LItEsfe6/nyMuoDEQVJzdQQvWoQnh8eMA23W/1ZhxdlzANaIWCXkRwAK+dRW0XzVuE///i7//zH313OX4JvcYD30+M4BUJFK97C6/r6tr3tvXEWKs5oMVFIkQSxu9be3DEgSiBSTurMZQ+qdcyZcRQcR5EywBGRXFiU3PrWrJtaiikkxu7VSYkDSOzOkkYOIYQwjZiEhjgEyWZwqvVyW9dY7GjwqxAixEkxR5EWsTb0UHondnYnJndxcjS+3T5EvIX4DpBRrZlFFgvzjkW9ecPDMcVIv//j33PEOMj1ujTdr8ur2q6aXOaI9XGa8EGsKOLcal/wMiT5DvEfil8g/Da+e//0PQG+rMtyPZVlq9BrU0ocaNi5zsc58FHMToN3xLdbk/iznOZ/+Y//9t//03//7em7d+8P0zzEYOPh6fiNty+YbEHvADTEoamlcPru3V8v28cYXDgxhUjezAOGQvte1s16jEQOWJuzK7fmFaEHzRNHUAJmQvHmBIRuy7KrWwjxfHnjEE6n0zzPX7582Zfz8/Nzq5v16xAzUSSSFI9Dr8XK63Wp9cY7JfcCvTnst5t6F8Z9z8t16b0DHEopAHBvBpt3d7/LRVU1xaFqzTGeDsdMOiUnhMv5Ukphjs6uvV8vr2Y9IGmpFNNIB+01EhJRaU2iEZG2fru+AXhASsMQODt0wqSq7J7Gwd2vt8telhSGMQ6uigwmrChRTcpmRT/99PKPv//Dv/1f/ofP2z+lsOVjGokVi1k/v/p2ey2Ehd2UI5FmceHjw3ycD1FQm/TmEsN4muKQVFtkUfC9ljHy0+MxholgCj4hUgoZNfa2l/3r+fVr60WNDodpGIZe27pvZkYSRSLFMAzjcZpTZEYMaWSJbmxmZblps9qLEROiIFAGeUiZXHCtlwUrhbqVuFkAKKBJAqEt24soGYyIuO/A4DlGmwOHtRUlsGmYFervfvx7MD0dnoHref1z25eVVxXGygn5IQ89ejVfFlezCT1wT+I4zO+PP/zyu79FhH67Xb67BNGOAABgS0lEQVT+E5S3rrrfFt0ihmAqb5dlPE3T1MwuFDgmWrYtppBGlnD48PLh7z/9j19exl/86vt3D78iHh7mR1LfzgvIXvorQwoZ976H+G4OgPpVJLqVkGIv1ZuGzBYI+9KtUYgBsYt1hPVWtl6ZUk/TAQfBjOzIyMzEuC17EHn7+uoIv/n1Lxzs86efzudzoppj2Jer5HGIJyIKg6iTN9tLMYp7aWa6b0uxrTb1os2txHI+vy7XdRjG8/l8l76ZWe9drREBEbmhu3fdmfH58fn9w4FV0evXt4/Lvnzz7j3a075e2ragtxSE3fq+ME8cYwixt8rR1ay0mkLUqs127AZ1f//DD301fB6Oz0/rbb2+vSkkraX3KkFEMmEGVqhXPnyPcdCy4Nevnz98+fH169flRr1PdEjDQNRduzeoLaxr1RLikMbp4Na1sM9TCvx0fDodpiiBgYVzGsd5nnIeOiBoL3VHoCnlwzSnlERiCCHHlHIwZ93s9fJ6vr0ty5mFpnkOIUKTdV2v25UF53EaUg6Rh3wQlG4G7CyGQOjEHUopi66ttQhDkDlPJt9MgwCOXPpbX9TcfOttJzXAQkuSrI3W8sUwtY6MlAMNh5lG5cV8qxyAsz2MJ07Hpa5ff3x1XBgqIBbdE788y+PeFTxkpN6vYGI03GqP0Z8OPL5/fvfdt+/mZ63bS5pQiUwD4PvDg/LgRjkcO9TWNA5DqRq1HEiLkXdTKTnQdw/HV6uvX88GfwSgIR+j8DE3WwV7NNzL+mdM33SUbnXOT3urHV7zwN6n1vZqb0FO38xDh1xu5fV6u6KySqTQwPetTzGo8HR8OnLSuo1pDCFeL69VOzFRgF/88BdJ5r//x/8foDPC8fQEhjGmlBIykcQYo7sbQRpiHHKE9PL69WVZ9+prre2mwCoD91styx4Q9roBA0ojYEBV7+jBWm+9IQNzejo9nA6nyJG4rLdL3X2eZ3eY56NDM28EnEnQDRHZLXkCBjN1RyJqffPSjsc5Vu/1usr19bM/Pv2q92pmKQ29mxjV+7pOMyAOORBnCGhxcHqmjA0//Pj69eN5fRjnf/m3//q6vqoWB+UwEIay6/68EZEwe6u9F3WQu6B+HlOIpVQJ6XA4HKZ5HsYghOZIgCDIIUS+d8cBLOccY8w5M3Mf+zD4YeRtG0XC8XgMIQDQXUvbVVNKQ45EVGvt3QAYAO6Tk4zi7rwL71y5EtEwUB4mYdRo29NsG4TrpwYUI4mrAql7uW6FenJQ82qKTqQwCvmQJFDkzEbogb/77i9++5t/4dB/96c//u4P/2ldtxhDNAaXLXYiGQ8HYdbFtO+3a8GIA1uY0zCBYNWit5u+fN3Pb1sm7AQt1dPTI0B0tMN4XPFq2sWjEx6OMsxBDW7tuvaLKU/TgQHbevn65eXdczTpgnEYpnLukQlDX8pHJEUPFh6m/Mttxb28DQMGTaDJveR8jHnWrOpc3m5OHDlGh5CBcMghO0JrLVJQ8Ov5lbodj0dv9eH9Y2D53e//YVuXb755yjHGJGaW05ziJEGQIefcWnfuTpKAjkdsap1Yz1vd+62sQF7WpqoEuO1LqTWNgyspOHESv58GJZQgPAxpnufW2qfzJ2EQkTiMgfB2/hJFhLPEptYVgRkUStRAwZFloAl52vRCiEDdwJ+evvny+imiR0Ltt4ynvl0gTGPApexEg5GA7m3bPA8+jBjFVBk2LM2UptPDQ6U15DFPQxrVyjRNz0/v5/lUS7+9nUutKNxaO1/fVDWIJBFg8q7koGDay15Q2y4k8zCmlJxcBImg96aqd73TOI6qervd7uKo4/E4TVNrnTnEmHvvRJRzFqG7oPDecTeDGGMIQTgQAziaWQhBRGJriJhzFhFZvaOo0BLjYAZ7aRpMQhpg5BBXrWsBM1Q3RATG4ma2JJQgQuwsJU/Hv/5n//u//ct/FbCf5ue3t7c/vt4cJEYBZ115eJzmMMaGOhy/fLXrstlowC7BHNt6e/3Mf/50Pf/uw3/+8PEP748Pcci1L7uc56e89gtVdOxuu1uVEJ6evpnyadvrT68/gbeIumxvOWQ1KPt5ucLxeDDGMSU+ZuwWmLReq73mPPV9Ps3fH8f55fqf9rKinrz7OLVxGMZpfis3chYKBhghHY9Ty9bWHjxoR0cjx2XdUhoPT0evKxK7448//f58/TLPT6fT8yGl18tHbTqNj4gcY0QmVYsxqoI6MUNKaZ7nDl671VpD5GVdL5dLCCml0Lq5IxqDUxoGQ5CUEBHU0IHAx3EMQnW5fn39mOJwOJyQYdmKo3R0IgQnpNAc2AG0dt23AllG4QAA0+FYezTr2K2a/vCL31yWC8RxOjzOp0cP47Z5NVi6RCGJOTEyuDFwTB4HqpvjDTt0nHKGp0O367IUzyFN09Pj4+Pz0/vj8aGU8iZJRCSGrdmyba4K3oBcVdfbsm1ba9VAl8t5W3Z0T8N4PB7HcRjGdG+BxxiPx+PxeMx53LblfvpTyncBee+uqvu+E1GMUuvP8xWltFK2WquIIHrOERG3dSe+k1GEiObDiMAAQETyejU7DdD34tc04vULeh8zKnAMPjymmcq21gKI7uimxWo3NCfLu0h1s8d5+v6HX75/+t7r7XFY3p2++RD+0NumOnYmBgxIQH7z2tp+W7dtb9Rh1TaRt3l5ffv4urRP1w9fyx9popuW3CTkcS8Ay2pha60hU9+uSPXx9BxtOMQppeHz9UsrVai9e39Sq73Svu2O19LYhBM/5DF4927eq75evmD48MN339ayhCSPj7/6+voTEaIeXRc3qRug876vt+XrMAyCMXKWKCOKhHGUOXZAwHenIQjclusYcm310+cPEuDd8w8ppWkarKs7xjwwM5OEEIEAwBGBQAAdAptZSimUPY8p7iWX1LRqr0QEnlVVFcBxGMZxOnAMAGCtIyIhklvKUVtzLYwEhtu25YSBcHz/Q2Qo61licGsssW51CuzQLucr0gOPMxgDYErZDafH0bxwmn7x/rvI8fj0DPOInUOwvSNA6/sS5mG6q9NhcENHIAHfVDWQjBhacUcrwpDilPOAiLflspe1lNKaniKnFMZDOulkqswYQmhNt21b13Xf91r3Wuu6ruu63geAkNy9qRIzs2CMMcbIjHdb3nsXkfu5N3NV7b0y876buztC711bR8SUooi4+77vjrspBOS7yjCEEFjcvaszs9wWrQBpyBVbOrSpxHJV0hASCyUhHgYtVgDIHcH/K4a+165LHt0tHudv37375jgdb9y+nD9HgtMhvl02V2zcQyQAi0OqGZa+rXItgZMPrfN29Tj11/GCUpfzn9r2JUxIpE3XEI4yTo5A1i7bTzmMu+v58qKdjuHJ2mrsDj2MIAYi5NBT0DQdum7qO9C06J7RDWEvbS9Yy3j9ch3C6+P0bLVDGKbh/Xa7kotrsjXdoPpOiZ8fx+fjcDwlGSQhJo8poGTgQCnlyfvt9etPU0611s9fX8djen76vt5qTE2tlKqARJiG8XBXVWntEmAcQu1uvSO5eUMEIspxGEc7v93cFZEQubbSWhMOIpKGfDgccs6muq4rEsUQ0FStq/rlckkhhzhNhwGhN9t779Bwyo9L+RwE2QkhOZEpmOr17TWyME8sgcgBvHkfYnQmkfj0/j2Ms4KyKbFybVya1rKca35+DodMZtCuRNUkosZyWy/ruW67t2rqMaZ5npnDtu8vLy+qSkQxxhAZC5M2dxeReTid5gMw7Pu+rnvv3V3dvZT29vZWyobIKQmS39P9UvaXl5f7VvC7LBzAW7tjMvT+YebW2p0ZY44ANI4zoiOiuwFg750phEgphRDCkDIiqmqt1R0xRln3dtvq8YHDNCeuT4/96q1ssK0hHrMHlvGIu6Ov4GpOKcRuaoplj13b4ci/+e4vvp1/IMC3df3zy+92+/L49PB6vuzrxnbAKQNGQp5Sqo9Do9MBMhTGtmMaFh3i3hHeynomVhBVZu+q1JEBkYMoWb3t2+60d/nzh5dtKU/HMTHVSGkYkwTCHjEYdCICMFUnOPZ2RA/S+cAyH4fHoW/jhUrWhSJN3EycYp8VwLlOMqkTpDq+V/OYOQkrMlrFpTVoGzHREPdW2u2KGLa1XW5fhykdT/Oy7gQY49wUz9cPrV5Px29yzq3t7r6XdQ6jK7l37R3uwQzYGNnMB4nTPF4ut3UrLFm11bpDGty91rqWHRGtNkHq7rU1b52lb/suMT49ProTOJStEMD29mU+jHl4RBvDwMioe40cJBBCqmXbtx5yS0EYwdFSICJ6eDrGcbqtZRwShaAVFNC9B5SUufRrr2vkI46hobGjrjVYULB1b3UzZg55CCGM4+zut9ttX4sIIVLvpWyblrJvpZky8+n02L75fpyiu/VeWi8p3aetnEXKvm5bMTNwJMLea9nb5fzp69eXcZzGccw59d6XZbmLw+8OAQBU7wBtAqB7iM/MITARmUHv/WfYNbqIIHKt+72+TCSIKF+/3oSCNXrWOCY2Rj7Ap7psm5+O0zAcRPKS6PVtA1AkRG8hCEqsTcr+9d379HB48N7Odfnppw8fv/6hlJ8CTyK4raubOMq3D9+ElMOI3+UQJS2tRAi66w5COXatqIUjxzAjJet98/1r/wKND/iw1627Vey4DSd61NDdGfQphDDEFPMI4G47AHCQGIVIoCFUMg7mOEwTWyqtb/uZx6M10V1jGpMHAcfsi5emIWicYoz5cZiyiFjxqrVp3Wwt66LdMYy9eauVw7htpaw3hkIctAc3F2GgBNDPt9fHwywpGOg9c+29u4K7r+ta655SQgcCS0wFnall4efnR/vytiwLQicC09Z6bVVvt9tyuRLiHetXSkkscwxMNs7zbbnkmNzRwXLObTk77Ov+GiklnFwAIjCEGIJ7GIZRrbr3XrfpeBQeHLQq7Ht5/u5YGpBHcClWbudbMw/dgTRnQmvUAR4fMYiVgpev17eXt1b22rWDmiE6gLVWAMCsE/1MV1e3fdkcbN0LAKSUzueve13ncULErvV+kXABQOKwbdv5fN73GkMexgRgRHwP692993a70X00dBzHlFJK6R7kpJRSynfY8J2e9HPu+1/15/+VrIH3AeumvWz7PXUuZZNitSnYBvKCj3PqcdutHA68ewHCObxrBBd5W88b9BYTVg4se6LsCoDmhJ/PL398/bM4f/7zn14+fLR4zqGE1LddO657sY+XkMfv3g0PyLGM7e364bYv7EgMU0SOWrcaDNiFQtjbHhV2XdiQIQHLCZ9OCBJikDmO05yOGUYQcEJqTd0cn2JiJoFijJR5CgkULXFMcVIs27btAQAscm7WTEHYAyGiZHAsSuaRMOXAbtw78x2ZCGbGRBQYyAlCHvh2fdOqxMw+MIzo6NaYcxzkxw//BEDj+E47uHsIYd1uYF7rbp3dW+urBEBMRARMwCqCrbV1W1Lm2nbr5g73OTBGIRJwAzAR6W7zNEXCvrcQ6e31SyQ9HcdWVVtxQyC3xkvbeGZ1jS4pDa21pj1OSYTdpJtr11J7OEpOg1Iuzd4+/enwza+MB2pbasvNO7ZqvTrAEA8UUf0C9Ez4nmIvsP50+dOHjx9fthWMmCnGxMzbtgLAVnYgdIR1K04oAZ2AhpDTdEiDlbbdlvW2AICIDMNAKL0XVSMiMyNCRHdoSMwUUjrknFury7LcbjdVnedZREopd2BMjOneLXE35gCE3Xrdt1K2EALiz6Okvddta73fl5F77/1+VytVRMQJuxk2PveuCnFUHphStL65Ve27NUagteyo3Sx1LhLIpLHZu6fHh+F4uXz4w4e/Iw1/+vwPvfu69oW+hpBMem/FXT+9WMJEFnmA7ky7UPGYD2M6HNMhi/i8gZfMiTntqfsBAY0kCk/iEpHFAzpwkGGYpvFwHzNf9qW4JZN5nmOM1+t12+pSa0klpMiMNCLaGqPkHO/qYhahBvu+d0VAAatmvauLoCIty3JvwcYoImIGAPchhJGZCbn2HkKgw6ydJ+YU4r5up4fj48PjH/74T58/vx4OP/9t6ECIl8tlGjIjllKjUJMMLuYWQrhpAegiMkyRXrVthZGqdgEUYkJ3oceH5yhUywZgZjbEvK9ba6vberu9ffvu27Lr7fJlSnlZrsQRSYWE0Ep5pXhC4Bxjc2s/V/3moEaZeu/7esvj+4EBYk5xZhKI7P0AAz2IlkUBViCMh6MksYho4E5AE53ep+my+uvr5dN+23POz09PIUBrjZlFQkrZECR54GkOiQUL9DTkKEFjhUp3KME4jilERARAZr+jfg6HQ7c71Bp7bSmlaZpqrSnle7h/r4qqain1HiUCwJ2dEXMKIdRaS6m9NiLKMczzPM6Hw+GbdSvL7WJmzAiEZnovQy3bIqiCFPdWFN0WeCAZJ+/UgowIvGuJMv3w9P7wr/4PRBQpFEchUOgk/PR4eHyaD1PibXu77K7jafx1KyTc3h0ffzmnunuKMVBIQQaZchi7tcdv3iFADvk4n4Y8hhBcobUmSIgMdyEUtKq9FDCDLCwYuxZVRYfeCrirNtSeOYyHMedcay2t1t7U7e1yzjkPQ8op3QNBRBaRbduwNUJnvqPCupqVuoOjMGuvvffbenP31Ia7CalNa3O1moeo1s01plFZmkUnPS/L82nOafiHf/wP19vrPL4fhyhC4BEwnS+v23ZlalEoyKmZMgckQXPt7go/I/BjSimdL0vrQAwhxTTkcTiOKQ5xyINIgHVdg2QD6r1fb1/39TKmlPP49nY5jBOAu7aH00kClK3GNJay9VLTNKkbmbdW91qt1jjkYZy5sbr1fQshxjTEYYY42vGEzrTl1oUHF6kckMIMMUMgw0TIDhZippSPD08AcM1vEjmn8Y60IAzMEmMUEWBi8UFCongAMiETwsBD41RbjDHH4b+eYGIGEUkSOAYiaq25OwDcI/gQMIQUQsg5hxBKKdfrFZC113sefMe+030fGyIRioi3XgmN8HA6DMfjsXvbH9d1ZUF3u5u5Xpu2LthBe0tZGA3Jd9+t2Gl4GvLzw+H5af7+ND4+jPNdph0kCRGA7V3VIceQssjA3drTCN8//eLt+Yd1+6s4xEA8SFZA7hgCd9+GYTpMRwBwkMQkZswYkoSQwKW1VrWqNWu91o4oCrZib60JA3l3ImRw6LVrbatZJ6IhD5Jk39fr9brsi4FL4GOe71ie+5DhnS1lZtu2qSqLCLOqruu6lwJM8zC6u2BwRgoCiBikuXnvjo6C6t5MhWOIA5i3ps5h3RdC22v9+PFT12U+HrZrTZ3ccZ6GUspPP/0UEwNxM22mgBxiAAD17uYA4L2Bee8WYxymqV5vYOCueZgen57nw9S1Lsvdd0uMqdzWZb+2pozycDxZNxGmIFrL48NTCrKs5yhDzLOq1m6PMSCA1m6miKja1msFtXE+EJAieCbS4q3dSZKI2YZjDqNbdUdnAQ4GiBAQmnvHsq3ns2kZs6Tnd6dpvn9rrXUJnHJAYDC3rnXdqrdbpBxi37ojz09THoYYs1EgBySy3kspROTA61Y0erCfIQD3cdA7MM/cY4wp5+PpFEO4g5VUVb3fc+JlWa7X6+vrq5mJSM7pdDrFwzSOY8ihu/VtJ6IgcphHdSvlDqHpIclDeJQA1r2Cg0xBskOAx9P3vzj++vn03cPpaR4Pc5pQJISQUoo5pRA5iXdkRSIzMHcovfVeetifhNze033LBnmzXTzkHCVTpBg4Eom6oVvXqqqGwDGIiChll/u+SoXNHYRkgBBYVNW7c5REd1ha27ata53nWUKqtVprvTUzA8IQwmGcmPl++q/XK4uklO5tlNvttpRd9d6J9JBiDtkJjdBZ1BURkURCIhJwZca7LhcACAWAat1ZvLXarYfg1/Wm3nIet7U0KzEeA8fa9rLchiGP4xgiC4dtr+M45pz3fW+tEjGi36PSXmopW4hy/17TECXFZn69rUTdEUtpzGwK3ru7O8BhnpmCmQHA2/n8fDoG5tY6o+SYt3Wv3VIKpdQcE6cIvTNhCPFuI+8JIrKgDFmiJ4Ixa2ltPYd8cI4YBoBEAO5AaHD/7wHfPn/58NNPy7rWVureHEqpuzsw0ziO0zTdQ3kzU2tYHCFuxX789JPX/iv7dX6XICqadTN3b6YYBMxbrcyMRIC4bdvlclnX1czuBdDD08P8eBrm6R4mEVFkcWJKQ2ttGIZ5nu+H8x4O5ZyHYRjmnxPlZVlur5ckAYVFpFu/J+uSIgUREZmOKWNQ6pJxHPPzwzffH3/77eGHbx6exmkCBmA01OZuW1NvKTwBgHkDckysBVophB3dU8g5Juu9bLt7U7UkEdEl4pxHcjDrjJ6TlGZOZO6AqGYB75s9PaWBg4zT5Nb3fe8CNIKqtqp7q621Uja15mZMwRVulwXIg8g0Tc5UWh3HERHvdkJCMP/ZOZq7A/Ter9erqo7zdO+oC7G67bWodyLKw0AkQcYYE9HPG5/umdO9zQ4AMcZWOcbo1m/LBzQ3TWZ2PD5Mh2PvvSzLYRopZ1UFcyJCIO3VId59kaohBgrRtjWItNa2UoU4yDgfH2Mar7e1NJ/nGchfPn8BoKenhynlXoxFa+vb5ofD8e3tNScZUj6/vrHE58dj2+tW95CAQ+putdZ5Pq6+qdr9yTDz3ZBpb0ce4mH2p3cQjpJG62C6kXfz7AgCho6A4OCICB5EwvXt/KfPH1NKgeTTx4/X88t4mE+nRzNY1/U+zp9SGlMmAgamEH713S9L03E8IgdVdfuZCRCIJEittbu7+zAM4zjGGO8v9rZt7t7dcs7H45GDvF7Oddu1d3fPOQeUu1Fj5ufn5+fn5/sbYgpqLZEAeFv3223Zeo0sd/Cjuw0pE+Kch/vt8m/+zX+r1Q3AuZjVh/z+2+dfzfNRhEGsE5h1uROh3K3obUG+WW0NkuQ2E4i5jSlljobm2upevBftlljUAAG09X2vDI4ELIRukQMRCBkAiBCimxmim2kKmZnd3QwILcYIALXtcIVtW15fX2svp9NpmqbWWt1biAwACt5aE0YH3Xp1VVYhZhEhortjJaJxmn49TYoaQkhxYOZSynK91d7yMB2Px+PhMOQJkRH/t7sQwBHpji8nYFNww969rtu2bTlEZBaRw/w05MnKyszDMLRSHbS1EqOkYXSFXhURQ4jbWhS7iDAFRooxXtaCAEMeYphYJgBwVEBZlsvL61dtZmb4+DAOoTZar+fjMC23cwj27uGBwHtvvXfBB0rptp3ruqXIyLFzaNqnabpc3mptIQgi1n0jgCApJMHHRxueencIzA/vvRbVS/ANBJQFCBkQAADQEYb56Ye/+Kvp6VHb/vblM6AZyOvr6735lVLKOQNASjHGSErDGPM4sIwpSKnn5fNLN5zykFLqqkjOyvu+q2rwVMumvZZSgsgP33/PzK33Uso4juiwLevXr1/X642ZY4x4nxl2v8vgAODeLAMAB7Xed29EUGu33sQBQdEdeq+lLOe3+2G4R1ny3/6L/447N7OPbx+uy9scxyEkJlPRogUNzawphhQBbd2X3is6uHu0IVjJA4ZJpukYYyxlaVWZEpDXxgQIbmDo7tfbjRBzjhhC3TVQAAZ3c/dScNvKfR0ii4BTV1XVvZR1Xd1dAsUY0jDOalupsG15msfDoe0lBQOwtZU77VOAtr2KCJLsrW61iEhKiZmBKY2DpEjws8wY0Hr32+12N11Dmsd8GIfT3VL2Wksp1vvdaqq2u4KFCFGb1t6qNtMhnxJLTtM4DsfjwbT1Zt379Xp9OJ62zRU6IhKRSCi13p/7MAx763vbkHycwrzN161qh8Ph4MRvb5emVUQGd5Ewj8fb7WbmvVeLUNYyxNHMtu0yDTEKg+vp9PDy8qVqJ2Qk76VYbZ6k6M47Pzw8mM2IyCz3KALACVr3HqcnGL4FxW6KvHBg6qRvZwpXPj05j3f8BDgDAk6nH/76+Av0fn75u3//b0/lgDO//NS/fPrUWnv37pkhbNtW3oq7PuTjkz9d1mXdF2E2s+v1HGP87a/+ZjxNt22t2kII5JgkTNE+fFjWdQOAw+EgQiGMhI5g67L01pCIAccp3wEZ276UHe8J3r0NDOZl2xFBYiSAvW69NQBAogiUQmIiALDeLm/ndV2BcLhLrb779ld979u+bLW06szQbLdi0pE5oAMiOmHblNDd3UvpgGY29ZoQLYsb1H0Db9AbIqaUYoyqQ+8dFDqqK8CyaHfhjIgkAQkcoRbV3pm51Oru83gIIRTd7xCEfa91b9u2EcPp8RiDDCke58M9XGTUNA3etda+9K1pFaIkwQmaVjC/t59qrXcByc8sTtV930opYM4h7LXvtdy9thCnkMZhzinXthu4mQPgvR2zLLfWKjOtt8u+XRmcg0RPSeR0mMHA3dZlQcSAdl9PrWq1tiFnAGkVMFopJQ1ZtasZgAOauzoUDjROMxhO07g2//Tl815u29pzzv/8b/7mm2++6c3WdRFqtfgcaZrGWqsp5JBr0ZzCOA61rNu6S0wp52kQ2xUDVO37tpQckcDU8jiSMAvHIaeQRszaKuAN+BhwgL7orV8/v9a2BPa0run5EYcHAEYABEMkQHUQOb3/9d/+67fbdj7/0xiGX3zzAxENQ4ox9vFwu11LKRZpb8Waln3fEUnYHZdl+fz15T3DttzWdb2b851546Bgd4XPuq632+1eDOUgYC4id1UIupdtiyKltX1dt1IAYJ5n93BPbdUa7QKEZV/P1ze3nyWftMA4zCklBRum8S4vvdeOpNfFwAA0h/h8enTvCtW8t0altLtzERGzn8vJpFq1Q1cbhiBpiAmaNm7WooQUUrrbGLNYS1c3cf2ZeW+WUwIndzT1VjZXCyljFHJQVQjsCOu69t6JwKFxhGM6MLMIrcv1/iMWZ7e27h323vuupq0zOJIreCtdWxckF3I1EWmt3TWD9+b53lavHR37tjUHbSZCgOZoElIIgRiggTuo9t47EPTavKtIUFcQdiD3XmsjkGGcat17be5OIOOUxpxrXZv127qAYZIMjKXeBKNpabuKRFUls8zBUHdkBAXUYZolstdWSllK7cUur58fjsd3796XVvdtQa3hMfEUwKjXfu/5I2JX9K3mPNaiijsTjeORkrvDGGZHa92YGd1UPR9y7725f/P4vj8+0eGgDWz9kmICnlHb7376fevLLw9P0BwZhjgZBUQAJ8eOIAgGQKd3v/nV9x/Wl8vKhQgAIIowM4AdhlFVb3vZthszz8dHpiABWquX69tWVtWHh8OJgVqv5r3stUpPIYqEbdve3t4ul9sdFnY6HXrv1+sC9JOZ7fvmrofDQUR6tZTS4XDovX/9+nXbF3eVGIQCorempTYiarVuy66tbPM6TYck4W4KSbW11kxl33cAQIKHhwPzg0Lvvar6uq73ROSuGr2Tye6tBEbqvevtdpoPQjwMAwcJLCFFjgER75psBwVwQAREEQmIDHjdbuuy30ni8/Ewp9CWVWsbhiGEIEj35B0RYQUEzTkjOpK3qq1305+vrGUvpZRSkCmEkEOs5kAoziIYIhMgRkQS894cDKHWvpW1tyJIROjIQUjI3B2JDofD4TilHO5p7n9dWqEhRTPr7oTYuxLwmIey3DrYlGTbL4SIADmkw5DmKZ+vX5dl++bxW9Btq5cHOfbW2rZiTvM4bqUAYcq5lMqmElKyLlIINnc3RfCAIGwcx9HdP335omoiIY8DoFp3SUl7k0SIGNMhp3y5vqUAgN1Ybbc85KZ9HIdeO3FgIqLQexOhprVu9Xg8RuHqLR6/NfkmQGr9y/WnjzKtnYwwbKuepXhOoWhfdzlkd0REdHQEdAIEAHv67hfHP/w+W9v3vfUiSQLhVpsDDOOIMRFzTiHGaI7ujmVNZQjjUFrV+0JZpBjiNIYY872Mc7m8tab3+k8IIcYYY17X/evXr9freV3XEKRs9R5lHA5zzkmtff7y8cOHD7W14/H4q1/+8jiNSdJxmmOMwLSuq3tS1XW97SRDTPN8TGlIw2RmAoSMFKOkNIjI/W1uTS+3a2vtTvS+TxJs2wLgIeQc0050O18+f30Zx1FiCPdMSYM3A7x3p52ZBVHNwF0BWmvXfS+lqGmt+7qtHEhB18s1SQCwrW5jTDEld9y2bV33OyCWCLrWvfb7Q7u/Xa0VZMIooEoAMeeErKrMGAOHIOiOQFU7gKRg5kgk45jrtiNiHGOQVEq5XC6llBDSfBhzTogownciuZnfZ5FcLWe9j+T23sk9xijuWlZJwIBR0rdP76LQut6W29VNBGNt5+PpAITLcu5tSfTe3T3GZd+ZQmDpiijONSYJOcq27Yh13z3HFCWoA5AT/py+EyFAH/LEBq31eZ6IiFNetttWbnk49G1FxBxD2ZacH10dmfa+jGFkzgSo2ogQTLWXFDNxUKioBZDD83eKQ1le+rq9ff26llWnSZDSEJnxLihwAARGb45813fn6WmtbVtec857LV+Xi4gQ8TQdnt99gw6X61trRVVrb0wy5qmVXrZtcRjyRCQphofTUwwB4Wfx8/F4/BkdiQgA7ioip9Pp/fv3y7K0dhex0TzP83FGRED0jjHkPEzX2+evX98O8xxjfH54PJ1OMcZSyuFwqHV/fX39+vWNiFpSdTydHqYp0/+/qPfosS3L0sO2d8deFz6ey8yq7OousruglghoJHDCgQBJ1IASQOj36B9IP0AjzTXhWKCaZFNsVBcrsyrtM/FeRFx3/Nl+a3Bele4oJhGBe+8+a629PocQkVwso9ifsDS0nOWiKDDGWuvz+bz8V8ZYCEFgKhn3MSSGD81ZG/Pw9FgWhRBiWWNBCFNKEH6OslkeBuf9OI4xxkwpIQSE0DnXtW1smuWeOg5DPwycsbKqQghN08zzjDG2VsfkjdFG+xQjBtAyCuHy3iFCKEWfIAAxLobCAENMMYCRMoExifMcgk8pCq5W5QrC5GIAAECYnI8dhDGCokLr9brIKwBACCHGoLW21lBKsyyjmEwpEYIjSIRgnZLWmiDgrAcQSkZRSHVdAhxn72KCIQTOVQAGYCBYNU/eOo0AcNYSSgEA1lqYfKbyBUANPhKES6WsGeZxmExMKMQUrTUQBO/jOA7e+xj9zS6nFHutpRAUU0Rw1zcpWoRQDHicjLPm/ua2t3YeR5SQBw4Br6MD0NX5BhDqjKvKkjA8+bhmBWA0JWemTy7SbH3LVgSeOgD++PaHP4oY6rIANEGOQUIAAggAgCAaHSnAMAcR8EzdvHn59psxJDhbs98/x+RX9UZlRUqJM17XZYxRa30+t9776Lw3emw7AuusYllZcC7/PGAvdS3LMiklQkhr/XlIiREhtFpV63Xtve/7LiVwdXW1Xm+dc8PQTdOEEMaUMsabpvn46Xl/OGy326urq6qqCCFCKC7yskyMKmPMcj07n8/jOFJK8b/+V//94kBvFx+m5TJlDEJoGUWW5BIA4pI2zCkDAPgYGOeMUARhjHGZKbz/rHVYLAy01sYYp00EKcWojXHOBe+Xi2mI0Wnjg0eMAgSddYs2YhxHa60PFiKAMPQueBcggowSzgXjXAixcLtxgpQQhLEQgmDCKKGMEkpiAnoy1vsYgfXWOw8AJBB775wzgolM8hSTNtpZFyCoytWq3mKMUgII4a7r+raBAFDKlJLWOmud90EbgxBOi4sbgJRyxomitFY5hXCc+hQdih4QWJUb7yZKUgLwsD8SmBhBEOCEIEQwBO+c41ImTCZtXLTWOO9cirAd527s+qlv+2GeBmuNMa7t2qHvnJk3q7ws5KpSKucgBTsMBIOpHwqZgZiGoacI5plCmIzzzCVnjIMACYDB+9navMyLLDM2VNW63t2n4gIqDiCnUEJrm6f3uj9zjBGCv/vH32o7397fCs4RYkioBCBIKYKUzBz0SIRMEAMIpKD68xFESkmQ4DTNep6MnvTcB+czmZd16b1rT80wdJggkMA4DIxSyYUSAqTkgvPRp+i1nkPwjNGU4pKgAyFeBAB5XmaZWti1hGApBeeSMUYIFUKqLM+yvK5X9XoDEZ7N3PbtOE0+OgAJCEgJtV5vVqs1QjSlSCldun0Iniz2G84ZY2eEMSHMubCIFRBIxpjo3cKlXhoFZSylxBEAMXGZYwAhhD5FCKFzRmuNEKnrEgAwDAMAgFJMvKGUOmv7vvfeE0qllIiSoiiMd3Nwbd9lTCxS16HvYwhS8sU3HEK8dENCPrO9Q4ohBAQAAgBj/HnkipFyRilNIRrjCKIARAghSMRFyBhDGHZD571fvi09zwBCyEiMEGMKInAuUIqMMfM4zXpkVCjFlncUQpjnuR96jLHg/OriEqYgc5kLti4EieDcdGK1A95BrzfC29lGhI3W1p4JcSkRRktI2Ww0YwxDGKwzxmAuGSPaBkJRmoCLwTnjnIsBhBBCdCBC5wIAERNYZCpTilKKuYwgTeOppHwaGuCjUmQYBgyByCShlGBgrbFuZoyxbEtRyAQ30bfDuaxfKlHNk6vqgKpNhAVIIDBF2VaM3x+eP3hp19vNF1/86qd3f+z7rl5tXDxVQiJWBRAwRIFnqO0Ca5DcAICKYgMIPTXnGKNgclVfxmStnZ3Xk/HjrGcTykrN8wQJXK1WXLBPT8/N0COCGKPeuxjj7LV1DqS0gPfe+xgDpawsSymzheIWY5xno7VZkOxhGPb7w8KfK+uqKLPtdpsQtNa+fvHy6fh4PO5DCKt1tanWCBAAACFswYmNmRfU4vO5WrhHnwVpICGEMI4xguB87/ulKlPOlhGIMSaUJIRMw2jHGWOcrDfW6uAgTH9S7rhlXTOOI4SQc7lwxJc7K8Z02U5yggEAGECBKZWiyPLFPUEu9oAJAACkzMqitk637ZlQmWUZpTikCGKilCwxchhjPc7DrINPjEFMiPdRCJHnihCMEJ5GO00TxGAtuDW+77pp6ud5RAhRzjBXLvhJzwCkLMuMsc65vu+D7y4u0J96cWi6xlsnpSQIbDZbySmiKJdKCkoIu/oyz/McpPT0+HD68I9Qu2EyhKuyFGM/AIi5Wtk4W+ud8UqpRcfkAeKEZlnhwwCI9WFGGDDGMMVlXiTgjPMgLg6KblMVnFNBGUxgnmyp6jD3wdl6te66bhhHgpNNWAOKAeSyphilyIz1BiVIU6HKCFXbT7fFRVELHwwODpICAW/sCUOMUQSYdEMrlPz1P/2nNsxD11jvCMJuHjkrEEQAIISpA3D+8FjcSShzgJiQpQ0+ej32A8b05vaiqgoAUtM00zTs9/sYw2az4VyEGGKMSvDtdsOlJIwaa51zhJOMYms9xphgaowex3HZvixMh77vFyp/jB5jzBhfLLUXXnRRFF3TGrdfHhVvraTkxc2t1ppzzikLIabk53lwzo/jiBDCeGFfY845CclzIWHiCwS9gFwAROssBJ8JqM65xZ2CMSa5cMHP8zzPE8OEIYwpkZxCABYGzqJzwxhLKUKIKSUIyZL0lmXZAhZGkKLzISRCIMeEc55xASAM0ReFkpIjhCjleZ7nWbk/PC2DoBBCMg4JhBAstTykSBCgglWMTv3UnFrCsHPOGTvPI+cMY5Ii7ftRm6GuS4IFJgRSkvEyuiV9Ohozp5AwxZRSrWdrbdu2erZZVhKCtZ76qTN2EowLhhBIKbiyqCHA3qTOaZv6KngpRF1s1BdFWa3Gw6d6miGN54ePMEGm0qF5gpRiH0GCzoaUIMEYADgHDyFOCANCI0II48269AESnAghs7M+GK9nDHiRiyLLizz3fYcISgEAiPJqM8wBRuAjJQQyVAi+M9ErIQlmjCMXhtibfhggIOt1LZk0g1VlLqq1RxDAHgORUpjak8Rse3H1dHqe53nq+5SCC+n923eXt3crSgCIEJCYAALQcDB8f8zKHRISQCxFmWUZhpRTOY7zNE1VVUmVH/b9h4e3KTmCVV1vYozn81kJUciMYjSPfcMYQUhrjQPOCsU5R5Awxsuy2m63wzCE4Pu+59xaazCGUqpFHtD3g/c+y4qyrJVSzrn949Onx4+L3ohJ8frF/eXughDWdc25a/thoAwzwmOMIKE/cSKA9x4hQhhVkvOUgvWfn4Foo/cxhARhIIxmRb6E9kCMEgTzOHVD3zYnTigBmDOSZyXGeLAaI4AxijFRylGKzltrdQSIQgwSghgs9COECMTAuxi8TSlhABNMs5mjDzF6JqSgnHDGGAMgnJtna3We51IIjCHAn+Et5xylFMe4EMcjx72e+n2TUmKCW2vPbTOOo/GOcRlCmuex6stVWSUEF2abyFVKKYTkrIYgIrSUf2+9I5DgpOexsTHpoQ0hRZBoLhDjCCWfYtO2DOOIQF6uFM2jB31vm/5DVpBcZfz21bWARs+M58350B6fu7EjYEAY5Kqw3iEErDeIgeSB8wl4nHwKKSKSxYRDHCFRyHlvLMCYqZwCmGWScdzr1rgZTgHlWYSIICZLWZWbFFE7t0RmF3cvmFKYC0xIsAYO2heTKGtGeZg6KDFVNLgYCCYAhkgiBBxJkMXxcM6Lzf2ONe2h1Wejpx7Bh3cfs1wS9jUAJKWEIEzRM5eK7TrigCEGIMY4vbi8TCnEiKLbj6YbBpCp4s3L62CHh4d3CIZJD1V9t15jAEJiJKVAIArOYMaUUiEBijKU4mhn5xyndV5WWVZO0xSjX4ovpVQphRBqmvM4NcPQFfl2t9tBiIZ+djERxiKMhDPGyPPxQIV8/fq1KpR2tu/GeXATdARhznnf90opKRUhLKZEuKDL7JVl2RJFuHQWLpkzflmBSS5CCM77GEHwQU9znpVVXgTvg7MhecH5rtpxRhYQYJomq2eEkJSZFCqTOQCAS+GTX5g5PoYUgLPBB01QCiHp2RCGEOILuG2D7/seY7SQecqyRIiM43g6naSUIYRp+qwNNcbYo11o4mnJ33WYEEIICcGbacYJMykiZVrrnpA8zxeu0cJsSynFmKbJBp+WXJalLQxjk4Azzs7Or/OSIHQ87evV5mpdM0y0mZHMynKd5aWUUnLBMGv6pu/GwTdDd4zBrKriZntxud7OL984Hd+++yGMDQgaUaIEG/vGzTEADBFJMAYIMaLGaG2GepUrIYO3JRcRAs45CongaJ2PAKSEZJ5BKZOP9eXN5eU9hKQuNofzk1Tq+sV9lmWE0JjSPM8+11QqoThNEBvTjGcMgKQcJJEwQkjDlIdAXDNG67778Q9SleucJ+8Aoce+l5IqIaINkEQAEADg9O57f37cvbiFZQESiDAIBnfrDSFomCdAU2VLijGlgRD+9ddfF2X57R++Oe6PgmerumSEhwQiCZ9rrrWcQ208xnS7rjCj4ziOgw4JMUYgTBFEQUWe55QS59zzfn86nfp+SAkKoYqiWk4shGm9XnNBjTGn8/l5f7QurFarsswXWMm5EYRAM75QW2OMxhkELIieSCmtNiklyujC5rPWQggJw4xQ6x1G2Fs39UOMsdxuaYYzJRYe8mjtZLQNLqJwXUglhY8pJdi2rffuxd1dCKEu66pcjePIBMdsgVGT0ZMxRkoMYQZi8N4SglKCBFPrDGNMW3c+n7NMFUURY7TWLQ+G1to5xxhrmmbh8AxmHoYB+kgQopwb5+IwUM5D8CCmqiiyLIeIMMbGeWr7DmO8BM4tuqSFQ+ucC6HDBGLEnXP9OB/2zfHQSikjjQNAECaqKko5kyoTTKoqLzdFWVPKMcacEmt9Xa+bLrXtyWvTNof+ePQXw/b6sq533mFRqfPzwc4NJhADRvlm6JquPTkAPUwhImujtZYxjggzekIIWOcgCpxCxiWngFBKCMmqVbWqI4ibzebuxQtM+DiOCcLd9jrPFSY8RSRFZlOIEVAhFWcIQlyUmMv80BIYADIxGezmhDIAIURumob19W0P4tvffQN/8cJaP8+zHqfrV6+FUtaMQq0ARHbuTvsPcTJk0JuNSgkgQK8vrz45bXSPcZCcI1VknGlnnDHVqsiKryCkP7376fn5wzSpqtzGGDlnlFJGxGKIYq03xoDosqJgjJl5nsyUYAApWW8Ywev1dolI6/t+no01oapWQogYfQgBwqSUEpJyTodhmrWmlA/D8G/+zb+5urp68+YN5/xwOCIEsXUAAUqQNkPQkRDkrSOMEQCi1QtfGEEICSHaGmstI1RK6X3UZtJmyqQCwQcQMMYgBQhiWeZK8Wkemra180Qpp4JPo+667urquq5WwXshOJes7Zt235SrWuVZroqyKE7753HqIMQgYcJkSUiMIKXEOPUx4hA4Z8sqSWs9zzMhtK5rKWXbtm3bLpeKEIJPkTCqMo4hmo023iUfdPBOm2kYAABt2zIhLi8vMclOR6e1Xha1RVEsm/iUgDFG6ymExKjo+3EYBm1d33ZFUWGByIqXZV7k6vryoqzy5IKSlcoyxkhV1SklM09lmWvr8qoWJOiGEwTN2DndHh4aWbQQ5RZa5yLiO6FICEYgElGcw9g186RHY0zw0Vr9dDzkQ353dQ0A6vtGZcxbmyhDiMtMMiZysUKMKsWLqpYyN9oF79uhizGOs7hTL0peaK0hBiC4GANkggmRqIxIqIs8wWT1AScHmIRQQoB9ikxlU99hABOkv//D908PH+ahjy7G6G0MThuhW4CJOZ9TQIkJbb01mjKZdI9BMtZ++PSMExRZhjPqYnDBBxCyskAJvf4SUkHfvv1x6CfBNQDQe5dCKLJlGRDGcTTOd117fXvz6tUrEFPTHpwLKYF5tu1op2lKCRpj2rY1xuVZWZbF8fj87t1PKYGqqsqyBCAdDoflYEgp5yl9+PDhdDoN42itnWdTlmUKIQWPGRBCUMIpFpBR/L/863+ppIQIhRBccHrW8zxbp1MAGGIIkTWaQEQpwZ99wXmKcRx6yuh2u6mqinPBKEsAzdYem1PXnRgT69Xau2CNxpSootjudkKKEMI4jSkFQYkx9nj6pM1IuRBCIIxDjLPWC1ywVHfO+TzPznnn/CIzt9Za+/+HhFJK86IoiyKTijOeKAYQKqHKqipUhhDS1kAEi6IoioIzJjiLCWitF2mItdYY0w/D+XwOLnnn+7E/ng6H4/5wPBzOx3GenYsRQCnV7e2L+/s7LgjChBAOUYAIIAgQRCoTRV1KKUOMnFIpKUiRoECAGZpzfzozwoObIAxC5C4CSDBm1KdovdeTG3Q/DLO3YRrHj09PWZZdXVxaF5wznDMEIYSIC1nXmwQRADHLFCVEShlCHLsphoAwVrmKKYUUs6KYjNbOcalUmVORQ8khoTCBhAgAjGAFSQlQAQGKMRKaM0zmw6dPD49YZc/Pp6eHjykmjKhUuVR5JgSK4bR/ev/u7aCnBJwe+v2nt8gZOx7a06lpe0xEkRceeAAjRfj5ee+dl1IlGPM8V0qmhLwPC9YrpciUUpmkhC07cSElItjFcHV5+eb1myyTMYZl+0mISCl1Xde27TzPUqrVqp7m/t27nx8+PhBM7u5ebjZbrfXxeDTGUooIoQjBPC9ijE9PT9OkCSEpWmuMcwYkEBwIHoKIhciIcw7E9GcobqEAAZAoISiBrmkhSquyAlF67+syz/PSWlsUxZJjLoSs69X1xU2WZe04/PT+p8PTE+fi3A+fDs+//OrV9uJi8Wa5ubn58PDu8enDYf9JMMKoiIFYq7mcrUNa6+DTNM1/NglbEDsA4LKyTSn2fbeMZ4tIYHlBHwkhIQbG2CaTWhpOKKNCaw0QhBgJIZTKjDHRByHEss5a1C0LSk0QVSoXjKeUjLNGT0137oZ+ms00u3meJ62VyhhXEOJp6FJKhGEwoXkc+7bbbi9UduGtbbuhbdvdqsScRR9wtM3z4cP7B1Vs797Us+0ZJVxZChTEufeeIM+xpXQAMYEUjNEAo+vra6XUfr/vxy7GKPguq9ZCiLKsAcQEI8Y55xIlwJjAhPlkvHeyVJLxxJKUKsToQ0gJ8iznRQYQTwAAyCGAACQIXAIeAgwiThAiBEACow8PHx9//vGHD49P524euzNnKC+K4/nkv/12GjpZ5NM8DF2XUrrY7DCX89R+evc9BPr9/pSx7P7+PsFweu5Lurm/e0UYP52Op/MBgMgYQ5C9eHHHGDmdTkIIITjFOMuyoesZ5S/uX5aran88/PY//+6Hn38WTGg9D4MGAYSQ5kkPYzcM3fKnQghd1y1XOCl5URSLCcp2u0MInc/nae6aplk2/atVVRSF4IpzEWNYOEiIMp+AEFLk2W63IxiieZycX8wPE0iBIYgxwxhFH6K30zTRAHYX2+riwlr7/PzMOb+6unLOfffDDyHE6+vrdVWDFFfbzXq9bs7HWduP+6fJDC4GSilFaR6Hfmg+fHjX971kPIQQoKeUTtP09u3bLMu22y1CRAjRNGcI4WazWQBjpTIAgLVmIVkopRbV3LJRnabJGTPHCCGMUiqa51J575+enqZp8sEyxrIsQwgv3NoEgTc2QvBnF9UYAaW0ltU4T4zQPM+llNvtxTjMbdsunzVjAsLYNvtpWlkzMyaChy4GYycAJiFUWZZLjHtKybkACaUyqzab/ePP3/7w7r/6r39J1dpnqqCZjTNhHFEVjI0uG8bZRoAQWYYxAFAmMgKR1noa9eJdSQnHiCPIMCQ31zdc0Oj8MEyH/Wmz20aQICYMC4QRhJBgSCjaZpvkA0fETxoKAAjDgACAQEoJxJgSRHGJBgMxARS5Kh7a4f/5+79r2pZnJSHeTSBAP5lmGovn4ycs0c16t15v87Ku1yuCyHq9HvT822/+w/fff/83X//aeu29X5WbqlwLwW7urs7N4fnpOM0DhGG7vdhtbzabDUIQQtQ0TXAOAVgU5V/86ldVVXVjL7n48suvunPz7bffci6VEkIxRFmmEmMcIzqMXdd1WmulVJ4rjCklHEI0DIP3HsJkrZ1nbXQQPBdCABhTSlJkkgkps7yuACbTNMx6RAmUqxolgCEi0YdlFzlpHWNMPgTnQUzWWgRgJpXkgmMiuSSYdlPnvZdSLj4tUkqZScYoZaTpuqDnEALFAHJ0s92ussL18/7pYbPapgS7rplnI6jgXGRZBiIEThOCpMwgxNbElEyIRmViMVRbnDBCtEIIqbg1flGdhhCKoqzr2jm7pOp6a12IxjnftIhgbb33nnEiMSuyPMaorcWMYkTbvpu6cbGnhRCDEF0Mfd8DACAGCCQAWJlnWSZtaa6v1yklo522JqWgGHXGRgABpNCDiCAieJ5niNBC/aiLcpoGhBCmSHJpkQJQff1X/+z+q18BICSXNOMZu7DWhmAtBIEQyGmIyIeEEDROa22VzIO3WltKUJ6XhKBZd4uIpaqLlKDTjhKe4ry8ECU4ApqSc44Scj6cCeX8IoMhDl3jncmqGmVV4h4kDCGOACEoFkZnAAgDlADAhPzmn/03bdv942//Lsuyal0DF4ZhcH4WimsbKlJsdtdXV1cARuc8ZFBxsaLldnPLaH55fesxBhiXmRqn5pvvTpQJClmeF2VZ5Xnuvev7Tqns4uK6aU6CMV4UZtb39/e77fY///73H58eb6+uV2VJATTGYExijMMwYYzKKtvttl3Xf/z4mCIS3FJKjbZKZXd3d3VdL35YjBGlcoJFrli5qpepaRmbIYRCKEIpSCkSDCnP87wuau/jME1k7NuFvhadn61JPiAICUSUcs75xcV2tVotMYaHw8EEt95uOaXDMFBK7+7uLi4uy7Lsum7xXNDa7Pf7GGNd10WhJgzGXlv9adE4C0ypoNbaaRh9TN5bgFHOynPbaPMMAVi+aWvtMrcxtnhZu2nUznlCKBO8KEop/TzPhBDKOYaIUkacSwlqraMNpcrU7iKAlIIzRkefIMGLNxuI6eJyjTEGCUkmQVVYa5e6IqVcfi7L0ns/DIMQQggJEmKMIQyM/0z0CMhqBDml3gbBuFTch4ASADFRygmFEAFOM4fM+u6FWF0ASOUqTyilCDnLKJHjPPmhsdZDEFVWhqenczM23RBjGlNIPvgUEVoU4jjGkGVlnuecSQAixMQnD2C0szZmppALJYQkMUGACCowSClaF7T1zkgoQGIYwpRcAsmHESaEIAfQQYAwpAvBOQK4vtj9d//dv/ri7vYffvf3AKRqt95stiklQjDnYru9kJJzroyZj/vnLMtjHiklF1V9f3WjlOr7PqXkXdTatm07DF1d7f4szl6IN2VZSs45Tp9iOJyONzc3EeGHjx8JhZvNGpA0Dh0AcGnsXddhjDEWXTsac/IxyEzc37/UWhNCCEF5nl9eXmZZZoxBiCyeA4t7SgihbduFzbmstlNK09gv3zLjMkRgY6CMlDQnbpwJIYxzmuUQQp10URRZllHMpOQXFxd5WTw9PQ3TGBBQVSmE6NsuL8v7+3vnXNs2CMHDYX8+n8uyjDFcXOyUyvq+b5oWgDSamVJaFMWyWULWLFhBCAEhgDHt+iYGxzlHCDAmKKXjMENAl9h0re0CXwuZjePgrSGY2uCfD/tMKkEZYSQl0Pc9QkipZbM2EIb7cbZ64pwhhMau74Zps9msNzUntGkaCCEXFGAEES1AlhcKQdL3/eKx8edrRtd1C1l12SQYY2I0GGNKozEGgrRoOFb1pm2atm13uw0AwLuAIKYsv7x5/f79e8wFJDjhBCIctEEI+GRjskulJ1j4APbHZpoMACC6ycWAMKYEQUistc/PU13thFB/5tgSQo7H4/F83lxeEIiCi9o4kQmM8cV6l3Ext/1kp+3mAjGZOIuMgkAQhAgg70IIPZIUIQEBWGjOCMB5nN/9+EfE4XZ3qc00aa1kfnV1JTgFABAsGMPJhyovIATWOhjT2PcAIZRA9A7EMI39OI4JQQgho58F+E6bZ/1U13WWZfM4mnmepllSThI6Pe+bppG5LJWEKFirhRAxgqWrr9frEMIy3xtjQghKZYSwGCNjTEq+HPQ/Cb7TYjW32EEvLotLrM7CiCaECCGWi+s0TVpPQ99+tpvGFMcQz+ezKkoAAMVECVnmxUKDOR6Px+PxcDhEkCjG29UagGStzbKMMTYMw7fffnt1cbHdbkdCjvu9c+bu5Yv1emWtmSacUkKURAhGPUcIRKa890tvwgDKPEMI6ZOllKYID6fjdnNxsbvibFr0KMbOAEQpF/W62+/3Dx+PlPCyLAXjbdv6IpcoaW36vu+6brvdMsaarvUxLJEK3kVCSNudtZ4JAjCuNQZL1MKpbSjheaGWDwVBsnziS41RSi1SoaUbhBDW63U/DnleLncJlGACQGsdfcAIEUI+Pj2mlC4vd8sH7RJgUnGVVav1oTnvT8c3L18RAubZxhBghNbacdLtNNjgrbUhuBDSZ/cyGJ0HCKHDYb/42TPGlFIppXGesiyz3jHBy7KECYx9n8u86wZrbV3k5OZKj/bt739fVznLKaQCAAExBcE7PVIKAZUBALCo3VNKIECInR4ePvzQj8eccs7KmcwIkU8fP1xdXb1+/ToluES4ntqOMQJAGPU8z3Py4XH6IKXM8xJCHEKYhznLMsmptTbLVFkV57btui7G2LatHue6rqUQf/O3/6Wex3cf3s1Ge21mrcd+8t6v12vOxSL1WsaYruuW944xNmamlPfD0A/DxeW2KkrnXFnWIYTFkM9ajRBBCF1eXsboj8fj6XRaBAZFVSKElntalmUuuH7srbWkWK3mcWrHAVsbosMQYQC9sZRSTiiBqB36eZ6VyqP3epqttd55a93hcJznWTB2Op2WmFtrbTt09scfT6fT0E/ehxACwmlxcuScLxTrBdOGiGRSpZR2O5RSenx8BAlTyoyx1jqAoPe+awdMYF2vhZSKO+9iAkgpVWQFRfjT81PTtX3fa62ttYvk13uPIRr7iQkqpQw0EUKyXC4T84eHd7Nxi1TCWi9VbqwfxxEAQHFaBNeU0hjjQmVdjMpCCFkuIYTWaee4MTNjBQawn+cY/fF4vr6+vb65OR6Pz8/PlxfbGHyCQCp2PreEkCzLQorfffPHY5ZfX19+Tv4KIcaofTiPbT8MDGGGSaAAALA4qXCWOWecN3/5l39RVYVzBgDVNM3Hx0+//OoXUsqiKKZp8tqsViuZFb/9d78Tkn399dcgoWBdwDDGCGOKkGAAPAAYxuTnRDkgGEWSQPq8a4PQOffbf/h3BPvt5kI3TXD+anfVT+PT06eiKBDBBDPnDESICWq1maapWpWU0/3THlFifXQhAAA3u8vFjBZhgDxYJIRXV1dKKWut1iaE6GGqNuu7F68+PTzM9jtjTCnL3famKuYPHz4cDserq6u6ruu6JoTkefb09BxCmGc9zxpjbF2ggmNKECSztvM0lWUJIey6bskfoBQv/E5r9QLsLsJGZ6wxJsXonJsBBADkQpYXl2S1WimRSZkNw6BtTCGeu0Y6yblACE3Hw+Xl5cXFxUI51lqH4I/Htu+7hfuZFcWfHiwghKgJhBAOw6C1ZYynlAAIf2YUhxAWkjNjzHn/8OFdnpVKZRDCu7u7xeFwmiZjDKXUWjcMY0ppnt6WZVnlWSYUWWFIsJRy6WspREzJbrcz2nHOg7dd1ymVY4zLMld5tjyZKaWljR4Oh1PTLVbaXKplEbTciZdiv0Dry2th4BGElVII4nEctbbTqBdWnwtOa+29r6owz3MV49XV1dT24zgAEDAlAPClBfdtU69Wr1++6trWrNfOeD311plJj1rrtmn2+z1CiDHhU0QEwoQowjLPtAbXNxdlWU+TjjGemuPT/pMQijFGMRFSppRkljHGxnF0Znzx+lZKCZx1uttdbWVZA5gDTGKaEOQAJuCSxwExiCBGAIKYAMIJAEqRkuK7n34fAUgBXF9fMykqSv7iV3/FGGvO3cJALvKcUjZHQxlDAEKEV+ttkechBOdM27bTNG42G+/DAuOEEGMMwzAopRDC1to8z0OKXIpvf/uPv/v2G54zKVVW1S9v71fb6p/8k79++/at1lqpvKpWizvi7Y0cx/HcHJcaKpUoiiIr8mjcNE2C82Uzbq1dCPMLURTCtAjBl7lxKY4LyDtN+uPHt0KIXKoff/iJ9N1IMFZchBCEYJTSEHyC4OLiwhgzz/Oihxzarutb7321rtab+ng4d12nlIoxJgi0NSFElatClTEE55ySudbae6eUVEotDJzz+dw0TZ7nWZadTodxSF3feGullBfbzdN+v1hmWz0HpyWXVVU65yHE7el8Pp+zLNPGjONY1zXjBFPy1S9/seiVp2laRGSccwAi52KzWS82G1mWcUqXmQohtFnv9vv94XDYXOx2my0hxNgZQjh10zzPSqlK5YVQy5xGCIkQLM2NS+Wcm/T8tH+GGEmI+rETkiGE5nl8evyIEKKcHE9nJnhI3vtIKWWUHw4HBPHtq2v9k/7x/U+KyvPhqJ3Vxr37+e2P3//Qtu3idCmE+BPoAa2dCSEgoa6dry5kgkDrkXP++tWbPxtCSSkhhBGAaeiKorjYXkYXASOsyuPJQkYiwSAaiAAABkIaGY8pKgAjDDBSgFJKCzwQfv31X/723//f7x8f/uqvfr2Y61OKCQRSCklZil4KwTmP0VtjCCEQAiHkapUpmQMAHj6+x5gqRRa3/rquzTR777fb7TRN799/IBD5FC8uru6ub56Ph/dv33315Zt6t5KEDdpop7//fo8Qub+/x4haa6fRAgBAIlJSrXUMYL3aXl1fSCnPTUMShJw7axcDdIxxlmXTNJ1Op4U8towbxhjn3DLoIYKtd58+PmGMVZZxzl2MEWHy/dsfLjdbmBDGWAhOCAkBj+PIOb+/v99sdj/++OPH9x+stULyoshOp9OsNUCJMLpMz1wKKniatfdecqHtDABggviAFBbr9XqhHy2VeAGuCSEx1kVRaG1BQimBh0+f3r59CwCs1zVBeFOVMQWIESGcAoTB4nkEmrF/enqa53m7Wa02a6Zkcm5xrFimAgwRAKAsy3nWTXO+fXFV5JUzC5YMqqJwzoO0loLNRkdvq1V1PtuUUpHnlJAsk8vuoiwyznmMsdcTjpAQAjESghln26YZho4i6L3fbFerzU4NXTf0kvN+GsZRq6IkBC0bW4RQAPD50GwQCBD8+MPPiko96kN7bprmj9/8/ng+QJRiipBhwgjGCCFkvfMuQBjmea7rDaXYmNk6m2elEOJx/zzOI8s4mhesDwAYPYBaa+MNd7Ys67ltp2mUNUHWA4wAFClFXubQ+jCdEM8AJn9y69A4OgTgzc3dvjnWZbnAroLRBajB6AwAQARhDFJKueJCqdkOzfNBkKxabyileVYyKkJ0C3lz8cl7UxRcCIxx2zSLGMU5t386fPPHP9zeXRdlOTaDqKvorfX+cDhored5XHLvlnuwMXaahyzLrq6unDeLxbfgXM/z4neyNOGlVC2/vuiJEULLKLFMLkVRLNj/zfU1hDDLsqqqFlUGWW3WH58eGcT39/d/3mddXl6GkN6/f3h6fu66jnBKCYQEOeCtXmhqheR0aXYxJsaog8ZbN6Th46cHKeWyPPXeNk2zaM8XGIsQsmwYl8slSMgY632ECO0urgEALMtISv04+hipkpiQ6BMGOKUACX51d7ur109PT8fjMaWkp0koSSnFGH12qJRqIXuaeX71+gWlWM8jpXxXrrthCCGPMd3c3MQIjqfTfr//+PHjer1mjLVNX1T5okoTSgghYkxN00hPq6oCABpnF7M+htG7d+8O58Zb1/cX46Qfn6+qTF1st8659+9/xhjnWRlCQggAGEOMIYRxHAftnSGnpun79g8//vHp8BSMLooqRh+TR5QQgiFIMQVOmRCSMaYUhzg9Hz8ltEspESxOh6MLPiF4OB0lppeXO87pPI8xOO99fzyH5Kv1quDy/HzEDkICAaQIzkQob7A+nzMGvdZkLRFiMXoYRxBN2x1/8Ytf9G46nc5XVzeLroNzLmRmrYcE5kLGCKRUKsu6rrNzmEbbBX/uBynler0mFOnBSCkZE1prxnlZVUzJRep1Pp36vp+1di784utfrtf1MHaUcBsihigk9Nd//ZsY436/r+t6oTB4bxGOw9B1ff/y5cuiuDqdTu/e/axUnqkiBLcc5XmeT6fTPM9SyhjBMAzLjL0ojJfDsNAfCWHL3ZpzPk0TQCnPc/h//O//6zxNL27vKKUAAy4FxYRzfjyfvHVLgWdCNd0ZghTcHFys61pw1XWdc345fCE6jgVEcdTj4ia0uFwsexhjLGOMUqIy4V1cCo/VOkGkjdNa51WplKpWNWPs8Hw8HvcAgDdv3gghuq6zerLaweQppRATH0OK0Do9DL2UcnmWYowBpDzPJaOcMYYRwMjH0LVD17b3N/cvXrw4Na1xNoW4MD5SStaF9+/fx+RXqzqlBABcrWopOUJk0R+FEObZYggxhIRx51xMxnubAPm0Pz58eD9N0zhP9WpVl2uFhR51N/eXF9tNtUsJztZEECJOLthCFj6kj+/e2qkHGD4+P5vZUk4IZwJThFAggGcKxgQBIIQQKjBKMNgQAkacURkhyIS8urqDDJ3PR5jAbr25v7shhDx8+ng8Hr768uub62tKcV1mMYJ+1IwxygSVmEEMEQEA/fDd95MehWS3r766uP1Vgs52D9DZtu2AS4TR47lXSsTkrXPeOYJFAqhar7eXF8/vPyjOAMWHwzPHqCgKXuYgxNPpNE9TURQLu9Zpl+d5hP7Tp0+MMW/84XzyIXAlOedvvvwKU/r9t99QhIs8l1JKlVsfCaPRp+60pwwTzMZ56tojIaSq123beu/v724wxtZ6RsXh+LhI2hlj06TP5zMhZJ7HJfkLIZRlcpGwOxcuLi6urq7atu37NsaY56Vzbhz7cRwZVSTG+PLlS4ZJ13UAxIUpCSGQUjJCPgPDJITZeW8BSOPULzL+WY/W+N1ut1pXi5uXt55STij33gvOvfcQYGNm78MXX3wBQOq6Tkqx5J9BjKWQQqXnJ6uU2mw2IYTT4Xg4PCOEXr58KaV83j+G4ClljCNGJec8AuBsKOrKWsvO5/P5bEOnuMrzvKoqznkITnAeQtB68j4yKYidDu2+/c/9OI5ZLglm0zRlueRChjRf31w2zfn5+XG73XIunTOMo8VMTirOmXz69Pzp4eEXv/hS5rJpGkLyPFeEyqurq1999UUMoOnOj/vnaZr2zb5t++jTZKaPn56UyMaxV7lElDztH6EHu5ubpu8wSEKK1W7rjKeUGj+vqvrV/QtA8Tzp5Ly11iUAkvfeR4gTAs770/7Z6okCBCJEBD49fizKNcJ8tm/tOCefemvef/yklOKCeu+HYTqdDpvdBkTw2V/V2tOpsS51XXfujq8+ffzn/6IKID38/D7jbF0XJniG1ctcLTuDyWjnqJ6tECoFe3h6IgQwyTARRb5KfgIgHp/3nDFrTIx+Qa+qvEgh/vz0SAk/NAefYp6VZbHGGDJOhBBumnCWEYiOhwOIkFAJbRQqAykhONar3LnQNA1lDAA09JrhgWPy03ff/6f/9+9vbi/LohYiCzGeT4dFpL6M/hDCPM/ruq6qylrLmFBKee/fvXv3008//fDDd0VR3d7eCimbppnGkXNalqXzEf5f/+f/VubV0HZLroG1emFs/3lzYoyVMpv6IcIoBJ/1OI7jYkK9ENfKspRSLpZGMcHlGno+HaZJr4o1UxRCWNdlSjBFuAATn43vMG77vm3by8vLoiisj4sYlBCSEgQAFKXkksMEAIAUoXEcV6s1Ffzhw6e+7znnEKVx7oPzGMAU4+IhgwlxzmVKjZP2MC3OYafH0zybu9vr+/ub7W7NGIkJnk6npSE+Pj6+f/9+s9lBmDjndb0WgmEExnHeP+5/+P7HX/zyi9sX1zHGlCCltOu6GP3d9Z1zoesaY+eUkvUREC6E6Jvz8XhmhAnBjTGIkHme3717F0C6vb3Ns3KYeoAgTCBGoE3/9OlRyuyLL77AETptjA8WRD13zkVrIkhoMuM4tkbrqR/KYj3PY3M+AkghI9M0rIvVy5sXeVUbM+aFWK1WgucQpYeH91xyQmgmc5VLENPQayboX3z9iwQIDP7i9nJV1cZ6qx0jpHeDs0HiQDGTUo6T8SlKmWVZFoLzIXHGFn7KPM8hOADAMoKXZYkJ++abb9qm+eLV64vN9tOHB0ppwujQHqty8+LuDkLYjc27d++M9X/7t3+LMf7+u+8QAq9fv44xHs8nN2nC6Wq1zrLi229///33P3LOIUzrqtbaYoxVkT8/nbr+lGVivd1QzDlfAoP94XDIsuz29nZJ/qrrumkaABCjYtmOvH//Pldie3FVlmWCccGni6LQWhOC8OPjx7yoFnC4qMr903NwFhHadX1e5lmWDUMPMKirIsFIab0wxvq+F1KOw/Dw8HB1ecOYgNBhjPM8996mWD8+/rHI67vd1axH7yNn0kXfNO2frB45AMDHuKxpp2naH89FUSxglnPBOVdXZQSh7RpKKSdCKeWcdc5yQRivpeRKqcPp+Pz41DWtd+7u7o4LMU3TZx2Q9/PQE0Yvd1sQU7COcyqlLMryeDwuFhV5oQgh9/cvfbB69qvVijE2z/M0DQTDxc5S5lm93i62Akv3y7Lsw4d3x/3+xf0r5w2lhGKylQohggisJK6rrCgqzun53KYEY4JXV5dN01htSyU3qyqACBEOIYytiiY45+ysX7142bdtRJAoMevRzt7q4L2nDAIIrbXjOHdd1w+TqorNZuVjOOwbmNAQbBpPPlifDBGyGUYpJc0y66PkUsiMS1ll6s3renuxATBW5SZbF/3+5LTjghCQrI25yBFLMZl5nLpuWO12HNO6rgmGwZKEQN8Pfd8TsiiKbEhRcA4AmCYd4nSx20kh2radxwkjxBjaXl7sbq9/+P77v/u7f7verRFC1oWqzNvmVFWrlJL34Xw+zfM8dN2p6T9+/Hh9c/OXv/paKSWlZFQQhg0AJMsk45vNpizWjCNCUN+PCYDwmU+Abm5uECIfPnxcGKBt2z4+Pp7P59vb++3mIiXMeTZNQ0r4cDhRBtfr9RK+9PHhPVmCloRkx+ZsgplHeD6fOKVVVTGCk/NEkOZ0CCBxgdfrGiT2/v37cZh3FxsAIef86dOz1npbXhCiy0wlEAhk5dU1xlQbN00ToTjGiADghAIlrbVt37VtJISEEGECjDEp5aRtURSccx+sVFxBNk2TykRVVNM0WmullBjDvu8hTFVVEkJmPXLOf/nLX57P53mec6kW4wlOhbF+ITgZZ8qy3G0KrXX0vhtaemQhJIQQRCkEFyLhjF1d3vzww08QJgjT4fC8QOvH/WkYhvV2u6RHLitnjDFFuKpWzsyEomp73U+j9x4Q7GMYjx3EUDBmzNwPTQghz4rjufHOX+8u9s+H9x9+/vLLL8uiOh6PTXNys729vJJZlkBAGMhcDuOox2FVr8mKpJRc8J81ay6kCBknUuVSZhgCF7y3ASUwzINPtiiKGJIxzlnddR0EuK62eUaUzAXnUnKZSQSy3dXV4fT+4aendV5CgBAAsMhxgoohwrkZdC4tQsiBCBGy1psQEYgpRBhhmZXjOEYXyrJs2v756SQkgRC3TX9zc3N9cdk0DSHkeG7bab4W3BmbZdI4RRi9v3s9j3bs9z/88NOs//D69WsA4t//x3/48ccfv/76V7/5zX/xq1//jZ4GZ9OL+zdCKETgbMLbdx9fvXlxOh3+7b/7t4KRqihvrl+8un9NJB6nfhrqpmk2m83vf//7//Sf/uNvfvMbrXnXdZTS1Wr19PTp7vbFxcXFzz+/c95jhKQs908PwDu43X769Ekogf+n/+FfQAjHYcQQ6XGyxux2W4SxsxpCsPD4irqqqooxCiHSWp/Ox3GYVpuNtQ4hKJWKKTEqEIRGTyF6a103ToRSQujxdAjeO23GcUgpGGeGaeBcCM4JIYyxBMES8pHlBabEhxiBxwh471KKi1MnADDBqI0OKQolMcYY02EY+27U8+ScWwqztmbSxltHCR2neRo0ggiEwAW3JsQAIkjeO85kUZScs6JQGGPOBUhgGObj8SilKop86UhSZqdTSymWRRaSq8uCc5Zg8sFTKsqyuL66JARP0wwwoZRp57x3MUHKGKH83LbBu7Is58l454TgYz8xKZgS09SXRTGOg5lGVeQxxa5rGWMAQkKpMUZxgUDq2maeRh88RmRo2hRjXRQv7u4EZxQiBCEi0OmBQF9klBKZQtTThGJSXFxud1+9enV1dbnb7m5ubwmAxhlKqJstwvjdz999/933QmWEc5Vv8vUN4xJqPespesg551Is1c17Z4wGEBIMVZY7F6w1znkXwnq9rqpSqkwptd3scqWM0fv9ngq+2e2c1+/ffmibgZKMimK7u3bWztNAs2x3cQMwffPlFwEkY83LV68ur65Fxsdx1kZ75wCEKhdt25yaI8bs6uo6k0JlYl3tplFPcxcTeP/4vm87JSVjrOu6sq4udhfffffd+/cPZVkJwa+vrzHG7z+8naa+KGS9qpw3xkyvX704N80333zDuaRc4P/5X/4LQsjz8z46LzjfrFf1amWtDQC4EPtxIozXdbVer2dtplkDEDNVZlnOOF+In8vKCRPMOAspGRcSJM64GKIU0oc4jxomgjDabjdCSUxYtV6rImOUZnk+z/NnMM95PetFtey9D+GzH7X3njGKKbbGzvPsvbc+xBSXhBvGeN938zwv9KSyrILzmGBKCQQgBY8xIowUea1kZrQGELx+9cVqtQIwMEaXBCRCYd9PznmEYAi+LGuEYAgRQrDe1FW9zvMcweisSwBCQgCGMMUUY9/34zRlMsMQUYQzlas8QxilhJTMBBcIQghJluUYI6uNdf7q9noe52UMhSlZ71NKS7wkJSQB4KzlnC/BBUKI7Xrz4u52s1lTgvNSLRQsPc+zHjvdO2+naZqGXtKCULbdbHa7XV3Xr169Wa03ZZkb67z329324uISA+TcPPTHeTQJorvLl9vrFzQvojbeGQ9QcH4YO0ww43QchxC8lMI5q3KFKNYmcJmtN2UEi8u8ZoxACLqub9tmmodvvvmdc6Yoy4eHTyn4q6uLFy9uIYofP77d7OrZ6m++/cP28oIJdnl1acw0jv3lxUWel9b4T48PQztO8zhOQz93b99/eNofvLN39y+uL1+em/N2u9qud+vNKiSfEtlsNymk/f5AKQshIoSlkATiV29eU0rbtoMQ3dxcCyEeHh6Op713PstUva7HSf/dv/8P/TDUq02R1/h//G//uTEmpKgyxaWIKVlrHx4evAdMSMIFgKA7t3aeEYYRxGmc1qstoTzGVFU1QjDGRAidph5gzLhMiAopGKVKZjDCrMxTQkqoVy9fSCUgQogxlatZaxs8odTM+nw+M8YSTARTjBElrG06a4IUEiAoM0kJjSFmWV4U+RKws92s67qSUhBGBOec87IsKaWE4PVqvTj0M0rKqsqKzDmfl2q9rut6hRDpuj6EYOxorQWJ+mAhCgn4YeiVUplSxujgbT+c61X1T3/9ay4zIRWBCULMVQYRDMkB65Y8MZVnUklGGaWUcZliooQjgBnlPljnLOc0xnhum9N+DzEq65U1ev/8nABo22Gchvv7+2X5HUIw1iKMtTFK5lfXV1VVzfPsnNluN5yzx6fH/f4RRDB2PYiWML6ur4ps++L+i1/9+i8vLq7KqqrXdVWuOJOciZRSij7jHBHSns6ntjHB7vcHmZe//OKrgACXNKagTZtxhDCnnAvJMcLOh5SAFBJCZJ2PGMm8QpjGFLph4IpTSjDCIYCmOZ/Pp9nMGEEp2Ha7QZjEGDGCGDNn435/fP/uHSK0XFWAsFzKjw8PlDDGaN+2IYQIwLt375XI/vpv/snN9Yuy2hAq6npVlavd5ur+xRuEMSEYE+KDk1zFEMtK1fUFwTQFYK1/8+bLerUpsvLu5h4itN1uXr9+rZQoiurm/kWe5d9//5MZ/dd/8avLm+vf/+GP//Ef/qEoCwQBF+r/AwHU3/3CQv3GAAAAAElFTkSuQmCC", + "text/plain": [ + "RGB4 Images.Image with:\n", + " data: 256x256 Array{ColorTypes.RGB4{FixedPointNumbers.UfixedBase{UInt8,8}},2}\n", + " properties:\n", + " imagedescription: \n", + " spatialorder: x y\n", + " pixelspacing: 1 1" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "using Images, Colors\n", + "img = imread(\"cat.png\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now let us do some preprocessing. The most important thing is to resize image to 224x224 that the pre-trained neural network model expect. However, since `Images.jl` does not have a `imresize` function yet, we will call Python to do the preprocessing. The helper function is defined in `imagehelper.py` under the same directory." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Image resized to (224,224,3)\n", + "('Original Image Shape: ', (256, 256, 3))\n" + ] + } + ], + "source": [ + "img = convert(Array, separate(convert(Image{RGB}, img)))\n", + "using PyCall\n", + "unshift!(PyVector(pyimport(\"sys\")[\"path\"]), \"\")\n", + "@pyimport imagehelper as helper\n", + "\n", + "img = helper.PreprocessImage(img)\n", + "# transform from Python row-major to Julia column-major\n", + "img = permutedims(img, [3,2,1])\n", + "println(\"Image resized to $(size(img))\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The last thing we need to do to prepare the image is to subtract it from the mean. The mean image is computed on the training set, and it comes with the pre-trained model archive." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Image prepared!\n" + ] + } + ], + "source": [ + "using MXNet\n", + "\n", + "model_dir = joinpath(Pkg.dir(\"MXNet\"), \"models/Inception/Inception/\")\n", + "mean_file = joinpath(model_dir, \"mean_224.nd\")\n", + "mean_arr = mx.load(mean_file, mx.NDArray)[:mean_img]\n", + "\n", + "img = img - copy(mean_arr)\n", + "img = reshape(img, 224, 224, 3, 1) # add a mini-batch dim\n", + "println(\"Image prepared!\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now we can load the pre-trained model, via the `load_checkpoint` function." + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Model loaded\n" + ] + } + ], + "source": [ + "model_prefix = joinpath(model_dir, \"Inception_BN\")\n", + "model_epoch = 39\n", + "model = mx.load_checkpoint(model_prefix, model_epoch, mx.FeedForward)\n", + "println(\"Model loaded\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "With the loaded model, we can do prediction by wrapping the image with a `ArrayDataProvider`. The output is a 1000-way vector giving the predicted probability of each class. The class names are read from `synset.txt`, and we show the class name with the maximum probability." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Egyptian cat\n" + ] + } + ], + "source": [ + "pred = mx.predict(model, mx.ArrayDataProvider(img))\n", + "classes = open(joinpath(model_dir, \"synset.txt\")) do s \n", + " map(x -> replace(strip(x), r\"^n[0-9]+ \", \"\"), readlines(s))\n", + "end\n", + "println(classes[indmax(pred)])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can also show easily the top-5 classes and the associated probabilities." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Egyptian cat w.p. 0.415571\n", + " tabby, tabby cat w.p. 0.230575\n", + " lynx, catamount w.p. 0.153771\n", + " tiger cat w.p. 0.129155\n", + " Persian cat w.p. 0.053005\n" + ] + } + ], + "source": [ + "K = 5\n", + "n_best = sortperm(vec(pred), rev=true)[1:K]\n", + "best_probs = pred[n_best]\n", + "best_labels = classes[n_best]\n", + "\n", + "for (l,p) in zip(best_labels, best_probs)\n", + " println(mx.format(\"{1:>18} w.p. {2:4f}\", l, p))\n", + "end" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Julia 0.4.0", + "language": "julia", + "name": "julia-0.4" + }, + "language_info": { + "file_extension": ".jl", + "mimetype": "application/julia", + "name": "julia", + "version": "0.4.0" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/examples/imagenet/ijulia-pretrained-predict/cat.png b/examples/imagenet/ijulia-pretrained-predict/cat.png new file mode 100644 index 0000000000000000000000000000000000000000..5f681ec7e99cfbca148cb89547f53f13e0eeffc8 GIT binary patch literal 123126 zcmWifcRX9)8^=*vv_))MMT|zORlD|z6_k+JqqSNisM@ zO??$b#il5Je&_yi?l^y(*FE?7oX_(<_r#ez*1gHd%Sc5AygOtKk_?D(=S_6=|eQ&LDPn3R`nt_X?Na+=e1RH07AE< zfB9??Y)AxrZrhLF)GXhPSlBBC3#fTq3K?_;_eE9KO7&esQk!uDkbZK^g0 zV1QddD~grN0>Md@4Apz@#d*bQnU-B$(29|B<6>(+9pZ{+MMRZtCPRlSWvApu49nH2 zYk;v16^z;(FRq&o{0MD$uu?c^WbR?G<#WBhKf%wN5SM`!L5asCK!QHg*>@(Qhxh66 z9k;nssIv7qY4C|qiHy)F%?eHPx2uysc@Xe2hV;S|C{YtZ?PpwLnF|&3BP|@oP(8DziA;R^(b~bXKbl$#1lqKkJY;7N~k~9%I)*ocAmxeZyOI*^* z8G^D`aHqF6>ptIevYQBA54X=zsgUw`q-*~rwFV81Pa5qXeu8F$SaXOy`7{?lKr18Q zDTifg9MvrP8V%<}3s|umvm-$44D5P9jjp zK7zE9QNqQHr*|P>^eyGW%5rRDNm&`*{_WtPo)f0wDt$_7ZR9E%`cOy*qD`w`1Eirk z-d!r^F)_)O{nN-W;E0LB#?b0Qu3o3r#y}AHtlEWOX}!0xIoeD#;Zg`m;_avU0b z@|`|OR#z_ps;dDh5HEyW-#+^#RbeR(6ss{Dz~j~3ehpbr6%+=wxYscXQ*+it{w@@Y z#&eilbpx6L1c6b`wb8YN8hS2QsUc|#-JAUie{QcP?TL6(9+NWmhTnaul=lc9jR<`7 zE{RZ%CGIH`?^{l6oP>2uHkG76*JMBTvQPTG>pN&ICVf{oPtSiwo6ks%5|4^bGtCTL z%1>iNX^7sZ;if^jb1<0DMqR;ha8U8ypo*TxiP=oZm0PCKy%Gkj;rcigKH5pkIhz-A zr5E8pW?7`MF4LvaimAJzQfTU0%MbK%*oz5C(qf%xszRjNUYt(|r zovT^zNE>`g)hkM|US=O)rvVOAnWPN!#;LnW?u@d~Q;9GPfXZsPF@XKqg+uNSWd9U) z!8DN=s91qmaRc^QN->@)`sx768E7DdHW7f|RM*zl5If$r<{AK_o_KO0n#eP@L{mCH z(GTG8ami7qkE@fuLBP~FM=&lMH6GDchW5E|!kCJsYW_&*7f+*`k&ydTZvqlII*mcl zg6KTdUDF6-w9e0;Yh`4YfUneMJ$qhk@$e_L#EZY?onAhZ(u3Rkl;Cjbq~_ReBizcP zdGg$xTc=eC>8X9@(k4vw4cyyW-!Mg6f-7-)wz<K{iLepB3a5$9fuJQQh5>a9eG#4)5Bw5e=q^_-BE=y2z73@(c$8^8Nfg&J z$RG{%ZPUZub%0hqyZGVZv9kvN(>Q2dYX>_gAnMncm;GE0JDLh?A`*(F(*RbWchkw~ zvr6zf$PKGC4O0vV{(?fvXAXqZB*MpOAjPlrT}4zFP3==fqG^}83Gf>883V<22sqDKu=RBz|lYKh|eC65W6A{ z{|0;Z!9oyMCBRW>o3#Crm*kvfI$>|)e(5a&1znf5x`6ubar z9Bicr5R`fvr#>DO{KQ&Mz{XpQ4y1>GIZ0AyWMoU3$`tEUFFr_f8E5CnC+WHg*$m0{ za)W-oJyJVg59v^?jy;~7G%mH6Vmb>7T;Fq!Un_tWWZ>(@@q^b^^e2iPn#-%rR;=K1 z2kDPHWK3geTFAXJCf&^@Q9s<5Dt2-JOd$HFz0>3a}svL0TA9c{&>I z>MVRzT0Tnq0H{u*2RDyf^Q*xZot%7#$HZ}|xPX|5ChjXlm3G}FQHh3rvZ+^dN zrAd*!D}ALWtFBi5A#FBRxPm^^P1(|9d|K&7i5?(zo66Em)MTo`0wFm76co*m%Xi)? zlX~aIEtG=82o_??8vL%MU@4tP)^n}zeub%i_})fLx4O5<;>05IpCsfci>r{AUIZ*n zElt3yw~rtgDRoy2K8@Ix06FYGAMmCOn8DUGD+*y&OAYFuB$e@V;3`l3`bjw zU@AqeD~GeNnW6H`FvL;6VJaqVBRz z-nxMYr-XK=!^U`V9uklhj=gp&(KJECm$SB-@0CYp#(x(dZp}q{C=v^e(jK;2jDiP` zO|Mv#u@@^lwCb~@7>1}9Sn1L945C2pCG5YXhGFyUT=# zVFZC!sp_bb-hL~ri_vO=URNaU>ikjgKLdkJL5Yd~6$;X_JjZL^X9R)FS!q*wqRaFs zPI$Nu2Q=mhoeABmhaNM%$7$&qG}Z*kflu;%c81@%Sw`L zIWHRMXlGP>q-y2iUV;Yz04%}O2u5tDgZJ!)mkWm_k3OtyfT@Z_)DhZvr!K#d!9C)@ zpRaS7`wduphy{Y+umZtNGoo}0SQkw)eV$;#-NiXzCPRirClQ2r6!>URy}WD@6V3j> z3S(79FJR>fYGrx)Tc5ix{%g6BOWgOp$1$O?YzA_ zQ3bwwFTNK7ey@m0+lK~v7J^TIr2Jy}@=)+Dloj`NTv8vYTKVWD~JvgA?b~r&^Mw0YexTeH-w4tI5D)};>oeHQ-YBmWN z9XPX&I~%5Sw6fESS^@B;w@`%uyGcXQd+CX8j!@GK01P86#(JgLidxS_VZ0v<$#51( zCr_(_YU+#%?OZ5BpHC{IT@Ht2zZnB4-7tG>KeZoVO_7|p=Rf*LH-Fr^W7kMfx| zt`0~Wlaj>CNHS{dhNJQC-h_T=xdCl4Ji1v<hnsVz&cTNnO%m~-06|L*KwKejG-6&4qc96l9V{-W z)#5gD%fu)7NtBisVeYy-k5i8?jQ?rko91rKeW!2pzi0NVGPcg~wJk{-0TE$YFJ5$2 zwFDzwtsyzMCI(bL;0w};nrTKe<-KoD`sTc^O!X zdh)gN_{#5>-UeFt3}yW#>&vkwqzbpU6DtYeO8<`Dke13@O&ggrC%X}1hAg%GF8xg9 z+Bf6Otv90R6RcQ1kGWxgSL=7}a~6ne{faP8sL zXlDAMD5-MVxr4mNv$5Bd>tQouzmNPVYn)%1?3dG0VpwN3>5svE+!t0sMNtQ|R%0l1${v{KrKy-QM+ZGxgrX*p} z?tT=zdeB$BHLpylb+r2hl=&6&f=>M1)e>0oSagp1=Bbi^v=*YA7Yl59^B7CZs98{Q zt=N*4-n>Iy5c+#w!nfJN23mo>Y%*`6z zKTC`B_)u|tI=l*DaZH0}GF}&2)O@Nh`pKsNg=qEuETzW){BS$wNxm}xRX60TwB~7M z<{>gXyl*H7gUdQ}nA~YZ0^V?o6w_svI{%OfOQA~9V$~!mtaU3 zb#o8T2}{^JZYNyvgQi0JQT>&0Ck8#^Bb0uRC~2xF6t`PbB&#Bhw6@-OSMtXI`BLSm zUDKmQ%- za7y7nU)##7=C?KUVZ-4J2BT_VW9pnSc%UPuOhfKoVNJJZPD&Orjy`IzNVALwrZ1@R z2N9ST@4m%W94AIo=$AYU@Q7~92r`EjzvV7|W#3T9p=(Z!{9QPlxi(w?6jOQFD&s~l z!W+@ra|7zCDsNzd=fc9Rn@dag_Fe{Iq8sCL?lzm2uJpp}Jb^{Stz{zB;fI=;8R=J_ z+_O6xZ^|(jtZ0|AM6;?_ux7m0iDpHx<|eJPivpdUe~X7pfgm9bKH%i za9mn=ZrCl~d0{P60eWEwNVa!7+z`cRuJQcIDhg7RrsK)OnVP{qZX9u94?!Sk$B)vK|rQS>YUkd;{gGk#4blEkVR?C;TrqW9D*A%O_EgXBvl{X% z=p-}_8klB_GcFZ3Ofabp%^%`l_kgIi_g$QwG3>fqds%kh_FX$ zk(POR-)8m>c7yEX7n(}Hlw~BOS{mHGq4&0Gu0*b}V5A2bs}Qrovp*IzwIXJXaB)r0 zWGGO=f6U`%1F;A->%pr%?E!+V4aeV0raW`YYkF178MPbA(FH~XydL{)!~2|;uVF8=eI7l<=DaCK!NP+R`i_i7dS< z?$k!CE?0XOl_t23s#Z*klQ*P(fV?FmU@W@H4)D zo9ZsJ@QvcDTA!PFJuu0`yL$Ol{q`~4TKS^WkXB2s&B8>2S=XU-X>Oh5t0~&3N2!&AKspYgFV;%8Lu}T4ER(L0-mHx!1z2X1d|Nq?;t7D6CRxnP z%+gMScxg*uu^Ud@ipJez4YP=~0s$T91!@+|)&4hZH`d14)(S+lm_;Low{`ZrDfz228W!38|6PvN0NMab@LCbw18d7J`D^;Z4E0A#CY#2bFq7IjA^2 zS+gS)Y8Xwctv{rv|0>=h0en>X%2wqyk6X%sb)34G>mPTcQz?B5BtE^->+Jh}dLEY0 zIDZ#`K%mv(UuhlX4!qnQ2_t4((o-gMjw~XEX^Ule)jc0;>vR{udxgg1hk0YJ7`bNG zcPXYKgy!zgncbtMu6Cv65cf&*AuwMI^ytp_k3%SO=SXgAqgOw13QNXv?Duj1~Ds zafrj7#x!lfKtBq?0Cr&1WTXS5G)1M^VqQZKl<4o8>ITGAgcX)?@qDK=3Y6&BBwCZE zQ)U8B=uHFCo0sPd6cyW4Sq%eTlpn1g{96=Hw({`OqIVNV7kd)xWWT@lKpoDIHj)=2 zLPB3!hp63tEI(8+m6X~)h`e(3H`4argI`TqbP@u5Iryuo;bQlubM!*qKKm{bs9gN$ z?ZdB59-`r1o8L`Mjfy{17}c|`q>AXlH9vsgo^q!l?1g4wJTV}Lz^_wq2n1?kbMhnO55*NFH7d>5n_teg}F8)OReWP~zX5skU;aq; zZI9@EwwQO`tQHn_u{L8|GOwl_{=<5KAsC-u3%Mibz^cw(#`>~Vj}QJzI5WK*?YB{xac$E zM1ehW9H0`8g-w_r)C(4NZtphC+VByTj?& z?+KnIU;(McT8=d#A>E|b1mv78jVzUw!Hq+^ZG+A z(oD6iSx#JzyP0fp84DO=nu@WqVsa(hGZ$#0%jCA{7Wr&k(O7h2cbMY-hej*HJ$M7o z4HhE35p^j`82^4@jj1L;ZE>5!CA%Jvb!hRr6L2u8Eadl6slsbiXPKV=4i`EKDr$Im zea^ZZ|BdmUfeAssE`K^O0KSQQD*MoKeYtEWae_bc>_f6ca@)c`a@XDmwUh3JQ~8Uz z?vvc4WuLGIq37pZT^A$gOKl7b$2J#34kws}fA25;Tx>5%M1~#h+~v>ZhcY0ibF$0y z*6cl_zm(6ct>~1NKvTsF(R3sE$&0xdc3P9kxJVwQZlAo@?suDmT4Aldw5}^9s82 zo&7>r_{$L4xpu`nUdDZGGfNfaM3~F8h`T|;tYhigGaX@I2zVSp|N1`lyQJ9Vs*PGc z=Ch#Cp93dE;rI4{XxZ8xPj?bS;e5zeeI{*z%eU8UcMY5ulwCycF~Uqa#jH$W3C=Kh zeofN6wY10F(_^{5X6Z{*_|`IRtO&+0^N9OX&kbDu7)2P}?^FP_y>X zh{vL0XV3f6aCl!JmF*+8?{+x|Jti5Ibhfs1VR@10#t1tNm~ zSpCatGBRSp`{D*8$Ir(puPt8N4MvciUa`*Rk#=ks3Qo^xJu}(H2N{^A5ozFMYWudXM(zr7E%GB;|-5Vn? zkgOKI2o4Xf4G1^W9&1u4GgX=elM#krFAYHY{6Y1}>(>qQ*7C3)YdR^fEAbG9&x3>e zv|#FM1_3aAZHRVHe*>TQZtc(wQ-Tz0p_zBbfs6e~gFrz(>T#=~TpUFcHSC9|b&(oJ zcDx(8^o1KD5?MnBA$+%(WNh>-ZG(HsdM$-l?x1^9`TZml;eLnA=~%M-T@U=t%}>is zM%UPkknA7)?ulXYX;^WgaAA^DP>{Jx(Q5z)&gpAYFeC$bg+s(h9rY^l`{)BXtPug5 zQUr}dL(i5 z6OMaU&eZuP>NVNZjQapY1r)7Byj z_`Dge_RddL^KL~+&9zXpklbM9F(*nQ=LF0x`nz8to_;NP zZoX&heR5VEaXOK!c2Q$DVl>_Ex6wO15S=ql#6$7P2qEZ`k_S1sEf-3)_`a5T(p5{Wa( z^WsD8((K(TY(;_&m^y;U*A(a<=VSFRx<)C6@7FxI|%t|F(b z@o}L}67<#gdqU+k1C)i?+mB;Uo4r*}PDy6IB3QvqdaOf$B*Pip5id`7GYJNnr_W8{kQ;7n-c#+ zBSkYk-C#)Ax~?t9!2v7y(z-ujEe%MMP;zI5qxG~Aw5cbJd7XgTVP9r`y%FCN*(!@J z`#!m_TNJ>I4?ji_!@5XG1z{VUyl0ahcO z@5ARFWbG}tzqP2WsyuDOb-yq#ClRIiTEs{1KGpigp@}dXAKr!Z#?O{!m(xg^ZG=~^ zXnK$hArS3yqoJ?X^2n*yx}AV^v-*}9htrdh+};;TYJAS%=)*SKHag`;JhqJNE>NR?+mR{3m6(r&raU*%YGh`2M zTyjPyI@PqXx4gUm*QZmtw+R~KUdj_=R9peM+BD40pY)NDy8conrG{ot3PxgGa&zcU zl^f@uTrP%cL)4v2p$6h{GG@N~B!_5LtCo7>(7X^EOHU$GpNqn9bD(@5?${?g)Z2C1 zFwmVk;D_iZ@GHGdN6M&iSKvfWcIp$I=q!5eU#<2MmxFk=`)6jo(%>szhR)xoZm~GI z#?ISCBjeBZd&xFWoCyLLWY;!pnjqs$HAz_Tag4iA4v2uI%zOZ+O1;81h#?%;6^+T( zL^dzxX0@}GQitl&)sZgGeBw(i6qD?q%cTozEILI&Z`d!~Rvp+2==itX96{lav%T23 zI8^)mlyyx(&SZGMWW+4gYC5g~>MCgaE$f9!@-|nCEPh+lwETTU1(7Ls`Cy|Bhtu+w zGbU}ib#^EW$BLv*Y{=FJP&sq`URG7va!1sBN~5x>SaQ93e;~5>oQh2@7T)S%6zkxn)hscK8Ok@a7UADU!sjQ}=q8jnY z;m)i?iYHl+-dGHKI*W7{o9XASs1r5-kzTCDx{?}hGZPya9ZT$R`nYjJ0Zok5oP3Bo zFe*FEw9F1s$EH3!?CJGT3?mM3o3pNWvA1rXKV9^(%sjqiAEismL|3Rje@pc!c(f8b ztvBKQ@87-J!jOjzjorkskJu6|k93z1h}Q(t#>tb1i9ceFJZYJPv}Jjis+lGNRkw0> zX>|~C&Yf@XY~~fx?a0r3wYztGy!OwB&uL~>ccpB*;LEXJO;v}!_tZ*5RYy9|iYe#| z^{akVZKCGpe=^mTcE9rS5p;N(tfmlo+}EdiG2OMdbuqMX+;z6Y99g!v z$vNUqD8H>}6a(V8*Z!x8EKBx)bAJ4w+(zsDObV{A-@HUz8$a7zdvr8VQWZOI9r{@k z?cZtJ>mT~U^z(K9B)ZP*Nj*kt3Fv1~ej$D8==;A#t&tJ{_Q6V{Ii}d|4IIh0y``lk z{OshMGEUrFEDOiylAKE|kip?$2P+5is%QHL8}i|6WW87Tzo^pKzN0NQ<;$Yd9(l*1 z05XZMgQnzkFPh}|u2#iFf@_jTvKR5RvcB#@Gmu$&*%l(#wvYN!#s;PN530@3PU&ocPc!W*X+dC`R z%9Q>UFDR8`Bza`DG}hx$Vc5+hus-t9w;|^eHa|Q$8l&9 z;>}ArGXV2pwtc+pV>qO^t& zI1g;2qa0FaR&tZGkO@Dn7ZKA2KPTZfm?9=*6svj6pfzmZORK$lUqqApg7a397Pujv z5sWU^d~CEzZpC8#%c}@tJ*`&tcxbYrG5kZ5C_#TC=jS#TNLTH2c|zTY7)Lfm>MN_F z>H2w%_#Z{FpE-buJW*Kh?E?#c zm!>_`{tft6JwC;gb?fQ2Phgj=`4QjD)jBXA zjpXz!*-!@3%SqSkm`QMrR!0P$x+?Nn=Mhh~H;#|HW%z)*eR&rjmlh9> z&JP1G6;17YBslE>E=2i|{4Ec(lhN}LoCZ#k-q5wUyk5 zE6L_UBHfMZ!}bsvWEo$oAJ6KU+%m3mQ_kUEl$VsXgc2p!KNH#G$grEPVEOvJE2u9}KpSm`?&p3&Y`HrOlE}T37Ey_W@fsY%_ zJL3-r;t9?3rNQ``CHbGrnmA-lO4 zUr`_$DD>Y>w}pJEwox_i)V$DFnePt2>c!tf%I3(w73Pa=hl?K$=NsJ{76hXVBR9u! zUNP*a^*b%cofBwhF@x)dzz<+UF)<#6-tBJU+6Va~18EP!(N(Ps?@l|ihtzn2s3%$U zJ!idD!)CZWG}ij%BLReXNrPv4cH?_ioq_ka4AE1{M@w#5LYd>@(KdKdX+TPBLHxYvgn>P64@Pl93Ta3}u1is|h?!`%Kwj7i*bjwZ& zp?FpfGBc5Sk2myl~#1&~i$KGtwI)T6{NJFP5@7emU%vve3*Cd4 zB?(u2Cj0;UT`{6|{`-9Y1mRzdsca!H zhn%G@F-wdvPxMK2p6sAGD3aixV&akHf?|1A5A&9>2f=)-&)0=IqBN3Qw@=2I=NIPZ znUZ$iWs8mz47KCMpL1y8_WyVgZX)k$h~4@TAW@^Kt%CptM79&~EpL9W`=N(1Kk#C2C}C2bqMC86?Z&NPu&aG(vLU8gt>gB@NfRSIR^PyQNTdXuU> zDB5{^`^5oGkhh%t{x@=#sC5U22zj&pc(Q39VV=w@^u}qt+lW=>c7(dLkr)(TcYO-^&JqvM@<70~LE3NKrU8_|4SAAZ+a29*< zck3Tv;Uw~6BT_QFmFlGn#YY;AivM&F!+w=_)~Pu%R)pn$OXi+mmcA=@Y4VD&DB;ePbMe&Nz@% zz(rkt8T5*bG$zeCD1m@i2CE3EwC_%76^wf3e2bmRDZuFraBom_NYh24cW`C5j*hQH zIpc~XrdQ=g=1y*uBxjD;>Nb=gv>4CGjMRsgfg>FA4N^5LJ^T*IghnyDxT@^dI}K*e z&aK`ybU^ox-j4~jNY&q2S)#wsV-P;%VkQD02T#XUdpi{*$=$nYkS}D z%!ENNL@!o$PQRwmQPCe~vqelIh}tfXPuLSn#mFr{n3Q?mvpsx2;aPp%H?gnP6%zqW zv3ItrMn>wj|N4iAc@HK#bTEt~!;dI)%$vo)3UIucefG~U_+1_dnw6Z(T&>FAoK>!% zlgOOBB{4E$>&hv-H$FEvldNWyw3ocJ6KHmKc!H>=E|uE0BmAq~9=5eO+jK)$b?Box zt#Z(uIiJY)*Y`ZbclpnHvd(0NhV$E?KMPyAUM0Ljz2D_(y9)}`*?f>gHm%+LVUrc{ zDSGbYmQAKytM?tm3sndHyuS9ywU4v6nPfs!g*z-KJk@is=lj)>MHy6;Q)`E=_DST^ zG3z`8?t6$%VlC5%|C`e$q>%J*1`F){N?ZwxLF4?&QkZ4Qk_=zgvhcL_4Sb1pLDrs& zU0zs_?a;(#aOK3yw?ct^oX!XeCp`=Rb9ydwfbKg8%(@p|lN z5&yG|v`*;2W`%{|SJ%KbhYL#J`LDjNqvK<(yyVsC*EY+O|4qf;u~u-0dvbqhx;b%E zXy_ie8)TNX_T8n*)pjKY{ng!_B~jB=<2l|Hn(CJOXmcM(A~VMpAC(wo;kwUuTyvB0 zg>O}7F41RvH1;*^7|2=pdQHh0<)M-MHI-o&4D-z0h3e#s&7Y?m%X6_3dAYgs9gz_s zn)R(-OUJuD!6)04dz@;gf25JS$8G#7mC44pnRik+Oz2`{=na%#Po(-S$SG8B6MUGw zavsCo&0oRN43&0+NQjo&G@*N=l5%s%FHOjJp8^m<{izTfQi;Xx&!5hg&f9Esc;vyI zw=mFtGDNYF%cazM&DS<_*(aM*s9U8&Kv+9+WkPM*&g1{aCaJzwKdw>k$8ff6ljPgv z`%N!8yk;hwP?fJ`H;sr5Mu~O-US!`#cd}e^SLXwH-{!Cgg%LJf+H7;1JjPsS);Ih; zvpjmEo{I$v@%`~kW-Tn>{_J02NY203E9>Z_C@rO8WL=U_dovlT4YAH3kL5gPODE%e zS2Y`56o~0YQNTrN%RD&_W0Ashw}Yiej+swkgSqF z$ln=snmM~a_v6u(yfOVybVGS#3)W~lpdik)!01%Q#+!VqY|P#ioK})PUc{^Znae|+ zgLM!M+<$_!qU@iqz|SZuorf)9XV+j`cNUJX(aKyW`{%apmnGn*N_PJ)wn_YFy>K=y z(VpC=7IIPQkTldn^P3j0%wqC~H$aM2Ober;(z~~P9o(S9AwH-vZMzBDdzyAf33%X}|u%}3d2&zV@c(O^iL&0HE&`b^KzFs5&9v2hdq zAh^};q~LAu3u3G`GLGy~RyrXgZZUr`O^#<9n!S4#rs|z7P^I>7fi@NiP=25gF z;1>s+pg9OgI(l!KF!efQ=*522uDDQJI`pn(^&gm`- z=sHJ!rgcg62AGw{ws}MD0cC@8BGqG=6A&i(X8K@-3V-BwW1)%KH|H6`WiWt`#&k36 zpzm@cB5!TgMyq{wF1GD`n4*zAPeyyE>>EgkWP8eVIq5zo0oXLCm)a*Ge=XZ z9HyeCqNc2(mN?`i5e0so*YH7x@@p1Q(Gbwy#!*9GeNzPC+fv1B%>Q7uK6#OHhr7b! z^(i}tI!g>U3}6Zo5PFsQYLeBy7I7oFmMtSdSHoR$$Y!abLGy{TNd1>#kRFSKxzO^W zrrk+xyxiy8yX#sF(WgP3p7~AMv*DE%1zJ)SKt{5cap}^|?~UmaF<@gH`V=+Z7xE$* zFZsBdNkt+rY_%OrPB?BaS_} zZ20ySru6y7BQmXtq_!3`#8zS^fbW%Xfrz0dEsJ35H0Pb9?Wdm_3`d#zKUO}wA1vFI zIMaRm7r%BMKUFRKEsoe2wejf5pm%MUqFs#JJM_C)+3f!}$N!Ic zVsMXM;@UlsVY)MeDF=(|*C&;k7G&$jt2$5q=uRg$Z~k?!ebc~0sE~bXRxjEbD){7< z(A!%|)QEb~ORQGTSk%~3@yZ%m7c3=$S@YEJh1<3n{Q#52+3*X=Rv* zQn$ko0Rs=95-FUyv;4p}MxZz4{#d9GcoRKjrgAtlCmEjmee{~;=beAtzKPBJ9oDT8 zJKg+iAsq^HjZI&h?7vg&yE=nUcP9UP)Vq8K}klE0SK)gyjzcXi9a z_ZEK!IvM#OJ3%wxgVl|7%f17@m#90$hWDb-cy=cS{3t*I@;h|>(cFD*`2jb#q24Z) z@U4Z@6A40(?ti?J2dW60dPC3JI}KI$RSbDOxy;EZ&rH&+dv{-8-uy^&WJ~yBZ!U3; zO+>UU_4>fv!-@b!^rpIugY{b7`|=-(nQz>x9Tgi$QPVREsQ!J_)6xF(G&IdeC5-Ib zWI1uPlv}-*QUbP&M)O*Mw0Awy>H`Z!HjSI@J5~3KB8AF^U307nM{X~fmO{tgbh-+b; z5Lbv!Qhx&K3x)smzgvl#>b?Ek^tw#Hp4?0r@AhcEG37R^1mKNxEjcx4KNZ)*Z$hb0 zkXBBNDs;6rbsu%xu_d6L@mor2z zXWYH-^J-^g`RTCNms%n2T{2WTM4a23GQM`p*tpcVE41O*;e3AU;+V?;`jg^!?QmD% z>!Co-+>2Znu#Oqj*P<0r?lczCd9uvw;rYxqFHfj>X{|>5TJb>DJa4UC^AK-x^2CVR z#echDJKX=D6;>sI($N|{mJ`#rhAVHT0_3G6&Yq`49<3-{{G91ppPE;TRR3r@=M1)H z+X&}ef1r6qOmog30Tj*DaK;NEaE~!!U03%p zd~Dn1!y_VMl<%zUEC#9kD`9KGJy033LOc=MSTDY9<8ob}&+B#DwsqaMZ4J-N!97(X zxqFuEve)ZKABeSTz#TpiQFLtX0IK#}7E~1#6sqv01C}g_odQW3h3@X|!*lgfg|Urw zJwL4L;?pm_`fRBfm&-aTQ@X&TDDuURzF3y!XMXOdK0JK+<-4!e@Q9IdHM8UKe%(AX zu1`;yp~BF1eteLUoZ5DN(7GJ$?$f{aYyb4W{3n0%!$19*uYE7wAI=|U$kZm){mJZ+ zGCO5I$=x?1*L4lA?Yic;#_;qMqBsIupO@Ry{bUW?hk?z7$!|E4tu#I83c2vY@)_?1kGH;nlU1eUJ7rim=+$tTodD133wMnN5c! z36yRe2&6{@RW1dSvY-@3$x1S*&*!r=Va0CZm^C(E$F`+!8BqbD?$$AT_>zw#HL9Vm zmGDt71r>{cE3m9OD^ThdyQH z;pWZb(}$05C-=NuF&@7^;~W3;fA1f?dwihR<>}nC1w_r~nnOU(@JwMOA}ednvF9A- zJQ-0@Xp>Py;_BRx=@}{=KyGVZu8;Yy&V$C1FQ>zN|LXPq{rzk< zkSoC)T^f{!p}+C!v;Xl&Kfk^o@1HLK1G-EzRHYe`SrFrFizcX&EVwF#RDmYTumOnG z6*%^}wdFY9U1A%)PC}Zj;X$IHR$|PR(!eEn@zt~%_uz!(c4-F{&8b;40TOI`@S<3c zs0>kQyO$VHO+?jdkI^V=O*1oxuaRp!+ms~lZEidwEGCYp80X_>+V=K@W3euv%Fw zNt9DnDn_jClCI@L!F3HbQ==S6n=uv72e~baV5?R}bs>{@1?# z+2>!q8zET9E^l7F2IP8K57Xi4>3vKS10ODrtnvQ+`!}y%1Gzpt&7Xhy5B|Y_@cEzp z+5i6k^`9*F%M$i(dz#cz*hOniP*qrKz&XYktF2?5w9iYoR+VEq@%;S0S*i6t%XoVCYx0-A_RU{@^~ukiK05pJ zyYqu5yB?-4kXBVOythTs6r_XFfXW#fiFipXoZPV1W zHS;PaeLS9h1*hElvt*a6No&*Gg(c`+sK|;T#$@COEpL5XR{*LlV=Yuu+Wo6r5sWc1 z(`c14qJR|HN(6wJsA@)4Ya&v)M#QL$eVdmEL=+J5lYmt~QB}#vjLj1$fkZ`-Y{tlh zW+t<0x6V%@OH@Zzg#ePiWg=;ns0vj@1fZoUFOibP8ik9dL=`nrgFy;{n~O?>=s>FY zR09KG#I|1AEM01>PnYM1OcbNwVk(vHUL~s2n>RO?^ZM}gczb&~Z(}+hPlqFb%jNR= z_07%Qtv%fQ_y>Rb(d#z~T+jaS;d!xXzPr0*@kf7h`|Y3m&ENQ+{HOoo|6#}5KY2JW zH^;6q(xrGi&qeugUb8$QERpoL^8xcrthYD!HU;=W7prR4D9=%0NUX8?x*=I<5v~of zo-?n3LXl+yS$kB(`q*(u$o)t6@BGVo^}qG4U;lgG{FOV_ho{G<>$L|(${JMK9^TgG zF0}o&Qw14_Xvm_F9OFvS*4uLT3XktwL~iT*%cW8jT$X|d(sB>O#9)?|cp54)nJGE; zxI}3On_pSGYqQBr1#HWZ7LYJly8GBNDa9^ouxQ3crI^U^xJd6KybC?9lpD%vcK`_I z^TV~zi_PgU2u)Pv1zwjX?i8U>m_0Re2OxpU)W7-L-#Z-Vd7Ae?rJqDWk|9y}-@RK-e-K#Dj?ZG>o8!AL1@YX+}7>#;ra3FpM3iM{TGi9 z4{zSQZmn0w<$66FXR|)m=fnNo?c0wJfAMc0&QD)ieKZ}qnF4ox{`hpWxu4fKwkgU9 zMda;q`Sj}_uY9tjfAsdo+L)JqxS6iFl#Ip{T8X%?+Xn1nK_A!a_38P+V+f^0toM^0 zwz0*wX;$}=*xr5lgTMcsU-`$s{2On>o}bQZ4x%K{+eA>MC4^?v;Yey^w5CRGW0T5? z!96cg_7o^(@M(M0uKoV?KmEb)eAzF@>A1UwQzc3w1B}W*DU|8%Bg$KCs8!OVH@SOz zTJEjQ(k9`g?9@pFs!bx?BUH6pGiamDeL5ZbvfSLvx27Vy$8?0UWM>X#?s}TtOpi=a zqwY(z6r~u+ip&a#NTlyR9ZawoG4xme{&#wB%QDZ?44@DcfTdzDr(gHP)t@LWDk}q7 zsK`p+oeHXICn)SBrv1xUVC^!01$S*TtF%%n251Qjd$QL{^{8S=MzBzxm5EAZRfbHa z%_v1lQMWE4mXtKwgw`7gYge-%ObInXv7jRnl?GJSsFBBrv zlscauBVw5kx6^4oKOVpLbMt!rZ~ow4zPWw#R_tiYo5SmGAMZ{&J+519`PoOG#26pm zeR%u&{_5v9cgK(49?#D|{LZHzy?XU(QfX6c^#sA5X-}oEHLkS>C6Ks|@pQg0%P3V@ z(6`6C^Yb-5-rn8K+vVxUKmP4s{=5I%zxCTUnp>{d%bJ@{(~i2Uh&hC!)*DKz$`pi3 zF|nWwp*%sOhfx^c1I=xx#bwE{@UOFDHSzQGc(m@8c{xiFWC#)tf}c9 zmknV=A|wl58MWurMMZdKX2l-BzWY}&Rcx1k?*RkUJ-IR=Dh1hPkGpbS@;vbm7IHKwx8yZH()-S!)EstHPK0 z_IzGh_3F6jjDl(_YcJXGBAK;C4EQK-DM$-%IZ#)HOEW#p z3mRmuerRG;5mBTe$nCmmand%^4!s}Sw5X~`*J)26ELLW1=`J;Dgb(+vD%bTw2Bjv` zc~a$~CSXSG7QZxQ6U{`vh!8zIa%4pfU+v{&zoe4WtT&A?uu`E4t=cnc0l=tTX6H5N z_2Q*o5Qb#M00he2(sKV_QMq#*sHFx$K?O8~L?axEYO3%^Fday(J=|airouC`O~y&f zpgPoynqn*^TkK)(0wc4ef{l^{BccK-U5b`fE}jBWlV&Ep9b0RZ6sXdI6`+E>FQ>!t zbUI0!lN`ej$D1Nzi)or3KRn#u-@pC%)7%E6zW--`@vFb`OJD!SxBl$U{^ZsDX=-*j z91cgWHJ+c&cOSlgcl*(AeERL*`|7KN%{RwyFQ-OCj_Kj??vqcy`0&o&d~|QiqV4wW za&3=?f!nv=+%!IY_0134kM3@7d?V|zaV&b+E{FGz?|N`B0l$oEWSu4vZI72Ly~=St zUuNT$>y6-l_V4}nZ~W?CpVsRa4<9aLbsu8=8bc@mLIBfwp0_bnc$jYHCHO1*kpY&rGb%O=0MIe7SU#c04V&Za|k>lFf_XF6kqZ!8ZGZE9*gB%ymu zOb*!_5nt;{QC1nyn=06W%Jg!=RZ6qj)TEa{rO|nqoTJ-E=Rg5&YEy_9OF2}>GPe1<5HXUtbgXzS45J3*~uF_0Q zZJy@SVcWLb+uQJZK0iJ^JoIkw-hK7OkAL{-Ctv&IlaK!G_s^H7_0Rs|&%gUKKlkDB z;dof4xow%(r>C3wkfPi3I(_oVN8kA7wjmvK0dD>)^V-E)cVVeN0!ynPuhECrib)SK}3jD>k`{Q zBG> zbUDPb_<^ljQ)q!2wBun`ZLZB^LOpw|>*l>3tg5O|2~brnkh>KlN!9j3n^vvzuHDN8 zkQB|GU5;>%LII^a6Jb~4LKIcV%7hoRQuej48Oy+kRt3dMM3snDWC(=eRU|VKCFH(@ zOwptbDy4@u*;J|r6QHR@X}RtpP(f;y#TJts9Jy`d>{}*o@8;9t*yaAySD*E!QAKIf zvWFlDV-MSaubzoh#{K<$#K@@o`m}Lc>{po%H+Q!`diS_` zeEXZ<`u@{jJYUyu;Wkdw(+__f%QwaQ-RZ{CZ|^>Re0;pD>o>pt(f5D!{`KvvQ@2>R zo73wS_=68Wx@+^-Z*J~Q{)4~s_x|QLe|Eb*uIG1GY|%RZe?LV}S{sp$e!D zs_L9mr%r#~{@r`+)jiFebBw#mhq({=zNEFJHJ|y6ao@k+bxE=5$nGRXq+4xvkcxz< zTPg)2Of{jn=^~$ju;yXm4saT01ZAOMRGq^-NmrwlzN|zspl_?Gnz?WbL}pIv&AlxJ zd@wh2!eH(K0YD|O6Eq++#HNNo;MSU3s7RwGfn8aFM}&ccHgiDXjNlELdaYV#P}j+! znl>|=n?8(yAS85uyt#}gTRdBJW$8P)uc%LERJ_u%%Hi|fS#z=aFWC# z^K@Kq%G=MM?uMP7BI;q1N$H$w3RZTgjf@XnZq==ZbVjPJ>8e}}s)ubZMkS?ub@f%(T1v6#;QZ%5AMZXpe*BfI z7yE~Cuki=2UclT=)-JB{qs?~Jt&UAozdjwt({#K#@82EdkAC%+-oJQO>%%zjrv`Nl zA+UO)Wek#7x_+8#@+hmGDwtLgAy`r*0D;V867|KJvd(278h6uKV zQ=jBia$0*mm^5NW6(ULOv(+#{4YmMaP9jDSMw9{>ToEap4K$3b5dn^&;T3Wrnng5G zW-G`Xewx}L!sb@zT8}oiXyG$BcnzIxF=tO20C`GtP%uZrU5D9;AvqXKZH@@%pk$fZ zs31&cCb#>slxC45yGas47D2>tHd_!V5N<@$mMof?`@;4>1WAb$*kfV0@pX)E@lbL#tBqh3gN<#VORuNL`WP! z%E;)1kz3Pn%#^%hoh{rSU0pnT{OIaz+oeIa!(})0OcFDpj;&!yI*mor@i5moS#ttr z#^dog&8O>^rCDrt+tq6I`t|E>wQ9|5p3K|z@n)4*x1W7__x!~l{}X@m2jBV4byqIe z>+k*K2N|E1evR>pn}c+#b-B5@8Nugy+I3~S8W1sX1wk3hY0j5Je{*wVb=q!L$l1(u zH*7A?F7EuQT|R&GOMmEi`h5TJfKX-|!(g1puInH|x{_C$c%zO651>@YkUdJ@wbnpqu%>7h;1NuW z0gFLhUEAW|Lkf4vOsh;bLnsIaY9yneu647k`J@p&$^;e!oFq#UBX>6xb60~Pqi|4e zZs7{%Eabru#&|?XnZd;nCz!HAwP*&ld26jTYq3Z&Y3bKel1Kh#g7he$(N4T4Ncx&ov&D6{z z$imHy5RypBS(r;Meb*I9Z*0VjNU*3gW9iXgMif{AVZ_L|xMy@3vO21Tqb-^@6azs_ zD9K5Tm>oUXfSd@CoX{BoL0r*{2)h8*iRamdoL@YC{Ot14X0={-!>;TuHs?>uS<=jU&|_2TnSKP{3}`Gi)=a#3^@ue{*wpcYoV=-EOyQtxfYh^sB>h zZ0`3DclY<#j4^a&ZZ(%ua)LS~!b1G~`CF^9mgbk6RXd(G>*4L~S&y_`Z_tFv)+^^Se<`$`}rrKI-g+)?gBqV0ZOTk=-RMl;st(oaE z7x`(Hsx9e4uwaz!`n*0}eIH(htAo|asy6_0QvhQEW)LB7)oMgzaKzwv^Q?kVG;kzW zZ*v{1HEm1d4I`GUAhzbgVwapUMpA&)v$mdB5UF1ZV5kY|z=wWVN>ujbF0tgY-T3h};UwGcJ zI3A*TBgQ1l#hG)?9p}Q*rHq;0q#eQmfY^x^zv!}DK#M>oCU7*6MeZ$2?#HOmC^6KjR;t{%k=bPW`*4^3FWj%}+j~;J!{aN4d zy8Q9=XSrlN?Oz}EoAnvMq_sZWJ>;B*VNmsw%F9<@9QKFJcDp@0pXU1dL-Oeq##C`7A`N`EeA_e$5=goTEcSAu*4HM@V zFJ52?tr(3Jy^ggIw${L-?}u@!bJNmwxhq8IbBj7lO1WQ|R#ydFE@1?L05ML}IF70n zm)SF60F^X6e)iVOwSIU1QO@G6HJuPlgb=W{*|ZU|kPw$J2UXAjqlkozhRgPpkU&A- zER~k)H6Rg)@Dd+D4|Q)5vra9V;Ub4N2M336HxEP!N3UeMY>dJi5c1+N0`OeAoV##b z;Qp;OZ8{z&o!WFfP2)Jv#;dn`rVtQ-XkI;Fsm`0JYE!N5hCyzw)>@k|=uIBQ+|Aw~ zJ{XuVi4;kxOUoWRiR6?d2@sp1`@*xwH`*5B@I~If%nKX{Gbdb@Vi*V*0ggrj#KZ+z zAcP2W1dtOtL=YIFrvMC8^8jRA$oSSAakXAQdvbn$HuPOrhBc(fJ%YjM)&6XIadv(d zP51kg-!j-Q3?ly!*lXMzGzi zHk*z$HtQY|4SmrzdbQJd0`~FvFjcQjFVA-4Sa0uc9v=2(wUW|XU~Mx}+`PV?jz<7X zr5uk_$wRAE5(mPomoJZ}!{wu^)vD80A8zk1&d)euP9+N+#v^*1?{=%cpC^6&@^;#f z&%gBkZo8iDr+GSgC^U4_I=N)zlr$ho=}MRTf(YzEu~=mcJ(93wY1(QtCW-KI9JQ&r zGA;Q4U;uMWWp(w=TR*_-A00myp{BEf5iZTvKnC@yHoJN=H4O+976N1x00COuZWLk5 zz80KVgop%e$zU(sAG2uciZG)YS+Fk*=ENk73}(S@VG4^c%*TWvM=`_}<^6+e6VPOJr2XlZo-8S>q>YHmopt)*ou$+mOH1xuoL-YlD z511thGc)Cs6X%?|lq3w72=H=RhrlJ=?M~|E0Z4!m?gl^%L0bprJTV|u&4Pkp~mL&@^a?|=XP<;$yKb!>-`lnw{4)8@(3 zXa*sZS)E7FW~L?4>2&<`)6cBw`PHLvS|ZpWuw8F#(%^i3yEo$Xu=3D_g8y*;&<)*N z&tDu5$Nj_p?)Hwt0ig8VJWn?-zqmNNFo>q}@pyXm`u^GTXYalLKBzwIZ#@*8$7v1+ z?mHBk%u*TpJS5)Y=lH6(rJDGpCfqRJmVmUOfBe z;U^!>pJBMF&a;N9syUcOGZ+coqRwrulUai>TK2<>MFuHgG1_^A;*xYG@Wg^Z;2@L{ zG03?fG6Vqh}aPSsVX zahm6uho?Qdq$Es^5MgfSh`!X)YE`ueuyC)fMl7sJ^Z;F4FhIWG?jsg~MG`5cbg4^O zmisU-J}U(C0AMCT!mfjJl$cCQ5#G9dE#L~Bj#r;Sa zh!hU)A#b!3Ow4^p7iYuy`9)cG)Df}x41HaF<>EaN>Q~(xX|+P6bUf|%54XYsDXBv4 zhE#@>GdPT=gRDwz4d&YB5?!`kEX zvvC@0odDwI?!n!Gu-0~VerDhio+Y17lL60arON`rB$LjMuCDfX_q*L$$;0t<`sm{i zApnuqL(j~geEbP`lw1zSkF5MeixawT1xkA`9P4hgeYECJ@A+ReYn2Gpo_h|jzJOA?2-~Pevk5FS8 zr|EPwovTh&XLEO*!km}_(5khn^VoC-R{#sw>Hrv|5UP!k3$eKg3ZW1HA*ILx0i;Vw ziUEX?IRc#^BEUS%F?3O*5*mf^!lhV@Er>`UjN%?oi+yb*^)rPt%@P z&sKG|M3xA^)y+cPbXKsYHqT(b;N;9L0GI4Tn6(A49?Sb5K~2LWrBq7p(jw-d1#*wF z2n!t`!ot*DAwo@^JP2ZO^8rQhl9dw=R#%JaA&3cB83Z*8imI9dxd5nG0rublR-))2pY$D$TkN6gqNuOdni6aja9F=W)D09_mS_sXknPvA?$5{^Ii<@-m*<=H(~GlUBl4RMKYj6)x61kI`oo{}0f7u6X((5hk6*w1cy13} z?sDoH!0|YHv$iEMSeDB?N-+BD(@ib4bZmti91CVsXYEoVLG#6JF4P$Pt&Qk)ZvyJoImw`x5Vxztuqr&HHu5uWBLNh+yxNsjPj_mrMJ`p4h; zXWxJMeFo9V%(S&}u5)eGT6n9=s~8LsQUuS_6t%`|VQ7$%2twS0mY@L!GO?o@E?sa6 zX9gm64*>);1X2ihAVly8GgkvOYmO%5ohzz(xU(>#5fTO=FhIncYB(|BJkM=z;MV3@ zRcjm7XI?)Y%$o(GlPk2QrfSW+VJI$2w)i&}5Xa_0NM`O!7zkns=$ku4q?Egq7e_lU z!luL`LQFuY=4PQwEmFZ0n}%A18!{jw#F7$Xk3fq0rvGlVB-1^tg#dz)HFRqX=!WRH zXw=@QzyzEvF?M7fb4q#GY_{t}t(rKihj-t94_(@+HZ_e%kM++K>sDX077X zP!b?X282K}LID;41aJrR5CmdQUFk|Ti>ft3MF@!NTm@m(6(TV*CVBSsy|&)`wO{=k zpWnV*;mV>yr8=9bs#XsT^Z<8g;h zkfR1V1t4Wab2I=z1we9)a3$*t_Dt-KXo{GEfPerCVp=fkD3nu|B!gM28qwydYPA42 zRrfF(dHZ}tNZMLxxQ8}x&BDkXOu^j{B_d31sUwCi$Z2S*0pY%6nt>c3mbEeEk{6Vj zh)9yfAJfzql8LKnb63^5X>$(@Ty%;H8fj5gAsDsh(L&?w%W#NIvrax zZ3Z60&^a}*`Mvi)c>es^=U;rdKU^<^GDduT_j!AD_l=+Zx!?Wq4^bO|-+u8@EiN8E z9}lm)By;o9b!vWhb9;O9D(A$Qw`W_TlvAb@_qPwsStMKNx-WRWkfg@ovlbm# za*l}L0j6MvDdz}}=Exx|>TGU8RB{1|rlVHKf>o=Bl`av9Z~^3g)nB~*&h_E;um0En z+1XMvW3{V1e(nuB8j{?{1Y)Q zVLM!wXb};PVXkUk-DeLCcXKdAhQNPw5p3$=3{L0>fo=ru7yvF|ZdToz>jI8MSrA2t zLco$3Ga^a`PPi&*yIOI{Rmb6YaUtiIyIp3ve|=}_2z0tXJiLBgtLjv>&8jUd%o+>6 zce)P#tt{z{$^ZvV^efrVe&C7WlQ|c1hTkgNTs{IyS;w-Mb`~;osyKqX>pQUL|`25Zm;id&d<*Q=`@`JAtPRFU$httDpJS@nYsnuEMR$EiE%Ar1=rs<$&4T!+ewGj~{$_WZW zDXDM}c|viI0CQ}MKf%2PC?UHoc};Tz!OT&7={}pQK?I>8P!cH1xg3CmQ_3Z$l$gm0 z;7v|v$$dhARjaM>>gg(C=|y>%BY?e$b9w|Kky*IK;+i#EBF~_X5WGOy7X6fi`vQH9 zV4_7>okWP7k;6gF5W$+eMr&#)MDheEl4Yk9Z-najMXr-$>iO{?x^fN}Tw_5JOgS3qF( zX_}`9Ky&~oT{=I%>^lDNqaV4!lgH2Mbg1(*&LbGKXrF%k*YX`JV|NZxKQ0$_i--<@wZn^iu4^qmiX^q>Fz zzq8+;>SXuFhvR9U#;R7Qd79>#h=5UD!QCw&vH&NL`OwDWY(~Ij<|5b+oY+gkB(X@{ z3KwwHm<;qyBr_^TnEP^n#2_Xi;|L%?i^Ud5f=E9#X9R(%G7AwSkqBd8nA?KP@xbMS zRn5$`DzC1Fuw{ya3k`#u7Uy6DMxa@|Q2{tWFgS%RxdQ?VCL%xtfEH%4TsK4{<^1Mx zWDH+2 zi?B$Mge1z$$U%gJ%$cc_u1J>)%){5uzW$}hZ`bRGX_~O3+LRsd?r+tbH>$09)RfXZ zRbfh9*RR%?2|Y6UXPH*1V=<`0*DK_KioVhjNDNpS}e{d2dE3$g$act z;c{$uVqc&t;p_^E;b)B7zct zL81{dw`tbxJjV)f5Mqa6yFMN7Yi(JQvUj;_!)AguJybXIs+-js$T{Z#Xl+*WOt=}+ z-OU$X=eOT}&SI~xUv0PB`EU~HKK}UQciw*cZhKBhxo(K(CAn0(Ka_O%w&q-FNlY%6noG6*I*ILq+6M|0I>OAeW zO=e!0x10W9-rwzOmv_TzSgnR>I_~%T58io~C1VU&(s#c19h+iUuN)e9pAxa7)fq&& zkl$kCNlO2SOUzKjr%oSBlOl!(QH z3<@(ZuQxhPBii)(`udXA#~3lgpy^?1O>0Vtq+4yc*);BPoU0_>Y}Vsc(W@D^aWsm6 z5RN(@zWDsp%SV@Hc7J!f8M^KI{O<1lWyz19zj*n{XJ7vEm*)wbk#cd!(^x6dbZk`9 z{_tAElax~4o~uS!&Uu=~FFv~tjS!t%HLqsgl>#V|)8X)ts3Z1id>D@ph@5lrMoiML z*TE9*+q2Eh_3N?eZZ&K#&PIc#v0iVaU#rG+Jk;idour&g->=UT!tz@ZZxr_$ha`E| zUAeWX>HYoD9M3mTQgQ&BX9bX9-Otl;{pi{4-OJzjtzYF1Q-^54IfGd9#()&XG{9n- zPwL15o3sL~s!T-4xFF}rqs`R~58`KA8!`hDAzEuJKnYdm_%m~oTn4O8tEFraU(|{#^Vb)qh3`XSje|a6l}c zIzmK5Hw%9Qt_mQ=ym`ZVuZ2*NVUYTLEiYOSqJgh7Pk zc>3(~&$pYab+B57bQhr|6?r}g?QCu-Wp5A>LlU zYOU=yYcuQm^;Fw>HLQlAY9uV{;cUCv8zmYYNv6& zxw!#__3jMGq51J)e}8u?C0#wfP>c187k}@U|H=R9@BPi~)q0RbnMjPqNf4QkGZHbI zldFcsL_~e+!3mAo*qu&oI@P&NA>crkT~<;IEign9p$8cV#sY+r2ntS;1Q87vEg(fo zxj-fa2x1UIKp`ZD#R_P#U_`=cK{deAO{+ScrfHh%+*;EJ13+To-J?wecr?UdSmcuS zQzVgvEwLX82Lcj8L?jHv3@ATE^iY6D1O_k>qNJ2ckuHhj;&S>aJI*N#BOKgP5=Lt3 z(>$4mRYql30~Y`xmbCanC>)6)0RksRg0z7D6LJC(gcKM=?74%|mDHuwMNSbZ0wX0w zOwpv1RA6`{PuG+IoX8LNdu>L-O?8@^hk}EtCE@e)^IRwusy1gqVG3`p&4iecvv482 zzI(Om*H^pC`BVeq+0$pY*LUaVkAn1cs8^5P$wRumdD)jvge4VK-|o(DZ*IWTH0kBz z^RDaaF#;N3II2jxzuTuSZ`SKo$=2-taO;Zee)!_^k6wQE$!@b*Z??##lstHJhRY{^?eG1qU-{^_*6Z!cg$g>D5D+i@sXzu!V(1`^I&kyC zx#P7PvoU(iCqGTq>J-dijH*&FWuid9Dv1aQAcD*=%p*VvW7#VSL6#&TJ*^R785acr zELs(!FxLnVTRh}Kq_u^*H!!og*_(sOlCfk;NtluQtK?zkhQWv#kpdVYxy6za07L>t zA^@PJ%+3G=h>U>$2IYVZQHdt#Wz6OuoB z@#0kLcs#uS-j^J7J)9G}>V(7zQ#;MWW{_@mn(O*Z>0f{Z#(Za&%QX4Vt~$MH1Jr|oL1kXR5nPNxI7_q)yR z;%vxib@e!1J^I`K=pTIRv)>z5kjaC97dvzkbEAZm0!1+yWpEO8N2i1-Dn?ap)tc9_ zjjCvrMMiW7$drI2&>{$hoR(BHx?4^;1rZThBQp^a6NQJ+q6db>4uAk2 zivkXWP?$+DW#Pn3B2oab&KdzC2>{&|x-B9yGJ9aH&E}?6W68aBKpDA^q>!bWi3k`F zK)6IE0Ew7`c-cKrI3WTMU<%0~$&r+kM`nS*(`jGws&vI6a$X?`a^9V7!+d{VA3u3E)x&r?K+2DHPsTc# zdDT|unL?ew0C$&HB55ASQi^&dfai~%9uBuRcXuh}B;49Gj&*zSNOHNkx%&ZHf3G^%b`+*EZ2SJa znYk(&-oL&--rbMKW2=)kv*w|7uC3OXTL57{47=T0t=6#8_obBWYLiRae&vJf+lPPr zuYUEVy=H1D3jjqp3vgoO00tJoguwxXhTsTbh92asb5&R32#D$$8sfx6u8j#PTjCTB z4uMR>2?;n6vC#5A1|~#zMB)gFU{DK34+ujKX!gb{>=9vvL17Wg;^+;Z0dNs!hc92b zB+SAB%K$zQ!XVo6o`x_4Kwtnw0BvqI2IQQ@1H*wV61Z>}Ee|Jzg9`x?p)d+A;~SI9AxIUj6dJpN)Ql$2|V5yryWBB zIZPV{=8}lHeF^5gG@Pr3VG*j%i3TSy&A z->-J-ew%Xsu&9^$@k3vo%YMnS~ILF#wtd zQXn9asJS(DAPP4lMgT|ma0^E?_1PC?=o=KLnqmZz5MW>|3QZ(ivU-bNh&PW0L<(EB z#|$B67+|rq+lkx_AXo?qAvnl=x#>cLnFs!q)=vSzNpemJ1(A_pF#((T;<-UwK7{5L z2tdmoAfaObEhxAJ&K;D<0m7X9|E~&slb~h*V2{uxuQSh52( zLk?GshiUGyzbF?&&aI9fmWi2IaxOV<*K0t_S-QSYDW%ehBoAoSQj%^}5TMqRWMFnc z9ruUQap}89S5J_b2`?@#OWGg2Up#;N-52kFtsho{pv}|na>I#I>TYgtSu(S?@!)l; z?UZuqR@>=VRmZs<-KsU~%hn^^KAaGxFUd`h$4R(5zqnL)un11B4NRLlrUanExLR*c z^Td63etCfiCfE<@>gw|R+4CQN{_=M|``zz;_M_|5eV;iAxG-f-R3gY63_>yiJEjP7 z5^*pnn*|z1fHk+~ z%`glNeA2nq=5HPmkMJ-z4R0P1gcyzySnGu3l88A6B9hA+m|-|_;F7-ph~R~ain!3< z-CZas7H>+$0BVbVZ?Ob;umqAY^YUZvz6^Gj1Vl4)MK3}8<_P2tZW`u{%q*Nk+&#ct zApkW3!O#H!jKKrL5y=B1%n@u3B?CftQdJfV0Lo-F=FtH}SRm9LRRcUJdJ|BubF+gU zR(&_m^E8iV+nuBg5mhHau^P+?xwcUm6ZqKmDZ)z{HW#axFF&uNo^6KJYSp^l%%bTR z_n+*x=R+EvJbv!P#~M0aM+BO$@-X4iB5JhFs`F{m)+BeFQlE=Ehz!i9`@_TWP@g`{ zkFTzN^5G9(zWR8*JG;1i@%YijXgc5TS7qfTROe|vZMSC-b$|P69A^L;wi_V6zrO{W zo6l}YI1K$p1IGLN)DO9o+S)YLq1)`XmoH!4+}$5uJbyY`ef5)j)Ry}p_uc9E5YGF% zqZH`4AVRac!65~de%OorlOO!6Z@v89b-RY1b=F==NUGHhwaxQFuLA@TF;e6J$IKki zTC+-1#w3CQNz`>n&7wuB4G|0h-N`H@Aqg*cN(h2LLICq6OmDuVbRsYWpqXlQ4`q}H zn`{PihT;_AMgZurlnxdekNZ#2XA8#=01*vL+=3Vx1JRLy1kghpgd#G5M+7tlWS2yU zXs)PM-NFfZ*?c?lV&r8(3Uk9C0(Bw^L^7+bTEj5&#qk6Hq25|mw?J4}|G2aekQQ<% z1V94_a`Vn^Le*47-w>Nk5FVlgiaD6n$JMZ@Ee;Q#)zP$JF7WK?;s+mJPtDFR zubMYtn%aa(p1k$`*~O)+_k9^}5B+L=s$-qv{PN=U-RjN0?&fxH^}tZa z`4|yro5w7rn$BZgt+!`q7ZDbiHmfxSU*Ej??A2%6i~g;zy!GJ^e~bZKI!ZE)r+HFZ zbvemqkmJM6Ve+%9N9Pw$S!>_--`n5*;QsTM_TV|T(T|L$HcJq9&=fHX8VDkyM<}AW zx(I;>W6C7aYGk0CgcMv-=$i7}%v^<35=B_6!Av4R!r{@(GAE)G)*#T$RUIOQ!4j9$ zYF2e_5l#Uf=mh9KFA1~=#AOBwVYX0fu{C98Nkq&Z;o#s^ggHV3oJdHJcu`piFxFY) zr>}V7L=Zs6EMy+dH4sx~v4F+d%qb`$mc_7zlLI4J5MdWFwN@8mD02`IsMT7hR@D>$ zmW?kW{4@)P3rIc8wOYgizH&egBKPLtLA02cz+J;Dpw={vW(ZEE1(_f`#IQ9pf^9jw z+&t~OO^=3JP3M#btu0hFG!bZ3Ktu2_ClpS2c6Qn3$=$oFv*TX(57X{!ouoUCr#2o} zXM?&wzI+joKmO#$AAkO%&H3)Fx4%3cj)%KzKt9gnj!^Q>!@7QxFt>KBwSM`%uRVJ5 z*28@NaJosUtXG$P&yOBoFeir?)|;nKUtC>XRPZ7>(Wte8QyKcxJl)=Yk;>}n+h4A# zAAj!bIw_O-O*KfBMf%AB{+v|O>XO?P}NQ694TT@_S;J{^c(;y(s zg$aoy7i=vB6rv=|7@1W$q@YAF8Kj_I96itocy3C<5{MWQ;aHK9kXYC}S6={Y5ui3} zvldpZHbW-_HgJMK3oZ$PYpno)g6QCC0E?Z(7hF}C2ZUsnuH*h95ekrSa$;o`L}W%l zYI<^mjGQ~k9dc4)B%FncDm0x5AW>d~shE+|;<#bq48Q=8k|Zo@;HIV;&7uN8^4gAd zJWXw$W~ej38x~44(1sBl7-kv`V?u9On`cmR2G72f`yE0VEy5HG(EyYoQh*^M0%HUk zxHhofm8Lj}cJFl0AMKv>!zxQci&|@#2?A$^zz8?Bxt>%TA?8vz$!fI{&Jp?i(W9;_ z^YOSEhDVPcOTXUj&ez)=kY7B0EO~W(eRKQj^*Gn{Dy!?=?OkX+wmGQFCG{N=rH9+w zW+bc9_cWiz({U`E0pRBL)!F%WDZ*Y|KG|MgvhZrXnUAOY!{;|QH-f8RxVyhSyEwag z^i+*LfBD&Vor$0VBw1;|G|$KJzF+6A- z-?%sMS;%rXIF)DhBxgdkd=n`j3C@n|GfINUK`itQu-GGqYs@YRHZY zPX?Wkh@4B#LGng^h+Q7KM4dO)(H&fAKTg#H5|XeIxdd?n3`j&_3!%-!gE0OLnj{iB zX?4@^xj_&vVMz@40Hl;eSfBQd!vFw)07*naRNNtv5CId3NI2x&6CtWCos$UXfw_}a zKL~|$@jwiG!x$tagBgWV>gL%&EM;QBoH-D}N!U)uYQ|X5ts){rn4xMENp6505!u4x zLvj&{Tq3|~o9gIQ1u(46^Q>;fkP>^CT1BO33MPOF4b?2dB7%X`7|DSef^!MVT$`Js zwh~~>Z07Sca+28zRYTLvRYD+XS@nZIdCZ%)A3Z>{Pyf-#t2`0AIh&d;_V{^Zkd ze*1fr^c!FLKytpB_F0BLq;#n%vG~-gNbb6IfX{R5a(8p{npvJbeNSgZoS!~@W{QlY zuBu8z;klGu>bqkdce~w|yO($O*l(Ucd+X-2%*(JKJ4JqT}(1Fy&mv zu@P4hvzT+2HW%A@I`Oe<^Z4r37tPyzvN}EtLzhbr5MHZ?^tl_yW_Ic}D@adq=cEDv zQ_QVtC_3ZPZS-gmBti?8p#dP0V{Kl0?Jx-jP&h{jD2BI~0bL>9{96MB3BW^ug#fF0 zfCVutIU{-lQx=v45y;vi8n~ky24wSwBn08Ai7BNdoG_dMBaoTg&E5LaA*LyU`_(cP z#T3%Xu<3G6BAJI_VDZ*wh>YlzSs*u^dCG{WQ8`KJAZ2g}a5iri&RD{rg{73h!3|AA zwegi5LUD zHA>0}hb+?ban^bAaMy~eMhtqw8QiMc6jp_XNaU@~2J&RszJK=K&- zwYJ-w3FqnIfEY_-A*EDGX-$PQ1|4tQpV+UDX(w_e|U{>gNl`cmrY z;dDB2iEiivv1ttJRh?$Fah^pISZ%arATY3Ma5)pK*&;nCX8TU&mA$NGk{W< z0MB04Ei;iLI3T)$GhpOgn3sYKE-;`$I15Kl)jP$Q>-}+mI?mM*BmpmmsT70^^Ch*;+{!F=<@?Rr>AqWim}ySM6n-!tcwx~1pYmBCf(JcS;Ll*-VKcKdLD zcm3-A^{c!6%~YEmj_cLx{ONmt>`(tYfB7%|_y6ku?f+ccZ~em0{?e&Q_vrF?yi1d< zIY-2>DFK>@Plsb*oS&Z`4)?-c5?T+dFfSbwr+KoR!Ay|5sUE_bq+zbQeSEdrUYzb< z=OU}k_TlDws`r#-yV`W!a6GhBI;#rcjFAaNlJqN1$u;IW38gwmou}rOSSroy_0?*7 z`TFiPhixt{fQk}aUR*qWa(0?u_3%6l@Ah{OuOA*P11UK(-d3^zJZ;AvS2Vo7I$yp>_Mi01T4sCP1S)}01*Q*K+PdYohb|m?ag>b z5eacIwPQFFr+@%-cPD{}5T;xbV<1771U!r>51oTg=4Q>2B^Tr}<&HSX!)cC55raqr zh%>Ob8$louuy|lts)Q>M)aGFj?CzmV9XUoM;RplN008u^>xrl&S@ju2Y8bJTh1zHg z(Hs&>aSpX+k_a^@gI8O0(%}$h%#0)f6&cG~R_9EmMa1RVwrN0hUXzzR9;dmgPIH#7 zmmble`p~U+W&Pgn?D6iZ%bSzdE~?LsQM!JhlnJSsy?pt3zgiShKom!cH8p$c;A2V#qeUp{_;5C z-OXz)Tt~G@wau+HB4B}VB1|X}G`-=L1Y+arwH9{C0N`K&Ktv!xnh1g!DH8$;6A=Wy zk*Z{1L`DUV0ED)r&;T6R%-yRixEF*Zi`)kdV4*Q15Sq5|kj3bN0T3dRCAHRQVU`pj z!Fg5+PE0D_1SO6ZOTTmu}TivaMT06-_qEQn-) zp%y@1%Fy>&0!zth=p^Sg;;!#{$;NfRf4F(Q2dd-Q5kjBcf6YEHzFyV(z;P0NP-hp=mr^A1D{d=O2Ff`rE(zqleMdR<#|+cG8>E z#XxuWpWom8%|G|={@FkNOMmjWzw^Dfzx1`!{s?`3dcM86e_7}D;?W~JHVDH^+32XP zRPWAHKV*ftJij^~_oeGmFe3Jc`_OipDxq{;9yaT0?QnlR9&T?w|NLyXlFW%aBAN`Q zYNhX5vs7{^DL`gBwrMOJu0})%t{REtt+$>|)h9hZe*Er!uj6#sovkT}Dm=Qn$T{uj zahoo8vNFA)oVuXrfo&AefaI6oL&)$iM2$@vXiry=& z2mo3DAR`w8Yi0zoI`8wD4?F6*LJn*(C6+WBB*CDj3CntY-sO@~8Yrot;_=n>jULBG z+s(!4WXB#ao^GTU5)E(1eLanJn%q^}DTd89D`O-oGAaJ?*F8{LvR5-rnu&fBmQa^dEci-m^*PXHTBj zcDk$WRBhJ}W(IB)P^384^{`4Qsaxq*hTsv~-KB;G@N{ac;q2zsXAk@F@#CxA?sBfC zZaH@N~$O)zAidGPi~b7*vEA*ybifI5~qb zB*g}T0f=IX&?1E~nRzfU6+jbR;D3oJF*=%}vJ@m^Vemjlf5YNLSg>h^rV|92p*j*d zp(D9hMTR12q5imgN|}Ho0)R~o09tEJAxk7?M70JYl86~;88b=B!OTF_S*Yj85Jnv8 z?oyluh!bL1SQ7T-yT@hNASZ`rVF8c`Ah?ng$s81meyp{*NoMI%N~IT(R8pT)*XM4A zu~|LY-QBS=A&``fP_@IsN+TXHuL+Uc=tK;_b2CqnyBIdOS~F3Q0)YvK6d_Kao&ZT> zbGhnDj|2k>Q*Ka3z8O~C`TEJDuFr&1gAN0^0km4jd7L96q5=Bpbm+RS`S`HEX4jb{ zQT9+|Ak3SKXV2dH(&f|j-}$@$@RQG9|KUIKZ%d=+tKAmFF#~L^qHtPh-fng+}|D6U3Y%5%Ux%_ zFbU=~tN}@_m#(#ko7;!Oqi0W_zj%S%-#?7&%U$1ZfxFxfr~L!fW9oWnc6hiW0pMtJmYK$9@k%uQvT^z3od=J;Fmk@7WdUAvSLE=-}BhO4B2+U2o1tUb8 z9q-O-)T3ulv61H>*4_q?q)C zV4GoBi}bQ8+$Dr?br02G0H`{8BkCwfYgL=aaqk|}aXOt&jv8Q_v#WQ%_KWAwzVg}a z>DPbvcQ0N%Jv%%5wO{+~k3YZrPyW;Y-Q&j>fA#7=ZO^)PI=C2w+GuQtQ_VyC9s=Ckwdlc$ecwCW8bQs&+HTf_SwpdbI?-~YS+@Ynvu zUQa{ns38P(tFkWrs;t+mZZll=X*jbMV38e_MATu6o zp28$y%EClJfka>`CISLs01-&)VBrviK}^U(Ko|h%Oe9DqLKq+{B_*Qj5rj&idyHt0vVsjl;YU&HKLDDkts`LzKjMb+#_l3c!86~$M z(hMoW+ceiYgLz^au$W?CyS=|-W}+bzSDMrgM(eAKN6+8;eXGYG98|vj?Qgr*Cr_WY z)5G=M=imC)5C6hn`0r}?w|?*2nfS9$zWDUxkN-#i!~gJWKl4XE{Pd&S+lTGt1t%7i zzT|tiHl7G@D58|M+q3n0VD+?K&uuKFq*c$uImoWCFnXo^*S_@S>Gt-!zx#V%eD>kP z;fprc`C%sFZr%6W?t?FVtT-KDpih@b-I2V$-dY zkg#)b=HQ%rUybI%$|maMOql=xS~F+ub4E($;iQ1>5I~qRb#FGf$dVx}+!xX%;+t4M zU<4>25=ktjbp~)i2TlkSh{>1e8#GP`NJ6f@SaQLqrman*>xLyN1tXw`VgPumQ=8|L zFsGE6$d{R8w@R%pvQ%aW$>sr6j1Vy(QP*|N0?mblD^CXImN<8Ul!ZhD5RC*7>gjNt zZrA;Kz1oc9zTc)W2@r@z04YP?uTC{vK+X)7CX%HXQl*9OoemFnI*9#B#$73EXbD7jU@t_hMR{WYt^O`T1<0Og3Yj+{8Zn7 z^RT4u)MC9}mpnXw_THnnKIqnu17Lr9^U)`tUtTrysq1`^#_r zzHfYOd-c}59R#5srkt=bosN@h3+5bM+FhRJiE~QG9VS<+Z3GLT)OFpk=6<+P^=X<) z?wdwc%d3>uCHD6*J$&tJUs+#Xe(#Hy$Kx>N%Bt!rybIKTqS*Sr4f!TC|5JE&F zR|v2u77M^H16J^*QKjYzZ_K%i=7HT22?+>%iI6z}5G7uaw_rfZVyFO0ivSpaS|CtJ zLdnJmUSq*G&uvs~?rwo|t0iF&AO=&I&jjerthx}92qA?B5<$+KvbDyH5aC22!YNTn zKv0Q$6hH@HD1}*sIhabO6}TaS5v0hjZ*=mq3iOn#7BOzmUNtOtZ8uNf2kjz*J zd4Rl?q1L@p<_2Npb*l5RU|O%&z;M`)8ck~hfTlCB48xjpdi>~O=q}e6mz(qR(?Q$m zVV=jwZ#}<$b$fSvGu@n{_pp>}(``_q5^-}{gL%3pbY_4xnrzyCiP%Kd5VS3`%) z0p48oIHVNNl|hfkJsKgfC)%7}qP5fM`s`}QtBp3ZN~!N`(%<;yU%k0*AHCGkV*(OB zORMc^e|Y(D+Q{Z2;?v*zvEDt{&&Q*8-ag)sr-!?M=3G+ol(K3&sr7vs601%?aheYZ z-1j{)aZa(sD*+;6wHa!y5HPIz$IsuA-5Jz&7;o~FW$Ppd0nS{ zKb)tOI8o^s0CJ_-Ar=A(w89AqTs7QXl|+P5Py{iO^W>o5O^pZ%2|Pt0vqli6h*-*4 z?uHoR*&RHb1tlS|BO7LMH9#av3p7aBIE)#A9h-vFJU2B!P7vrCtud3c;7_##BU4Z$ z0Wju_oCPsJ*+MeoJOLmYfm6=Xb-5H%n5zOhA$6$)@hmBb(vUkwFqf1GzyNTW6{97n zwtqctw}<U7FkIO%>m>||55jmJtcAW+~aA`tTb`I~>^Klxw$$G`CNKlkYI z)8GE>Z~lQVy<1j;5R+lf9lAY!bipEb_YZxyou`weezV&-VIq-8;+;`$x!LKoF8w#Z z{ms|cryu-y{Qlj8oOh2dAMY+Mbbi=8KZlUp>+#z^`tjGUE_Ulo%W(aSTD;5AA83_9obpvyT@T`vLND;!A2L!;B z8NCIBU}0enoNKLGv4u9IMB*{-+x`7;fYRVM{-_rxN&LJJfM5X2q<2%U(C z8FZ0l%3c-RfV0eJD(U6wwalHG9$j&!LiY6OQ=n8BE_SQSizmPP{g3|4ANv#Uy!-x-KKl6K z{+9S$QXcC#46BcS_?`dh|NMXbrAN>A4>z~Bujc99-R_(h^cmN)-Qx%x+acu)9>@Lu z>hV*-&IyIc5NN$YN@CHk`+D6z`S9~!`K{mk{N%sz=lEY2^I7_ z)g~D_DiCOHSuk?)7Q&e_=AoDM#q|32SAOlk`B&fnoxJH^Ts}TEo*oyIilu%k??^35zWy}LH2bfo2Cz1jsLn0y~!mwc&q))9P<@3#?`{^{!$7!nODxttm z0pUPMog`E6NTo2NW{Jd*SeT;f%#iDZtp<4#L4r4{<3fSb7QW!GKD3+OXDkN+m&?roUDicUfF6I_s0ggJiRbm2W2@YKRA-$EPdvtd33qSJ*|M@@vtuH?N{QWP#|LJERo=)F+ zef!!aN?3r`9fD)BAXsnexW9|3`lD>%a61KmM(6er4Xg|K%?agB=cE7>odbBm^oz(=-LfYCV`4pqDZb zB?}jKSIfD$nujA$&RkL|eV>O5*sNdv_`Bn@e}29_hRgZ3-Oh{W!P4Nc+3VcmP(xFQ zU_)$Ka+V}5Q$&}%23Or@9D3gNB7O3d5(*N*rt9VsnEH*$Z2B;r?(X*Ec$nrxn^lMt zi*U}8&?LCzg2JAYLQD)ZQwXIQ2oi@vAR{OPhgZswq-zS`EL}=K9%={zlMp7slyI&y z03w!@62ZVf^T!{H0C8c;0M@G38m+3EMKEz%4gIhYUe3-K+#?|&u2Wx_0X!1qg+@ng zrga>vHf_$wJ-E?!yGwVQloFxO0DF~;o+a)2SU=j#w?ZHf! zWPc_vnKI_3HR+MYHdXYlq&}}w>WBm*gMl!WL69rR6fp~>m26kkv4_sC;oxzaYcq58 zd7jO+UvJi{Gj{}}uIq`V&6S86v=4vuJ#CKt#ayTNA73_&zyB|P`RS93M~^R8{SeV| z7w^6E;unAZXaD%0_+!H9U;p-ZzWF=9`{LP?Kl>;C$m88+x85XTpHJgFb|nv8N#Mu( zTNXLLI>*&I4Xbf~DC-^M4F;BOb$oUAoo{{fzxYSL{-v+}?Bk2__x`~@{=fX+|JGms zTYvrg-~TOB84rj1`|DQQYPZ^4oagdns`cw%|GJ;FgUQ;xx`MXQI-ib=(69Q^cc;^d z1w~|Pqr(K@feZ*NL?|8r5Xe3}c`H5tz)NcPch`s454T@DJnUDi&CqvUpnyw{kzH{H ziN4FF1fvmqqSV|cm>Ip#0%RTfstiojb+qlvs!#n&QpzbODVg(-rKH5U&)Z>F@=B6~ zqiZD0T`oh;t6{V5hxJg_>u$9wEI|p95CHP;gk=M>nb6>}fvoRUK7S6iDL#(H>l_b-0)TR;Bn<{Q7bZS7`0-T&q{f1`A(v+ZvG z`qiKR_y5v=`oI03{crwP|I6=w>-*>H=i94`hnv^0?jN45hTBiRINpEQj$`#P5*L=z z4Qi*>&MtRX(=;89w}4I|K)^|eh!>7`(`NIkD`~TavLWzc7`g_VZrycVbyvyvj%)5_ zkA`t-YIyHn33{M=iY(}i?k)Nbhn#yr>@xuFeXpNFtKLxjVy zI?FjX+qL*2)FDNFo?!*DzpD4B~iFs}T%L9<#%V zB7+yi0G=cbf50<1hYxPhMx6{qT_O12PtNxSk{_DwUb@k}?|I8oykN(o1 zed}`j8{hio-dws)6+ismhto8D{`p-}{Ez|yneObl&kYCfs2rt zCC;s$O36dlDLB|1hq+&EVCcz}2=70A`NL0N{Sm|MUOxOMk@NY_5L0Km5M8o<3Z|)TZ(NdTOVDz&z3Zd#*NuQei^Qjie1mBJJ$ObJ$(=TC%a zp4W{ErIY&YX}$y3)~XcTY_~!LUFu5em>FkX^Rj2LSjZDV-h9`uS7s1P2tUv z2L+LG7FDf!L?RMS9a=L$ zrV^C|l60;uU`~>)y1O89_|n7&L}u{JlBINP%LG$66H)2NV}_->$t1|*G#@nv$el>h zK$sf9@i1ze*Xu1S69=fND+{Xm@o*Bx<5+KgaQymLzOw1FXteqM*M9StA78!u2fy*? zgLnV<_4UKe-Tr=m;*0HSD4SJ(wj0iOt1qti`@^`~tjN<`Pjf{EJnavgp;xV{nmGay zN|~l<=sE+WdB*WDp8V$K_SMbtWQ7RS{OtTm>es-pI!3mZWcSa1^|wC!^b2u#{^GHU zO{ZBb2?gfUd+&WP-`~|^TjBcde$NEketkThj?-t``~BtRxp0S&+N|z}3a75?DM;N? zDMMOI0MrnzHa*naFIE@b;Q`9tF1vDsY1X^jIvu@H?pMrd%2q&QCOyl&r_|~(*48mm zNgc`LN|^;Scu*2(I?>$Ct5Q}aNsdTPoJ?d6I?Z9glIkp)0L>%NbR-PsU_ll!pMjMT zn>hnARzmjxcXET)T0nG#02`ondEMs?!KO$LSglp3p4m3rN9kB_{~BzZ>St%$l>i#2 zA}I?ccg3#YHWSR(ceHPfBvNK#M1(?O>WG?TMc8FYIcGIQ^wOarnQLp66HAwJ&XsJn zTfOt%+x;-4A^>2-ES?-YE{x8C$jA#t0uX=zOtn&G^V+N>MqKDHK#Ud)VMdS$fl#_4 zQzLJc1uzVNkux|l6{exf;2z-yYl0DcX0ZL$N zRYkaTX_}@?Xs6@%zx(~Kf8%GLbbU$ulh0m%^26_*ZZDr)Ts_+4cOE?&)~k8OX_}Dy zbi9A{`JSNL6*(TRL&x#ZV3(^xn=21%cfY&2-93M_-kwdss`G3rRo7YS{muQI9p~Dm z^6c`-cfR<^{loQFUOa#Iy=UM1@W=I_eIJjXKKku{{p-K*^FR0ATkrhrAN+dGfDy;T ze7RcpPda>k@%CFE)mV8N|k zbmcmip}U;t7~}MCc$i|3>AV9D(vD?}ZXZobN;LOr+YJ{%2d4?6rJ^VfscWM#Mpp{6 zx=mypIAF(FSg?aMjI9Zr5+HJ+amJVt+#3=ov4T;~nFWX&1H>W(W{SoHl2{YVv3Xb^ zxT^zV%3^+^Sp65%AuNdc?*WL^mciI&wN;$qfe4~y{C&~>>4Mqn@% z;a+If=g659)zoQ@99`1XRccke8V8#BtZmko`>tDU@=&DoLL!`!NYf@Mvt+AUy1ZWZ zhvUO~Gn~eG=(@)jTXIDtZ8%R=5^gTepT6_uZne8VoaWwBJl#Ee_INst zH?MDY+g)Z^4@u`K95>tZ-1X!A%`{D$^|oxw{&4u{2cHF(-R@kY{*!0axP1Eh_07**UjE^q`~ADOFJhWt8aW4-q8d=g zq2Hv!5CIU{<~r3>BxRJGO4=}GPRn8wOX=u*!{=9Rn(8M%oQ@9$GWpzWatI<8;qB(4 z6lMw*tgMdKt$5S-S<}GMSoSBol z2LeVyh;GmjXidxvN?UDpx4C$9@uW|MxglsNy~k(<>WvcS+$lOTq0JE^1A5fC-eGJl zpUlFJhTJ$+j~PHC#FY^kfq@JmT0?D)2n;Ap>W#=V3j+}ZP)`vM+*-?=Gj9mGp7Z%` z=z^qL%hPPNcekeQRPq3TI;pAw&I@tElL^y3ddJ=?Bxk$^Vtr)X7EnQOy(n+T`l z;oVnjxf`U+e+*iN$ z!R4c??b-9!FYmS!{h>efrS-D`x-;Ltp842I_L4az*$oyx9;-+`jk7tNU2ccf761W4 zQdcrlrV6kwnMeW(cx~4=?Qjt8ck7L+(sbM=nfl$)+E`B$)KkKlyr#&jE)U%xk4UhT zl=Eu6-rkkloBg%NjKO`!P<0w>E{zi>b;9hLH1T8xY?M=>-q7dcsV~AvC2`HrrZk5l zI>xPogCE#|$pA+SB4(793s2&?AYsRh$d%>)N7JA6TGM3teb|~JB8R>AnV#vLYOZ2+ zvzs%!n-nQg9MqH{0lu*X+Y$^Lf*||K-@^BTFAN*7KZ0SvfFMD(A&ZbK%95HQd#vuR zs&{_IGwi)HGa}aDi(NdQ;GC10F|74pzaJ1bg6!5?e?257dWl3ttd@dY60=M80*SJW zLvbOu_0+67w7|BtH4kS*uGMJm$<@rv6x~!& z1d_0Y(*;q7k%$n~tO6JWfjA)wab^;k3R!3#01teZ^Uhbz$uOX`1%w!Q$b)BBbf6?c zNs6i5e>hpK7tFFF^Y3;pN?n&r-5K`oo{mhxNCA z@`IZnmfiQh;{g499yr#zEl-a_rZn9>t!Hc)#J%e2{z-;$-d!<`rc1ZZ9g}cL1dN`C zln=44X?-x%Xb627wX)<2<&n?5#Zpa#B#F9B*sWkG`>9NlIpai>#spJ70Oj}ZJ_i7# zUP{(&+gbn{Ot$OVBYIHg%| z3=q_wdDu^Nl-6k%p-_NdX^3^5buhz*f<^KLAZCCNyXxOEPU zX4}KlJGE^;KO-7AaZbs-sy|sY0PoG*Bj+(C0`!1NBo-GPc?Lj$3$+26SVSBl5vSo- z88-nQ%2b9cqM-;S;U4w8yj$D*s;Aahi_O=*E@y&h>xSq#<(x|%269U*9{TY3Xa>_T zC&G1E9v@EUK)~gT0^!ClS@7}!q@cx_k@7})t_N(*5H_Pb*LTEEV`Q@Qr$75y-5#Ia?Q&XsJr0aN{LV+; zeKn-6Z{M8nPY?V3L6DYnYftg@S6|L%zx~6{e)G3}@`K;{b?kco=Fk0`&zJjm52uHE zIzd8_AtIZ51PaSI4rZ-eQ>~JLK^(vXO8Atg{PLqX9PHhdEA9MTBmzKj!31rtFu zh;Re9&EjnK1n%LeUEPp?RKZ;XnraV#h|MoH_i&7h4KfpEk4C9-+DMdzkOIlvLbZ46 z$EV}g9$3_TQ9B9^xn#P;CsFt>|En(%eHdnD+1gUlZX9oVe*(r`xz42w)6ATE^hiGE zeaasZmz;$;M_{)Hi{&DEM092*hKMdyN-n9i%;Gk+zM7v;r}MHNNpPADk}?6aP*Byr zxG9jDQg{-PVa$#d006pKySOU>i3pfDr$or^4$z3P(7?i+K!lkyK?w&Z16K9(X?y?J z-fnhm8=59WERqu=fY#N{>(aG)w_bbI*0vf7B;u&5D>!!R7x!B(IhQ;Q5x`I+#R&+9DA^SOJyH0Gv}~Msz`>G&v>hCqRCF`~93tkO`N0982Dhd2(&FF78Iu zG1Od3!C~H0*{2}`IDsRo0f5z5&HAyett+Xv=#gm1JP4Dc*0#7Ehe4TpM4-=zATlx* zByeaEfeaoh(h-v@*~M}iYypUzWMDxAASyX?5<*k}6l4g5?&n0=ygi-2WEh5gt5zd$ z8V5?0@-?P@T!CoEL;(rFb54M^tjZXfvEky*5Js#lid2C|;$mR6f7n`O#;WVq)|9V^ z&pGAV++7kMQo8Sc?}oWBQs>mnfr2k??6s?^vjE+R6F>`}M2ItG#< z3HgDcSagq0ghIrSOP+G%S{tx0&5}je1sszMw)NJldJtpnOYc@qh5Ty1y1se&9G@K@ zj`t6zlveBOt7jj7@x>RX)9Is+KKc6Vug5uk{K+fR_Hh5M*YkcJPLHSXZClsyc|Ywk z?@OMu>}A|{AnzMvm<2GS+S`X?UCv>^1E-?XJh!&pQtYqB^?ZNz@yFl)t-tlfzxeOl z>u>)0U;RW|;=FeX%IW5Ie|>%JLdW-?zx(``Pp`j-^Md9h7&Fo?C8i47S%?ecsh^zj zhUX`gx99e>KEAmA7?mYC7iJ%C=9lyL_aC0#eew10oz9Pmcqm=%zPAtN&+9t64N$Ur zCkVjo0=A7^&q+{tY}DNlA{D+)oKwuF<+b)FmOx6x3_%W_hcJVk4P{7byj88|nhvS; zN<@U+pdukgsA|c%LsP4!8AS+zg!pg#!%qVhsWUPFYd?p_ZhB@Kl5;6ctVjmr5Dsv$ zL^BZtcH-9T(QE?>(`s>175%DgS#lOi;f{o@ZCm?*)FFbI=6Rpl9Xo2<$QkV>k^?(VFZMw z@Q?`2^zP+L$^zQ*i|04r{SlZs=myr?+q>Jt_4RDNY|H(ZfBq-$KL4}FH(%<9w-8-N zsTXgS+UazE(uE1jz=BD&19}+>VHSv#3y2PRIPCB8i;vpf3r9{*N2*7!t!_26Vgv+5 zSbz^H3jhQPt3xdKTR4}qzG+hD6jRLybhA81Qlz|C_m~gl-dCWb6 z5hWZVAY2h7!pt}3l+(zRn3;d)?|*^-j1k^)66VzU1{g!SCSr3BR3bO6O*ez4>VOC- zx}6jji}MAnPLRSPzyQSq5nE2#F0*R+T37&;DFsA8&1o3Q9wQ?JG7tw~n4bayF$qsh zMXd$72llSodP>B?Ib~)-L_!u4H!%;Ql~S-|00T6l5-#Bg&{b=62TQwreN$dd>6XHT zQ=-g()_d2@+8W-Lx*ngFR_g`jTV(8A*JWKE-`CU9wryR{k53P$r}O)F_f3C}A>eFtQ(y+gJetq|8rulGv$Rz`2N@E_2vw0GZ zsM`}lO2ZuiY^Qf61G4UhX}I1$y?@Wk^7!VL6ZNY!XF~LPS{^>U`?fxQ`1%(=-_B1= zu-hH-{w>IUxcEt0mX)78C_c+j^>}1wDiB+=;jm*`>F`fbK28yDDDL&z_hj zW(rUYBS>Kdk<`35C(20%rc6N0$;^o&=yVZ{A&lXP|Hj{cfq+C|aPp{#6ye)eB^73{ z=q!*qT3^?`g!Qf$O8e%~)D{EFX>vnA6D|l$keLb407-Z%NwVxj#$lLK8r7S|k;F@y zm=h5rI}-s!x0Q$j0-cG-$PI!JLYu(PvwzSsPW!cu&T00$A4{m11^9f7N!fS0$r*kmpaa^me zOUvVMxPEnW`&=80yJ_4_8q$np+~;w>RMYSe$FpfV%=6o~U%&q9^ZVDoKx^Zea~Y%z zBn+G|?>6(THo-n+39xm20D%4BCUKhf*L-(Nd5ZhT*MIiMA3l7$AEmF`)8mIs)ayn_ zcQ<#0aXLP{e*5+P;|C_1<}2ak?y5$pX_#BZoOyQ`0wsy$1PN0bX*cYyUwo3EKL-d{ zpWyt6eM2+rPKa(>-6~*E#Kqh^Vgf>U{EolaeihyelAC6|i|x2;EaC1iC6^a#O7 zh$M-#Fp+4DuFb;!B@F%2*2zplm=Tc}keYclH-aGu`L}=f6-lSm0Nf)0E!@Mn^*$s) zai9*?x~q34;PmXe=@$EC~xkL7JvDTGuKmOLUPzmQj`1OQP-*0>OMLnAnBw2K@Z zy=%ATu9#pqmm4hETeIFNLf54&8yI-&+u8yFfl8TDnSt?e*w5F8rK<_&`M_laWX=V1 z$}${o?>_qU;~)OyCx(n^B$+m9UM!_(=DU%dU|i$4c_e@GVn?YnQk ze)rYa$9G>nz5nvz?Q82DOuJ>kToR25hk>ZbG~VPw#G90>a=3A_LH*&~`?L4f^|-9N z`H*B(hd??VpMLpgf3$7KtKIe8_4PR43FmPff-a^|gYY5=qjg;wu#_aijD-^&4%g$& zEneN3dRmU&&frV|+qSt^1UCzF3)5Vrpb)_79l=XrAOfKP49(a$3>%)B->Wq#Gs+0= zW>MSOEIcam`C>2k@D`VqZj4CS6q|;6nAgiBvoG4$tu9ShX2b{}U|GYD+I!nTA)K~Q zBus#D2^frJ4kuGpgVt4KU>cdzILu6iAQK6oU*awv-Q59!(D`@%@k^2>$s!120Mf1% zJat76ErpT5O{>+eHNv{}ZX0m(XyF|79zDDgCL$pb3;`#;2$3KmlVAu%N|~v2>j(gm5CWqUnt_FfhXN$< zAw0Ra-kO@KZWLuqFPUVBVayXXM|I&82^&V(+CjLIH~TKk*V?@sSe zkFS?`nx-)y@83PWecjIQb9MwfJ$(J{!#6w}4u^w;0>a^X_w4SLhcu<+WW#)w4tI+~ zM&W>EU8mjMt507c1h_vueu!xEJnhCi;Y&_FHC=06kTa)@fUUJ^wTB_3VHgp}ni0b7 z)%EUh7}GtHK29f+}DU|u4w+N{nv@PAVkyE!owR7o=;EO7VxIFHGSIj5wQa; z2rHnvH?z>L<|hVPIdQ#}oyc+LZLI8&8Chu?NtE>6&{BZkX zoNv-N&)0X?H!lNHYt5{wo;lIYbTj6`t%C^gfZJ-P*RA#F6~l*IBHVRL$POfwXej$K zsP(Gn0mC&A#4`oKifhO4P^ZV%nwxK1Z(Uo{ZCh(q?LDGvZy3Zu+RQOCj^SN#3w2k} zsJ&V}E!I|mCMm+L`6)n?gfJnZqcH;X7GVJ30Yo_>rIZ6Lz?esX3zC;p%0n55(Om;n zWQYLwMywtN5KNPx1d|{u0u!5RKoEc;W|E~gBoYV#>_h=Th90i2JtVv5aKXeG=Sjj%c(A+tn;i=^hwt%o@? zGYkJp2h+`-x|%5|KM-XX00ls7tpl1!iXe>$=ZMe_j;qLrr3=HY_ie^0C)U=Nx|e%Pt)$* z&%TgSa?U^c(NFicx6>7W#`!P*wZHoLbZ33J9^u{Nc{jkbt1FV5|LQ;gy~x8~__g2o z;g5ea?dh)QN1wgOBcC32r|l$;!|ki|>0d-1wPsh$n8)WoEMLB@$D=mZ!1wR_ybrs& z3-_#RJ#UZq$1Lf^%Y%$N1B7ryDfX`q-R;MSJbrYoH5gO3=|lLYu}4Q4?njDWgWL7|o(~TbVOUg~;NQ)rx@T764#d z&VXcKE0C9gvE_))*_^vIFf(KfXb|EMiJ|1&D{bw>fAe>5h+UWwDT2&EwYwvcFf*e4 zYWzwepu0i0AaV~A&H&OHU=T5yf*T`2&N&=CI$*Dxs`l=++v>OgYD&X?{B+E>=6zcq z&3jzrt^sC^*%1MQFQ6|70stf@g2cc~hz8grK-E2>wRSPw2RMZHz6MZuK^VfwYzdF% zM~4lefiXH05R(uN`|F$C)&A<{<8gjI?{~X-ob$f6ZT0ix)9Lu|;dpv~+ScA$_okqy z<@n7vU%mPATTr{&Pr0OVn39yP0m#pvy_A%{^Mmi-zIyrexPDldwOV2PwZHh&+WVjW z!N0g2XdbWX+SOx!_u?mi`S1Sr-}$$1p1&Ms8TRngpMEx7U*mjzT06F-+py`|27PwKlW|WaP{h~o*q6tJw3Md1 z0hwCJ5(sJ>QM-9J>{%swLS`jm@0xW zGXyd#b5b|J-dea5;;E3Rs)xFRN53G)YOUcH1#!cgg=PR+*7wp6!bA5pbb)Tf zW649wJHf+{p^sQsU+WeeA}*K^M~KD=!aV{36$w1LSGb||Y-Jd3 zKYo65d*_`VA0PdAd^$cX=WRY*0m~rs&CON7)1s&EfBb6OHi|UOhtIzEeG0m|dr@}7 z_4Nz*5&!8gepc7>ZhzH!{|mqV7vKDAfB*OY$^RoQxu2HI!}-*{_u2EZ8z0Yq&V_#P z(We=Rr)#{uz1rUth2^v%4G0inTm}GH@1N?!o8{r{pML!n^dr{$_n*JXoTtOJlp$f1 zt7liYxw)z~Lf=orX*oxadJd&HOfx8K>zdPW*zGyVe%kM@CdI;$t!=%Yz%0y18QXGN zx7EEv7zZ(Biw1!KpuJsKJ>UTl%{!YOSEdC~8z2Be9Cwt5wv9T{}kAUT@x$ zKOpDi=G^-o&+~a~rM%FoqfKr)>fiYL2bfi`iKp*v+S%dpA!v=cuh+APjwSC`QL=AP zjNiIH8;gADmL%gPnv~6-9Iei)c6+)gB>~+91VejP%(B zSzS1nB%*pu|M;< zJ}r77XKOxZ%eF`R+uK#PZ=$(&$i#8!xs<504I=;L1>f0TNuBIoOIdKqQ-T(ba|@ZIFei$68sLW1obI+?K)(GL3y10`)(N= zCB7IeJ1`QdJZsPG+x!AX+wXQysp9;Crm~NCxLsMfeEhwn>okb)AXZIuZ}G3wOtw^s z+cykHg8I+;Dc-wgK?TJvsO1}Br^~oW;J-k55!(AMt_E_GGIN1w+ zpJXV%F7y0(V2~{h6wwn+Be8JLz-#WM{}LQ)e3bo)UkvB}zEyfj)~oBu^<(I2U!gOuErQ9$ zf~2T_6~GC^(zvPbbgWeTJ=l*+(W$1Rw+R;G4@Gopbf)Qsfr-N=Q?&>3Q*Y0yZvEE46DoSUzEcvV=;Sob{Rl@@SydL)=Qm%7TF^=lzm zkq`p?E6b}IHkd$bt;D)`aWLvz&xiu`>Ti>S74wGPUHxP(tDVuel;6ty=`N{$39gD# zDC$rZjCNYCsA~mo05t$CrAG9EzEv|Gq*fpt>}n4OE>}5zj_g0G8z9>F|oCM@oTSkv{-&svikCu?mkW*6~4f zXm9r)=z(v)bZ?vdcnu;A{HhCSrplbjwH%)T7w#m-NcHpf^Upa28nV0>&EiK3sOq(1 z5)$w47jIFJunc(#N#C&&4`r0|XI9{|=c`rWe|HbtrrH;YfD0-PtT6ePwS#iz9{#lE1&**HG5PZwZAwN8LC)Ua6Z3%9(D2Ra%$+}A8YgJ zhyTCI7gG%xr}0%$2VZ6{w=dWD`cK>czi-G>NZGTBbHfFOKq$g_$22x7pV{W)op~BI z3H_3IIlh_ZkW8a850BdqPG($JDi~Q2;u)IJC3|d@&I_b>Rs;8Y)pCma3S?4*K82Ft zG$|t@La4p1djli#EQ|71*2)E}b&K8<*yHP(4Q`UVwO)c74}IJm28a+rWM!>{9G)Fh zdhq`YB&mLlChG^j<%6QZI5?~g1mzNR0! zN0AVI7CQT)d_EypaJhMSc^u`hSa3NudmeTs`AZnuZL*lNl zN{##W##0WhcX82eOF!^l&_{nI2Z+A5V_(z&h>ruOe$5X|DylspOm|+&E*c2>-OL7C z%NcA;HH->i1}IR9eKA%u7mP_N@~XQGrIxTWIdrtvudvqW7I>nzK4(LL=G3y9>NiCX zQ$@GSieUu*XgTRht5shISrdwC@H3OeO%2!i!AWC`_CNz5$INS7c6(#lXE)%0H&WiH zA0pcG+X7ZiY>?CFRiJ6O+X|wG_NHp9{sXjE3wB5C)+y6n>!2hiAd%=%(nD}~E7Pi( z{Erz&l}+3qvq#2cqJPpPrm|QEYh@+ls!E=X~c& z+vgjXk{{3HF83+!J=GhN5`rw9<5{Ste7k7GYF7u({VB}|A=w?OAF`;`YIRj~QWoS9C6st>B z@1_patY=nMw`(o3$OLN`13r~;>cIzT-@WH_;qpY)n+1OSZqY zKh;2xhk2L=kUBo|u*r*1L26V1$pblVq&z`r?N|hhg#1DTjctJ%#dOHRtuMb*M`Hz7 z%@(}P%&4{J*}f%r7-`tQFG2|~xyn(>^Yim6o6CiAxhNDY!Q~&nFoT?C5XZvj9g+UA z>z%dut7^d2Mg6V*&6TK&mF<=Yd7jERMNqQx(V9=>zr5|Ui|zA7t#^-F~#w8qI{u$nRhwCclo1$=&&7OnYp)1fm9fg#Z=px@IkYUHtQ!RxjxM& ze^&IjXPYh#tW^cmW~?h_Ni%P7R8M#qIC4sMPqwm+vpXY}KL`0VRraZJ=|CeqShR&? zJ_cO__Rz6W#(rC{xu$%U5c(0s3?GZ}O~|iLZgTAbHy!niio>(24oB|%VO$m^3-___ zAfB9um+7zU2ixeg(!Rk=3=_ENL+fgvD5(WMrt{q&MMNjO5|Itik(Dgk$}?a~7AZ1S zT#t7No&4nUEfplK!;+e=IiKXzD^O(C|D=6SozxMEC=c_JfpZ|0K~J7K2L1U}B=dZ%HJC>39*|-98D`4?$s~;eb|o zZ2h*c&L-~|>Qc}jp z&Gr<<`b7Os*88e1CZaCGD7w6nipq%c$@O_j<K6qk*~MFC-P zXYOUv#d>Ive1;oOajEX}FxMM;z_IUZjbp2p9?iE?YI!WZZiWZ%u7#iHogUhbcOCL= z*%m;z0?g3@({)|~KG{3IK@=Fm`7+s6`9-b=C?P(m?|i6_aPstCK6Hq+D^Ss0nc;UW z5B|=3lo+O4OpQWh8)|FAu?Ya^Ew%$JKY8)AqPKWOTff0>{xhw^^JN}9v(dkTl&x6- zcs>Hl7qJa)1(z0mq7;^N=IFrLIV1a*u=n*iOa+G3v0?I|*<5Kr%FfI$*;+a{+eqpN zRFVqqfy{ayP|kV%eS%=Y< z2K)lG{2w)TVnFHaeL%o-H1sz}23};smXM>FRdz6jm%)HuN!o;RB0b+OTk2`a$Aa0A zw06KWVvd~V!_3bN2wLiAfPXqgUF{TfWpI*^W_P(+0cc;2?;?9IeJ(8ErT6t3SBD@o z&w*PpiL6gbmz;9{Ydq~?jfzkxh}=z7R{eW^F)={ii?2%=Hw!Wgs-xW6`)k2Daeg@^ ziv1}iUWCwt!;Af>(>}`U-g04k@!@i`<+A1MZ~1uC8AB9hjdJO0uD8`Jwn8KlC7O@k zj~(n#tZ$b)tPjasC(FlQi06$u>C1j%A*e-4fuCL2PrI_XySw{MDU~|yKkU!nIc0%d z(r6tA4xaew(Ccy82mMmU3F+DA7y#C^fdNk-$;B(iSUN7 zMy4V^6EerxW|)4Bef;l6igO}uD@L&#Hla5*KkSABHgFX>ipv+ndnu%@#t#q zbFOEFFpfgtlO#GR#v$`7#yMST;;(iPS@@~iye3P;UpoXL{J*m084$i};{6LrW6=;! zPPo*#mHb;c9Y4GD<)HErRWyKb44-(6Mln7lNyWk0kzP$9sp?ms(yGD_4JrWB#B5V6 z8U=3EZS$P$A5aAgF;iv5K6P;MF0+0K5xbj+0I|h^s03lv2o{*H4>AWMQRlAl>^Tx0 zjRk;A%X0|rT~!bKk`51*`Qm?gjpNOQS$&NP2R1zspv{Nhi^U@EDg6gkK0W$E*;m^} z?I%u^w@03c@id>DY!Ih1;eU^ZEAf}PPa24yJ)-n<2j{t$N6P0dm${c? z$|m^P)9s7ns>{hL7pLmB9~@I6nOe@Y@6=WVmhZ_@CI`$U^`(R514f0FuiU7;uQ>I~=uJpY1$5jw z&2)?pr&9=T;zeT4l*0`GmC;avxSY-=YQmfJ3KJSg84DT}uP z3}@A-dyyHrXtam~Ib_iDVoipPCi{EW#Q?3cSA?CYIe-rKE;m%)v+12pUGA{MmuOYu z90ZdyOT$nuJtT__kBx5nO%}ZvI7t2%5=027AQW;(q+cPa8`)7;Tqdn!* z59!~j%$Jsz1FdiRANQn0p8tDxCULnIwL2BHQD@V1vZ;I-b!n!2&ZMkRPgxJHlp0ag7rjOe$C4BWPZbd0rnQ5!t&{JGQ-u8VxyY#c=4P&9LsN;6(KYuJ zpxWTbW8@Q*b55Mwq=JF?_pjN9$G!$1_J~zn)HI)BKMYa3uvBk z#oM>cT9bkc{ltcBnj49ceqpB$p}SgyllpS%+jniVl%1LE;B z>(1q4zO=epMm_KZAmtf zv&ilT`%8C{h{D8*fca&pL2!n9m*m;=NPxWiq5I>sq}EqX8d4?XU+zBiy^LA>u*a}Q z16|c@zok@UL-)JFehdT+>R*CdTUM;!z-Cj*)J1vJat9hxQ2|cIxEMa=dP=`FASY81 zDLn#US6LQIO-L2JycrquA9cajsjx(IX2{tASz2szu3nB~0 zE|-QP4;KC;+FtI@9t}|-MC1CAe?QEe?(ffM7F?c9d?5x{>z7c5a3vX3Pb`O~j%D>n zBwv#5v9leGdao2a=XAQMnlcYLHZ>5(>hwoMY93JBu|>qmE9_O=|8 z0ACI8S3e6VM9U;7s1(+IdnCssq~QY6(-o}Ogft3-xbQ%{#1xn)2?8K08M(SK51+RW zMd&+K1a(x4Gtz#SZVY;0dl%G<7e8_V%cxBVSrc&W}b6iKV?I+PPs9s za<(ZVqMl0z9HU;}I{0mr5VrRb1$_$?E#aRI`tv=d`SRN3QsSkzg0gO-+Er`4+V#HN z?U?r5|6kTeA8^vCDA6{fx|7Z0zOiwy4a(T1<;Wvy(e{ir>hYD$!#ZevWeOk;l9Hmo zC;K^-c<9|1TIdUx+s0Q|O!ch{ssDT$j;mS@k-NtK#VhI#{XfwXKd8@EWl6~7_OctU z%Wsk#C|YFoLQ)-|ks&LjRjH%e7c{Hh*~O~da^LDT>2@_M59l`NAf9+$&(NBiOZ4qu z46i@^IIZ~MJYWtdR4V86Y2(pyc5!)Mgk=6j^GHU*82}RuG^W+Ty&->|F#N5aCDebdo&J|aQCSL=aAWtBo2q*U>M|&$141cc944pyq(HwWZ5ASj-RRS9>y(*yPl`O1HdDi?d(ds`_M>gq~ z_3;&e=+yQe$EjL6Q4~Slf4f0G4QwNM5p74M*Ma3hSZ89JaYJ*tud?%D1gAJ(LASB7 zN0tHP|3LoDNmt>2=|Lc7DU5<&R)Z)ZWJB{JZ}wadW35VsKhV)kN*nDl3H!PQ#%d`h z0`+_{Y5Ct?|9S;1o#96zdq~=BvuZs>%k05~02X`T zY`a&2=v018#23ONAs!&(1X$YcO~*!Pe%JZ_)9?KCFn(cxB+Rd$JSw4l{_oZ4DQkRtWgVfikKm2WoaEH7!?06PnB1a%pX+hqdXRD)Zt%^%S|vcDY@pQ))!AB%wBruse@feC_?2x8Nwejoj06yl`Hp ztmzQ=c&laq_uz`=ssiig@&LinS5Gh@mHo1;|LME6Amcks$i&qErfq(N*t_sg8w}QY zbOtcA4%O2n0Mx4gq->I4R5vE^{i$c!bvVWc&0+`Fx~c>N@PO=Tb=}=h38SB`?D3#@m zTXB;IUc2+T7zr?(w(C}C6a4S}RYPWjCSpTthkW*Zlv-W-a!An8!oEcGQFR!bMPa?`r1@k>)F{v2~nDFR(Z9yb|y6> zD8f$5gXF!nHPdnlOP^`FJm^xs*rh~i<{EKzeI6rBk!PF2d}cri*r$VKp;SU3KRpGl zuLNf2Pfep1tfy*OK2y@$EA|l6`{-PJNPeduGTkko)yyoQye(aDC4&m~M@oqQZgoG7 z`t{HJ5X{2BtqsZ_SQ_^)h3yzy^LuTC14C(t62o?UNJ@Be`f$kA_q%BZMCL? z+Cu)^79e==V+Q|P7t1WL{09N&k?BdJsh^ES9KcdJHSc+JRy!CxeG3d`cA36kAHP3L zZy_;1$7vI9-`&rGUhVhxp0uATBhb@Q%M|VCBDLgOr_rLhkM_Pd23D|89Pq5oA3&B> zP5Xp9rmw|xfhzMLfOW1q0{}4Sj52~~B(ZI6Sm>|#cXVz>FaO{PMbo9{lm=osE^c}D zrE0c;pLg1eE0)@hfkElMcYGB7h(!Y+pKR_t!ixE+$@42n!xlzIw=U)*$hTJ%3`G@- zGvuiZuk_>ds3Q917=5x_;GBc1@z*4jLoeqo+w5>Jek6C%gx}lklJIUhB}=rB_c8NS z22Y`#YwhRls2IB@ia(`=0=_|fk?E>^v9?pp}iNrsCL{_eir~rRLbrq7d-Wk*`MH4o$qj8 zEJ*+M=ii1}wDHdY`^^f4=L5nj3V@uu!ulaE(ybVmZ#`&i8xhTKep&cK)Z~>o`7C2u z^c_yQaomP<0yC$*O|fGRnoGl^8JVax>QAdQ^puJ+1cmJ3g-lCOHy&**1^(y{5RHE& zga@OJa%$RFYDd{`bk{iYg!*{xIp1NR8K2}#0d^r$B#~DngBZFezcE|C5{-+Fw_Qeu zd0=&M3Ev!vRtHu(N{fV8TiIY)FiYkR)KHp6Zsa%2WRyh1byRmT@5<>7tsJg>Wa*sj z2CWe+?BtoW&ZYK+NZI$Qy^pq{{;jwk7_*9`|2@i`3Ra`Vd%N(eK-g(v-Ll*4DQpQ7 z{(53I9s#UZ)mA63JG}ighjV`VY@g+>(*xi&RpV{q`PYf(YdIm;w6SUNG2g!3&{7<3 z2Z`+dcdXc}xZIOtJv8*0H9}#KMi|F1nX*>v)v)B`6?S=&tNV^0Bbz+3`!%WkYr+3d z?+=+o85u~viI0!Bhfo07$KlN|3-KN=01~4UFY<>2T;z0vpOZ6?$st`Z^IjZKOB-or z>8!;qmF%w_Ys9r|j{M~%LL)V9Vcb*9uL`B&Sr*5U?g3PdnG4}>%Z2Y|(JvE7YUoX$ zo<~krdcw&NZ(C&H@8k@Gw{qO+_{qUTgZNsC@1mjcT;1dRV6>c;yxZ=<$xjQo5@S*$ z*g)%&U_his4yt3$Z7*Ok?s76Oj{d7-i@rMg1D+mgwgF29(R%#*qLffQEb_GvM~mV2 z!soqt6~_7%e=`-x1g;&k+$h`Xod0^8MH0;p^Mxn+x`O$&Mbn4`O9bXi;JW3^*lHe? z^m~9jn-x9tKpT1CJqr1}L5ltIK>6g`_em!EttOAYq9nDR1nhsJZQp*8&}p%34q%)% zrOyB zW!!;atP~biC`ni1QcoY~vQ$4DFD<3=tT4lgR*;%rf?5E>>>TyG$*F&}S&3H@N~PBP-4C0d}cmQTvSsIwW`?d=^@EGs5J85j7Nn_90VTj z=lR+Qkc!oTiZLfJvX>|(DS%Q_g|4dIbguSAbW_Y^%}MurVvS~MZz{Yt`g#j~Fr$Qv z{Z?_9A75CpQ40>-Mu|R`gTYm=bjc#BN?TntX*+7hOZ*ji>q@?_H%wd82q-ouE(IR4 zE4N5&w;YJ+snI-cIsS68V>-k424B%7>>_K&DgC8l3Seb}oYy9K6F{Ui$@njU zn=hvykA~Ixy8g|Ok~Fyk0KD7b5bw{{nikbHVN{gDDtMblOSVntr2>C5xADTUY#5|f zr7Xps!FN8~>Tap@>qjao>uIN0$7bf6=1-ddG7lNqeWo2BGGpML7W)%t1 zKKL9J96c)d?CcA`l_GLht7-w)i0r6weM!DsO_BeL3(Dq)cj+Bht!%~>A<@#q1 zFlU`z4koTYgDngtZ)Gz2B*{Q}ThXDVMa!*4%=L>uBFnzcSh*$kw)aX()z7}u!$X#) zs$AobG4}z+gMXeqSTRB~t}V1I;Sy2WS`;yogpQpk4P2_v`4(v29)hG*(B`6fo6)a} z3x;&QOLF8YOcD#pIZQ7(_Py;)jRYs438U%$R^m2*fT_o5ApSa)l#vo&4_rP(gDUn4 z^2vZgyu#YCE@YoH)jeJ+_|EIzW(wa-_p)>IewW1Vx!RIQyx-~0OE2L}>b*Vk8O6sD zrHd+vSUFu;d_|D9ANMXp%MbYXIBGK{jE#ACP<5yGO4bzG;{=@r18G%dy~<2)Lw@() z*B~DZ9FxqtB(^gd1aH9L%T}!@%OE=AvL^2UL&qSF8}DgbK@QyWp3BRet)lEkG?KKw z6va<5@{MkTSK3IjfSZ2`8n9sxCh>@Su-4rFNL3Gb8zvB^$n4Tmw)A)VZ0k@_JQo`# zBH(zG1uPVMc+hKDI{ z7Z|X$t$d^|tCz9&*1f3?i$Jfg=>X~Eq1#tnf#Spwx`#176EL6 z=M1zPaG&Drk6wRgpJ;7UT=|hlctrJUlE24-#zaMK_-fW^4O?%h)w!F$&&J8c$!JOK zBc=Q*YN)-|s8*efqMTbM!WfC(}7NZ@3r}k&#Ne zB7O_FFDQ*d{?{1{$R`zs_1*)&u;xZoNOgHU`TQg}P-ZS;f-WV(%akbv@;v0~c(K=n zo@rfm-$!jhy)EDQ1=>4_&}dPHw=x0*0c0BgJi_pkaj?y`J2V=}l1aY!ox-iHCoFR+ zx@fAm=ww7c%W6N)h<+8zbV-gnQ)AWyvaL>vR!jEB#_3|`wy;0t)=jD3<9l&GDS15_ z$-aXAJ6QU9bp$Q^v+9NgSb7n_&mjU(@2t#qYE+mu{E9G{m3)?Jjnoo-x!7UwxYvZD zmQVf5|D%5NwBo_X+DWsUnoC}koO?(&I4LWZiH#~-eGd?$@;5nHS30&~&As{Hky>5h zU6J@iuGxX0;+L^pg=(0fjrpyIL`?|#I-)hQ&T*b{tx^<4@5fGde3hA>6WPJ&QG&Fw zwgNnmczkg|1$ZSWGer)>_Tyr^5&>7U6&QuxOfv)0#4x`m1HVCvGsOF%1}8TEE+-sb z75Za8xtwU+Wavu!zWt0Yc<$f4iV-~$POrhqywk5BY>zd%0*1rbqlwjMO`q~w z4M2w^GkeMhrg3~(wx6e^mz8!l`1to=%9JrVLcu3Nqhp$Eb=uQt>IKTLd6T3{vmmr; zihbcIwAR7#qzx07z_j@1Wv`P1VIxoA=>~UnB`0`ZGzN}(38d8mC|&HKlckk z?)ICi<=Jt&w(;04Ez!fSbkAso3Dr01IW9y~hiz24b_jWHRG3RXW;W@R_4e5C9@Wjo z?>FM<0ANOJ>hEE&8*U|Ndv+zG)9-9&UQL?~+*i}TXQl1^u*B`gdmJ4=ujJ41o*Cs3 zsw80kuBTS_#n2i8DnkyBe;77;qN+?8on^`Uoh)*%4y` zuQ_;kJ*UPEX7|K{5GWX+qDtp$rjaUpAMp7P?nQ=_ri=gRR{04{x{3k$_2shiQJN|{ zKP@RV=mpT3l{M&2wd5E4feWQUPnr`FcB1mezQ8V5tld<&x(G<663E0kzlFuf?(*m{ zXRv2dYHAnuVql@5OP<4Tay~g|{PftHdqu?s!vj<>L{>&@?7j`Ok@KI{b_s;}YQ;K~ zT>rNmFd~^U!voNoR;yrI5&Yo(-Nf+g6x%S#t(EuVb9Nnja3k}<>U#_8J7}Z)(2>dY z)OZScH29vj_STF=U&Ai~_(x=mrXZLfE_0vffMQ9@Dn4Fc=LTqtVkMblvEq+dgYt?= zf2x03T(U9c0z;)DUn39zbY+=%tXu+)y8L`H-@e()*qXC>i# z*+Df=U9;#x#c6Tk{DM6&@B3AQpOSB~6|xc@^FF$P$^a_t<$tzA$=1C2syedey*z2@ zL83|c^LP6V5~m=eGH4zw;*4N+0I;NtKHLAOf?qS5>TNkED)&+YZc7p@H+cU;l*Y&0 zr*})ig+`&1kQ_+7qvYAH=H z+MCcqAd!8pTqXM5l!iPZciBTQfu?nneCz|>M%J;P5bG=y`0T3nMsY>XVa0~In!-ST zsdro_YO7xU>rCFa!1TD^9x0+%$K$VA#tVvJi^s}q17lkKCe>22zhsn?Dw&UW*R+u7 z*fKWo7ue6p7J>IzdFYUtSv)Z6aEl~d5PmQDmAj5sRtTiG=>8l3<_vJyw#$uzh z>};%{7JOlA)o*@%MX)&1Ji$NP2-;Me?gn8fZ*oej2~(V5)g3DzV{hfJ=PacJP7wFalr zGHt(y68~DNhOf6}ig>~o)vP>;yqZeRH>B0Ze`B8x%Rb+uW0FY>v>qRAvZz|M5p-r5 zSy$TrOR@?S^WRPbwtm>BAATG>*Q8F3wue52hMK`_>vD{9ic1e8&0B(qdB8*OvNuMP zfIJOO(Wyux_s}GCT<&oC|tBgg&IW-kJs%vyhrQ z1Zdv|Lqmg&V6tsQ$Uey|bUGgj?JT8edk1!R!Bp#G2}axYWBnJ-IlKsb)ka(9i})}f zdR=(77P=-Qp%bj2BA(^{Z_LtbG98u1C7&lSI0{`q;J5|)k0<*#M?bfQ;CrJZBLm)e zIF=fV6)W9OrfzM$rk@H|F?6P7gxxVMFRPsFf1R35^O;En-jg6M{;D;VHVK8m-Y5h# z1^Eo*l;#QuUYK2cJAB0v>;0ob#mw$~E4meEv|)W86aBdqq@^X`$au}E3*ZAWu@nQb zU^^J6`7t|a6sflCPAe5y3pD(Rq5R#=e+X^!dOOnOmt6fm$p$)U_Pw?6dlIZdpAqhT z#kEFutfovd2!!$Da^ix~X^nTJpFh?gj)5iZ_+Gt|AD`AL`ee>8m5Uf{EI;lwT01() z(dOmPl$4admnzs-Tp`EX#mn?^F^jG4(SYJgxWB(+u}h@FdHH&+SG#0}kx}o6>>KHS zm0-ONB5BLW9}+hot_c1ipFH9XjpJVlpRkl?Ok_4^)9Dp0q6H9-VPdN0uk;X4_*e!ux8sZT8~t@LT!X-RNo)0FM^b^(s+gH`>M<1mS#sur)R z7PjQBQ@PmV57+pVA1LCIHC5p^;lDyDkGef)Pz%r0v_Km=W+X|PT?epaWRU(mJ15X< zrKrHH4RM1F+FPBw+_cFs$$rDqt2_>DO$DpzPhf{Q4JxodmE@-bU>K-X0UrYrUx%<` z418NWKd%26urwQGK|}2`6-3{Or~^Ro)Nie59#J;d1TN}UDV}?eM=d;_Balli`zet) z$`&kME`ZdgBAfHvEqFU{&1sayMRrNbP_#xT14JO+A#+&c^$67sGj z-St<8gOjSaKvY>c9Cey;M~d9B6Ot5yYUo`CUz{ zO)MwWtG7SXRFu(4k3%a4l=?wX+fhs4n&Gm>*yvqtS!vo>Ej?g#B{T_>0aQm%JTJD- zVKr;}X< zljkuSEXwGUV}~w3(r%dCrU>u-dqjm??6L0(VNc0(N`Truc5}K;fP0^4w6xu`uzOKc zZ-Q1z>d+&y91Cq6JT4;-ZKtvF9?uC7;Fy{4#&pSiFB`W1o%N&aJJ~hNuq-MpQ2GmCGW9> zk<=hHu6Sg+B$}}Aa=i_b{ESOWc-mcERt|-wo%=y$I0^$kCodpW{iSa>bOMCq6tS9Q zLKgMgE20iq5I@+e*uRx`_{z~QZiS^cm32TEbp}GyRoeJ>+dP) z5*A+MdafT+90h>qRN(y2?ZeZV z)3s;Zac=oRbrct&>>HzLHB+6Q5m_w7r|UnsRos-=EopVnvE3jJ=bygh0wol<-Vkg{ z?~_jgJPuWZJw$g*FXotIamwf7-@z8QFTNy< zYhtpq>pS+ZN%h2eeAk8<12TF}v}bW`AUNPl0m~RCfSU4^yp6SSBI!o703$Z8F3}>- znXoC{>u|#TxCCFoQ@!@cF6a8x{LKCEpMOegO9=acl{VM!>(n4v-*y|xyTa#j%QzSQ z$~{eUd)nTz3h}K5?@tdV#&OZE-8 z{9%#hL&UWVUbg?ZG4xk$a>te{?%>BZH^G?-asdv>y0Pps4i2vAp$Tj(YVxJGJtByn zwztXqBPU~SQ|s}!IUQge^Na}bR7QF+H>Yf%AH*4=p~|q0#K9PUwu;hx5r7CeJ@ZE! z27bsGtECK2_m07zbb5`X-|3FL<_0)&X?We+X2EjUqlRqHN8nraF6;{itrxR*?K1Gmc#aJ#A)#5@+kSRc)mEKA@pKhvUO}MmvnJK zCVqdj5fXa3e@$C)>6`Z5a($l`s3hNPln4!cV&s%{L2>KG60H80?1z1CXr@+~Z#}Yk zydpN}9Y~ZaQP=Wq%d@kwqv}Fg#jshGe z4*uZ40$`vRMU^}gPK7^n4+2>_fcRxdY;02y#tmmHO{Tpk z+=}};^3;Y(LZAnwf|uw>$~qbQr0F$PTZw&L2zCb8`w3Eca;ob6G33 zj*~CW7bPqeZ|~zLC;aYg_QPeJvdwW5pjT?IH$|D~1k>!b;mJU(cw7uJiQ?7>N2QO- z4u7_+;_mDV(W22OX79;o#AIY0f6b#3EwR?7sDb?IlqEKf!|8Y2a(sMz4&&Xd<-Tx! zbGiF)&pRlqM2QWyAg@;zshD5vc8{m>X>*5AaLa6g-b!Ue-7M>(@0wj=w|5m0m?|ynX{X zOHgWQq7e9L-jM8nj}qLr=L74Tz=M=#2s?bbs1(E8@N8qiO5~-&N(X7vNKQp2$jr=M z`YFTpJ;{EvHnntGoQQBSvsjrsUg~jups78DYf&J)7hFc39vi7u`bz%VY0~)?i#OOxiVB^<%K6!iMCr+3IN)2?`}j}b^8ZXYoLEUY8<{6C*{XN zLnBEPp^>!kL0#=eixXD#0GyC~Qle_~9y`lhkNrzx5s+c(% z!;An^jG1uuI9gNIO`C#siMoU6Y?^!TX$}OAoSSY5K#UsX~(SeqiainTm8N z3S$5mIE+_fi?--$O^*}ZHi8;M59Na5%UzQ(lpz6N&pzl*hF{LI8;|nzY(ZuKB+e~o z<(RTCe&ei=r?-{w-#{et#@V+hXDIR%=)v7`mVqy@e%#*P-k{H8IsVWtu#|}@^68+$ zip~o?3jRgaDNQz6jZIw5UK<-6ZlTvjxhSX*LtXKYZZ&w7u#VLpukRdf-TGy0#e-~Z9WaKQm$qz_v~u5*A(ap#ML1V z=#bMn-^T9RmeD~Q9bg#4$8YJ?dqs^EB>Eh9fxp};GGQ*( zQw<)@{ct)1qwKDE_nYyg;DDgu+5#KsGSQ5(1dKdtOlNgW`Tc8oHCf9=2ST5UOHe!;-vzkvmJrnZ;Ok2yAibLelaQd`*v@e>3agy z1a}75Gn+cE5sT1@tF;c}p~Tvat%vVkxxz)lyzqQ1B_Y9~@=D724-}Oz+$p`A+8iiG zrRp5cgGpC22-x9-pa6`ZEILxOId<4Wa~#EAC||^H_~0RH+LzQ+s+(o@X3vtDzi|g^ z!sY;HEfhWZtJI3;m@o)F=9=%!fp<0Cpup8=k!sp!)r3lq&u>P42dE(vR75ls5EAaZ z!`NK&ks0?Y#EvbaC^lankp%q%%%>9B)um+-LVosM&iX5&%fjjOK$p*AqwlZFOd2CU zfgO-#;h_}A0sng8{hKmXM0mO9K>%Ozk6uqFrE#fdNTHx2H?XJf5oX)KN}l?R8uCWtIfC;izW*X zJk2kiYMVpO?3W5lo=y)RUI!7XJ^K#Gas}?Q6r6sD;)6AtF+#pxN_#4COR76lqTmY< ze80XNZlt?*9_n3`a}Pak)Q`h%85{Z{5NRm3Ih#~J@+9D1Om0JfI>MN4yz(`Tsf-$v zTBY}PUEk@`X7YHGkIU^pbDez_M!kH{mj5TRtnJS4?zZp6$G7|4%B;vgBs?SDB~52I za&qZjwGxekiP|Fo?+h&GDg3%w&rARS4bVcWn5+(~rTS&X_{G3*Mvxljx5K}#cI15% zJwm_XRdmhoioUFix^xYw!h7-l`oZg7mj+gIQ6%sL8e~gQ}j*>z*HDV=*m4YwO~Ya+#sq{YwS$C?U5mLXHkSwUVA)|nBr79M4JSjtFIs#vrrK|R#W zpULpCMp}|yktsPZ!LgqQ<7Ag&xsmJW*Wea1>odM_LLlR(fo|P0=&e7}#eTi|j**Ia zohkYyE60Ai4M&lr8Hvm~@6o1b9zZm5avBGJyOU?7qJrZQf<4_Rey+>qx~+SKW_f9% zan{q#pd-$X#sSf&d<6Qi*K&GzIXz9Zsq9maQ1@Z6#Iv|FhhU9?a4PD+RZFk1FU=Xv zwRck%WqGj{7p7$6I%u(oD&? zkYE;`cpe(z`0A|BDA05HX`5{dg`y0k`0bihw+*w6nmF2`P$Tly){?Vw{ z6!PO?de+;_x`N2!nXlKr5Xi3lCr9M}QFQL{O#OcxpZmyVF4fYoxs*k*DA!zEcf4jkNvg3&K~DH_Brp*=XH5T zQHrI7IhPvm_*lN8qLPK+iSQB!lF^B~=~d`wp)F<1`qkZwVl!a|K?2^g-JwC7nsWFH z0;&UGQ>xi&|I6wMDRB#UNmG-!g*nevq^N|qwX=OU^w;Z&fiaLwvNWZ-A~K|I4qdSy z5qDTmZlez_&hL)|SGQa=*B7*dqnQU~RqEY7!1G;*R11?0&(F)-F|@(sos3WidQnCa zC*dJ4kAzk#+iCOpvFQyaSABHLHKgs2hj4R;kjyA2^y^=jhFOXOZ|WXK9j@*&RJfbA zDQy?yv}o7#QuS>da(H}jc0a~;D(RFFG`>i|W&h6}tl~8O>}N#fkJj(au$;FD*aJH# zugSWTqhs^S-MGngiStd3l8usQOgQbnyCR)|3E&C?sWxy(U2C3Mv z$9zn>C_*n}vadLj>A_W_@b35M-Aft;;cd6hRpf<2QtwT%v1t*`!OXw4^Qa6 z2RM^@;@LFS(#lQ+V0b?TOLKZ{PNvt^8GSa!N5K*jqFYvI`qwNJd0iOp9hgF~1wTKx z6-@}+x2i*#OZj&N^dCsTgf}Ii_~*0X$3K@MGGTPFX$beX{VXFzULX!Cq0|L6F^0a! znG37G7G0x6pir((qjZ0TyMChm9o_et@vgxIg-Vs>8#3ZD!g(%Br2Q~*Jozr#=tAPF zPEftA_pw!zb7XAIU_b7ZU1E&K@Vg4|b%GY)ld9Loc%bUEnYq+>DA>d*Q*_Vfuu6=4 zmVCTxfe}QrbWV$nEi|0lP?h2V`b7L;M$!r3`s$E~yD~|lJ$KMK_Jdxww3^6N>mP+U z<>vpK!-H_bi3`)+m1#y@U>z>?7hxe`wn3MtK=yf~0SC(Z(22GisEedPA#Ehdcc3D&1xL+s@i{=-CR@8bY;`18{bx(S30BJw13-8x^d8NA%WkfL!24gf6r4EzF`6izgi z2l2iDDOW(hbv(T9^&-^%OJB3%Hk)DHtnbtVfVO!Ta!Ya}PD%c6tv9&5)MJUdh!_tgG zfr$P+wSyI&=G9F~L{vI)y->~|ZtqF@HqT>q!Nw*weJhY$R87+ScB$$5PO$^9^ZU_9 zK`Tcz0M|#Q)g^LEqS5{Ljk@%}t^xp409wXq6m%2P3X^}qAvJM#eYmOpTJlP3zVCQc zRIdYrM059mM|{%$`2~McxiG*d33m>d3t(caZM9cb2&BkEsMYA}<%LI(Z(j(GRN(kGh0=9SSkj9}Ap#7<~j8=PYbg<)GJ^P)RQy z(4tZ2T$lB9O0Cnm0{dg>t>_iB)>OG(^Q431Pt`9&1?$ashvecB z3r{FnFJLD3Lz%mMD}=iRp`~K!RXLx3p9m60jV#~Ptda@ppr(;gMt@~MmqpO=nL9sV6%*Xrq<)f$qM z%d#O6Vxtq980UOjB2dS8opDcjA0@wTBGMhX0)kZh`B3~qwLfu{^kyDUD<4bXyY~*S zjRfsXK7nLjNS41%;*qa^O^Fnbj@ctdiNN?RsPis6XK*PTvVCPM{o;qqFXbOBk6ka< zxe8U3w^)*zhnt-r!Qbs4ISCmtZMZ9w-4kFws;OV4FY&3ql)Y(&1in}8qt+Q}tgWw; zbVOX~`M%NEgu^Wwi?j4Zg87ll6AT8!1URZm|6o*+SGuk#v#QgSH?q6C9mR3_r;FLl zYBwjhP$zIY72jh}1PKkvjNUfZY+(>86Pz)?atrj;R34+hV}ni5N55kUrBAqmsVDi1 z>?tm4q^S3iv4y5(RHlwPv|}bj!otlJr#}a)Xk2lHe-?lK+<-tKVYt#?njvq0vxISJ zj8cN=r1{C^L31S78zhK>!d`0*uKSpCS>)zk293W{qh57&`VpmCGIsyhnDNpI1!$GYIe;xUGgxv9C}tB`IfNDd$kF$cb;SG<<6i%wV? zL~znIhL>VB7NUDQ_4N%d#a#up&bn_scaJ~v?c3**Z;Og0v$K9N5}T7H=c~!gG=9SG z%t2n6BkEJ*N7JKa!_AC8@J_2jxl+YhC}-FwoHCLTaN(6$6*~E~`z4{MP3B7A0z?Lu zHRG&}@?D{X=YdmXT=Y&7!E&i(Vfu3cqB=D(n}mGnvzJA`!R;R45Tc16xwhsbTUvEC zwj=Jv?@=bb=WL!EeAeFE7#*sstE(JDa0a?w9m(95r}ZtamYquxzsYKS@)AB)>-tko zq1DyZ1${adA-c~3_v5I;^z_oAKQUwpPs=6QeG5*k`}wDm*bfSR-D)08gUI8)d!l$W=Dfb zD1kqs`{&#gJ`vHFDXxxN1(+qbB%}Mh87aVNiXQmToN)K~c;fGOAqDC%C*gk=UT)g()v zvAA)!q*mhejCg~#Jx+(=gT2np<8@ubfqSt(bg&0fl_x*zxmkRE@!z$eA2FA|&+ph1 zDoP9C|8mk5uDVc{+q#WDiDz8JY|Q4A6@`CwzzHhXKha99tR46$7*n&`mX8V|++8H8 z;aQzt@dtla!Z>~4&ntO2fQ9YFArFJ>QGKiAKY#vhaC~|lc|T>~`S!v1T4wP`B98N1 zJrZ4Ns^4z;Hhosqx${9!@0eq^GY?Lx9Y&P!zw*eF*c^@^LM z3X1o{R-6SqRguE=4g8Lfbx4Tq|3&R%>dWa^U&a*oP`=KD+$9i3KH8Nmz1*M`Wdl=q%%iSk6!yznC=rGfP2%>*44%QdzAXQ#q?(h|99&V$r zqoD?4F2jkbX@vuvf+D9A*j~{TdPtBS&#RYkd54wi1qqX=3#`GEq!J?ef~hiQ@o+og za4~(22KA|FaTZIWhxYfd>(F)7KBv7HJr=1>#~zql=mX7A&P~^})_JWj|AXk(OHV9b zf!%*~k-Az*5lp?+FPM{z?Bah1N6KGJ3?2ka&N+wWb>;a0YZn&Q|M)j+2Lxf?dI**V zK!FQ)zalQbd`X7cK1eToj?bbJ1!}x!z z5wZ6Y+8tG@I$t^6tmUjy1)ANXoeJTFm7|?Y<;1e}X;CUOwWy9tYd*($o@*~S^ag{z zDI%T_sb()S%Tmc@_lp5Qxq;b;V%mJ{i5|qf=Q6v0rT$hL1n+q4mkTjY5DC9Jex@;S zs$Vk&_*E6rZSgEWF1)5zN!@G)#S$BmvoLZ)@CrRv1X;qzt_PiknfG&~lWPKk1)FJr zs@Z^2GgJ5cfw4Mf4B2g9Wl$UT0y08A39ipf_mLLM5;nqFn8-JY)r-^d*ZpP4HOFfB zMJout&ZOF_YC*1gcR`PnDsTuSw}cSSDH9`@w`iWv9DBW>4sCd~3wBB9!?8Yut}$e! zuxGQ`WL#t*;~{0FuA)uvo)C^w+C?}1NS7-0IrI70$|VVyPNTeX3=04BW$j+wAvHm} zdsZ`(?nw_`-lfrO3YY`>WU?C&|Gj{@Q}pnBi|Jba;tU2g0jMpPZ-6~ zr)v?`pLj+?d*yseshA-5b#-UHQ;nusWBOgtQi7I$@n3b(E6YDE)6;vriiz`;)^djN zt))XOv*0shZMc)N=)p6nB2o+1#{7Tn^n)_^EzN%pt%rj(M}^IGNS1g@^*10 zn@tLkUM*_EVt@2scLkO|jnR76Bn`8@ZPLI9RKdyiipvcDMwQ z+K=e^ojKt`yIOOp%Gld}wAAPEw~zGFG1iLv1;Aub-~z38c8xxeb`}LL%ze#)tS|H7fhL zhkL^A&L%U=?W(iu*~a_#yg?tq^5u=^?XQbf9(|S)ZWT>_Hs$V7Is)R{rtc>;HsWj~ zMD{oTZpP6N(pNO<97|qd&JLVu*O{ukvBdymVr7ZwC0ioe(RUL zUnZZBs(i7KA}U%PoOS<{GW{;=;n=ZIew)Cj-0$B5y4aykYZ6nj@wz)BU`i(D2iB&?)IVe)&x{btPRq(bkn&cjn`EId29 z^xA$sepWjj)K{N}p%F#IfyV5!Jt2jQC=W_pE|k6CB7NtJLO6i?kDpnW;0b_ygcd6J z!Rm_%m!;~!+td8t^rWbGG}YHNzy8b9WJN30j-Oq(f4fHE9fyH#dPxX}W=62#vxJAD z>l-P(gec_17N30M^6p>CZZQust25&E>-ys#x+es$o5PD8Smeq?JK>Th&=qSg@wdl6 zPl#k37H|XvC8RgYoI1a=wbe2~Av~L^Yj2@Y@$J`ZbQYV(iSG1gIP8>OHFx=2;?G1+ zsJkh2;h3_(3{2kt!b4;A?sFzTx{Uj6d&MVPf&_v*J>3XfC8jHFXChf{ZyVxA_ItCrj_H2XsLQIR*)v~r!e7bL7d*i_<7Uu_vBpvL{AIUA{M7OpW?D_zzQ;CjMj}Q1U^!<~lDkIn*(5wsb$U~i0jHtI-&>IAa$OHHwZP=@(K~as=PA@B#Jq2Up{NL?Ya_~^FgGFJ0> zHa^7}FRLl_znS>EkL*N8CmZOs=&E!bF&?|U)#C|30FAhyj8;QaY53nOvpv~vySuwB zB~>|DRj(q|*Rg$To12&^YEMtP-sF0G#_ff}9gg`JC0Pl&?Tm(>GP#vs!yCD2PjU}} z@@wK-D5Cu?*zsUwB9QVF%(}#NgT&x8F^NTMMqMb#nbnlIufQt{vb**qWmok;=DV*Nsp1@=)nkaj0t+TR>*z8vawV-^*1Ax8nQ$iJ7rEr^ z1+JXHNHYB#Rco$H0na25>kH_XoN=D@%t&4WgJ(o>XPxtZX?AVwDWp+FO36qz4e=R< zyH|(7Co%}@hC9^ffFBJ{!pCCT+R&H)yh~NDmvrS~I=U>o{a|5qe<(5!hqztU5y>e>DfXki75#9mKTKov$V)r3x)IE+}8jmDF!~n6cg8=H*o4 z2{5j(Wb>F$L_;8{vij+FH?JT<{un7TD9+J`hmb|qO!uaId6uXlC-te~E(-0^QY_Mm zFg^twoy{@1NxV9KJm{>YiSUGO>YugQ^x4@ed)my5o%a4hI_JL_yEFzIrdMs78#CJ4 zfD!P z@9S<Wp+R`?b-TQLU-u z-IW+UNOoUa9{x4>xvbon<5+j7bkAD~nSJo*k8Kbt?zmw8?^Sl;9Tv)sN|~T1025)H zGTppDw~r!LJ+KDGIW*emL3;XZAM@erk%qH^1co>dRHXA}Aj&a4AO|dLHGrGd9+Vds zkD+5ffL?D7xb!FwWyN=3K_<>yY7o;|fiC zU{>3O+FVdM_EAPU4gN^d>|+OryHSCY&Xws%%)PFc4lw5&^q;}uq%w3Tnras@m(T-o zwtnJo21xNoig>R4nfAwhn;-^b=o3T~#RWy+)bSe$8(WTJ>ktTm@3k_w6lE89UaT;e ztADvuD}ym;va6Qr7?w@xfA%s3WGc*XsS&#SdZ1eC;8g;H$G4IzPyB63EfY`q85r2| z{_vw#!p_Xh4e}AzN2+#aNw)#qkIHepM?4AXQ?>hK zpLKW1u2=n#25LPX`(LF$T!=5r=ikO&#_}B9*jc&FBiM}=H5DIKCkG}bMjrk(|_bb-AFQ1bgOh?y#46ShsdDsGUREMbp3H9u>DuBvff zdws;18>|VJ%iO$@FP63yKKp;mNK^*E_(CNVYnj2!rxRv)qqD-6t7NU;r}%XiK?3K; zk3N;J0(C2RHLs|AfTStF^f3G@Lio=4zxG=)nvVtW|tpWC$9Wo$Xm%w86cv zCDjfLq?<;5K+)3>byO}Z!5sRe;e=5m6*RF$Y!T=Ua++=q+cz8k;|#X1Xu>( z)U#f-v@{W)*`C0V>8*js1anKLHy70eZU0-~;Jao3iM-06@T_Ru26oB0(1^8<7X}nR`-21uOa4=`$gXZtYtx1-{G*wMA0x{=Iw4*GT)3 zG5TD8oAH4nT(sPKdcwoAxr%Z_xdIU9TQ)BaRxPI6m_Cq~l?*}w|NfcT)q#ko zaK*S5<=PSB@Xbxm1hVrLiEg7bBM7(fY%f>J-<%#PNK(Js@V4Ax2|6f>SN`-V)BDw* z8+3@|-{k1CM>5W@)8_HtQFOJdyKw&7S3VDv=WxMJ56aKsu9ZXvHP;+9;d=u#gJi8c zSe|~5=$VbZ=PBFXmN4Fj>2=Nta^Uv8?Wq;?$v6l1OsQE?5Qqt>J&U4ct(xYHf;oi= zh#n9@&lT*$Abp@X?Y1q|6DnLzra{Ebcin@`;fuU4Ry`~Gb}Md0LYwL5tY*aiFA2sT zMzzI8Z`0%s!dWygjw)1&TS}kW`$LbK@<3Ay*d&j`nL2tdZ^`m(rzy+jYnj8sR7;#C zsmy`5gMrN)?zdEtK#mbe*sTlZ%A}z2T6jic=};YM{i@n-XXrWF#1{EGidN8hPr4iO zvVnqSC7zOpEB*WxdFANV?&IyO+^ZS0CBgVw&h26UgYy5_7wrv=7Us zG_cD|fyTmdb+q+PA5YGialbgwjvLk#C6J?C2~swD+|j!r%8L{B3gNx|Ge^8ArN4nB z3OIH?lz+W1p1ws6$~8iq!oXc=JA^0JOX`1MHa%Y2ycxk0vE?sTr=NN2$HvAVd}g;7 zSM`A8d$dJOt(U^0RcIF%1p8fv-6LqV1Hr0TRLe3WMoZvRVtn0~llBjjwATv7l7aO} zdZsyf|DGR+O>u=j@DbMdkd_kcG{JBF!r^Rl!9nD9z&KisW!Tz!Fkk7>fqikTtH{=* z;M?$cZd6d$8~nMxc*C|Nyr-Y8JaT46dw&LG2rA;0o=fIIZF*Z$j`m)6yT8b#?41K! zPfRJ)<`chuDp1ImRLhx(&tJQ*lMYIQL3|1YeL}ccv@jKVq=zL+RiAf|TjDQehY=hd z^PA3_X8(@G=Xe#b!)eMFNYKt&7i-@xY2U6noLW+kTVIp36AC?0OIUmE4VEh_JN%12 zSZ3{$4UMPk)ap#Xrl*g_7AEY_Sy?@jPbMwa@GA?lrs@t+;GW@0M=btA_X|Rco`i^3 zVlZc>r{ya_L`P{fYQgh_LhkQ;o#2UPPT+dt`|@z%u1YPb28H&{QmiJ z8$O7PU!?u@ty#>%&RKI02-y|!m-Xn(113muDl#|$)^{w=gd0I;K9+Zud{=cwJHaP^ z;2m35aihC{mI~tK)-~_Y+=_+(ZkF(zj`ChpE{}0=SgE87-(zE5-cbB3J z?^d{-BN1+K^ugIMdZm8c&YyvVgNNndgRGqqQ1B$m6nrU>fjez zE}TU>u=ZE|<^m^R=xU?_d-kf+ge6e)#X{gsT5aG$?M-RS6~H@H37^C%>@&Bqf9?AP zQykr>z`?#oqUc0thp~Kw^!lHWdl4uSDqtYU*VM~MLxDd-|1~(Jp9}Dcx4bdmrCC1o z27PM;(2km!bWZnO|?XV zmaueggrwbT&9eKYVftCo()L2&z#3O)?56UkE`-1_do|^W@{Hv?8;B3P+r*f=`Yh?1 zqsSS_N3&~n2McQMQ|@JDZ3Cf@@R3>)b7hy6u)W<^;AOQwqi$7Y&&IfM%J&aW6E^xb zBlXMdg$Pa#&$^okr{rfNCP`;`znT(jDKS=)?_kX}foiYgoIN_muog*V62sM8!uKb1 zPU014k~tlJdT5NCKVbrlnh<)@sNiJ|f9h0ATG!8prT993TZ_;|F(+JAFppr3ohY{f zaivBf@r*ysC|nMw`NUU+iE_g-5u*5`T?TWevVL9|jxu57r5JWQ!tAzZV!!;hyQJwG zh7@~%_}QwQ6d$dza!of^a)Isw{}{Vrw(`%FjYgW#9tqKz?(Qn*L4zlkTQK&q*?bx z!{DCS;V!G}dHdn&=#&?v#GMQ32?@8%a45DJoekC9^1}v@ecnEW!hrJgumJ}NW5+jR z;^W5V0D6oA58B!~L%b!@$m7OZRNd4db2Pi}A7rfqxUA#I+?QH1+vMyt>iJa6vkQsdJ&ec_4Uti81< zt}3~NooD9%HkL-)(7qnxOCj2y3jw7!J48CtXA-R^T3RYWE=;>FNAdm5%_>G8iY(j- zvOh=W$~hss05e}E7X|`zbBa1O`>o2y{DRquGV(>^MEtcrud<-C_@$YUhTz3@S0Kda zRz6<_g$%I~LsT-V36In6R`wBz?&nyGE|$U;L9{+R-W9Efc%4_OI-irq)1&B>7OJ_@ zc*FdJn83PdV&0v*@gD8cIzmu|n;!WX)_h#MdvG0XMF`>}UELYR{Cw0~USUC9*oQ`m zn@Yw3F{70O(c$hoX8;?NR08qgN4{vW+^}O&zKU187RamE3qNgxg4q0lRMZrm7YfSZ z(cxBjrLoRBoWeRn=MoOkhXsKnM)gx?Yvn{(Gwx9_=htDQ-$Vxl$b}#pfJ4m{gcxqd1iOGOYg?S+%ydgMTGc2lGp79G%e1xqz`3 zqRl<6&cV~@%Vu?L%+E9D!w%-v5}d!bsrKdY%je`Ec>$!j(<$K2XLG#2@YlTt;Z1Kr ztGn?nx56GXbbShKdQePr1TSl%GT>B-eN+f;hFyP5D^o|_WCdR%F0TQLW>);hlv%I7 zq1#KRbk2u0DWFS=Q&Ku~PHAYY2%Speyh-z*_2>FlVvZn6FlU)EX7pxCv&8VsA4r-w zGAH*t`NsV$=jpXqgb)F(q7#=Dqpq_QANvWlQRA2m^2$A~cowbN+Jt*BIHz6{ z?>bJ+%-38`@OTq?f2?J1pWj-0CU1~dFFDl{uV)id&7n#jG_&i~8Ui=#FK9;OE7;1m(f5Vq`Xj;}a*dT-)=B>`LGEfT>(Qqe8_{C{LdF zEx?x-ku)FS1(C#bc%}OJN=0XNRo^KBmqpM+XU7kI?u0j+wT({)Tx5DoEvrh;i%Um5 zNi)8D|76q^YYF9zk)1ZK4a)BJ%Gt2Jg%XxGcxaJbP#|X*yBAp|JH;3(plxa!a%!3R zFI6su+}l|}AG)}Cc(jXlhUm&LrlxFq1dNvPH8=Rf-lN=3z*B!#2I#Bub}ackhvfmfF>))8 zM!d8Lt0=$l5OI@5YnBH*j^MjI9&}@Ubj{N-=qIIh zLCrcf`DuNmfPyV~Y|HP(#1EBC_eob);Hkv_YQ`vlyAbZz z=m9`N;`c3bfiO>;c)ttYjfQDiOITME>)9T6yg!G3=D|n+{78q0#K=Bb!sS>%AT1o) zp~MXy1>UeL;qthbpK_9Ufz?iS16)b3RJ5VZ>K0aCxNyM!fsSVR>-nH#zNOYPC%WTQ(QF%Hj}(c0q-`KS4G=c%x3_{ug4rtc?;dQf-9NuBST!N%w{)E!(sH93IF0+p`Ihsk_q=>!G;Czzp)-#u{}rR(fi8t1 zy&2rfD5<+(`z%%5n}*uWShuZ(y%ocp*}YzfOC6mf5#H&z`rySX;lGf!GBv`{m&Y1~ zY(c`Res1_TPRQp!OO>ziT9@C{VBzqAN>a*RqnD}>i|`(w$Dufl&6aP^1aZdP=Z!8@ zo6~>L-&35nC=o0|bQMBQdya8ZJcHNxfiR-0%b2_I>a-aAQeimNwmBGe18~I7zCZeQ z)Jy*F{TE3CU+Bg)few{v0OgU--Nx>!9!w?2uXh zeI@2nfql!e=C##>gDrculA!!J@f?+A|Mg;41|Xk#R5)x92(`n>Ai^ZB@bHM5Lj-%N zCxcS40upD=MEYG)gYYQvMyWi~PVc!QAA`k4d(=|fw0^;4BB+5kp2jt`C8}L$1$%&8 zhr^f9fD!^Bk3qCWx7Nnc-ru#o_=b<_`WD+x4}3hUt?w#eVrswe;FfD$MQ|V>#T$wP zEdPK-0FS;ta>C&uvX+Bzrcnq#^kX^bhxBn&c4@(8+*{T)wv$JygY|4Zo)$wV)l-k? zHvWed^Mam&wxi>?5PY0E_rBVP<9^kmia?jE7ovJ~9{I?cPD_P%l%^+jfMNh)Bx{+% z-u~|ypqmZYpys;;ZLB2I9Q8RbT(nB{4>+7p-H{iG>*-vFCmi9Y-?}7)A8^XO^+KV+ zSVFn5C?=?_rpDFLzxTg!cLJ|YljZG?)zcil!DS10RjfHPEPj8_7tDcfKQz>q<>=&7 zc5|~;HUXB=g(Tm%1UVZU_*}M}!QN_=VfE7J`Y1{#h{ub#ubg10j?6FZ(T6}&`mI5~M-l!#ZVvhc30 zXgc}dqtPENT%Lpf5h5==N0U-<992@=s;@b5R=Xeh8bg{WSjRM-V-nA7Oh`{2ff%Wl zazNAiT~dFIUuttlt+;L9(>&%{#~^{Sw1q>+p&FnqnDm{;aLx!<#qML)qX}coc$Ba? z`3IB2(28^2tN8a46*PY3PHE`vmTs{?+JV;a>rn)^oiOBYUY6ykC?e18EFakWU1&qWH0i6{6pC5O&^7 z{(<&ubxNC~t5J1;aA=NtVUvsLuMU5ASEaPl+nN=6PX%u_u<#yFbrpc1REGthsE9_i z^6^v^zRMKzqi?xhb+%DS%Yi=3#cKv=iLF^K!6kRLe^+(EO>EmYyKf-fA#njs~~86IiSpA%6)M=$Kjq~ zJ9JH7&XLrMTcEP*ew|G-F5F_*@rRVkaeN}=9NQ1N*GyHM`V6kkHmiz>iP3jZ^suif z)<$kE)q$+cQiobXElJ6;l{VLZ-Tbfm27`o((lYwaAflA+d-dzwj)-+VhpG#g4y;L( zHT}^y*tI$lE2p^atKIB?elR#+@1!YSY8v}%fI2_YTrP=74Y_TwC8}u7+l}Px)H723lwQ9p+-f!kL}-4l z9fm))&Bp<{hJ_2c|M^>2v{&EnO$0gIGY>${@dwvt);JTIRK%$P)4%-jn~PkSwa*XgQ5jjoxNj6S7E5fsk&1B(wiyyL zf6t!(@l~tHCrGjAo z9lPF9^y*|}&}x~_py>iM>i$(rg>;;bLj4IlQR`=lFM)FzpXOLvYRB7O{rt-BR<0GM z(l^p^(kOuNpBjy}$d;2i6Mpw770;;M4@1?}ExPM;U&)=T{`JNdz4=L*TlGFWV94z) z0J^Y{Nw*}$;Ym(4I>!Vwc&MVy$y#QwkG&9>(H0H2F0%I)w+K!`2CnWV#~ejv&hx;g{<#@mf{oI4yVsULDr14emU7H4$`}Uy}n(= z{M)R?87;n%P&U!=+LvT((v`Pkbh32}WDQqou4qi@9iG8{d3!=|fl6~_X^e`oV)qZM zF`6?~-z;GE6+F*Q|L-ZvjUk$@{Fp?z%PgR^A-)jLMLC{>lAn8sjs6ZMn$+IxYjQSV^K0lw4I?!(iwa$F^-3pwf&KF3goI@+bBx|E{KP za+HpCy*d`SvG6c9Zg+bV4Gu9Epx(TAxHtQEhlml;xg@ri;|==M`*xGFt#lOO%Ej?{ z`-ywK?f{u{zcNP6zr+{%FuieEww6lBj*F>(LI*jgQSpZnJF8ytrPdBv_cOZ!Cx@ko z_Jey>hofrgGj___&Bb*Pujrx$}w;*bj zh`jlc;zu4^4H`S@Vwy>7{!QcN^)a~CIa*c4lC^n$zHIV6QOW#NHkFAd%4#=8aYKa@ zPZ?{-NydFeP(}t>9_;;o2=F~oq|_j8=U{C+S-N?9KFpvjjT7^Y-{0O}U!3B+;n%(T zlUHc8-I}d@Q(P`53`^f#iL^W0vmMgZs@8WQ^`8FgA;UvnCxCuM)AxaQlIqe$Dmf2< z91e$s5z_v`KXU71^TI;K?l`v~JS+t@rI<3ibUiR`dk2F`B;0(c#gOdwsUPwd!w>GK1?Gz~SfG zlTd%=Nf<8kSK_C0MbSt~HL3;ANqMS~>Pqql%k|1gC!0CIh5-76O`RGw49=gv)CreS zI4=l8Xg9bNzr6~z?lsA@(oWHT9e_*MdtG4U%EF>$f}$K(hDV{JYGpQuzi1u@$=OqT zGg>xN38mSiP>IBw&)5{|NG)?EESi?jq3E^mU_?W~CxBxz2z66j=I>p`)>s4I4JNq) zk`|#UBM6)%sEF&O*$3jE;(Wk#aAugU`ZYqpy0oIs6P<<$N(BtypiH1paF=W)DiM!z zZLqGH#YPv8^rz46!LF_nsPPp)WXhvyE=&04qbqJt!S>@1oKXXlx69 zr4-AFi+_muc#6Dr;Gh&Zd0X~=#XZ~0{O`yOLOrwau25HY$5Lkx+FRPgV+xCgXIIUx zb@qs{SZWEI)I$sGzpWbyrHip~`(e@h#p(A-tH3|5E7^mZ%ZBh{ov}bwYTpI3=$QB) zBpS8O<393+O{hPC-^{^B5N7Fl>LYV=?_LZP`4gJb|89bf(jd8L0en(uD}j|sJkV}^ zSsO8<*d|6WH7X^((8g|r$XZ>+S>0Xs1C83cco0{%Io&X3N>ik;@#EZAZis@5#NQC` z`2z%b*sGLM2P>WmZGO|2_r4VxO@cFdhH>CVr`z@d@|s8?i(z4eW#;1{QeSp~+6}7c z{hXeSzS%#ZOxxt-*k~;IX^`WU*Jzs|;J0@*HFis6`O4n#xg7b+TSlB)9IwODCN>SuW{kh2z%*{C7b2u@&5$AK~xr+ zU4Q4K2ml0VIhYhg>%;%fZ~3B00lM9w6FOfzC#LduicN80!jB96rES*tZciR{S;t3@ z`q5m1nN6F|I9^|smWxw;7&6LpN&QmN5uPgb((d%CP6y$JS`X5`72wymn%@uQ^YaP= za9#lUAm!M?b(rTbde-{J%$g~4YjZyQ!P?r!zd!#%v<=?uh#Bt9yHgCepCm{vaHysS zpa!|_Os@1&ES_>kJfHlI7Dn<@UyCGLn--@1D``LITOk@4#Hwous9^)*oWDw#7FGsK zUG$5=CNL?NKN0c9jf77rxc7B8%>Gcqfc+miH8jd81V>CsWdLNsiOt;nnFwE$vZuz= z^H;xGRaORes>ko|Y(|nnM|ujoXNkW_1!@(y=rFNP^(@$Y&Z`f$nGUkaNWo7gh!5qk z2y=xYIoOapx$eeaetFIcz-VTTu~n{gX_rL8VhkXo>QPXwww3ED@q6k+(f%dJRy?Mr zYH8Mw8wf^R#sy&-@4X5BM@3DIU`OnIz8huMu0AUAftVC3*dX}4^3w$JC3~`^b+EojJ}c;ljvQ$!u&2Zc z{)GF6^ubcVFD-CRI}?3rKg2UIg5R6T;X?AJLb;OHqGfMJx{Z;8qSU*Q2CpMPUzqXn z?R#AkqO)4OFta-!?Sq5kzKYq^>v4rhFN8{uxiIg1&rc$E)VsUIJrI+y&z!TaCR5tp zl@tk9MVH!ihT~6uJI0G`S|YC2cm!QOf^X_F5a|RhYR*9gK1uE8{fggt_>0&&b*_NM zi3z8R$rYxYu$Mht;Lu;4cauH#-MkK#m^NZn>1c!4^?Bw{qEQyFEdfP~)O%j#6`>yW zfsx<0JKK4^FEaj;#=80(!?NEVyZiU=hvcEzNs&o%LH$Yz%mu$p`5Na|*U7PAzX=3> zAM>_`i}pgiBVM|&HzTj!e{K-_b@n=;DLU(f;L&&N7@I$&iTwGCHV$eiH%g={4*H^Q zcVxhrJ_#68=j31w^}Cm#2oaqsSXC!-t*i9iImKkLE1r2!z*j1seH8zScZf$k(dniz zxoC`5P$x3o8P)zcOO17BI<)TOkFyqlO){%)v4Bmx-?bVyrS+lhH_OqL>F(;~l<)TT zM2^Bq&axhU@h8UwrC4YFi(|lt+1ScSn5D0NlfJtf={iWI?X*z;-tZV~-`@G#GQZz? zcDMQcZYSuuUyK3o!Rf=<(*h=WwJ%;!=w45iUp&p^J6&(y@#*6ioP~B!Qic0bddv-4 zrYew6qGIHk(j17BjDQ$IWGhFPh!ZGsHD;zWPYfYy{=ORx+cP(Q<;Q|`%p~m85nT8BYJrbI4G0^C`f|a-) z6x~5s$BIPTouFQgZmB(P%BTK`F%eHAVaP86s0;4TELrq{C3cZ)IUwx93y*$~DSUXZaq zuLdK;>np`iWG{tGU%5EB%b4-D1>5tym2q>FXE*W?1qFw>$k@yFA!6!}mZA&GI9la# z#e_=<&VBhI$D}!7nc=)3^4VQ#?Toe!#uc0^2pGg9sQ3isuty9ovYf4VT`1MRFf)cvWMT zvE4a7hGYO#02 z*;G}B2=hqObZgk9?x}WZo`u``p?2hVV38E53=BCmMd3WM+L8P#_1$&?4>Xi3eBYDY;Wxbo~{o?{DGYO z1+o!C5r^ZoePp}i{c)gKaY5pe+7P}L0Qfu^TwJ}$`V5Z$)uaGB%UIX=rN(3B!++f% z?sox#due+$mn)#~jQXsAvl4&v->kOc9oPLaR(<5P%k}9Dpa;TCZvwoai^1)|u_AX) z5=7KMl9s?(Cfc>?k3lur$gkJConwerc3r$dfh7=?Y~|$ z{%l-w|83zz1;@+k1>+~p)hh2zTh)NIGn6^9u>nm4z0{CeUHFn6&c=j3IoQ+i9QO|4 zR%(176a*>qRkrm!Bxer^Y#+^x%y@B2o~LB({;Hh1ZIF=`BbM&)hB`Ff5+WUO<{M*D zJ|hDw!xXcFp%H|`8~(w=2JRFXp!d4x$i9;5b4Gx{?GJ5y`>+{}%~mp;l?^-Ec;bGz zHQ!eozMHW0+nT;b|C}4P4LrJnLUUc0xYj zMn=Wia*j@3?FO@pyA*@#V?pZL3S9`l5fI0)*`NYnapsc>heS8cdXJoPC!mRQN$6Yd z`UOEI&Qh)@i7@h~@6=Ql1KD&j9piZzU^R$p zBoZTl;=NjdzLnx@9tZHRR_!^dsI%8wCGOOVn}10WxVIn|d!-ndB?1*6Fy;4+P4srz ztifRompD8wtu*G6EPtRxIY#%6;a^7~=F{1T%><8ImCv%$)U4zw_wPoJx6)yuzDi23 zI%1-p)Jiw364x3k6)s&Y7FaQvd>80RDGv63$M$%leJ$bWTc)i)7(PF6`Mm zP^ZSkc%JxN6k*IXAKC-%r`3D3E2VrE6GZoN^H%n12I?vJmbNDTrz^;03f|7x7AZ}V zCAMCo=UrG5^ML70Xxl^^e21vwRjOkeR$EShNjQbea-bZ-+M8u1FYQE4fNARO(_ zQm3tJN1!iOzfBF|S8>_YdU=?*><)rA7-E*vsSRxxGzIxxgT%x2zOY$LHmW`OcsZ(Z zyyjgz*Vx5-vBX&m6ma-LnfU@RNNkmk>b0xUAXa7x2j;nm-DeTHAek64)8E%NjbI5N zy1v-s$=e|xwVl>%Me3s;vBH_+p-%Q{u(2)^I4phwZMk$_RXwuOpPb)bW<%R6!^yA5 z^Nx)|N@DIoEsNVfnA4nZ0u;jq!-?Wumz05--Ggn5z1hu?w1;yg^5$oA3t#RoAe9Yt z+M6c_J?&~1J;Geiz3YJ_fV(vl;8%Py*ihT&KnkDYroL|-iB#~VZmo*@mYI9ATS_w_ zqI_(A$a1%Tsd;G?5fqY615lKl(lUNQ zgH@9xe(Gzx+R(sk%+LR&lzI#f^zWhNE!9aAsGI+8b;Edi!11z$Pr-?uqf8+qzmTjO z)Lj=FC;g=rF0S)&98rjPK&};?==Shwnsz7TVc(md{dctL!VmvI<9FJQUbxs?OS4yr zAkm7B_bfOeztBL;{&?r$b=?YZKTF)dV^Hysx6X4lq;YC05cPE6$Hd!tKg1lYv4J-~o}|MF zDX}0hZUyak38soAbq-@6MLc`{{4MMuDQVtgL=WPuKady<8sAq%sf;rryC#e|W~ z8I_}m6XA#*s)`@2g-&f*@;_W7uL~O*sg>^HYN&`5B8p>XYJ>y+@grjzM_Ol2gZzej z)1u$?oj~YAFVTR&tWHQ8eJ%Aru==Gub$Ru4w;URBnK)G8{l_9DJ<$dL#k%yDVjCs- zC9LN%w))|L?2ZBK>Ps#zQ|rP#wxoGrSyggj=ZZ}U%AU#7UJg{JsS8-J;`#Y8iS1%% zneX`A9S81h&1+Lv&RjO(fR&BorN_s|U(NUv3Tf0PtRA+2$>|z*FZ_ph;?T&B!;hhg z9f#zKksZH0L*MYhBayg8halzTY2bez3CfAtu}0p+TA@jKtUe9rmb4%~;2uQMvzy@r zBDyaC`Hxi1Pnu%x$hqqBcM0wTWih)3(i-VLU5hxhbH|p+UjG)@1Bs+42E=*{RM17b zCl4yW-k!(q=y5YjDR!2RHC_A8@H36^6Fv_yMCVip=$Ya{@rc3~rY=LlFsQ+-y+sq6 zk1us3s33t(-w!3&)MTF%;b7C6Lb#pm=9-{DHVh8_~GxK5h z2-Q03ibZ(vRjm;E!J2ji&FExb(5|dqeD^ znw{a-TTL|E9SSC&lWE=%#(OynK}$ABPnX*QvG2V!MV%g%Dh+bAX2d$Jueo#}T&YCd z^vOm(9l`Vlm$F1Oncf3)Fg_@4cvrdYr#|-!qy4>j{V>l7#M!XhG;ZpV-G{OY0F+*b z0hIA8n1^Pl1{_;t|J;IkcS68^qM96CD1(s^L&a@`VI>XqgPI5 zah=XcpB$;2?5Sj#=0@|gEw~WnL6@9TxQi!rz!G;pe;ivADNl37R;|i8e5ye;lB{vW zEQ_>z>49qh4i5sUwr)$%Z zWb$e#<+eRs>@qW>%v{QMrZ?MT2(6E9G#7ERx{2>|fkE_g#CGN((9^ukM5szDfCe ztNtTUPnIvVoa`@Ket0x_a^!h>bhIa=@DVA`!mw)ol~FL=V*=-FS-TMRU)tZ|CV{oq z$Hc7gD%;^6Zv-Pq^bFi@3CmHomck~P=!PV%l092m3WnU}dN#Z;;F=C|hC2D1E7Hb$v@ zYg+mO^~{swo837Y`Y!f04B&0h-~Uonqoo!G)yp!b4(qYP%Hg^S_k~hx_Vxm_-hqFQ zZNaiVwh?&>GR(zd(F9QAuBrPVGi!J-J>-57XA}tM&nJ>H>*_IejtGo^K?xbAx{M$W z)!zmW+Rt{nhSgEee0lYzghjuN(tjzSY-;$0-);2ac2BBZO<(qWA9YvHj%?tm8d+Rc zJHgLAR%yL%i*J(F1LT1IWO=hUgpYUiA`iofXla*wh@7gQLhrka`@5q%UC&u}VMnd| znlmk4jIG&lGUiD9N=s)+k^<2z@Wd~?*pDl#XYAc@PqHla`VD0h@myUJEQJTo_eRF2 zox`VaRT^as90clu{fF-c*5gEro2>uUWNabrx{sFDxb3Y#>8)*_I<+!MN_q%Rm%C&T!moIG*=&=1qbEMDNj3EM0J(M>nPfb2>~}!w?+^Q9!4eu8IzgjQB98m)Ru2bz zXAzn8GA8S5s&F0y0+YbQ>qe3wN6QK@jdBU&nI%Oj>n{3`eQSwxR5*U%k9|@z`ra#c5Zx@9Z2&VuQyn{XOM%$v4^_Jj>rpZCx zo&B8@NKS5UdMrO{D}NU0E4IyrW3(l-GeIY%4*xR?kIVAf?{;ndWcFw5JgiuczOy;Y zv+KA2QYHK(&=apB+|zrFibi@O+W^=K+M-#8Ai*5ohj6B8|GK=5@)xR4`5&)LnCoHnc}Q|w}>Rtm%CS)PCS7W zz4_rm2;HwS+?#Uo3Z;10gR7Lr6@~G%&_yE2DDtoxsKH78s*5=6F2_G?whgn3ujvy` zoq<2OQVQPJCBp1}P_f?>KDOKTtp-$8x?4}?ar8TcN9;S~@EIUw&}?K}VZ1lMtWx0P zTeY|?E!?^q6r!RMcJ|i?%E=LZJ>qDerqLOIO}Qtj7ewFM zs+c~`o)w&KIrNt_-wnv&1H{E{;K!zk9f*Q0M3kJ!Vnz~EMaGDjud(ZghUQR<@s0iG zDFJ_A+)@v{paC0$o#;Qx;Rl;T(u<1!COjkdH-)nTns=Qk9uu_~4VjAw_0|d%}GFv zyG@7V$+~`2Sgp%s$RC`I-1h0_>A_24bs4oDyJ8Lo8Q-&@MlLa!d)B%+Q<0gjJ4$?ib~v3*$#N zz4HJD?MO}!*+=a!M6}6pTVzX9A|=e`c4R??nPHD?DR%l~XC&g=lM@8DKs@Mr5g+Is zI~U)ON~sr+ffLU$*?apMjY-*-e^YI#nd1~)QfB&yM5di?}I}+fz(~U^; zLU)UF%jTV}1HlkZ93q;3TOp{Q764nBK&io#NYvfXf@kM}mirrRNQK5Nax+DfgCFxz zf6ZQBFo!Xe90-s>{b&~Co}%y;n_45I1iD{8y)=?m7rPwPDQgOr&>XZ{$W73=WHjr? zP3I6qRZ%k9yr%2Sz}JO-UzE^~yskm=_%tQ%VtU%c7qMT#7lE&_;|Wb@UXh;id_ZfG z3AvU8?{D8!-d$Ja{qO!4!`O7~SOvb%_F9@RR$>hZF$#6L8BI8gocG3}1?oA8zBeD! zeochiE%D4Lm>J#?=T6oTFsBYo+8{CvkgO+`r*zL0lTR*G6i%W@Qo=TB$+)DWJNs?cRrr1Ig$*Mzk4;4G!6je2x!zaUU7@k#)T zeXRh46=IiKMbotz(jJG%Kjap9oy^w%y1%Zf2de3ZtRYhATMHg=pkMf{nj9LGkyevU z#bFbIUpP#T|1EZjmq-uFdHr$Rn&&9Q3Rod&OjpI2a(E+jlOxyWVq?7v%9F%;(hT|5 z7e~x~m=!KqV6Gb~oH-e!{GnV`SyrPA?%9Fb>=?shtub(b-W+bv0)3YOdkN&KGg?|>AdPW3^YCTUGN8db- zryCmjqiR3d22@fxmqDDyffu}ZTQ82N&O+IvA0e4h ziD|w9bG5Zaw6J?`3&lIZddFPKn@wqrb=d}mHj{5csw zmWbzr&A3I3#}~L_N0EH+y!Y1ET{xK&dZpmkWxN6fCU9jLXwR@l3aA)04J_kRdJ;%4 zGnvX2cr+5REC0RVY8UC&*Q7LIR9c$%NaN&H*wBd5$EMCSQ+=92s<{5shL}|afwM3@ z!2nNpRt;HZGzPkdfZoeh6yO#6I(!mMty;HpgTRbF5sBZL3-En?^F4iAl|P=FEpT>a z-euS1hHC}BS?cWXOrCoYf7$ez8Yjrl;W!W_%06*s&X6)$ zz2~#(ZtsGP)%1=BU17*k=#;qAjp&5V%ChNjw5GPCR5nOPpAonO7GW=l`d)k~NdG-b z3V9(_Cq5@dWDZKFY*J{`$Me7p*Y0Q!-*UW1cPMxraz;PU0j9P-abOz{lVyd@MGgu% zf?hf1RA_KiXghINJA58C8aWJ1Re|tM&$pJXU^D0Ue8|}d1K+%Y$6*g);$nLsM-GSn z>+*kUG{2PglG4{NXlsL|p`EJ*dK#Y`c2s)`&GajJOlAC`o*UZw1cRMGx%RIVM zA(inui+XKMaXDHVPYp%_yYb)xp1GOLe6kM#i(hX(T-#pE-kyJ*5E>c;0KN(q+?4W4 zV0fJHVhl%>yY5F|xOYZ$l%y#>?_eX(-rEtWKN00*&*{<`EdWm2tIu^ut>UC2AEPLG z6xz9;G=Dw^?Wo(?MZcxu!P?c9(Su7<*j-)GU|3_$-Pso=QT5@Y9cNUm5t%G7Zr)u$S8UL5p&BN z<1e!inYd7C1BWH0J=po7<*@EO=hQEo-FM~lABzq8@5OJIZOD1ZW z>&IV@?-atvvh&QShV`ikn)J!R^fwGmndPH1f#QKUaM0&2BOM>YBzLW>g?Gdc={X?HpWXrgF3UR8 zuBJTB1_rG7ku;E~i2jK0U{Co4Xzgx&+(qv>=v0#;8zhL&|IBMGMun#=q-$WN5)qL^gzLvw45nNz7JC7qN>4oPR3hm6 ztA~NW2gipeD;fZg>3Iun$7En?qY5p>R^1Sb6=!g$}7muU=UaOIdew9xSO|$&PH=?ljZ6{BgD=9A-yQ_s_)Q<@O3>dEHnh<{^)MaW}m`~rR0s_h!DO589} zelpih^GmB1`kJWlldxKU=fvdF;p)%1xpH!rj@566lg>VzQ|;GRo_v;QfXx@txRiLM z$E{dUg46q{jscN2n6C3wNN8ck0a*Tor_b&@T4l0FS@A~?8)K;GbARnZBldw!Okc~% zn%&8+-SNWm@iLXXS|6(5Y4@Yv>(iAR>}|HOzi)ljkq-NlZ9}<&oc0eB}GIZE!0;jS{V5Sod$fG8kv`oqEEkaR~zPq1-}%POd1s{c!clC6j3 zS6X|&cqba!-`c-NEEFpI0^rgvbLzQdO=9G?Ns6MdPwzjpH;ejEn*+KUNc?ZAt$5n5i@~?>ljT%_5At#{{FM#7BjA`1cQvmHh+h+D ze-P{PLxERBghOUvb`%6miq%4wkRh~}-uptfgC_woiwq-C2Os9!-JUy?FB3e&52a5x zX?7=erz58o2MJ!EUfKj8_sA*AO|`0CR@u7ueP41$581ykj_ZN`e#H~>Wimg8o&C;d zmtt&Y^SDQ{hN_yZcuKJ9Bh9CxffVGei6q2B?ws!q1#B-a#{}7#-hn=*5rwEo$muB!czNNj@HJf??;>-Z9q&P|0p|jneZeBG{2Sh z#Mhr=Rk0f%0+#8g9YxAv`y;qog9gDB);D_R_jt4C>zA5hUbCpAEt$QVGml&O`2FU^bE?VNsLh+vByqXphsE)Ut)(un0D*3kCQcMEks!%A) zwazb9wI!F|V|bp6JagoeQh2c&|0tcL+AAu>{IzmV&?hBI62@IaB)c#ea56e_0}!7A z4UW``#qD6^xDR5Gqd&tGVsMWES2OM}_eF`f#=czgRgDFTKp_Djb_b=Rp-47E$?^gp zPYQRAWVg0dqQ}Bv^ZMbSRN`o*?d_Kew*l;6c9HcTVZ)00Y>PmesO|)A_sE$engM8% zq<>q(OY&i`v$DGu{KqppiT#+5@Yjm&Ziy2FW_zfEV5x+IBL@Ut3TaP%-A1?@H>DKx z;%t@JBh5Xq1niQNu|0!d@2q5nu$}Tzh4AT`MfhaHlRYw-96W+{TkHb@sJ5%HhI|XvtDGX5;x}yYrucfs8*q?5 zP-<1y2@()ujmdShyAqIU|ehg-KxzP{06zy5N*i5Px7`tMuE{_6G!{Aq!ZVMRqBE~x7AVzB6UP806C z3`=O=voKd;044GP2KzWxz*OW~${(wck$T!#RjRuE+>Lan?kfnd?lrb%n!xYe)@lJZ zIp9Z1KgRL{7UM}lKKp`3Iz0*iJBDBgE10*wleNfx_Ui%TO=#8q5Cy*AgRa17S|#4+ zEr5wAp#iPHdysN(vb>1n1%@Reo)og4=<1?bTGFAV`@EdH-ygeRwK8ayMo(0R@OXjo>d)(AL&EJ%-v|zGNT{;PCvax{u-gHv1{=26>?Lht03dbs*Ss zb%T>W$cC!6WhI-Fr#!lvGRuEEA?im)3y`;5w0^6Nm&@;>+)5@64vQcguY-P9>r|oU zw4*0@f}ng+PJ$50FH@&|Kyqh)`BW|4`7h8}-hsX1>(HK{23zfIh0OTc%^&#?+7!8& z>+@-Dz21pL4~FvJG4Ui3&U<4nNF<}ZaM7>UyWUSizXc~qx}mdcBUX-)oF%E(Qe``L z20DOR1YM8*J%B_qc-ir5m-On3o~K@9;2l zcKJAVOad02r?bK;(#Sst%{%CIGH;jt`!H!#DZS|Uucr|m=_a3-LU=1#Etq(Kn&$N4 z5{U(Yo0uqF+UC|s66w6WprG%!QFQ^Q%%{JZ8m&DKo3kfKgVxWvCGUL1i;F@7Q1USg zpBQS$Y*J(k75whV$fl)uZ^UM8j|b1&VMotyZiQ5MnS|s#qnfSl^vTh3&;MWap47u; zBQp-Bm7f~bq+4Fee~M-9h;c`~(U#W`{jSsX_D^z>x3&%<>8W@p!)w=Mc-i;Ff;Pp- zyyz(KqHL!hxEt~?^3!uFbyucXAxh)SFMirQZb`^EO6jq=7B8e{bJOccIW<)a{JkRM z4!4H(YoEYjTUx_yXE)5PiC0e&FAj5F{Dje9QnfS-eE7H5NwAw#=jIt_4Zk(Oeectx zadDdMFhk@YOG!*t4a|uTNd)N0S}$@Vo8Yl3m`~95(74 z1?jR6T&}@B8}`>__3f^jkv(}c8QMMo3&CcQ4>0Rmju#w^acb&9do`%#=*TD#*Uk+? z+kxSy$%U`|MK1QL*)^YJN9;?V&djdc)f7>i!oo0(wpDWHQ=(!ej}|Vp94~CbS#?C@ zY-$eA*b_Q#w(5*Aq*;NOMFUpLNhmA;I?-=yqIIJKDFu@T1Lh_2>pvxiK)TDjJySx;0;E4n6P* zzSj~ON`GnBlA2`-(e1zytjoX^v$D8+OO9c&g-c$aZQRv`IH4&8Z4AlUg5O@I%kh6g ztaEqXQgma3vKQ;xPK4C-)j_6B@+4bS?uVTyhBq}wdi_xD4E#^%fio9>^C~g_3shmj zNr*4S+f&}*OV^|XG`-hUeiieFMx|p(x|$APzKUSpr zb>24M5gycje-Qo|JlniJPR(5Jd>-j+wKid@IY~PXAi!k;SBO>a9Is79WzI{Jue#hF zAiagc9{94{`TNzw;9wl=WcX0?h1>5RE zgae2B_I5g6(x^jwlf@kxr|oBXiK%bJdDGt$-{cpD0p`6%5!GP>9qIVZ(hgRv+tcbb_C7pZM* z@?-RRlRP`?$M{!oMPCK0i$7CIc2NCCTn!_Sk3SeZ9lk%r9?D^<&Q~KeJxX#%xmWipJdI4DpwRp z9$~%onfO`lQow7Noou?>p)s+VuB1V>gV}H%9UQk zN<>58owB^!oA9A^*pT;+nu#CRKou=TPHcsZrGaHp(I5pWX+Q~3dC z9=)@wTvEJkX-s7xpbVq=IPF7>sDc)Y1;^XRxy8uqS*;nyvF&Nys+=}YG!kcE!aXxk z`x0~W$#)>QKrycQ7AB)~K%6BoRlpRoNDm??lc>T-nI zSzgC!f(0;;%!}BjC`pTkIPLOIOts5I??n?6P0>6njq9x2hB&D{a^HB6Ut;hxzKU5| zf+xvG&y7J!@HRi+==ZRB=n{onBD&shmb$t-Kkth|`k^6jjJZe$Y@dXTG=SgHkQ|>+ z;LH+JGIPz-33rb~nAKLnwp9AWAn~m4gGc;UBmr3#&f{m1h@5Qt#&F1k1n?G_jJ(yw zkPFaHq}$1^dQ zNXAi|O02m=?|e@DQr}STx(?Fd`2OnqsJ@f_e^TDu(AcTVv9P9WQ|lDT!l~5Y>HM3I z#AQ_1d0hp}ASrMjw}P}dZAV?qbS@oMWGzh$j9*gYznCb(We>G))t)_4Iqf;!PvEly z!aGtL4h`=-Flb=SwK*dpYH*0QQ3>0xqv&SEOjR>YyYR$#Yq~tVgQtiu(j3k^{SXjW!#aL3g!nXcXXxOIU7P`2>(E33up9fTzv% zcp?0*70h)G9uxTE-gfr<(6AnFg5ab#`*kgTg?E>EGc11NLYQX_EBDWa$h>k}dfXu! zw_&jMS(DWR)8Zb%SW#eD#`2m-3Ol?c89#Bf3J7Cvm3$I-%%mr0;=RVZX)3O-O?p=< zSjR8stFHfZTXmiZAf_YYdg%AwnM5c4cEM*^lzo@4(s$3TGs3B{>{bG*;xEoD2<*U49sG(``S&;Xs zIaB?!@-#kE1Dks@Z#iE{%+NRKGju{w4x-y3*-@=f7p|y|fE8`j9xO))GqpVoUnz#NoD%wJmlN zHcD|?%OtXPB=qmn zz^F+O*g>B+*5KplxJuYjk8~~DNxt35!s?y_@hu8T+@qF#J6&^7x+ZhA)dz`33Gc(6 zS&T7PjH3x_YeL;tNtP}gJFZN`=UOOj$|LQD59VH8@}0N;dz#Alkyd^s?=_RgmE4$} zmEouuzLB(2z-rQMnqMNpuvDDoNv@PKi8U5DU9c-9DlmnhR+9BA6^A-xI-`oF1Fa`Wi=0w*!TsH)?`u6L&T4ck)t8Qo))XY@ooA` zTv=cfwDx`~?UJ6vf!h+iTc zfT<5SDGLezXik9AIBSCoJ@LqO3*A=&1AaE=MeoF+?UFbcY;A2rP-|02??4tabZSoz zn1SzM(Ai|Q# zY}J)^QvkHSHa0HqVs4tIfSx#iKKnceh{AO5_1@{a$`R%CfEE#WGrRZOo{yDh?IJ#5 zb5p+MNcq9#p5R-D731R;Zv6?6(ke^*6{s#~G;3B$3?qs*6 zfLrqEU+sVK#UmNrf`vs!iK0qHLWY%iY&F@sX8iN=4Rws$B9%%%*ll{UNZ;St`L8!& znGXDIi#_yU@;~~){Pv>owC$IIvKoSUYjnd}&6kPEN>@&YqT;l>-$?b?%XtE4-B})u zl=Eu2ayOu)KVh)uX9qQul^_u^+q9NY8o&q~IJNq_TwxTUG_CB;L-qGE6?MpSS%?C7 zv0v`i5Tc=4d>r35sjqYRFV9P0uBf zf6*>oWS(iVHNN}-N%aObC?>WDqIcDc#d(K> zgBz|H?+K{ucW8?+_;Lw2Cbqv;U*&PX>YA!+)dMIphR>d7!kYFX?%5Bg#6LQbzlq3)795G`@T(6>K)Gq> zKIENiAnlR!>*5{MlZj;in|M89C~4l5?s<~Wo)X;;*HZPBn1wD~b;;x7Blam7z)R8C z3H-$E#acZqcJ~Aq-t@#a!5cC{&&TSHs{}@V)X*uy!aOO_jWV^p_B^w+5j9 zicbHHw;WhbN6@ktx3{)-hXzO%oCP)_#n-;;*O?&wr!lp}p0u-kl7`|ekrRH?C`y#v z(?MS)HV#3y3VtbpNI%DRK{MWu*&f(|Sq_H9zVX(rlVh_N4&)0Ph0L{ww;tp!GM8}m z$r=vv4W$QJ)k@vh1&;uAHv#tC3VLXxc9w??O8&FIO_FOb=VX@ia;3(gCXnHI$biD` zO@T@>+FdIL3@Vh^T*g9jT+~QSDv1K;uW`$l950L$)mVP! z#(_Wu>8<*vg}e^~$*90q@EdC+{<3wBuuwFJWp9?#X=@@KtfT6AI-92B$f@AQ+F4rVXo5WJY9#L?QS%?TY%p{SY*B&2UaB+7xw8O|mw z*q5S#A3R-Y$&F7`I3pwKKPMyu5!P6)Rk+Wh++dyKJ>6H|?6&P;i$mMs!m6kZZx%hL zPetRFy$|-rRgU&%Hr^7d$-CRx)BeN-*|*bs(sut=f4MiNxFO=+2d(J(+X?n=Mb4zp z$AS)AE`Bay2^P$wks$VICTXpKGQ#cfuHfm3zP<|CSL+e`1BX2wN*kd;1wHHeHH-5` z{_^VvYksT$ z)@lNtu~P;@2ZOE7%af_`stqJ-09P2Y-p|5dbF(R5LC;=OzGy3hIfPuDxlPWt&h`e6 z`@dc6a1g@zN0BN-&LntrM@4r|-QwVroVuXMY!95K*|^&t=frA2n=FGUqfQ{kB( zT1l{(95^rfR&1Q>$-(mK^l?SRDNXpK4~?YaoNo4XY)}Ypg|drJIqYnK8iQYWu+5Kn z-d?}!Zh6tnmp*f$WcnESavqA9)b#y z%g!xL?=MG}s8Jh84Q~0Ze|LQb%tFIYu8nN0(SbU}X2`vW5c*~xRfX4z&uOi-&wrc} zubHrv0N1~BJ^{Z##VM%vrgm&d-T4=Q4YAMjD`8C@pqQAYATf&r2 z8*u@>d~S#WK$LtN zT;g#U;^sfn%nufMH4;0=o0rg5$&{j zE7ba2k=J}lj)l2rJ0p|-ak!<4hX*5-3+;ft1wB9^JD?dLrUjSMhJv+k3qX3vP2b)Q z)h(IOElYSIkqRru+1r484jlj}X-^CToy4Z+b{UxZR={mv0 zGDhtqtGp0K$+hlm?DsqN_5IUoKvD=CA~Px{vnq2zmrl04nu!7?x2S;RP%Rj;7`sn7 zEXHrK$>};i>{rtvpGOE(B_zS)vrVV+VzNLWHzLMfD~3=Z|z*UYEMR zlH?oVHf*q~lV_2&naq#mWcHcRJ7>X)XbHgp2Wg^H{9;(N7$$ zza`K!1-zH<{A1>4)fpHC5#2$B!a^_}Jo4Bo;evBNv$cqQb~=RN=AsGm!M4SW%IQAg z^r&arX3#F$(>szjGK*kUe{gfEoqBM9Bd!v?&)*nyg_~%nv(ym))IA)k3pF+T*YC_% z>EEC0EeLs-ey5c`{mJ8u$pcb#zV!=9@$R)Tog`-MmBH6qydtr%S2?lq2$t_IUX{B( z$nnQ3t{YV#(M}!s!}RlYcXy0Kt_K8MzZU52Bil?-*8CQ?&zoj~&3$nG%y!BHVsJ+X zyi2>~bavXe6VbcdBulC7tD30Ed$N$Y>z%iuRxR;&(%o+3jyRonQ}~4)^yqFg=5*&i zV{>(%&;3m2s$rCwF2wbB<6w`%hDpC+OkgJ-`DXuf!9oN*^>lCgbR^<*b(s*j&Nrge zX+Jx&y#ER>o%396ok6y<1UM3u{x56vCwqH)+qWuzmkN?G5q~Y1s!`kZ zgX;frxBK2z8WT}r1)m>sp*w7-&wrW74AGTj@bYBK4-9PSdtfftRpLGXFXZjfSG;;X zwvTSA>yR>7L$s5wb-539`zSb~&AuupAYkHV@Qyzn7fxtq3{utY03q)AwU<_|6n3{! zYVm!xS%UF!-kKNO*QzB<41Tyg0MZ;=slckY<=E~dVTi0!_G#YfQqsAXK*#4G00TVa zDH&qvF5WY<9iat$CdTwgMb`OFCo=dONo%Q^RNJ^Tm*yMLuHiW+?E zOU}-;JlFk$lPS>tY%r)dGf=Z&qIb(P~(JKJu%qko5id@Z6t zeBz<$^x&CZ+k=}LTo-B03Hu(rkq74DO_^zq(qsAY{pUFje_>4$8S7s9$L?C-Y;#SG zMi|-Kl2>|r`#Znh!hnJg5B_nq(08RFE87)CfHdh{p40E_syXk(>!MsSAKu85_24fs zZZ1updUe>lyYB|Doj%Gfx5xd#-P`og?B-mnbsH={BY$HFA&`>03CvKAJEF{gQpfRt z`y+IF?f%3_v+V?~7%v!0IcSCfY-5f({Ewew)nD?|M=J*cR@nR_PCNL*iE1C(a2A`Lv_@KAphZ3HEK-Y zZ(8^Af24*_LuOn@exZPlSRyJa&i7JJaG{ToBGgsd|6#G@e1~jBhNdHjNKb|j>=|Wc zcPpfsc&jfrt~{7mME#AwO)WW-bQ|(#E?7fKat1R`Lax?)S7;R^^7H-V>xMmLHFgo9 z{!hR+pyw%PTa@}{r_L!oEuE^hca)uUPyJcMtJEA3PNU%07oK6sdU?i4`po|wcEoJ` zr=2VK|Vl$Fi!{Zip>{rC3Dx{RUhf$%>Jk>^pd-Kkb*{ z;Xi1Esk&mj-8xl4+H?0*Go0?%NOK?ZJE2T%lVtu#i$L{WD73Jefn!Af64>2D+cGzSn17OEN&p2GY zy+X>9c~pS7=)WPF7WVfzkq=*th-M zDU6GrCT={}ueB5f@Ybcn5A!2th9hVdDkp1vr*1ZQu!byK_N@R>fQals>iV*dN3|s~ zBXdSzQTAi+@cGT5lp#;#wH20IV)L+0!ZOrw+($&*tDC@NRLJtdWAh;%aFk zi<8!YYlg2QQyxBdfBSd{Njbz;tJ;#vD_Op->V1@Z6?0P(;VsZo#CA0Us>JP9xJa6) z*uL%nN=GC|Z3LVMJc#|=Sx!0^bx}}vRJASLxtKF}Vn;numC{;N*i7F#kbL)0eHEWv z(QE_NcLJi8nlb5-YiP0(pK>y|>(Rx#QUe))VpF3`ZVqmwacS&_tih1c`Xc?2UJLQ9 z9#&$~`kvkNBe_Ize*3MOxnlc?K3ik*ZSiebc}Bq#mZV}@d1WAR1d&o1LZGbu{V=CRp`vYtZQcYo-ZL3Z%ld1$AFzU)DR$MM4lCgnx-QvF~|2jzZLQQhhc$Pdp3;Q zC4Kcv_H!UhEQ(R)k-R2L_g@#Bcm`B`Me3?iPYYlLIBYrYi#YeGXkihL$E!3?LBzEf zpV%RTqp7mMZAa=%0_p{X!JlUzpbePqf)?$tt?ODy`mzrXzl z?{m)1<@I_#pAT7mAVs{O#kGO5s<#uTKRPctu)lIpzS~q;_J*ZJ4uMhq;?HS@p620# zA;hz6hiNnkz1x>BOAmMhInVsomtUL{>|Dyaba-^{P<0$=fH4u()5DhobRK5m#k`E= z>@raG$vP+_3t&3=Tsx*7BIk~j7#GGU;ZB-C| z%b6R?q>?^21vK)^CEm1~rZ)vRW2X%AhohY%?ppUo=}8O#-+wnKX#fVGhPR?dqQA7R z#2?rWMY2V2HrBJcPTCMKjB)0v>K06^Qv)6zGbSL=7Jjztkqr*N+KcZ785sYLxuAPu zVsCL;eR1b_jHH@EFt5H|P2o^0jUB)7t)|~!Q~m64r&)OOK=i>;np;=G!5m{f#9^+q zYae9HI;w z_V)nN%53Kw&33l7#R9bYkkt#!oh#?;o>5{GFb$Ody4EwN|NDeHd1)CHwHG+o(B<0Y z{^wV3)q(G&Fi?C+hnJ3Jy7i}vH72af2Xr8=uIxd8$9bwQNS&M*t z$z#Am19m3Y_yxbtGWn6!exlDCbg6nOYL9TxBv7|Nk^xAZz1YzLNxYba+>p)x*y8`f z7+hv&cd8p0(GZcAfxE1@Q8?q~Lr1jBd#@96f`vimw90d45hP zt)2UiBTEg&;coHyMD4(bH^D{mTSuLPTYt8BnfOwpmlSCO21{_&qoZNcBiQINoDKL3 zXA@vfA(zkfWG@n#iTJjarhGE-zoIg!S|O^D&i7mOBQW&st8D?!)8X@bPJ2#_jz&hu z+_A*$FO)h#wPtq$iaJP$be@Y9&SAA{hS{xCRGD4Hox12v7w5};#7g{YKuVX{i3Bfq z4yoXv4qqq@YRz9xlR$ohr<`?K>iR8=eI8CE{0wYq)4|%8-b;ULSUD5 z-_CwHl>=>W{mgOyNoM4HysG`0#&dhF&z@KDy3 zeKGK7ZiIFYvf#(g`h_MHYsI8Nwl~p|o_FPP{%jvzBs3>CAzy_C+$^Fsm0;uJ`O++X zCK`>WWGbnl&dQATz|J=RAE=t>vclfhO=V@}dJAI|kVba=kw&C&PPA6(Du>EXR##)} z=B&#sg}A%BYd@4ah~&;t26=L-4=ex9Nvsz{RSAZFCAdMtd;B%=$3{Qw`MkNfMH8tgvO`&Nv_rdL7`)$^^LClaK5MK+9TJ+ zH{S7nf_)x2_Fz)aRjDofoT4Lns>4Y13x(E^CoVwJF#NGWz7SMe9mG-sF!TByC$)$q zX!q`}w>}{{%J@ug_aK0?6_bIY>%Hl15}{WNVedX*;8HPBI@FL3lDC-P{U@h{CMd^< z#P)6M3+%FVW<|xTr;QgJuDuKo3Rm;ti-sTwOFsbuV1Qc&1cBMCzEhmn6jWWP%KX0l zscS#Jq_T6z)kzsSTT3c28R`mjMyUnM^vq#D(^vj}XH4gZlu$!Pw0Gw6z1uT-H~H8= z*Ec}{>`!9x<0A+afkMO0L*GvqI`HsN5{uGhDT9}Hdpn0qz7RvxlG8BN@2>5*Q(T@o zsUl|lJNMg80LL{M;~)gY;`!B~qn%mSc5u|Mxv}v5@3lZ!(NP7loZVXLHL0OkYp9KX z9!KoO&#ypb&y$QH9SlCSb_D6!edz%eX<0@b^AP)w{UQIKm(!$9yPvO5oQ?T7F)Ol- z#HsWNF(F?AbwR<|u={3D5Z6`tR<$foIxCQZScL;mFMM$E5`A=xZQy4{ehuPwi6>z{ zq@W4!uvJg?o4s?MQ8|kuP@)?i_r4COey*8{7Hc`CDeUnCJTcxjJ!T~{=K<9Q6mW|*xmRZKVz1%7t#44P911T16J+t z63bOY%uwW~U!`T+VH+&Bowa|DpC|3HF={AxSQo>UydQazk)ODJCufEI2%(e2_};{_ z^=QArf`y}<$Ion^K55UV7+d+1Cvg>5KCYZ_@32`b;fPu8s(-H~YODhHvCT!dG4@qf ze)hoT+zI}{SA_NTgN0V*pJu$a#+goSqP(B(qd(ClPDQwU%0{pabC+8|1U<90Gbc+j zMNFZ=X#cCTV&$Hma!H;9Vl)r8(D@L{5R4)~8f611h6d6s4L@-pDsKfVTCC=%taJx{ z(PfGkLVT#kLa;$@w8Wo?xXJpy5q%ZHw{Nt9FtOzU{(oSDr9W1>uVWz|vw`lejKI=a zXGR0qz|t6O*{zUFB8NL;R%ap@t9*OfDy9g*U=9_5?}1Eplb)_N==b%VD>4_w2kh;f zLyX{U)GPETpYVrD$P*-T%|$D7+-y}bb{Z3_Wnq@t?b?Q5%9aX@N-u3Y^8~**^_t8 zQXaPgc~GsOL_BY=fjmXnPFhM$8f_eT+iXM#qqAJ%+%VkqB~a6@bj{J&0vYLUN%%f@ zK16dBgqsC&-!ZLF_Qmn1Rg~!fEM-1FCwB8N{>C{w<38?A5e@ylMxK!C5(Su+u6Dci z&I_hb$UmRWDuycw4pzPT+*v!2wh+~R?@w>qTj>)C2Y>k~td8CLU8&iYF+u$Gs&pRa z>9t3^N~@>Qul?A*NvT@qxRL|z=hR2A33;L;D+bcmt$4czW2_xV1IH0Q<1e1a0vdDii(HkM9BJ z@on{Gc|))J7LoGrv@(&##$W@Vw@-m{+Ne$UqiAdZa3w7ACm~m2UnjgsulGmpNXW8x zmzD~I@gP1rU8$pJNDZMzn0jM?a;Z4{-0-@m&jqp&7O{ei=5s3 zm`s4Y!9Gwu{Iz}b>%|eju@F+|GUD1dnZMgnQTK}YZ5IOoseI#--{0_aGGFGz0XBPW zM{GLknwcS2VJNJVJP1?Mde|p-({zSjmt>ke`m2CSsWQ+u z_V%}tAFL}>Jtkuqw!Xo0N&1(U1+rB63d4XG(XYN70t_c#$Y6?DVz^7nEsh@)Hy~wU zN418?{IeUOO*om@*_t+K&Q2sz)$^KmFq7(aXA0Zw_nb&Ht!0mr_E#M{amN9pb)QG7 z*31@etYw9E|Cn%{SauID$_Ui>62fS z=x=84!oJ_pk-EM}y*&A%Y z*dymtJ#;KD17~EPjL!%x7nj{#O|~c3U%*mmnesfXo6J(VM5e#A@nXgWSJ1Mi)qwm#y`FBr6TiMqVM*eW7{3ZLJ~v4%Tzj6sUna z4KMZD@-#hPlQ$Y^uBQcNq2P(Mz#9Cs85)+89HI}+mWq=5Z;>=>o{RsS>c#ieIsCFO z8y^=Q3N4BqXdvoj+utjgj#%TtF(NOrn zZ0!48o&H|+Os~O;&Ygg>mGIDl%i-r%_$t05-Wx>`K{QHoilt$bQR2Yl-dnAsgZ=H< z-tT@A{9#4cWN_J-bnR-W_Jhb5E%`SS&VzRbXFfG9c z9Tm>g_4TRUC_G-?JD=5Maeq`CcPkOGYRO()5J?bKIsf!OSAV24 zKJ=gKBviKFC!Rtwo)x&_H*$w(uCr6O+oP;7xsa`!zW?{04%aG8=jn1mTlkAv3{Uk( zv6HnBikb&xD^PlqTdL^^uVdxeWE9pf;9}an;7k}-qA_U0;CAr8O}2Ucg#BNW*25h% z`yX>+7ImGvDnN~CQhnAaTFn;gAchF+kN*o;PLIy9!sGMd3Ps^+kwG;u@Q5>MgkYTb zI)J_s>Nq_$vcWmgJK6g;N_6$Rgq(oQtq@NOBWwUHHM(pwjVZymQj+EYTP;7i;_Vpt zB438>zDbqy+p~;ZDNX@_!c3&swI8`5VoYYQ=DcJAYkeUtNTLj2U2wUpe_Rc8whcn+ zu_Z=xgjzo5bVB09XKe?yKo zy6GpdJ*lltL$0_IOt1|4YFh8s`uc^lA{hO*U9bAq2E6DD`@QT$qaT4}TC{#+SKZ9v zdllwvwqMX?iA9gy+DK?-t|`O4BPWvi6T+UFnfUc99wK4fv$OuUc3jfdB58SYAZwaA z993Xlds)q4dPje(93yHg$Iev)jo$(Iy^|q&QpfDVS%utCyo?!%+}X(j8bw-E*rxJs zYrw|bZ0Oj)QclVn*RgOqIWlrw`xvp)#%p!$|9`AAo;?2Nb3jqfd-QVQpi$yAn{dfusXJ>gK=cy17 zc|G`kfU~Qc8&cPQRVYg>${eU=L{{Vo^NQ;wB zgd=6%4O~7`783~>;oqd3p1Shu1enMP5rKk069TFG-_zdyY54x#hJLiWqV_~&5ZEFq z`;Re0+8hQJD5{x7FN~@6l7KX;TU%jX(Jc9$jrmb;HU^-@8~lG=AFiR~VUh>==M%<9 zNPCzet;}yP&oh8L`e_Cb=S_$z7m0W~{1x0$#289Cbzm7FI=pCeO+*aBwr=g#T8j%`6Wyyr+Y zulfGJxI{m@Lc#25Di{*z{%vEL{tz0L2}93bxCY{i4BNv5(}&{3r7{w2Z`e&yh~X}? zF6{lV|5V2Oo$Fo(L>CS8R;>*{Oq;9Mm_(EdUa^|g;u~0c*Rta? z%i4*F(<*BnJ*6Whm}4eSo+}5|ek>!Fo_(*R=gsIe$sf2He(ehi%CE6A=8DWw$r_-q z`>axKWL%@E_I(&>T~|GU;zMg9eAaj;v}N**2jIldhqzu%egFUWc&QSI$t zAPwr^qf_kOMj_%3-F_~;Q-68s(Ipp}_pGpoMl)ka%W%IITCvp;AjK^m+8)E+NNdW*#?mFY$Eh7qWC8F9OVS>syAK0&{HOn5=glKGN^&n6;x_4BcgQV2MkemGdH$E9{e(kG$AeCsn4P`yJ<&&F zrwDpJ&vt#p$WJ!uF3pHkeTmgwX6J z6bE?cgXpaSfCvbd!i$tx>%DmM)<^Xdb7WfAHp%Z)rOZ3-B72p$ZRP%?NVfOmidIu` z^AuCix4c6SB`L<%=F4{r-gVE? zybb#jBiyV>{^VAKFUaqdiW5@ygd@2@B$1=2Q%8tT-AWB+HThcO-}VaAHIP0NHumpt z(9vpfP}lcSTDX8e`Hz@i5!I;yxtxvtP|~BtM}AK4xMGQ1^Mle)aa><#wGzd0xCt>8|VE6CG+LH$K}aq|M^pTzUM_y6v!T6}Y>SRHPlGBR?QRs+t@Jc#?s*9N)cQ>wqcIb#|P$+xWC*Te)m` zOSjetQkL^f&6bifWjeEJy1yHwp7!Tn%-v0%sW)w=)h(T20s_tD!|$t{$VNBLq{991 z+(jCJv`U;1rh^V^cDQmwGvu%^Iw;SU{cI#)!`8TQ9O)$#dd&{<6T;}&oqNz-5rXs5 zQE7&dhIfvnPy4pDZiw&Bu^MYgYFfGHqRzQE3(E+HUJ&pj&0a{^Tiu_te<3c;YQ1nQ zKn|U_dRq_!NrF8|(tWl&rwiSldDCEa?FKXgzUPXS&h{wrpN$RJu!1iUsU=vK;vITN zqDL+-p?VJd)X%Ro-PlgHdF0)-$Xj5}j~+#`P1E)kjuz|vN-xSKGY29!=ko3M0t`tD zs4b!HXVdYU`_i5_wn?jcw90f{05qJF+t~~Axs07H5ShJxQ8ILs(w@Nl({)Mz!@`pj zcMfws*~@I`oAC#6jA3Pd>j^QDR4oxsW6;@wOgMzgpf>XD{?wi%Eq{3EJ_H0$S5Zq8 zPqncNiv8SrMs0lHat)l#sUe?1x3`}HFc=S#oL;O_ev&P$5$U)9`rI5~^Pr2Lj?fX0 z0jziI+m8y@S*wj{y@tjyK!;!<_!}kR@F1Kn9QhcAW3Ozhsn(G4CkGBUlI}2pPz{-d zKpt~4nP%xd9|h|d&bL+qLfGPH%MU99~8i=QpAteVjPnhj($&6_VWw^V8;^6ao@QV}?~+1|3afCnddR9jjnx2>KO zIa&VVxwmpP5tEI?Wg9(THIN^Zf=Qi(n;Hu;=frp`XkCxVZBk~&-v$lM2ET|u-~+Zx zt!l9izl{VJrwW9L8e;?ZsZ;P*L0Z!nolnGDbY&bKq+QjUy^1V<;g;{sTHSJ<2x!PZ zroBs>Y4x;XY6Vt-DNF!htB>Lu`D)ebypX-GscMw*my$rEX`9i zIo?FpmKJi_jCQ-QIbo;cl&hclkysrhF*8E|I%5ucNec9`M9D^e0mQd^y6`+m2aU4J zGU|NmLOOM;NB)Tg2(^ZL4z!~<(%|1R4eWFitK3)3WR?mTtb8epZrDQGd&b_}#mEUl(H}6ig`LE4-vvQ(iE{{HS73>uNJm^5U z;n0l$^X<9Wi&XFVLbUt$bosFha@^WOt?T&k`HKTWvd=iOPac(YokSQ&{_f)$M!Yw8FU=Y|eDD&QQfmY;!#ZmElx&BvwiImkp29|NNoU)cGV2a$6?2CkbT!F;URq+uk=xvr7ijqZbw& zo)q1(y>ngoT(iQu+Lo*F&B(u>(c6 zRj5u3=Kh<%H>eH}Lw5y{J(Nr;|ES{`=C5ss)AymCj$VckTMKA>W|N}?*V>_0CW>l_ zM1XM<+SBBKJ4GvHI($Z#cuQV2)uSO-Cr6kBRIB~K^1~0NEFo#G300nj-*X7Gyh%A6 zgi=cF0U24M5_43OP!E`5$<|1oM*9_>o@Jl{VNg(#C5}TSSE7N6$KW}T0r1~=LT;s9 zvuIX)T40G3NR~FkxtHeBir`lV|E}eceuj^>p7i=pkSJy z1sjOwRB*56H>be~eL>un>gOV>AqAF}@_j<23sCUpk`~__3wAiFR4Wuqs)h>L8V65n zBAsy8c%w=P&8tVNt!)t!mx_-zw-7wP-4FW)miEvSIWVgyOfnS!hZ=&~ii%~LK&du|e29|H^ zn0lL7JWz9;_9ob1=@zelueeBY;bru*)^y%tasw-Bo0a?s#1$<_Wyj3PSCp3MsH|n020T6(N_B1WJyr) z8VS9qTRy&|g|XOr3*`D8E*b<0OV+HYdh?=hiGFeQps|}JYRL~Qe+xa* zm~j+_KO35=pYtSO*dYszjDyxB(6u3AE=>npx@UMM1!|R%aVC)%N_|k-Vy}Haxa2@c zJY53n@c7K8oCSvc*UL+S1u#SCR~+0}1-x0Zgpsq93%J=r)7re5{!)A`@aI8Y5X+;C z3;Ph&xIR;{SyHIk@J4aD9;2`Z=hWo|r_@RE6ebmAqB*p_>T2? z=FLckqMF9_sgAkX+C_E+K6vdlG`6^Quj5h}K_K}pYhd1AcYbc3XH3A;>=_tthD;`o zh;5tUETJ5bswW3b0b>jE?62yhF=d3Z?JvjH)C%(fX(-luROM~~JiA+qmw;M;6ZwW}zn zw_Q$f_hvby%>PFhqG@wt`qgvhYq zfO&6AMa*=?rWXYByI)fb{pPL_TECfMl#T#{LF^!}ztPL-IiaS=s|e90NF~S^f-vhd z8H5?ZB8=83Ze8auj)TyHW(24gk5sVIwR8fCsWlpN>$-e@UZfDZV69y;hVbg^-ye&D zHSn;)TvrdP~9-%(MJtG66>$xIL&~-g3tDZdC9U%QGV0(QDHz$S{s@0 zlhx=Na8u6?EoILMRWVNTeYVby&He&PO!wq~T)eObd+Hu)=SICnDlMIUw6*ly_vDbp zC=<+mlVpGMrM30==!A4K?CD%5Z0RRCoUf`SaSzykII5p3UO_hBUp(zq;`@~BnElfO zw#(}6DL==kmv07AQ=l!huK2Cr-~UAEZ5Tls!pU_s2=Jyy+&xYw zesDmXory~RdT*0etzbKrCxA!E5oK_ey|)KMyh@&H(tdn}TP9#OUVFQ!WlH0+!`g%U z@^#Nq?|!xlO=bhZ43dzKD+hn~Uo=$OP2>iFsz!z?TdmsG3$GP=i&Se>K|Cvoil|VS z*Tu2k=PA|%LreUg4J2nR9ZGE`jjvPM@`$8KqkmbtVJLY>PM_$Aw-He#gTUrb1Qe_jZ-}N?^j3_|gMyPnFmj zidedr-KqPbviI(D40)p?5Oz8F(NzbIbhhQD8d&upVTViB%(h|(Vn?@b>o>7EF3HQs zm&?u;^po3&U;{3=YjlqIcjwY!t;UGaw8JK(Cs0EB9Ml*D$JKEJAN_LpVlE$+Sdd+P zLOE(djcb+@vf2Lv=RCe?rLv0%ME%?meiFi(QTul0!DY8iqkwloa)h~SC6NG)*p zCjfe^&s~a^X!xRj?zyTlzv2Uh zD5SB=osYsMjwEVtZA=m3;aXajieb%v717B=Pf5e%u~Z`b9So#(j2~eI;Xq5HAxonj z+iZd*5JYpcKJ0}rZ&Xf()NqF0UR)XkIOXxb{nbX;OutIYf>p0Wy3{_LLvZ^Lq=i#I z701Kia7Os~LiH3oZ>4(jWoWkqM-HZ7=273B*pQvOF=cDfKUl=%+vGMD=vjdCGJTe&@j7PCNX)Zygcdl z(>)Xp>;WhEm~`QG4G4l@rdGJoyvA9eUtOS<>nlr*dMPTMh)PG<4B4cQIb*}Ycr>qI zGE8w>8VBj??u5O$iu3@Tv}`VX%8!bXt9nurOoeg4ONxE5;bm(W2m#SuzHoj`tXgQ` zHFQ9FfKql4_Xy*e?2H``P|JL}l^G&}E?Rr_W7y777A)*jJZNWu#4SHhg-f*%6M`tx z*ZN(9Xhs#}Yd|WJKquXZeBOIw*fGId`Y4(zfZ$|i$Jk~V2Kt~>Br~VoSkprcg zNrA=8v-`J#$1+9_GcT-HO!+>om3WsHDcO8eaQ6a8rWyyg{GgVfzOJ*Fh~R=#wJ5LO zSR(uImGrEdGhEqqi=)E~cK}cs2}w2Qw~K3Y^#Y+iWvUI}JY()uA_l>hu1Z|I?Ykyc z8}4fFXm2$s^wcd~x%xVw#quv=YF+4^IJFJ-=sxM4mz@))>EjGW_$g_h&(q*LP1V6{^KMCYdXtA!96<;V(n$ zu(HQ?uHem-W<>P6{l=LP)6-D*BDn@%TsE;#Na2w-DDld|Bew!={ZygE=6A=gUgJT0 z{|rrqE*^g!kg~~=@M_799%?ei6PBWMN~?$noZrFo6BnqTp1Ry#^m{)aUI0ZyxZf4~ zQMuKSQr>{{CR=Y@xbZ{Joh!)xf|~bS<#{*rs?YO!JxWv?8ykDU%!S|8TW3tI`n#xE zwiM=!4271i09Am{cPiL}noa6EO`}M{yXWaA{yE?s9A+xx^?yk9xPWQRV59R%QwxH_8-lDPU}9Qnp4E zT>HX|PtYknh&SWrsYtJMvQotPQC!0eAEIiR`_Tz4O3&>Znnr{j-nR?j4Ai==IeQVP zB49;)nHiF#fs`Hk*0b77116!DaE&2u`pH>y$B6LiOQ2t>n(S$b-?!IqwVY6@40bV- zy`P1Y{nSd@+vuNlfmA3l>Tf<=8UzpANwRtL;StKzg4ct~^VghUGA0>p%i#60bdJ<7 zZ?-&TBH-0RJI|j=o6#PCVT*hge)0N9q_Q2DK`TqntSzcs8#DR^uc?=BQcT0EP^r=W zEIzL{;IZ=^Rrie;CC4jz!Jd7kVTW3hyQ5nZ7D(xb90pLidIbyZM+zq`>lKtWO&o0j z9es%)HqPZmu@ZUxJ$6zf?I>znxOexUsWEcS!={eOd^dJN4$5J=@AIvw@uD{9q9sBK z`;?rA%7m!Jpdk)xaGnx0Rkk+qng!&2y&bt;8ecrRy3v2!@{d*~chBsu&iE-gJrfxd zA6vY@HN+`j2>K?mVEFHZAPl6668X7`Qw1T61B+63b^`bp>GN?M30pl*m&ThVY8f|P z+{e%eXcEXcV#A8XAEJ6|qWGZ&H{9Tb@(X?KfV){C*(K6;w@UncOHfnwgXu5eRhKBN z@B90`>UH&Bs4#6`BF41O6YojVq$YlAs&pq5Mwg8c-9bplAQ%AXjd)!ve2eDaXYZy)BK$l6< z1OZUx)NwbMA%q(-BZ@sQecj@F+()1Pe1@vn-yMUhP~n^qPsde@EkB8(Rx;BVS(|MA zbfaVA$B&Li`**Pjt^DWf#S!zw2B#Ir#`Gq6E7n@b4|pHkqvw{n83A71r?rDxglhQ? zE0|+1j;kSB8nH-kppQjY%1?(TTj!|Mt7amK9%{_VO5XitRqO`?fdc5N2qk4@)O)MT zwPe@RWD^_3;y=Vhlqmy=>wET%6$0Z5aG8%VvGGV6OwadrUH)cJQL#k9W zyD6lsnzeT!mhrp(w9L2Eli<-$+OrZ8qK+-lmD5gZQj5A3W)M@jpiN2drwh!g%U+hh zJ~$oT)mt`-j|@Kn`oS{w9&s*~H^L8Fs>+4FLK77FGwSnHHD@?C=@l8M%;wj&$`-D- zNx&xl8pd%enLFnh!R%Rv-n&~nY-f)Z;I5$)JAiVbNVV{8mZ2&w{?9hyq>y8(+7HiQVAPwM47e#rf*8a3uEUUT zXWv}_O~mG-<(s@kpwtlcu}GKLxLD?osdG@D~^tN z{ab(jZp>OElj`e-n!s>zzO)s0Gvpc)-~%q))j6?rn^fv+o9E4g;LH+%qObKJPiNcw zefyV@P*;V6+=t!_j-fI2oNH}CFi%L%g(3rEhJXQbYJEL}#nM%R^x&X;Hi-~8 z1Zu2s(!|9AX+$W&AdzM{PYe=8ioz0f)Ou2i;k@W&C3Z6vpO3Y-DbtV6+`Yux+~y%V zkY#P{nCIeh(@D5Qtwarsz~PTOP8WlLm+i2SEI`LXi*+JX6IJZn(~xH&CM(+_7DoHp9h~Um?&V7xIEgdCTq-MF=TWigq<={O&4s_JBkaC1)*W0g0%YoAu zS`uRZZLB}31(!%19M-lCis(wrOxF3uS{mmFOGJUBP5$ghZR zhgxoIxCUxClXkYE)Tx+sNCo7bp0X#w+S+=uiW`A+!VXlP=$iP>ZTFRA~WoQ3*xy_9gz8B;s`SM$8rwf=4&)%H%FTwDCY zU(PrF_V16W-9^tWpZ50l{a?Q}w!Zgz{4-bSd4^ANXlO4X896b&PO6V4XmC42bwOX7 znyw;uyk5U5-jcwLpnZHD7t<^rijfw}%m;I*!Yedb}?A zn2t|BRq~l1<74(Nf)5Ulh_8Mg9qGRdj@h`<(f&5p1a_%3vzE!02oyfDDN!K?`yd1R5@T%pBP|Wh@z&2@EWpqi2 z+X(+jvm^u_)eAF`tZbW9{{V^*rvF&%abo@bJEHxzqa$=IZm*Xg(Fb(?!|hUpZqP?# zD1l=Gj`S>6?R#D&E(@fIpF~>xZT)*u(|eR#dN}BlTB6nD_U<}eak!O!9&HTypzaG< z@M(4gc~z@|63IRYNBQd0Q(u~FN^9PWs0{#$cZ@>O8YX(PysWIo@Ao@ZYLlW7MET1M zI7w3j{mWN)3dUC5k~gJgO_nunP zaosWR3L)0weLIM|TD3u=_;6_)0I6-fJ$R8|J~iW8|D^DtnunOpDZ)dgGt|$fiBQK> z6zH0qDtitsMs#P<#hiNpFFmLtqdBd4bK=`y70a|kzh=5=He8PPI8FeU?QSMN<5RrM zak}kb*C}p~i|cP5e;2sXI5xVl|6Oa8 z1Lx66`4kYOQG5aoMSoPk8qCM$AhZG$mu}eEg?lNt|M@-qq9csC-Zmeq^jHv$GIp#H z^b(WSA-GJ3J7KG@mz^UZWyR3yyeWGUJwTbsH{dQ*PZ$k?*;-(3<#HpN-!y{cCe=q` zjJb&oqOyqysSj!1!rFe~UY7dcHA7396WCqu@p7`4RJ%+;gzXw7AETsy9%E5xPL=nr z`rh$m4a(jR3ccGr+t}2z!#w)#(L7TF?rbddm45d1<}F7qDX`VaL+*_ z)4Pg0LSEJ0ho(SXc;n_dKyWUI49KC-*HJFOukpn+Qub+St6cRHSUA-b#8x$tf`>~* zmHFZ)(1*`5LW{U?&>FK)zZ~{!afdhl{XA-LV}r}&t1pRkzNDqPp9d#F2mSDaX*j65 z6*l1UNJ^WNv&FTteB2Yq$P9dSeFpK#WTs%*;+r>6SuOUW+^|f+Q2i3_mA_Ic2tJpk z()abE-gb);GQ5aP5JLX6sV@k^K?HT<{;rlEY6j2`mxBH&4LA9_a0sV*1jnCF$DUEZ z+=-aJapMNabWQKpwUAepD-mJCw*i1I1H}c)FBGj^E#9;ua`QNr341%PL~l?SEUUVo zi;Eq8R>>$=As^b_-d^WAuqPbtNtR0ly=c7QZO*1nb+_~+^s0Ceh7^Mm>4>?-`^iWb~=wrP3wIJZkr3#uI^R2X0f@kp`-+S+0Tx^t(y}w z|Ep(txlEf>$njELoe%v!XdSn#dqSJ>gT-0SIVakT4TaHfGkMt@y8N)~s%@y&D928} zk7Lnj`IB$tEs%$L<2dM$b}AlYT5^TkpY-%Z_0Y!+sYGJ3n(5_o_`3|vGx+a|p$}19 z+~#tIb`IG54yo#!@k+!0#a`k~`QM{c(361RCH%A8h79_HwX5g2-o3v5B`r}91XMP- z??c(F)Y3){HtsvPfXr4DtmUgA+QafWOM3eiuV_Y4JNwky+AuYZS*Vw&t(pEGAps|$ zMJ;N!w|CiEi8)BBukS3sdrPy1Pfhl5S)2^YFcG4lVuo}QZrK?#lQ-~$^$>5ezfTj8 z!@-YD7$&Mz9e=0I{<`baVdTHrw+D+GyOQyHd!Q=+wz)g4K#xXc&9jq|og3CjD9mE5 zDGqx{AZ3N^;u6B25`VDttCw9q&a?*T^18UJA1RKx;x)Y;EylF1%00D8GBq_d1r`7g z0{8vmQPFI5K5_$cZ`7dWm-)9Jolc@5hS6niuC6|gJ_JL&Vi`w9 ztOW!(G4GGIKX0|QnEa$Xuezja?C$l1KvX{)Si+4UgcYqo(WO9n(J{|vq-t4MSUB2I z@8ZQ1lK`{J4vG6lS5h8kv0~S5(lsFOdOZ^K%ZHl?sGB!`5{C}Df{IQjsuvYvoP_$D z9MLpI2SW%OA`!KIAjdVJeHo%6Cttm!Vq}p2=`LxV-R*ec!=$qzxb9p0*4Uu8!XYGn|T4WKv(d=m}1m@9-}V{^DY2I-D2 znLqqF;;>)7dMM*a@rLiBYs9$xmHkyxbEv5<$K*!gb z6)ZrYeq(OO)qZq}w#5Gozrk9@+45BSlxX5D(kL*o8oa%2^YO9aA+KfwFQYSAl(5x} zm5iMjZ&IOL2?k9Bf{HF)h8Wkp*}D?d0~*pZf_#5eO{{?cp|ufVA+I_Y?>KGCQ^!EY zNa39h`sN4cFw9&iyC)Oia9)UCTKHyDY=ZBU8V5? z*OiYwbpi4V(G_|8oQ6&J7Oy;_ozxNe95T3`QS z4N*xsa`+fv+QfuBwrkj_(B8Bj&{TF4U|;=Ag}UQtGa>)2tAF}RA|qZy|MT5bg_RzI zRP=hh{qtE3sK+6zL&};Io99sPhMREQgt+qIHw-RpfXnjoszuiB`euUL4eeoM6+4Ix z#raP6B8G?UF&pURvsqgUxBLx~EMWF*!)8mplR%(j2k zWdo6tbEBrQ0Y0RG_}_q77(3Q4wc|uBXk_H=Wukx$iMZE0!Qb1z(yCywcGHoD!1ZU3 zJ3WLNLR?&3so#Kk{eIU8%w=LHh0BX|%JWuw9siR=u5uu)hWHP#IiIQQg?fM}uZS2c z`J`v!2hnAZho0`m6p_et(b(CA;F2{*l?a{EyaH!zfbQMh0ZG8~K4CqV24(N@3kh8= zQ5IrwPNw2-VXBD{x9&jsE^QsG=n{FzdWCGRF=8%l*JxV~5a^{=U2vYZZ;56Cz&f+ByC22{ zno=Jf7_bJOJi?fvN_pU3I`iEAT;Zv=hmmhj=EfcFOv5YOUAk4M5U=!XcaR)+n-!mQ|I)U zIC(fJVdEKh7Xbokb`JLwPJ1NkImqCEyMYA~OQOl)zIoJZFgk@_=wH+u+12&3L=MmH z)+ns@^aLduV+uOim^1V1N>UGEENwSv(RHnI=ss7lT8v&fp?M`|wj*{#DR#MCc1}&rWg!6Baosc=^g(rHS1Xv=i=u#+T)q*|MzHW4%NgrL(zoC{KQs-(r6);Lrs!$$T>MR za+ol!Ma`l#r<|TVMmZKChY>Pm8MRR4xL6D;!yM;#?~nbr*S-7n`CRYoeO=d^{#b2a zvJ>anm0`CsKl!&mA7y#BS@2mwUK8@<3>;(%f2gjW^HS+bp{zsD0;9SN*%$FY?!dsn zw^gnQ2#VAK{h>>VLY%6vRC~704G&ffv187x4CdITUE|jWu5O;eJVZ@8S%N#rIvvOR z-tHbU29TEHvs6^Ub1p~x3`#*+qvUZVG~Y}VlZn6MANP0T3RXHVvT4LU#LsnC_AWc` z=g?mu?TPem3Dm3IU7ixp3~5W8SO?9mPj{iQtI z$@Axw&w(&x7k#0QhMrUvbV_o`P#?KYqz)b0F*t12caGvl2_tT%wQa7d z7MwshUl}k66S$1b6>dFiOW0fm(&b;ZlwC(2y_E8^Bt1$HtypwL3EnZNG~U$?W+IvGkZ$4vYeey@!%g8X5#?*SdYV`a<2{< zaluYY0|vx5zcWe$uUY!$MSwJT-nFg5|d!I4>#T@q{GQGgt*u(K*FDQ$T2`W z#%65O2H{6j-?s<+ccDCJ`91dyx`QCEDg2hG%$J5A zc~O;Lt?x^+;6IHJZw%y`t72>saI$=2;BQLNyYC*>N)ksadbpo3)*9_cR33sryD=)t zX$W1=Q8^?o5r>sEgC=aRc#465v2lUAx_GWSfsxI3qDX^rGPbT151Dq;v@h3znV?Wf z_^(?Th@4#@HFRrtwyxE|260nWui;$*Aip)V7mO2+K-U!w^s5%D!V1!oMPcLdE7Q5L z4e8{)!#7LYb`sK{jcsk+53dE4evt%OXy9;_k#aW}!yfLQHi$m^a|g(Ir-PJs-J#+$ z{WymC4y6yw>Zz(s81Lut-*8f8bRin;MRm@^v)!rkUQ62}>iWjlv!Q^Z%K;)Sk;eUK z+Sb-K9vo#ZUgLI))GO*ROGO`nXtN3O!rIBL-9(#?>w7{t9~$HKAJcv(=zkkQ!~rwo+TLwR_fB)NTx0Dk_47#sVX zM`(7fJQ-Bt8y@sMoN^EQZ;@v#)DnTdmZ+=8!x8|6I7U& z%8N9?$5gMAFM|H8_Xk5E(5~R=c5oLJq=>Nc znCag@=0p2H+Qe{vzsV`^L~rkCtLdO$c_rb;n^n@ z*TQt@ge%8&F?VDM5r043by9SLQ1K&%DX=cn2I8qWVYm6uF?S_D)06jKmj4E_ZEu_0 ztmF~g!UdUkU!$U)C+o4Qs;csgQ@R=^-1>x5(KUvX^vlQET7N&1$FOwX*y#hb`}dvR z6(cf|RZ~`W(0R9O$RJQcDt`bfO5#`mfEbdVjI|MCViOjdQg36W$zaooN{^ej9{jo! z7=_YizP!)8Zd^RcE!~5dt}$B~J855gvrh?@0<8&#WwEquKEn9%WXOVk6=3#e*-HbusWAfgpSlD@}{m5bB-xSZ~ z&{O$r7Z>A{s{0Z8zW$ZT;28`PQhZsL)l^j*k!?L9MO*}~X)HrqakkwCp{#H%h)HmD zw)UtMhfgy0jN-^mzo@Ucmy< z7qLd%672BJ>60Lv9uPYqtj?^CJ>WRiqQxky_jl;$u zaNvYvYlho@A?Q#A^>dW|Q)lEDv~WVEH&vq8tWWE|JXDhW`;XoIb?1iaMaCinj}{Z7 zO8m(O?~rI55c?q#j01Ye@>6s$$gT1g!W85CMg0ME=7%=~1q{#Sx!E!A=7QGWnggt) zxjo0kapjhCR?ve7YLUhw02tcWU65uET0%7(^uJ>cqKnf6W5ciZz|d&(+#fgxNPwgh z`Qnkp02wA&;s_b4i^L+oLnW>AE-Ax5l3SXZ?#t!(ozH$B%4lqBhLg?WHh|2e){P~8 zEZP&vvTqhpzS}SqGxIkd>g7;o8E0Zlh{%F`+w9gBwiqY;WoNp zKm4kU7&9eVNxr;R-H@f!CotDxgSwJCPjtW3k@1*0@_{pJ=;(AwQMP$7FpEbZi#er^ zR8-j3WZV+`ww5U`taVGqX=A%#{31C8i1Ei}TvwO=?@Q_8+fq4&&rHJH@KJbi*-*$x ztUw?LMKT9tt-F8;EH=Z+KAhq{-U+yGLATbrES%;S{`B=}C4kET7LVcjh0T-9Y#u!# z^w1{$kg~HTb8Tt*LXz!JByj?NPEv zYaD_eH)ftRf$*aqwbtU9Nlpao%=KJjv3bJv;B%$#dW0=yx@-c(;kFv4KXwUy9Pu!J z-$LtlA5b#8>P!1`4E-8^VUi3z$K6i$uCK4-n@7X-^G{I!>#}puW#`0gEN{(DPAavn z;bv+ARWM6tF615nJA1~jSjqi6%LW*jz~`&Its$GgIld)ng$1;a%r`ELmNc^k^%r}Z zb8|}sNE-}gAe0ZFT@~}{vWf?&xmIKR-MNfVR*Z{Ydxd#nfta{1$Jg zNao0ytt~V+*j-6GCTW1>cI@>1Pd9ll3o4rwHNePENO`MPY=YzwwL(!ID#(tS zaJIUv#Re|kTDFlkH{4U`U6A4Th6y|FK!%1szYS>mQj>Q*d~#7bL{WDa0&X%`{) zaIc>n$Z+}jUIVobU4*6k`R_{Oxg^vZu(9PD6 z5f+P8q*IZW0;56G4}d|6^k3fn@9MYo4E_2y|72p1rDWjI2w&^CGI5D{y3X$!i!`aBzb|E z_%ZWE;+nYIb89JOL}>`cq#dEZK0e(vFDkpglHn8J&zgl^?aB6WM=KJGbV6UXE5H}l$DJZeHSpb!+ z_H0Wm`({)by|}oTDvPq`1RQ=M680-t8$@4+I=xPf(5373EXA1V zAN(KT1q3b<8N|PiT`eD=picG)4fldOyV$rHSH_rrz2pVJoiX2;9NoPw?%Mu8-d;M{ zyCy>hPJ&cuR+U%i=(1i}_MEFB7qpDabrT`-`{2|{ina-FzQU6M473XY zh&B}e3wvyCf>A0b38}2(0)^Jl{%2*BUjJzylX=zberT|YBl>EgoP3S4HA1WRIRC$2 zxR2Beu6}d9{Mfs1B#>98wXZ8}i+{c4lAup29yflJzktdiAUJPpmQy~{8~{j(lR=gS;JpfyH=b+!uNcFxWRF#c!4*~E30C-6TB1re_fa-|&}yh(tYC)4~Gw(QP01Ww)1e^(RU(&FxR2QILKyMEM`wG|nF;snpUSp@sZ> z-?3hY0!v4+T}LAigWpxv6Rc^UrFgjT99wYB=)l0_vUsu-5Efz*p`h0v@gxeoqfz;e z{xQtQdl31W!Ca{PLxd&FD<^|%Vy)Cu6Fe+lpVid72W_w?Mnpu!<9`_Lh5w2P9SLkb z_)|I8@2iOwjL~-YOj^{u$b9P$r();N#!3)qYJ8`gxc0Xjhy~kl0Y;z@|CW31JTBNB zz)9IO#^_j11b2)?t9vpC_)~|)3vHVXq^D&%VO5?jO+xxE77&gRZ? zp_m(%AXKd+3`Nqu4%}Ki+IYlVQ4B>PJ4*u8pml9MANf7dTsRr#2APt+b(`?^Mw7lA) zi!dLyl+6v;4EL`NU6&F!YlHJIb-IAav^tnx<x*-dJjP((S2v*V?2BcI3z*U(f=1 zEzzj6J+K#BKkM6@YxH=ZVtC{UA9EBU`vB)#mK7@XevY;CFH~1mkTM`C#k@jUKhJbh^gaxmXGWN)0CTWGW=BQ`QJvT)k*SP*nR~2bZM1JZerHsK@gqlMge|S?gVflTX$^MLLvH;fysD<%gQak%Dtd>&`rDsbqKISRR@HWxbA|UaQ1g)n|MAf zWc5fZd^V9qC@US%WWR$IDSk~hNmJ1#A%{baF}8MB>paRed}#gzbe}K)*+)3Tx2l_G z?4PVvuM-uZD2RF{TR6{XKz~roBC9fE#-oL12Ft}Y6(nKy4XiZyh*TO%gQ`fV^!0zc z8`_ev(sip&5{b0V0&8Nt#vR!N@(oC)mSy2pWFk4tH23+p+!?mQ(~fG zI?te83iEIw7TEctYF(X3LfkP$y)%H0@$~5Fu+qTiNgiKG7wboZqnh2_hpIgou9e1q zIjZ~?(a7lP)YO4pzkZr6cM}Yyi<((kBYY)8gQAmT%KIle(uLy zSnGD@>JmvhiVzw2*)Vl`y348{YjDjmZU4E`fJ4{eoOyKEI5B2ey`?rX)07Fz(;2Msd)33$pS%)<#gQhy&h`qNDouBN7zTFQt%2;Toi_rq;5@&Za{u@!!)8Q3diz@*e{8 z2Fs7VFk@bJHo_p8mXQ1swHu$`**%m&!01?iWJkqkCk*zbi-D(&))9bB&M0>cQZ;+= z?ej()`ULZU9oUH~uT>FpDu|?b;E2u%Tj|Gee^(F1^#iYKM}fyV9S5}rh*V*X9)eDu zSh>Y8Sw4a7DeIwpHZ9D$#i-)PK7h_Pp~*MqZ&pe@+PDFikzVuqoq@~2F@H~lnCJ1G zR~(7kLHZ})|GOuR`Ru$~Uhb0wKhq4A5rDI`R;JVNIo?I&qlwDX=&L+Eo*^?PG?$aG zAEnFBFpAyT$^+dT@ybiu+}!kAKa4>RYEbJ?*cq20Kv&W$oX`6aYMfWwT?$|vc}rvz zke7yRt*WL3Z%&C*JhwLt07kN>tZXRKxys~O-Xj{0g61BQ|60H_W*#+_$b~86MyPm zNvRoz>dwVFXOxf5%!#{H0T&xgVe0-fw=O6PJFp_vZYXu5k|>;E#nvHemKJB!G~SMe zdbA}}(sU?uGh(ZPX+VSXOnG|<*t%G`{PCU1w4J}|;ukuyU&# Date: Mon, 26 Oct 2015 19:27:53 -0400 Subject: [PATCH 131/630] fix #8, more informative error when libmxnet not found --- src/base.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/base.jl b/src/base.jl index 5e547de727c1..0988400b9d31 100644 --- a/src/base.jl +++ b/src/base.jl @@ -19,6 +19,12 @@ typealias char_pp Ptr{char_p} const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], [joinpath("$(get(ENV,"MXNET_HOME",""))","lib"), joinpath(Pkg.dir("MXNet"),"deps/usr/lib")]) +if isempty(MXNET_LIB) + # touch this file, so that after the user properly build libmxnet, the precompiled + # MXNet.ji will be re-compiled to get MXNET_LIB properly. + touch(@__FILE__) + error("Cannot find or load libmxnet.so. Please see the document on how to build it.") +end function __init__() _import_ndarray_functions() From 8bb82e437b5ae1b947e2650139c2b69f13d37a6d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 22:04:46 -0400 Subject: [PATCH 132/630] use sphinx as documentation system --- .gitignore | 1 + docs/Makefile | 180 +++++++++++++++++ docs/api/ndarray.md | 247 ----------------------- docs/api/symbol.md | 73 ------- docs/build.jl | 10 - docs/conf.py | 277 ++++++++++++++++++++++++++ docs/index.md | 8 - docs/index.rst | 34 ++++ docs/sphinx/julia.py | 23 +++ docs/tutorial/mnist.rst | 252 +++++++++++++++++++++++ docs/tutorials/mnist.md | 150 -------------- docs/user-guide/install.md | 29 --- docs/user-guide/install.rst | 54 +++++ docs/user-guide/overview.md | 267 ------------------------- docs/user-guide/overview.rst | 376 +++++++++++++++++++++++++++++++++++ examples/mnist/mlp.jl | 2 +- src/io.jl | 7 +- src/model.jl | 6 +- 18 files changed, 1209 insertions(+), 787 deletions(-) create mode 100644 docs/Makefile delete mode 100644 docs/api/ndarray.md delete mode 100644 docs/api/symbol.md delete mode 100644 docs/build.jl create mode 100644 docs/conf.py delete mode 100644 docs/index.md create mode 100644 docs/index.rst create mode 100644 docs/sphinx/julia.py create mode 100644 docs/tutorial/mnist.rst delete mode 100644 docs/tutorials/mnist.md delete mode 100644 docs/user-guide/install.md create mode 100644 docs/user-guide/install.rst delete mode 100644 docs/user-guide/overview.md create mode 100644 docs/user-guide/overview.rst diff --git a/.gitignore b/.gitignore index 503353f37bd0..e2b2ea70b3fc 100644 --- a/.gitignore +++ b/.gitignore @@ -7,3 +7,4 @@ data deps/src deps/usr deps/deps.jl +docs/_build diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 000000000000..e1763e330fc7 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,180 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +livehtml: + sphinx-autobuild -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mocha.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mocha.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/Mocha" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mocha" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/api/ndarray.md b/docs/api/ndarray.md deleted file mode 100644 index fae68df32c2b..000000000000 --- a/docs/api/ndarray.md +++ /dev/null @@ -1,247 +0,0 @@ -# MXNet.mx - -## Internal - ---- - - -#### _import_ndarray_functions() -Import dynamic functions for NDArrays. The arguments to the functions are typically ordered -as - -```julia -func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) -``` - -unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: - -```julia -func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) -``` - -If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: - -```julia -func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) -``` - -Upon calling, the output arguments will be automatically initialized with empty NDArrays. - -Those functions always return the output arguments. If there is only one output (the typical situation), that -object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. - - -*source:* -[MXNet/src/ndarray.jl:468](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L468) - ---- - - -#### convert{T<:Real}(t::Type{Array{T<:Real, N}}, arr::MXNet.mx.NDArray) -Convert copy: NDArray -> Julia Array - -*source:* -[MXNet/src/ndarray.jl:237](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L237) - ---- - - -#### copy!(dst::Array{Float32, N}, src::MXNet.mx.NDArray) -Copy data from NDArray to Julia Array - -*source:* -[MXNet/src/ndarray.jl:201](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L201) - ---- - - -#### copy!(dst::MXNet.mx.NDArray, src::MXNet.mx.NDArray) -Copy data between NDArrays - -*source:* -[MXNet/src/ndarray.jl:189](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L189) - ---- - - -#### copy!{T<:Real}(dst::MXNet.mx.NDArray, src::Array{T<:Real, N}) -Copy data from Julia Array to NDArray - -*source:* -[MXNet/src/ndarray.jl:209](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L209) - ---- - - -#### copy(arr::MXNet.mx.NDArray) -Create copy: NDArray -> Julia Array - -*source:* -[MXNet/src/ndarray.jl:219](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L219) - ---- - - -#### copy(arr::MXNet.mx.NDArray, ctx::MXNet.mx.Context) -Create copy: NDArray -> NDArray in a given context - -*source:* -[MXNet/src/ndarray.jl:225](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L225) - ---- - - -#### copy{T<:Real}(arr::Array{T<:Real, N}, ctx::MXNet.mx.Context) -Create copy: Julia Array -> NDArray in a given context - -*source:* -[MXNet/src/ndarray.jl:231](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L231) - ---- - - -#### getindex(arr::MXNet.mx.NDArray, ::Colon) -Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a -copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. - - -*source:* -[MXNet/src/ndarray.jl:177](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L177) - ---- - - -#### load(filename::AbstractString, ::Type{MXNet.mx.NDArray}) -Load NDArrays from binary file. - -**Parameters**: - -* `filename`: the path of the file to load. It could be S3 or HDFS address - if the `libmxnet` is built with the corresponding component enabled. Examples - - * `s3://my-bucket/path/my-s3-ndarray` - * `hdfs://my-bucket/path/my-hdfs-ndarray` - * `/path-to/my-local-ndarray` - -**Returns**: - - Either `Dict{Base.Symbol, NDArray}` or `Vector{NDArray}`. - - -*source:* -[MXNet/src/ndarray.jl:384](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L384) - ---- - - -#### ones{N}(shape::NTuple{N, Int64}) -Create NDArray and initialize with 1 - -*source:* -[MXNet/src/ndarray.jl:118](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L118) - ---- - - -#### save(filename::AbstractString, data::MXNet.mx.NDArray) -Save NDarrays to binary file. - -**Parameters**: - -* `filename`: path to the binary file to write to. -* `data`: an `NDArray`, or a `Vector{NDArray}` or a `Dict{Base.Symbol, NDArray}`. - - -*source:* -[MXNet/src/ndarray.jl:409](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L409) - ---- - - -#### setindex!(arr::MXNet.mx.NDArray, val::Real, ::Colon) -Assign all elements of an NDArray to a scalar - -*source:* -[MXNet/src/ndarray.jl:158](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L158) - ---- - - -#### size(arr::MXNet.mx.NDArray) -Get the shape of an `NDArray`. Note the shape is converted to Julia convention. - So the same piece of memory, in Julia (column-major), with shape (K, M, N), will be of the - shape (N, M, K) in the Python (row-major) binding. - - -*source:* -[MXNet/src/ndarray.jl:84](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L84) - ---- - - -#### slice(arr::MXNet.mx.NDArray, ::Colon) -`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest -changing dimension is supported. In Julia's column-major perspective, this is the last -dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create -a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is -used in data parallelization to split mini-batch into sub-batches for different devices. - - -*source:* -[MXNet/src/ndarray.jl:137](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L137) - ---- - - -#### zeros{N}(shape::NTuple{N, Int64}) -Create zero-ed NDArray of specific shape - -*source:* -[MXNet/src/ndarray.jl:105](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L105) - ---- - - -#### MXNet.mx.NDArray -Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block -of tensor-based computation. - -**Note** since C/C++ use row-major ordering for arrays while Julia follows a -column-major ordering. To keep things consistent, we keep the underlying data -in their original layout, but use *language-native* convention when we talk -about shapes. For example, a mini-batch of 100 MNIST images is a tensor of -C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory -have shape (28,28,1,100). - - -*source:* -[MXNet/src/ndarray.jl:32](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L32) - ---- - - -#### @inplace(stmt) -Julia does not support re-definiton of `+=` operator (like `__iadd__` in python), -When one write `a += b`, it gets translated to `a = a+b`. `a+b` will allocate new -memory for the results, and the newly allocated `NDArray` object is then assigned -back to a, while the original contents in a is discarded. This is very inefficient -when we want to do inplace update. - -This macro is a simple utility to implement this behavior. Write - -```julia -@mx.inplace a += b -``` - -will translate into - -```julia -mx.add_to!(a, b) -``` - -which will do inplace adding of the contents of b into a. - - -*source:* -[MXNet/src/ndarray.jl:266](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/ndarray.jl#L266) - diff --git a/docs/api/symbol.md b/docs/api/symbol.md deleted file mode 100644 index a17f0c261fa2..000000000000 --- a/docs/api/symbol.md +++ /dev/null @@ -1,73 +0,0 @@ -# MXNet.mx - -## Internal - ---- - - -#### Group(symbols::MXNet.mx.Symbol...) -Create a symbol that groups symbols together - -*source:* -[MXNet/src/symbol.jl:77](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L77) - ---- - - -#### Variable(name::Union{AbstractString, Symbol}) -Create a symbolic variable with the given name - -*source:* -[MXNet/src/symbol.jl:70](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L70) - ---- - - -#### from_json(repr::AbstractString, ::Type{MXNet.mx.Symbol}) -Load Symbol from a JSON string representation. - -*source:* -[MXNet/src/symbol.jl:240](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L240) - ---- - - -#### get_internals(self::MXNet.mx.Symbol) -Get a new grouped symbol whose output contains all the internal outputs of this symbol. - -*source:* -[MXNet/src/symbol.jl:63](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L63) - ---- - - -#### list_auxiliary_states(self::MXNet.mx.Symbol) -List all auxiliary states in the symbool. - -Auxiliary states are special states of symbols that do not corresponds to an argument, -and do not have gradient. But still be useful for the specific operations. -A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. -Most operators do not have Auxiliary states. - - -*source:* -[MXNet/src/symbol.jl:58](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L58) - ---- - - -#### load(filename::AbstractString, ::Type{MXNet.mx.Symbol}) -Load Symbol from a JSON file. - -*source:* -[MXNet/src/symbol.jl:247](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L247) - ---- - - -#### to_json(self::MXNet.mx.Symbol) -Save Symbol into a JSON string - -*source:* -[MXNet/src/symbol.jl:233](https://github.com/dmlc/MXNet.jl/tree/7fa151104fb51d7134da60a5084dfa0d240515f0/src/symbol.jl#L233) - diff --git a/docs/build.jl b/docs/build.jl deleted file mode 100644 index 9576e4112c38..000000000000 --- a/docs/build.jl +++ /dev/null @@ -1,10 +0,0 @@ -using MXNet -using Lexicon - -config = Config(md_permalink = false, mathjax = true) - -doc = Lexicon.metadata(MXNet.mx) -for mod in [:ndarray, :symbol] - save("api/$mod.md", MIME("text/md"), filter(doc, files=["$mod.jl"]), config) -end - diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 000000000000..ca57eed31146 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- +# +# MXNet documentation build configuration file, created by +# sphinx-quickstart on Thu Nov 13 00:43:40 2014. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath("sphinx")) +import julia + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.mathjax', + 'julia' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'MXNet' +copyright = u'2015, pluskid' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '0.0.3' +# The full version, including alpha/beta/rc tags. +release = '0.0.3' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +primary_domain = 'jl' +highlight_language = 'julia' + +# If true, keep warnings as "system message" paragraphs in the built documents. +#keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin theme +html_theme = 'default' + +import os +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + try: + import sphinx_rtd_theme + html_theme = "sphinx_rtd_theme" + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + except: + pass + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +#html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_domain_indices = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'MXNetdoc' + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ('index', 'MXNet.tex', u'MXNet Documentation', + u'pluskid', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'mxnet', u'MXNet Documentation', + [u'pluskid'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'MXNet', u'MXNet Documentation', + u'pluskid', 'MXNet', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +#texinfo_no_detailmenu = False diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index c411cc86fd95..000000000000 --- a/docs/index.md +++ /dev/null @@ -1,8 +0,0 @@ -# MXNet.jl - -MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: - -* Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. -* Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. - -To install MXNet.jl, please follow the [installaton guide](user-guide/install.md). See the navigation menu in the sidebar for an [overview](user-guide/overview.md) of MXNet.jl and tutorials on training neural networks in MXNet.jl. diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 000000000000..8d0890e0c6a0 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,34 @@ +%%%%%%%%%%%%%%%%%%% +MXNet Documentation +%%%%%%%%%%%%%%%%%%% + +`MXNet.jl `_ is `Julia +`_ package of `dmlc/mxnet +`_. MXNet.jl brings flexible and efficient GPU +computing and state-of-art deep learning to Julia. Some highlight of features +include: + +- Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. +- Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. + + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials + + tutorial/mnist + +.. toctree:: + :maxdepth: 2 + :caption: User's Guide + + user-guide/install + user-guide/overview + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/sphinx/julia.py b/docs/sphinx/julia.py new file mode 100644 index 000000000000..49d6af95b545 --- /dev/null +++ b/docs/sphinx/julia.py @@ -0,0 +1,23 @@ +# Julia domain for Sphinx (stolen from StrPack.jl) +# http://sphinx.pocoo.org/domains.html + +import re +import sphinx.domains.python + +sphinx.domains.python.py_sig_re = re.compile( + r'''^ ([\w.]*\.)? # class name(s) + ([^\s(]+) \s* # thing name + (?: \((.*)\) # optional: arguments + (?:\s* -> \s* (.*))? # return annotation + )? $ # and nothing more + ''', re.VERBOSE | re.UNICODE) + +class JuliaDomain(sphinx.domains.python.PythonDomain): + """Julia language domain.""" + name = 'jl' + label = 'Julia' + +JuliaDomain.directives['type'] = JuliaDomain.directives['class'] + +def setup(app): + app.add_domain(JuliaDomain) diff --git a/docs/tutorial/mnist.rst b/docs/tutorial/mnist.rst new file mode 100644 index 000000000000..2e5d087a5242 --- /dev/null +++ b/docs/tutorial/mnist.rst @@ -0,0 +1,252 @@ +Digit Recognition on MNIST +========================== + +In this tutorial, we will work through examples of training a simple multi-layer +perceptron and then a convolutional neural network (the LeNet architecture) on +the `MNIST handwritten digit dataset `_. The +code for this tutorial could be found in `examples/mnist +`_. + +Simple 3-layer MLP +------------------ + +This is a tiny 3-layer MLP that could be easily trained on CPU. The script starts with + +.. code-block:: julia + + using MXNet + +to load the ``MXNet`` module. Then we are ready to define the network +architecture via the :doc:`symbolic API `. We start with +a placeholder ``data`` symbol, + +.. code-block:: julia + + data = mx.Variable(:data) + +and then cascading fully-connected layers and activation functions: + +.. code-block:: julia + + fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) + act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) + fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) + act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) + fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) + +Note each composition we take the previous symbol as the `data` argument, forming a feedforward chain. The architecture looks like + +.. code-block:: julia + + Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units + +where the last 10 units correspond to the 10 output classes (digits 0,...,9). We +then add a final ``Softmax`` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: + +.. code-block:: julia + + mlp = mx.Softmax(data = fc3, name=:softmax) + +As we can see, the MLP is just a chain of layers. For this case, we can also use +the ``mx.chain`` macro. The same architecture above can be defined as + +.. code-block:: julia + + mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => + mx.Softmax(name=:softmax) + +After defining the architecture, we are ready to load the MNIST data. MXNet.jl +provide built-in data providers for the MNIST dataset, which could automatically +download the dataset into ``Pkg.dir("MXNet")/data/mnist`` if necessary. We wrap +the code to construct the data provider into ``mnist-data.jl`` so that it could be shared by both the MLP example and the LeNet ConvNets example. + +.. code-block:: julia + + batch_size = 100 + include("mnist-data.jl") + train_provider, eval_provider = get_mnist_providers(batch_size) + +If you need to write your own data providers for customized data format, please refer to **TODO**: pointer to data provider API. + +Given the architecture and data, we can instantiate an *model* to do the actual +training. ``mx.FeedForward`` is the built-in model that is suitable for most feed-forward architectures. When constructing the model, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. + +.. code-block:: julia + + model = mx.FeedForward(mlp, context=mx.cpu()) + +You can use a ``mx.gpu()`` or if a list of devices (e.g. ``[mx.gpu(0), +mx.gpu(1)]``) is provided, data-parallelization will be used automatically. But for this tiny example, using a GPU device might not help. + +The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 and momentum 0.9: + +.. code-block:: julia + + optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) + +Now we can do the training. Here the ``n_epoch`` parameter specifies that we +want to train for 20 epochs. We also supply a ``eval_data`` to monitor validation accuracy on the validation set. + +.. code-block:: julia + + mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) + +Here is a sample output + +.. code-block:: text + + INFO: Start training on [CPU0] + INFO: Initializing parameters... + INFO: Creating KVStore... + INFO: == Epoch 001 ========== + INFO: ## Training summary + INFO: :accuracy = 0.7554 + INFO: time = 1.3165 seconds + INFO: ## Validation summary + INFO: :accuracy = 0.9502 + ... + INFO: == Epoch 020 ========== + INFO: ## Training summary + INFO: :accuracy = 0.9949 + INFO: time = 0.9287 seconds + INFO: ## Validation summary + INFO: :accuracy = 0.9775 + + +Convolutional Neural Networks +----------------------------- + +In the second example, we show a slightly more complicated architecture that +involves convolution and pooling. This architecture for the MNIST is usually +called the [LeNet]_. The first part of the architecture is listed below: + +.. code-block:: julia + + # input + data = mx.Variable(:data) + + # first conv + conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) + + # second conv + conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) + +We basically defined two convolution modules. Each convolution module is +actually a chain of ``Convolution``, ``tanh`` activation and then max ``Pooling`` operations. + +Each sample in the MNIST dataset is a 28x28 single-channel grayscale image. In +the tensor format used by ``NDArray``, a batch of 100 samples is a tensor of +shape ``(28,28,1,100)``. The convolution and pooling operates in the spatial +axis, so ``kernel=(5,5)`` indicate a square region of 5-width and 5-height. +The rest of the architecture follows as: + +.. code-block:: julia + + # first fully-connected + fc1 = @mx.chain mx.Flatten(data=conv2) => + mx.FullyConnected(num_hidden=500) => + mx.Activation(act_type=:tanh) + + # second fully-connected + fc2 = mx.FullyConnected(data=fc1, num_hidden=10) + + # softmax loss + lenet = mx.Softmax(data=fc2, name=:softmax) + +Note a fully-connected operator expects the input to be a matrix. However, the +results from spatial convolution and pooling are 4D tensors. So we explicitly +used a ``Flatten`` operator to flat the tensor, before connecting it to the +``FullyConnected`` operator. + +The rest of the network is the same as the previous MLP example. As before, we can now load the MNIST dataset: + +.. code-block:: julia + + batch_size = 100 + include("mnist-data.jl") + train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) + +Note we specified ``flat=false`` to tell the data provider to provide 4D tensors instead of 2D matrices because the convolution operators needs correct spatial shape information. We then construct a feedforward model on GPU, and train it. + +.. code-block:: julia + + #-------------------------------------------------------------------------------- + # fit model + model = mx.FeedForward(lenet, context=mx.gpu()) + + # optimizer + optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) + + # fit parameters + mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) + +And here is a sample of running outputs: + +.. code-block:: text + + INFO: == Epoch 001 ========== + INFO: ## Training summary + INFO: :accuracy = 0.6750 + INFO: time = 4.9814 seconds + INFO: ## Validation summary + INFO: :accuracy = 0.9712 + ... + INFO: == Epoch 020 ========== + INFO: ## Training summary + INFO: :accuracy = 1.0000 + INFO: time = 4.0086 seconds + INFO: ## Validation summary + INFO: :accuracy = 0.9915 + + +.. [LeNet] Lecun, Y.; Bottou, L.; Bengio, Y.; Haffner, P., + *Gradient-based learning applied to document recognition*, + Proceedings of the IEEE, vol.86, no.11, pp.2278-2324, + Nov 1998. + +Predicting with a trained model +------------------------------- + +Predicting with a trained model is very simple. By calling ``mx.predict`` with the +model and a data provider, we get the model output as a Julia Array: + +.. code-block:: julia + + probs = mx.predict(model, eval_provider) + +The following code shows a stupid way of getting all the labels from the data +provider, and compute the prediction accuracy manually: + +.. code-block:: julia + + # collect all labels from eval data + labels = Array[] + for batch in eval_provider + push!(labels, copy(mx.get_label(batch))) + end + labels = cat(1, labels...) + + # Now we use compute the accuracy + correct = 0 + for i = 1:length(labels) + # labels are 0...9 + if indmax(probs[:,i]) == labels[i]+1 + correct += 1 + end + end + println(mx.format("Accuracy on eval set: {1:.2f}%", 100correct/length(labels))) + +Alternatively, when the dataset is huge, one can provide a callback to +``mx.predict``, then the callback function will be invoked with the outputs of +each mini-batch. The callback could, for example, write the data to disk for +future inspection. In this case, no value is returned from ``mx.predict``. See +also **TODO** provide link to prediction API. diff --git a/docs/tutorials/mnist.md b/docs/tutorials/mnist.md deleted file mode 100644 index ecf7bab8f631..000000000000 --- a/docs/tutorials/mnist.md +++ /dev/null @@ -1,150 +0,0 @@ -In this tutorial, we will work through examples of training a simple multi-layer perceptron and then a convolutional neural network (the LeNet architecture) on the [MNIST handwritten digit dataset](http://yann.lecun.com/exdb/mnist/). The code for this tutorial could be found in [`Pkg.dir("MXNet")`/examples/mnist/](https://github.com/dmlc/MXNet.jl/tree/master/examples/mnist). - -# Simple 3-layer MLP - -This is a tiny 3-layer MLP that could be easily trained on CPU. The script starts with -```julia -using MXNet -``` -to load the `MXNet` module. Then we are ready to define the network architecture via the [symbolic API](../user-guide/overview.md#symbols-and-composition). We start with a placeholder `data` symbol, -```julia -data = mx.Variable(:data) -``` -and then cascading fully-connected layers and activation functions: -```julia -fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) -act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) -fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) -act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) -fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) -``` -Note each composition we take the previous symbol as the `data` argument, forming a feedforward chain. The architecture looks like -``` -Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units -``` -where the last 10 units correspond to the 10 output classes (digits 0,...,9). We then add a final `Softmax` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: -```julia -mlp = mx.Softmax(data = fc3, name=:softmax) -``` -As we can see, the MLP is just a chain of layers. For this case, we can also use the `mx.chain` macro. The same architecture above can be defined as -```julia -mlp = @mx.chain mx.Variable(:data) => - mx.FullyConnected(name=:fc1, num_hidden=128) => - mx.Activation(name=:relu1, act_type=:relu) => - mx.FullyConnected(name=:fc2, num_hidden=64) => - mx.Activation(name=:relu2, act_type=:relu) => - mx.FullyConnected(name=:fc3, num_hidden=10) => - mx.Softmax(name=:softmax) -``` - -After defining the architecture, we are ready to load the MNIST data. MXNet.jl provide built-in data providers for the MNIST dataset, which could automatically download the dataset into `Pkg.dir("MXNet")/data/mnist` if necessary. We wrap the code to construct the data provider into `mnist-data.jl` so that it could be shared by both the MLP example and the LeNet ConvNets example. -```julia -batch_size = 100 -include("mnist-data.jl") -train_provider, eval_provider = get_mnist_providers(batch_size) -``` -If you need to write your own data providers for customized data format, please refer to **TODO**: pointer to data provider API. - -Given the architecture and data, we can instantiate an *model* to do the actual training. `mx.FeedForward` is the built-in model that is suitable for most feed-forward architectures. When constructing the model, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. -```julia -model = mx.FeedForward(mlp, context=mx.cpu()) -``` -You can use a `mx.gpu()` or if a list of devices (e.g. `[mx.gpu(0), mx.gpu(1)]`) is provided, data-parallelization will be used automatically. But for this tiny example, using a GPU device might not help. - -The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 and momentum 0.9: -```julia -optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) -``` -Now we can do the training. Here the `n_epoch` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. -```julia -mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) -``` -Here is a sample output -``` -INFO: Start training on [CPU0] -INFO: Initializing parameters... -INFO: Creating KVStore... -INFO: == Epoch 001 ========== -INFO: ## Training summary -INFO: :accuracy = 0.7554 -INFO: time = 1.3165 seconds -INFO: ## Validation summary -INFO: :accuracy = 0.9502 -... -INFO: == Epoch 020 ========== -INFO: ## Training summary -INFO: :accuracy = 0.9949 -INFO: time = 0.9287 seconds -INFO: ## Validation summary -INFO: :accuracy = 0.9775 -``` - -# Convolutional Neural Networks - -In the second example, we show a slightly more complicated architecture that involves convolution and pooling. This architecture for the MNIST is usually called the *LeNet*. The first part of the architecture is listed below: -```julia -# input -data = mx.Variable(:data) - -# first conv -conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => - mx.Activation(act_type=:tanh) => - mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) - -# second conv -conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => - mx.Activation(act_type=:tanh) => - mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) -``` -We basically defined two convolution modules. Each convolution module is actually a chain of `Convolution`, `tanh` activation and then max `Pooling` operations. - -Each sample in the MNIST dataset is a 28x28 single-channel grayscale image. In the tensor format used by `NDArray`, a batch of 100 samples is a tensor of shape `(28,28,1,100)`. The convolution and pooling operates in the spatial axis, so `kernel=(5,5)` indicate a square region of 5-width and 5-height. -The rest of the architecture follows as: -```julia -# first fully-connected -fc1 = @mx.chain mx.Flatten(data=conv2) => - mx.FullyConnected(num_hidden=500) => - mx.Activation(act_type=:tanh) - -# second fully-connected -fc2 = mx.FullyConnected(data=fc1, num_hidden=10) - -# softmax loss -lenet = mx.Softmax(data=fc2, name=:softmax) -``` -Note a fully-connected operator expects the input to be a matrix. However, the results from spatial convolution and pooling are 4D tensors. So we explicitly used a `Flatten` operator to flat the tensor, before connecting it to the `FullyConnected` operator. - -The rest of the network is the same as the previous MLP example. As before, we can now load the MNIST dataset: -```julia -batch_size = 100 -include("mnist-data.jl") -train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) -``` -Note we specified `flat=false` to tell the data provider to provide 4D tensors instead of 2D matrices because the convolution operators needs correct spatial shape information. We then construct a feedforward model on GPU, and train it. -```julia -#-------------------------------------------------------------------------------- -# fit model -model = mx.FeedForward(lenet, context=mx.gpu()) - -# optimizer -optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) - -# fit parameters -mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) -``` -And here is a sample of running outputs: -``` -INFO: == Epoch 001 ========== -INFO: ## Training summary -INFO: :accuracy = 0.6750 -INFO: time = 4.9814 seconds -INFO: ## Validation summary -INFO: :accuracy = 0.9712 -... -INFO: == Epoch 020 ========== -INFO: ## Training summary -INFO: :accuracy = 1.0000 -INFO: time = 4.0086 seconds -INFO: ## Validation summary -INFO: :accuracy = 0.9915 -``` diff --git a/docs/user-guide/install.md b/docs/user-guide/install.md deleted file mode 100644 index eb4e1ecec80d..000000000000 --- a/docs/user-guide/install.md +++ /dev/null @@ -1,29 +0,0 @@ -# Automatic Installation - -To install MXNet.jl, simply type -```jl -Pkg.add("MXNet") -``` -in the Julia REPL. MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. - -The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. The automatic build is using default configurations, with OpenCV, CUDA disabled. -If the compilation failed due to unresolved dependency, or if you want to customize the build, it is recommended to compile and install libmxnet manually. Please see [below](#manual-compilation) for more details. - -To use the latest git version of MXNet.jl, use the following command instead -```jl -Pkg.checkout("MXNet") -``` - -# Manual Compilation - -It is possible to compile libmxnet separately and point MXNet.jl to a the existing library in case automatic compilation fails due to unresolved dependencies in an un-standard environment; Or when one want to work with a seperate, maybe customized libmxnet. - -To build libmxnet, please refer to [the installation guide of libmxnet](http://mxnet.readthedocs.org/en/latest/build.html). After successfully installing libmxnet, set the `MXNET_HOME` environment variable to the location of libmxnet. In other words, the compiled `libmxnet.so` should be found in `$MXNET_HOME/lib`. - -When the `MXNET_HOME` environment variable is detected and the corresponding `libmxnet.so` could be loaded successfully, MXNet.jl will skip automatic building during installation and use the specified libmxnet instead. - -Basically, MXNet.jl will search `libmxnet.so` or `libmxnet.dll` in the following paths (and in that order): - -* `$MXNET_HOME/lib`: customized libmxnet builds -* `Pkg.dir("MXNet")/deps/usr/lib`: automatic builds -* Any system wide library search path diff --git a/docs/user-guide/install.rst b/docs/user-guide/install.rst new file mode 100644 index 000000000000..43f882928d39 --- /dev/null +++ b/docs/user-guide/install.rst @@ -0,0 +1,54 @@ +Installation Guide +================== + +Automatic Installation +---------------------- + +To install MXNet.jl, simply type + +.. code-block:: julia + + Pkg.add("MXNet") + +in the Julia REPL. Or to use the latest git version of MXNet.jl, use the following command instead + +.. code-block:: julia + + Pkg.checkout("MXNet") + +MXNet.jl is built on top of `libmxnet +`_. Upon installation, Julia will try to +automatically download and build libmxnet. + +The libmxnet source is downloaded to ``Pkg.dir("MXNet")/deps/src/mxnet``. The +automatic build is using default configurations, with OpenCV, CUDA disabled. +If the compilation failed due to unresolved dependency, or if you want to +customize the build, it is recommended to compile and install libmxnet manually. +Please see :ref:`below ` for more details. + + +Manual Compilation +------------------ + +It is possible to compile libmxnet separately and point MXNet.jl to a the existing library in case automatic compilation fails due to unresolved dependencies in an un-standard environment; Or when one want to work with a seperate, maybe customized libmxnet. + +To build libmxnet, please refer to `the installation guide of libmxnet +`_. After successfully +installing libmxnet, set the ``MXNET_HOME`` environment variable to the location +of libmxnet. In other words, the compiled ``libmxnet.so`` should be found in +``$MXNET_HOME/lib``. + +.. note:: + + The constant ``MXNET_HOME`` is pre-compiled in MXNet.jl package cache. If you + updated the environment variable after installing MXNet.jl, make sure to + update the pre-compilation cache by ``Base.compilecache("MXNet")``. + +When the ``MXNET_HOME`` environment variable is detected and the corresponding +``libmxnet.so`` could be loaded successfully, MXNet.jl will skip automatic building during installation and use the specified libmxnet instead. + +Basically, MXNet.jl will search ``libmxnet.so`` or ``libmxnet.dll`` in the following paths (and in that order): + +* ``$MXNET_HOME/lib``: customized libmxnet builds +* ``Pkg.dir("MXNet")/deps/usr/lib``: automatic builds +* Any system wide library search path diff --git a/docs/user-guide/overview.md b/docs/user-guide/overview.md deleted file mode 100644 index 7689b3fba921..000000000000 --- a/docs/user-guide/overview.md +++ /dev/null @@ -1,267 +0,0 @@ -# MXNet.jl Namespace - -Most the functions and types in MXNet.jl are organized in a flat namespace. Because many some functions are conflicting with existing names in the Julia Base module, we wrap them all in a `mx` module. The convention of accessing the MXNet.jl interface is the to use the `mx.` prefix explicitly: -```julia -using MXNet - -x = mx.zeros(2,3) # MXNet NDArray -y = zeros(eltype(x), size(x)) # Julia Array -copy!(y, x) # Overloaded function in Julia Base -z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU -mx.copy!(z, y) # Same as copy!(z, y) -``` -Note functions like `size`, `copy!` that is extensively overloaded for various types works out of the box. But functions like `zeros` and `ones` will be ambiguous, so we always use the `mx.` prefix. If you prefer, the `mx.` prefix can be used explicitly for all MXNet.jl functions, including `size` and `copy!` as shown in the last line. - -# High Level Interface - -## Symbols and Composition - -The way we build deep learning models in MXNet.jl is to use the powerful symbolic composition system. It is like [Theano](http://deeplearning.net/software/theano/), except that we avoided long expression compiliation time by providing *larger* neural network related building blocks to guarantee computation performance. See also [this note](http://mxnet.readthedocs.org/en/latest/program_model.html) for the design and trade-off of the MXNet symbolic composition system. - -The basic type is `mx.Symbol`. The following is a trivial example of composing two symbols with the `+` operation. -```julia -A = mx.Variable(:A) -B = mx.Variable(:B) -C = A + B -``` -We get a new *symbol* by composing existing *symbols* by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. -```julia -net = mx.Variable(:data) -net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) -net = mx.Activation(data=net, name=:relu1, act_type=:relu) -net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) -net = mx.Softmax(data=net, name=:out) -``` -Each time we take the previous symbol, and compose with an operation. Unlike the simple `+` example above, the *operations* here are "bigger" ones, that correspond to common computation layers in deep neural networks. - -Each of those operation takes one or more input symbols for composition, with optional hyper-parameters (e.g. `num_hidden`, `act_type`) to further customize the composition results. - -When applying those operations, we can also specify a `name` for the result symbol. This is convenient if we want to refer to this symbol later on. If not supplied, a name will be automatically generated. - -Each symbol takes some arguments. For example, in the `+` case above, to compute the value of `C`, we will need to know the values of the two inputs `A` and `B`. For neural networks, the arguments are primarily two categories: *inputs* and *parameters*. *inputs* are data and labels for the networks, while *parameters* are typically trainable *weights*, *bias*, *filters*. - -When composing symbols, their arguments accumulates. We can list all the arguments by -```julia -julia> mx.list_arguments(net) -6-element Array{Symbol,1}: - :data # Input data, name from the first data variable - :fc1_weight # Weights of the fully connected layer named :fc1 - :fc1_bias # Bias of the layer :fc1 - :fc2_weight # Weights of the layer :fc2 - :fc2_bias # Bias of the layer :fc2 - :out_label # Input label, required by the softmax layer named :out -``` -Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: -```julia -net = mx.Variable(:data) -w = mx.Variable(:myweight) -net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) -mx.list_arguments(net) -# => -# 3-element Array{Symbol,1}: -# :data -# :myweight -# :fc1_bias -``` -The simple fact is that a `Variable` is just a placeholder `mx.Symbol`. In composition, we can use arbitrary symbols for arguments. For example: -```julia -net = mx.Variable(:data) -net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) -net2 = mx.Variable(:data2) -net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) -mx.list_arguments(net2) -# => -# 3-element Array{Symbol,1}: -# :data2 -# :net2_weight -# :net2_bias -composed_net = net2(data2=net, name=:composed) -mx.list_arguments(composed_net) -# => -# 5-element Array{Symbol,1}: -# :data -# :fc1_weight -# :fc1_bias -# :net2_weight -# :net2_bias -``` -Note we use a composed symbol, `net` as the argument `data2` for `net2` to get a new symbol, which we named `:composed`. It also shows that a symbol itself is a call-able object, which can be invoked to fill in missing arguments and get more complicated symbol compositions. - -## Shape Inference - -Given enough information, the shapes of all arguments in a composed symbol could be inferred automatically. For example, given the input shape, and some hyper-parameters like `num_hidden`, the shapes for the weights and bias in a neural network could be inferred. -```julia -net = mx.Variable(:data) -net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) -arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) -``` -The returned shapes corresponds to arguments with the same order as returned by `mx.list_arguments`. The `out_shapes` are shapes for outputs, and `aux_shapes` can be safely ignored for now. -```julia -for (n,s) in zip(mx.list_arguments(net), arg_shapes) - println("$n => $s") -end -# => -# data => (10,64) -# fc1_weight => (10,10) -# fc1_bias => (10,) -for (n,s) in zip(mx.list_outputs(net), out_shapes) - println("$n => $s") -end -# => -# fc1_output => (10,64) -``` - -## Binding and Executing - -In order to execute the computation graph specified a composed symbol, we will *bind* the free variables to concrete values, specified as `mx.NDArray`s. This will create an `mx.Executor` on a given `mx.Context`. A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. -```julia -A = mx.Variable(:A) -B = mx.Variable(:B) -C = A .* B -a = mx.ones(3) * 4 -b = mx.ones(3) * 2 -c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)) - -mx.forward(c_exec) -copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array -# => -# 3-element Array{Float32,1}: -# 8.0 -# 8.0 -# 8.0 -``` -For neural networks, it is easier to use `simple_bind`. By providing the shape for input arguments, it will perform a shape inference for the rest of the arguments and create the `NDArray`s automatically. In practice, the binding and executing steps are hidden under the `Model` interface. - -**TODO** Provide pointers to model tutorial and further details about binding and symbolic API. - -# Low Level Interface - -## NDArrays - -`NDArray`s are basic building blocks of the actual computations in MXNet. It is like a Julia `Array` object, with some important differences listed here: - -* The actual data could live on different `Context` (e.g. GPUs). For some contexts, iterating into the elements one by one is very slow, thus indexing into `NDArray` is not supported in general. The easiest way to inspect the contents of an `NDArray` is to use the `copy` function to copy the contents as a Julia `Array`. -* Operations on `NDArray`s (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. -* There is no generics in `NDArray`, the `eltype` is always `mx.MX_float`. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is designed to support multiple languages as front-ends, it is much simpler to implement with a fixed data type. - -While most of the computation is hidden in libmxnet by operators corresponding to various neural network layers. Getting familiar with the `NDArray` API is useful for implementing `Optimizer`s or customized operators in Julia directly. - -The followings are common ways to create `NDArray` objects: - -* `mx.empty(shape[, context])`: create on uninitialized array of a given shape on a specific device. For example, `mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))`. -* `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: similar to the Julia's built-in `zeros` and `ones`. -* `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to a specific device. - -Most of the convenient functions like `size`, `length`, `ndims`, `eltype` on array objects should work out-of-the-box. Although indexing is not supported, it is possible to take *slices*: -```julia -a = mx.ones(2,3) -b = mx.slice(a, 1:2) -b[:] = 2 -println(copy(a)) -# => -# Float32[2.0 2.0 1.0 -# 2.0 2.0 1.0] -``` -A slice is a sub-region sharing the same memory with the original `NDArray` object. A slice is always a contiguous piece of memory, so only slicing on the *last* dimension is supported. The example above also shows a way to set the contents of an `NDArray`. -```julia -a = mx.empty(2,3) -a[:] = 0.5 # set all elements to a scalar -a[:] = rand(size(a)) # set contents with a Julia Array -copy!(a, rand(size(a))) # set value by copying a Julia Array -b = mx.empty(size(a)) -b[:] = a # copying and assignment between NDArrays -``` -Note due to the intrinsic limitation design of the Julia language, a normal assignment -```julia -a = b -``` -does **not** mean copying the contents of `b` to `a`. Instead, it just make the variable `a` pointing to a new object, which is `b`. Similarly, inplace arithmetics does not work as expected: -```julia -a = mx.ones(2) -r = a # keep a reference to a -b = mx.ones(2) -a += b # translates to a = a + b -println(copy(a)) -# => Float32[2.0f0,2.0f0] -println(copy(r)) -# => Float32[1.0f0,1.0f0] -``` -As we can see, `a` has expected value, but instead of inplace updating, a new `NDArray` is created and `a` is set to point to this new object. If we look at `r`, which still reference to the old `a`, its content has not changed. There is currently no way in Julia to overload the operators like `+=` to get customized behavior. - -Instead, you will need to write `a[:] = a+b`, or if you want *real* inplace `+=` operation, MXNet.jl provides a simple macro `@mx.inplace`: -```julia -@mx.inplace a += b -macroexpand(:(@mx.inplace a += b)) -# => :(MXNet.mx.add_to!(a,b)) -``` -As we can see, it translate the `+=` operator to an explicit `add_to!` function call, which invokes into libmxnet to add the contents of `b` into `a` directly. For example, the following is the update rule in the SGD `Optimizer` (both `grad` and `weight` are `NDArray` objects): -```julia -@inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) -``` -Note there is no much magic in `mx.inplace`: it only does a shallow translation. In the SGD update rule example above, the computation like scaling the gradient by `grad_scale` and adding the weight decay all create temporary `NDArray` objects. To mitigate this issue, libmxnet has a customized memory allocator designed specifically to handle this kind of situations. The following snippet does a simple benchmark on allocating temp `NDArray`s vs. pre-allocating: -```julia -using Benchmark -using MXNet - -N_REP = 1000 -SHAPE = (128, 64) -CTX = mx.cpu() -LR = 0.1 - -function inplace_op() - weight = mx.zeros(SHAPE, CTX) - grad = mx.ones(SHAPE, CTX) - - # pre-allocate temp objects - grad_lr = mx.empty(SHAPE, CTX) - - for i = 1:N_REP - copy!(grad_lr, grad) - @mx.inplace grad_lr .*= LR - @mx.inplace weight -= grad_lr - end - return weight -end - -function normal_op() - weight = mx.zeros(SHAPE, CTX) - grad = mx.ones(SHAPE, CTX) - - for i = 1:N_REP - weight[:] -= LR * grad - end - return weight -end - -# make sure the results are the same -@assert(maximum(abs(copy(normal_op() - inplace_op()))) < 1e-6) - -println(compare([inplace_op, normal_op], 100)) -``` -The comparison on my laptop shows that `normal_op` while allocating a lot of temp `NDArray`s in the loop (the performance gets worse when increasing `N_REP`), is only about twice slower than the pre-allocated one. - -| Row | Function | Average | Relative | Replications | -|-----|--------------|-----------|----------|--------------| -| 1 | "inplace_op" | 0.0074854 | 1.0 | 100 | -| 2 | "normal_op" | 0.0174202 | 2.32723 | 100 | - -So it will usually not be a big problem unless you are at the bottleneck of the computation. - -## Distributed Key-value Store - -The type `KVStore` and related methods are used for data sharing across different devices or machines. It provides a simple and efficient integer-`NDArray` key-value storage system that each device can pull or push. - -The following example shows how to create a `local` `KVStore`, initialize a value and then pull it back. -```julia -kv = mx.KVStore(:local) -shape = (2,3) -key = 3 - -mx.init!(kv, key, mx.ones(shape)*2) -a = mx.empty(shape) -mx.pull!(kv, key, a) # pull value into a -println(copy(a)) -# => -# Float32[2.0 2.0 2.0 -# 2.0 2.0 2.0] -``` diff --git a/docs/user-guide/overview.rst b/docs/user-guide/overview.rst new file mode 100644 index 000000000000..2fb2738008cd --- /dev/null +++ b/docs/user-guide/overview.rst @@ -0,0 +1,376 @@ +Overview +======== + +MXNet.jl Namespace +------------------ + +Most the functions and types in MXNet.jl are organized in a flat namespace. +Because many some functions are conflicting with existing names in the Julia +Base module, we wrap them all in a ``mx`` module. The convention of accessing +the MXNet.jl interface is the to use the ``mx.`` prefix explicitly: + +.. code-block:: julia + + using MXNet + + x = mx.zeros(2,3) # MXNet NDArray + y = zeros(eltype(x), size(x)) # Julia Array + copy!(y, x) # Overloaded function in Julia Base + z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU + mx.copy!(z, y) # Same as copy!(z, y) + +Note functions like ``size``, ``copy!`` that is extensively overloaded for +various types works out of the box. But functions like ``zeros`` and ``ones`` +will be ambiguous, so we always use the ``mx.`` prefix. If you prefer, the +``mx.`` prefix can be used explicitly for all MXNet.jl functions, including +``size`` and ``copy!`` as shown in the last line. + +Low Level Interface +------------------- + +NDArrays +~~~~~~~~ + +``NDArray`` is the basic building blocks of the actual computations in MXNet. It +is like a Julia ``Array`` object, with some important differences listed here: + +* The actual data could live on different ``Context`` (e.g. GPUs). For some + contexts, iterating into the elements one by one is very slow, thus indexing + into ``NDArray`` is not supported in general. The easiest way to inspect the + contents of an ``NDArray`` is to use the ``copy`` function to copy the + contents as a Julia ``Array``. +* Operations on ``NDArray`` (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. +* There is no generics in ``NDArray``, the ``eltype`` is always ``mx.MX_float``. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is designed to support multiple languages as front-ends, it is much simpler to implement with a fixed data type. + +While most of the computation is hidden in libmxnet by operators corresponding +to various neural network layers. Getting familiar with the ``NDArray`` API is +useful for implementing ``Optimizer`` or customized operators in Julia directly. + +The followings are common ways to create ``NDArray`` objects: + +* ``mx.empty(shape[, context])``: create on uninitialized array of a given shape + on a specific device. For example, ``mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))``. +* ``mx.zeros(shape[, context])`` and ``mx.ones(shape[, context])``: similar to + the Julia's built-in ``zeros`` and ``ones``. +* ``mx.copy(jl_arr, context)``: copy the contents of a Julia ``Array`` to a specific device. + +Most of the convenient functions like ``size``, ``length``, ``ndims``, ``eltype`` on array objects should work out-of-the-box. Although indexing is not supported, it is possible to take *slices*: + +.. code-block:: julia + + a = mx.ones(2,3) + b = mx.slice(a, 1:2) + b[:] = 2 + println(copy(a)) + # => + # Float32[2.0 2.0 1.0 + # 2.0 2.0 1.0] + +A slice is a sub-region sharing the same memory with the original ``NDArray`` +object. A slice is always a contiguous piece of memory, so only slicing on the +*last* dimension is supported. The example above also shows a way to set the +contents of an ``NDArray``. + +.. code-block:: julia + + a = mx.empty(2,3) + a[:] = 0.5 # set all elements to a scalar + a[:] = rand(size(a)) # set contents with a Julia Array + copy!(a, rand(size(a))) # set value by copying a Julia Array + b = mx.empty(size(a)) + b[:] = a # copying and assignment between NDArrays + +Note due to the intrinsic design of the Julia language, a normal assignment + +.. code-block:: julia + + a = b + +does **not** mean copying the contents of ``b`` to ``a``. Instead, it just make +the variable ``a`` pointing to a new object, which is ``b``. Similarly, inplace arithmetics does not work as expected: + +.. code-block:: julia + + a = mx.ones(2) + r = a # keep a reference to a + b = mx.ones(2) + a += b # translates to a = a + b + println(copy(a)) + # => Float32[2.0f0,2.0f0] + println(copy(r)) + # => Float32[1.0f0,1.0f0] + +As we can see, ``a`` has expected value, but instead of inplace updating, a new +``NDArray`` is created and ``a`` is set to point to this new object. If we look +at ``r``, which still reference to the old ``a``, its content has not changed. +There is currently no way in Julia to overload the operators like ``+=`` to get customized behavior. + +Instead, you will need to write ``a[:] = a+b``, or if you want *real* inplace +``+=`` operation, MXNet.jl provides a simple macro ``@mx.inplace``: + +.. code-block:: julia + + @mx.inplace a += b + macroexpand(:(@mx.inplace a += b)) + # => :(MXNet.mx.add_to!(a,b)) + +As we can see, it translate the ``+=`` operator to an explicit ``add_to!`` +function call, which invokes into libmxnet to add the contents of ``b`` into +``a`` directly. For example, the following is the update rule in the SGD +``Optimizer`` (both ``grad`` and ``weight`` are ``NDArray`` objects): + +.. code-block:: julia + + @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) + +Note there is no much magic in ``mx.inplace``: it only does a shallow +translation. In the SGD update rule example above, the computation like scaling +the gradient by ``grad_scale`` and adding the weight decay all create temporary +``NDArray`` objects. To mitigate this issue, libmxnet has a customized memory +allocator designed specifically to handle this kind of situations. The following +snippet does a simple benchmark on allocating temp ``NDArray`` vs. pre-allocating: + +.. code-block:: julia + + using Benchmark + using MXNet + + N_REP = 1000 + SHAPE = (128, 64) + CTX = mx.cpu() + LR = 0.1 + + function inplace_op() + weight = mx.zeros(SHAPE, CTX) + grad = mx.ones(SHAPE, CTX) + + # pre-allocate temp objects + grad_lr = mx.empty(SHAPE, CTX) + + for i = 1:N_REP + copy!(grad_lr, grad) + @mx.inplace grad_lr .*= LR + @mx.inplace weight -= grad_lr + end + return weight + end + + function normal_op() + weight = mx.zeros(SHAPE, CTX) + grad = mx.ones(SHAPE, CTX) + + for i = 1:N_REP + weight[:] -= LR * grad + end + return weight + end + + # make sure the results are the same + @assert(maximum(abs(copy(normal_op() - inplace_op()))) < 1e-6) + + println(compare([inplace_op, normal_op], 100)) + +The comparison on my laptop shows that ``normal_op`` while allocating a lot of +temp ``NDArray`` in the loop (the performance gets worse when increasing +``N_REP``), is only about twice slower than the pre-allocated one. + ++-----+--------------+-----------+----------+--------------+ +| Row | Function | Average | Relative | Replications | ++=====+==============+===========+==========+==============+ +| 1 | "inplace_op" | 0.0074854 | 1.0 | 100 | ++-----+--------------+-----------+----------+--------------+ +| 2 | "normal_op" | 0.0174202 | 2.32723 | 100 | ++-----+--------------+-----------+----------+--------------+ + +So it will usually not be a big problem unless you are at the bottleneck of the computation. + +Distributed Key-value Store +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +The type ``KVStore`` and related methods are used for data sharing across +different devices or machines. It provides a simple and efficient +integer - ``NDArray`` key-value storage system that each device can pull or push. + +The following example shows how to create a local ``KVStore``, initialize a value and then pull it back. + +.. code-block:: julia + + kv = mx.KVStore(:local) + shape = (2,3) + key = 3 + + mx.init!(kv, key, mx.ones(shape)*2) + a = mx.empty(shape) + mx.pull!(kv, key, a) # pull value into a + println(copy(a)) + # => + # Float32[2.0 2.0 2.0 + # 2.0 2.0 2.0] + +Intermediate Level Interface +---------------------------- + +Symbols and Composition +~~~~~~~~~~~~~~~~~~~~~~~ + +The way we build deep learning models in MXNet.jl is to use the powerful +symbolic composition system. It is like `Theano +`_, except that we avoided long +expression compiliation time by providing *larger* neural network related +building blocks to guarantee computation performance. See also `this note +`_ for the design and trade-off of the MXNet symbolic composition system. + +The basic type is ``mx.Symbol``. The following is a trivial example of composing +two symbols with the ``+`` operation. + +.. code-block:: julia + + A = mx.Variable(:A) + B = mx.Variable(:B) + C = A + B + +We get a new *symbol* by composing existing *symbols* by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. + +.. code-block:: julia + + net = mx.Variable(:data) + net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) + net = mx.Activation(data=net, name=:relu1, act_type=:relu) + net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) + net = mx.Softmax(data=net, name=:out) + +Each time we take the previous symbol, and compose with an operation. Unlike the +simple ``+`` example above, the *operations* here are "bigger" ones, that correspond to common computation layers in deep neural networks. + +Each of those operation takes one or more input symbols for composition, with +optional hyper-parameters (e.g. ``num_hidden``, ``act_type``) to further customize the composition results. + +When applying those operations, we can also specify a ``name`` for the result symbol. This is convenient if we want to refer to this symbol later on. If not supplied, a name will be automatically generated. + +Each symbol takes some arguments. For example, in the ``+`` case above, to +compute the value of ``C``, we will need to know the values of the two inputs +``A`` and ``B``. For neural networks, the arguments are primarily two categories: *inputs* and *parameters*. *inputs* are data and labels for the networks, while *parameters* are typically trainable *weights*, *bias*, *filters*. + +When composing symbols, their arguments accumulates. We can list all the arguments by + +.. code-block:: julia + + julia> mx.list_arguments(net) + 6-element Array{Symbol,1}: + :data # Input data, name from the first data variable + :fc1_weight # Weights of the fully connected layer named :fc1 + :fc1_bias # Bias of the layer :fc1 + :fc2_weight # Weights of the layer :fc2 + :fc2_bias # Bias of the layer :fc2 + :out_label # Input label, required by the softmax layer named :out + +Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: + +.. code-block:: julia + + net = mx.Variable(:data) + w = mx.Variable(:myweight) + net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) + mx.list_arguments(net) + # => + # 3-element Array{Symbol,1}: + # :data + # :myweight + # :fc1_bias + +The simple fact is that a ``Variable`` is just a placeholder ``mx.Symbol``. In composition, we can use arbitrary symbols for arguments. For example: + +.. code-block:: julia + + net = mx.Variable(:data) + net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) + net2 = mx.Variable(:data2) + net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) + mx.list_arguments(net2) + # => + # 3-element Array{Symbol,1}: + # :data2 + # :net2_weight + # :net2_bias + composed_net = net2(data2=net, name=:composed) + mx.list_arguments(composed_net) + # => + # 5-element Array{Symbol,1}: + # :data + # :fc1_weight + # :fc1_bias + # :net2_weight + # :net2_bias + +Note we use a composed symbol, ``net`` as the argument ``data2`` for ``net2`` to +get a new symbol, which we named ``:composed``. It also shows that a symbol itself is a call-able object, which can be invoked to fill in missing arguments and get more complicated symbol compositions. + +Shape Inference +~~~~~~~~~~~~~~~ + +Given enough information, the shapes of all arguments in a composed symbol could +be inferred automatically. For example, given the input shape, and some +hyper-parameters like ``num_hidden``, the shapes for the weights and bias in a neural network could be inferred. + +.. code-block:: julia + + net = mx.Variable(:data) + net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) + arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) + +The returned shapes corresponds to arguments with the same order as returned by +``mx.list_arguments``. The ``out_shapes`` are shapes for outputs, and +``aux_shapes`` can be safely ignored for now. + +.. code-block:: julia + + for (n,s) in zip(mx.list_arguments(net), arg_shapes) + println("$n => $s") + end + # => + # data => (10,64) + # fc1_weight => (10,10) + # fc1_bias => (10,) + for (n,s) in zip(mx.list_outputs(net), out_shapes) + println("$n => $s") + end + # => + # fc1_output => (10,64) + + +Binding and Executing +~~~~~~~~~~~~~~~~~~~~~ + +In order to execute the computation graph specified a composed symbol, we will +*bind* the free variables to concrete values, specified as ``mx.NDArray``. This +will create an ``mx.Executor`` on a given ``mx.Context``. A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. + +.. code-block:: julia + + A = mx.Variable(:A) + B = mx.Variable(:B) + C = A .* B + a = mx.ones(3) * 4 + b = mx.ones(3) * 2 + c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)) + + mx.forward(c_exec) + copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array + # => + # 3-element Array{Float32,1}: + # 8.0 + # 8.0 + # 8.0 + +For neural networks, it is easier to use ``simple_bind``. By providing the shape +for input arguments, it will perform a shape inference for the rest of the +arguments and create the ``NDArray`` automatically. In practice, the binding and +executing steps are hidden under the ``Model`` interface. + +**TODO** Provide pointers to model tutorial and further details about binding and symbolic API. + + +High Level Interface +-------------------- + +The high level interface include model training and prediction API, etc. diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 6e40c00b6b15..03be2f7d336b 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -43,7 +43,7 @@ probs = mx.predict(model, eval_provider) # collect all labels from eval data labels = Array[] for batch in eval_provider - push!(labels, copy(mx.get_label(batch)[1])) + push!(labels, copy(mx.get_label(batch))) end labels = cat(1, labels...) diff --git a/src/io.jl b/src/io.jl index dcbefb145859..c8396c1d7182 100644 --- a/src/io.jl +++ b/src/io.jl @@ -118,7 +118,12 @@ function _get_data_or_label(batch::AbstractDataBatch, provide_func::Function, lo batch_size = get_batch_size(batch.provider) data_arrays_fake_slice = [SlicedNDArray[(1:batch_size, x)] for x in data_arrays] loader(batch, data_arrays_fake_slice) - return data_arrays + + if length(data_arrays) == 1 + return data_arrays[1] + else + return data_arrays + end end function get_data(batch :: AbstractDataBatch) _get_data_or_label(batch, provide_data, load_data!) diff --git a/src/model.jl b/src/model.jl index 80ee36d10674..84b581b00b09 100644 --- a/src/model.jl +++ b/src/model.jl @@ -178,7 +178,11 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::B push!(o_list, copy(slice(o_nd, 1:batch_size-get_pad(batch)))) end else - callback(self.pred_exec.outputs) + outputs = self.pred_exec.outputs + if length(outputs) == 1 + outputs = outputs[1] + end + callback(outputs) end end From bcc90ef8c6d564706991aa0cd062d74e96498ce3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 22:06:15 -0400 Subject: [PATCH 133/630] switch from mkdocs to sphinx --- mkdocs.yml | 14 -------------- 1 file changed, 14 deletions(-) delete mode 100644 mkdocs.yml diff --git a/mkdocs.yml b/mkdocs.yml deleted file mode 100644 index 78822e3ada5a..000000000000 --- a/mkdocs.yml +++ /dev/null @@ -1,14 +0,0 @@ -site_name: MXNet.jl -site_author: pluskid -repo_url: https://github.com/dmlc/MXNet.jl -theme: readthedocs -pages: - - Home: index.md - - User Guide: - - 'Installation Guide' : 'user-guide/install.md' - - 'Overview' : 'user-guide/overview.md' - - Tutorials: - - 'MNIST': 'tutorials/mnist.md' - - API Documentation: - - 'ndarray': 'api/ndarray.md' - - 'symbol': 'api/symbol.md' From 8d56a9f6284789f84a383d172f8189cdc22ed0b7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 22:38:44 -0400 Subject: [PATCH 134/630] a dead simple embedded sphinx doc system (cf #3) --- docs/api/ndarray.rst | 99 ++++++++++++++++++++++++ docs/build-api.jl | 17 +++++ docs/index.rst | 8 ++ docs/user-guide/overview.rst | 32 ++++---- src/ndarray.jl | 143 +++++++++++++++++++++++++---------- 5 files changed, 244 insertions(+), 55 deletions(-) create mode 100644 docs/api/ndarray.rst create mode 100644 docs/build-api.jl diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst new file mode 100644 index 000000000000..1d6be0498e4a --- /dev/null +++ b/docs/api/ndarray.rst @@ -0,0 +1,99 @@ + +NDArray +======= + + + + +.. class:: NDArray + + Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block + of tensor-based computation. + + .. _ndarray-shape-note: + + .. note:: + + since C/C++ use row-major ordering for arrays while Julia follows a + column-major ordering. To keep things consistent, we keep the underlying data + in their original layout, but use *language-native* convention when we talk + about shapes. For example, a mini-batch of 100 MNIST images is a tensor of + C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory + have shape (28,28,1,100). + + + + +.. function:: context(arr :: NDArray) + + Get the context that this :class:`NDArray` lives on. + + + + +.. function:: + empty(shape :: Tuple, ctx :: Context) + empty(shape :: Tuple) + empty(dim1, dim2, ...) + + Allocate memory for an uninitialized :class:`NDArray` with specific shape. + + + + +Interface functions similar to Julia Arrays +------------------------------------------- + + + + +.. function:: + zeros(shape :: Tuple, ctx :: Context) + zeros(shape :: Tuple) + zeros(dim1, dim2, ...) + + Create zero-ed :class:`NDArray` with specific shape. + + + + +.. function:: + ones(shape :: Tuple, ctx :: Context) + ones(shape :: Tuple) + ones(dim1, dim2, ...) + + Create an :class:`NDArray` with specific shape and initialize with 1. + + + + +.. function:: + size(arr :: NDArray) + size(arr :: NDArray, dim :: Int) + + Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See + also the :ref:`notes on NDArray shapes `. + + + + +.. function:: length(arr :: NDArray) + + Get the number of elements in an :class:`NDArray`. + + + + +.. function:: ndims(arr :: NDArray) + + Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. + + + + +.. function:: eltype(arr :: NDArray) + + Get the element type of an :class:`NDArray`. Currently the element type is always ``mx.MX_float``. + + + diff --git a/docs/build-api.jl b/docs/build-api.jl new file mode 100644 index 000000000000..9ddf9ff53b5b --- /dev/null +++ b/docs/build-api.jl @@ -0,0 +1,17 @@ +# extract API docs + +function extract_doc(output_filename::AbstractString, input_filenames::AbstractString...) + src_dir = joinpath(Pkg.dir("MXNet"), "src") + api_dir = joinpath(Pkg.dir("MXNet"), "docs", "api") + + mkpath(api_dir) + open(joinpath(api_dir, output_filename), "w") do io + for in_fn in input_filenames + for doc in eachmatch(r"^#=doc\s*$(.*?)^=#\s*$"ms, readall(joinpath(src_dir, in_fn))) + println(io, doc.captures[1], "\n\n") + end + end + end +end + +extract_doc("ndarray.rst", "ndarray.jl") diff --git a/docs/index.rst b/docs/index.rst index 8d0890e0c6a0..6af0d3b6fde7 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -11,6 +11,8 @@ include: - Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. - Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. +For more details, see documentation below. Please also checkout the `examples +`_ directory. .. toctree:: :maxdepth: 2 @@ -25,6 +27,12 @@ include: user-guide/install user-guide/overview +.. toctree:: + :maxdepth: 1 + :caption: API Documentation + + api/ndarray + Indices and tables ================== diff --git a/docs/user-guide/overview.rst b/docs/user-guide/overview.rst index 2fb2738008cd..b93de48fed7f 100644 --- a/docs/user-guide/overview.rst +++ b/docs/user-guide/overview.rst @@ -31,22 +31,22 @@ Low Level Interface NDArrays ~~~~~~~~ -``NDArray`` is the basic building blocks of the actual computations in MXNet. It +:class:`NDArray` is the basic building blocks of the actual computations in MXNet. It is like a Julia ``Array`` object, with some important differences listed here: * The actual data could live on different ``Context`` (e.g. GPUs). For some contexts, iterating into the elements one by one is very slow, thus indexing - into ``NDArray`` is not supported in general. The easiest way to inspect the - contents of an ``NDArray`` is to use the ``copy`` function to copy the + into :class:`NDArray` is not supported in general. The easiest way to inspect the + contents of an :class:`NDArray` is to use the ``copy`` function to copy the contents as a Julia ``Array``. -* Operations on ``NDArray`` (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. -* There is no generics in ``NDArray``, the ``eltype`` is always ``mx.MX_float``. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is designed to support multiple languages as front-ends, it is much simpler to implement with a fixed data type. +* Operations on :class:`NDArray` (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. +* There is no generics in :class:`NDArray`, the ``eltype`` is always ``mx.MX_float``. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is designed to support multiple languages as front-ends, it is much simpler to implement with a fixed data type. While most of the computation is hidden in libmxnet by operators corresponding -to various neural network layers. Getting familiar with the ``NDArray`` API is +to various neural network layers. Getting familiar with the :class:`NDArray` API is useful for implementing ``Optimizer`` or customized operators in Julia directly. -The followings are common ways to create ``NDArray`` objects: +The followings are common ways to create :class:`NDArray` objects: * ``mx.empty(shape[, context])``: create on uninitialized array of a given shape on a specific device. For example, ``mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))``. @@ -66,10 +66,10 @@ Most of the convenient functions like ``size``, ``length``, ``ndims``, ``eltype` # Float32[2.0 2.0 1.0 # 2.0 2.0 1.0] -A slice is a sub-region sharing the same memory with the original ``NDArray`` +A slice is a sub-region sharing the same memory with the original :class:`NDArray` object. A slice is always a contiguous piece of memory, so only slicing on the *last* dimension is supported. The example above also shows a way to set the -contents of an ``NDArray``. +contents of an :class:`NDArray`. .. code-block:: julia @@ -101,7 +101,7 @@ the variable ``a`` pointing to a new object, which is ``b``. Similarly, inplace # => Float32[1.0f0,1.0f0] As we can see, ``a`` has expected value, but instead of inplace updating, a new -``NDArray`` is created and ``a`` is set to point to this new object. If we look +:class:`NDArray` is created and ``a`` is set to point to this new object. If we look at ``r``, which still reference to the old ``a``, its content has not changed. There is currently no way in Julia to overload the operators like ``+=`` to get customized behavior. @@ -117,7 +117,7 @@ Instead, you will need to write ``a[:] = a+b``, or if you want *real* inplace As we can see, it translate the ``+=`` operator to an explicit ``add_to!`` function call, which invokes into libmxnet to add the contents of ``b`` into ``a`` directly. For example, the following is the update rule in the SGD -``Optimizer`` (both ``grad`` and ``weight`` are ``NDArray`` objects): +``Optimizer`` (both ``grad`` and ``weight`` are :class:`NDArray` objects): .. code-block:: julia @@ -126,9 +126,9 @@ function call, which invokes into libmxnet to add the contents of ``b`` into Note there is no much magic in ``mx.inplace``: it only does a shallow translation. In the SGD update rule example above, the computation like scaling the gradient by ``grad_scale`` and adding the weight decay all create temporary -``NDArray`` objects. To mitigate this issue, libmxnet has a customized memory +:class:`NDArray` objects. To mitigate this issue, libmxnet has a customized memory allocator designed specifically to handle this kind of situations. The following -snippet does a simple benchmark on allocating temp ``NDArray`` vs. pre-allocating: +snippet does a simple benchmark on allocating temp :class:`NDArray` vs. pre-allocating: .. code-block:: julia @@ -171,7 +171,7 @@ snippet does a simple benchmark on allocating temp ``NDArray`` vs. pre-allocatin println(compare([inplace_op, normal_op], 100)) The comparison on my laptop shows that ``normal_op`` while allocating a lot of -temp ``NDArray`` in the loop (the performance gets worse when increasing +temp :class:`NDArray` in the loop (the performance gets worse when increasing ``N_REP``), is only about twice slower than the pre-allocated one. +-----+--------------+-----------+----------+--------------+ @@ -189,7 +189,7 @@ Distributed Key-value Store The type ``KVStore`` and related methods are used for data sharing across different devices or machines. It provides a simple and efficient -integer - ``NDArray`` key-value storage system that each device can pull or push. +integer - :class:`NDArray` key-value storage system that each device can pull or push. The following example shows how to create a local ``KVStore``, initialize a value and then pull it back. @@ -364,7 +364,7 @@ will create an ``mx.Executor`` on a given ``mx.Context``. A context describes th For neural networks, it is easier to use ``simple_bind``. By providing the shape for input arguments, it will perform a shape inference for the rest of the -arguments and create the ``NDArray`` automatically. In practice, the binding and +arguments and create the :class:`NDArray` automatically. In practice, the binding and executing steps are hidden under the ``Model`` interface. **TODO** Provide pointers to model tutorial and further details about binding and symbolic API. diff --git a/src/ndarray.jl b/src/ndarray.jl index 2156ce76be14..b2d42fda355f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,3 +1,8 @@ +#=doc +NDArray +======= +=# + # create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) h_ref = Ref{MX_handle}(0) @@ -19,16 +24,23 @@ end ################################################################################ # NDArray Type ################################################################################ -"""Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block -of tensor-based computation. - -**Note** since C/C++ use row-major ordering for arrays while Julia follows a -column-major ordering. To keep things consistent, we keep the underlying data -in their original layout, but use *language-native* convention when we talk -about shapes. For example, a mini-batch of 100 MNIST images is a tensor of -C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory -have shape (28,28,1,100). -""" +#=doc +.. class:: NDArray + + Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block + of tensor-based computation. + + .. _ndarray-shape-note: + + .. note:: + + since C/C++ use row-major ordering for arrays while Julia follows a + column-major ordering. To keep things consistent, we keep the underlying data + in their original layout, but use *language-native* convention when we talk + about shapes. For example, a mini-batch of 100 MNIST images is a tensor of + C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory + have shape (28,28,1,100). +=# type NDArray handle :: MX_NDArrayHandle writable :: Bool @@ -55,6 +67,11 @@ Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) ################################################################################ # NDArray functions exported to the users ################################################################################ +#=doc +.. function:: context(arr :: NDArray) + + Get the context that this :class:`NDArray` lives on. +=# function context(arr :: NDArray) ref_typeid = Ref{Cint}(0) ref_devid = Ref{Cint}(0) @@ -63,6 +80,14 @@ function context(arr :: NDArray) return Context(ref_typeid[], ref_devid[]) end +#=doc +.. function:: + empty(shape :: Tuple, ctx :: Context) + empty(shape :: Tuple) + empty(dim1, dim2, ...) + + Allocate memory for an uninitialized :class:`NDArray` with specific shape. +=# function empty{N}(shape :: NTuple{N, Int}) empty(shape, cpu()) end @@ -73,35 +98,19 @@ function empty(shape :: Int...) empty(shape) end -#------------------------------------------------------------ -# Interface functions similar to Julia Arrays -#------------------------------------------------------------ -import Base: size, length, ndims, eltype -"""Get the shape of an `NDArray`. Note the shape is converted to Julia convention. - So the same piece of memory, in Julia (column-major), with shape (K, M, N), will be of the - shape (N, M, K) in the Python (row-major) binding. -""" -function size(arr :: NDArray) - ref_ndim = Ref{MX_uint}(0) - ref_shape = Ref{Ptr{MX_uint}}(0) - @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), - arr, ref_ndim, ref_shape) - tuple(map(Int, flipdim(pointer_to_array(ref_shape[], ref_ndim[]),1))...) -end -function size(arr :: NDArray, dim :: Int) - size(arr)[dim] -end -function length(arr :: NDArray) - prod(size(arr)) -end -function ndims(arr :: NDArray) - length(size(arr)) -end -function eltype(arr :: NDArray) - MX_float -end +#=doc +Interface functions similar to Julia Arrays +------------------------------------------- +=# + +#=doc +.. function:: + zeros(shape :: Tuple, ctx :: Context) + zeros(shape :: Tuple) + zeros(dim1, dim2, ...) -"Create zero-ed NDArray of specific shape" + Create zero-ed :class:`NDArray` with specific shape. +=# function zeros{N}(shape :: NTuple{N, Int}) zeros(shape, cpu()) end @@ -114,7 +123,14 @@ function zeros(shape :: Int...) zeros(shape) end -"Create NDArray and initialize with 1" +#=doc +.. function:: + ones(shape :: Tuple, ctx :: Context) + ones(shape :: Tuple) + ones(dim1, dim2, ...) + + Create an :class:`NDArray` with specific shape and initialize with 1. +=# function ones{N}(shape :: NTuple{N, Int}) ones(shape, cpu()) end @@ -127,6 +143,55 @@ function ones(shape :: Int...) ones(shape) end +import Base: size, length, ndims, eltype + +#=doc +.. function:: + size(arr :: NDArray) + size(arr :: NDArray, dim :: Int) + + Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See + also the :ref:`notes on NDArray shapes `. +=# +function size(arr :: NDArray) + ref_ndim = Ref{MX_uint}(0) + ref_shape = Ref{Ptr{MX_uint}}(0) + @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), + arr, ref_ndim, ref_shape) + tuple(map(Int, flipdim(pointer_to_array(ref_shape[], ref_ndim[]),1))...) +end +function size(arr :: NDArray, dim :: Int) + size(arr)[dim] +end + +#=doc +.. function:: length(arr :: NDArray) + + Get the number of elements in an :class:`NDArray`. +=# +function length(arr :: NDArray) + prod(size(arr)) +end + +#=doc +.. function:: ndims(arr :: NDArray) + + Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. +=# +function ndims(arr :: NDArray) + length(size(arr)) +end + +#=doc +.. function:: eltype(arr :: NDArray) + + Get the element type of an :class:`NDArray`. Currently the element type is always ``mx.MX_float``. +=# +function eltype(arr :: NDArray) + MX_float +end + + import Base: slice """`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest changing dimension is supported. In Julia's column-major perspective, this is the last From f1f869fac31a06d02df7ab0e06236e54c3584563 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Oct 2015 23:33:10 -0400 Subject: [PATCH 135/630] API document for NDArrays --- docs/api/ndarray.rst | 237 ++++++++++++++++++++++++++++++++++++++ src/ndarray.jl | 267 +++++++++++++++++++++++++++++++++---------- 2 files changed, 446 insertions(+), 58 deletions(-) diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 1d6be0498e4a..6bf6b5defb29 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -97,3 +97,240 @@ Interface functions similar to Julia Arrays + +.. function:: slice(arr :: NDArray, start:stop) + + Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest + changing dimension is supported. In Julia's column-major perspective, this is the last + dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create + a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is + used in data parallelization to split mini-batch into sub-batches for different devices. + + + + +.. function:: setindex!(arr :: NDArray, val, idx) + + Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following + scenarios are supported + + - ``arr[:] = val``: whole array assignment, ``val`` could be a scalar or an array (Julia ``Array`` + or :class:`NDArray`) of the same shape. + - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of + the same shape to the slice. See also :func:`slice`. + + + + +.. function:: getindex(arr :: NDArray, idx) + + Shortcut for :func:`slice`. A typical use is to write + + .. code-block:: julia + + arr[:] += 5 + + which translates into + + .. code-block:: julia + + arr[:] = arr[:] + 5 + + which furthur translates into + + .. code-block:: julia + + setindex!(getindex(arr, Colon()), 5, Colon()) + + .. note:: + + The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` + create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is + a *slice* that shares the memory. + + + + +Copying functions +----------------- + + + + +.. function:: + copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) + + Copy contents of ``src`` into ``dst``. + + + + +.. function:: + copy(arr :: NDArray) + copy(arr :: NDArray, ctx :: Context) + copy(arr :: Array, ctx :: Context) + + Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. + Otherwise, create an :class:`NDArray` on the specified context. + + + + +.. function:: convert(::Type{Array{T}}, arr :: NDArray) + + Convert an :class:`NDArray` into a Julia ``Array`` of specific type. + + + + +Basic arithmetics +----------------- + + + + +.. function:: @inplace + + Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), + When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new + memory for the results, and the newly allocated :class:`NDArray` object is then assigned + back to a, while the original contents in a is discarded. This is very inefficient + when we want to do inplace update. + + This macro is a simple utility to implement this behavior. Write + + .. code-block:: julia + + @mx.inplace a += b + + will translate into + + .. code-block:: julia + + mx.add_to!(a, b) + + which will do inplace adding of the contents of ``b`` into ``a``. + + + + +.. function:: add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) + + Add a bunch of arguments into ``dst``. Inplace updating. + + + + +.. function:: + +(args...) + .+(args...) + + Summation. Multiple arguments of either scalar or :class:`NDArray` could be + added together. Note at least the first or second argument needs to be an :class:`NDArray` to + avoid ambiguity of built-in summation. + + + + +.. function:: sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) + + Subtract a bunch of arguments from ``dst``. Inplace updating. + + + + +.. function:: + -(arg0, arg1) + -(arg0) + .-(arg0, arg1) + + Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create + the negative of ``arg0``. + + + + +.. function:: mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. + Inplace updating. + + + + +.. function:: + .*(arg0, arg1) + + Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. + + + + +.. function:: + *(arg0, arg1) + + Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication + is to be added soon. + + + + +.. function:: div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. + + + + +.. function:: ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. + + + + +.. function:: /(arg0 :: NDArray, arg :: Real) + + Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. + + + + +IO +-- + + + + +.. function:: load(filename, ::Type{NDArray}) + + Load NDArrays from binary file. + + :param AbstractString filename: the path of the file to load. It could be S3 or HDFS address. + :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. + + If the ``libmxnet`` is built with the corresponding component enabled. Examples + + * ``s3://my-bucket/path/my-s3-ndarray`` + * ``hdfs://my-bucket/path/my-hdfs-ndarray`` + * ``/path-to/my-local-ndarray`` + + + + +.. function:: save(filename :: AbstractString, data) + + Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built + with corresponding support. + + :param filename: path to the binary file to write to. + :param data: an :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. + + + + +libmxnet APIs +------------- + + + diff --git a/src/ndarray.jl b/src/ndarray.jl index b2d42fda355f..6db7546349f3 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -193,12 +193,15 @@ end import Base: slice -"""`slice` create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest -changing dimension is supported. In Julia's column-major perspective, this is the last -dimension. For example, given an `NDArray` of shape (2,3,4), `sub(array, 2:3)` will create -a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is -used in data parallelization to split mini-batch into sub-batches for different devices. -""" +#=doc +.. function:: slice(arr :: NDArray, start:stop) + + Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest + changing dimension is supported. In Julia's column-major perspective, this is the last + dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create + a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is + used in data parallelization to split mini-batch into sub-batches for different devices. +=# function slice(arr :: NDArray, ::Colon) arr end @@ -219,7 +222,18 @@ function slice(arr :: NDArray, slice::UnitRange{Int}) end import Base: setindex! -"Assign all elements of an NDArray to a scalar" + +#=doc +.. function:: setindex!(arr :: NDArray, val, idx) + + Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following + scenarios are supported + + - ``arr[:] = val``: whole array assignment, ``val`` could be a scalar or an array (Julia ``Array`` + or :class:`NDArray`) of the same shape. + - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of + the same shape to the slice. See also :func:`slice`. +=# function setindex!(arr :: NDArray, val :: Real, ::Colon) @assert(arr.writable) _set_value(val, arr) @@ -235,6 +249,33 @@ function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, id setindex!(slice(arr, idx), val, Colon()) end +#=doc +.. function:: getindex(arr :: NDArray, idx) + + Shortcut for :func:`slice`. A typical use is to write + + .. code-block:: julia + + arr[:] += 5 + + which translates into + + .. code-block:: julia + + arr[:] = arr[:] + 5 + + which furthur translates into + + .. code-block:: julia + + setindex!(getindex(arr, Colon()), 5, Colon()) + + .. note:: + + The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` + create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is + a *slice* that shares the memory. +=# import Base: getindex """Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. @@ -246,11 +287,17 @@ function getindex(arr :: NDArray, idx::UnitRange{Int}) slice(arr, idx) end -#------------------------------------------------------------ -# Copying functions -#------------------------------------------------------------ +#=doc +Copying functions +----------------- +=# import Base: copy!, copy, convert -"Copy data between NDArrays" +#=doc +.. function:: + copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) + + Copy contents of ``src`` into ``dst``. +=# function copy!(dst :: NDArray, src :: NDArray) @assert(dst.writable) if dst.handle == src.handle @@ -262,15 +309,16 @@ function copy!(dst :: NDArray, src :: NDArray) return dst end -"Copy data from NDArray to Julia Array" function copy!(dst :: Array{MX_float}, src :: NDArray) @assert size(dst) == size(src) @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), src, pointer(dst), length(dst)) return dst end +function copy!{T<:Real}(dst :: Array{T}, src :: NDArray) + copy!(dst, copy(src)) +end -"Copy data from Julia Array to NDArray" function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) @assert dst.writable @assert size(dst) == size(src) @@ -280,54 +328,72 @@ function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) return dst end -"Create copy: NDArray -> Julia Array" +#=doc +.. function:: + copy(arr :: NDArray) + copy(arr :: NDArray, ctx :: Context) + copy(arr :: Array, ctx :: Context) + + Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. + Otherwise, create an :class:`NDArray` on the specified context. +=# +# Create copy: NDArray -> Julia Array function copy(arr :: NDArray) j_arr = Array(MX_float, size(arr)) copy!(j_arr, arr) end -"Create copy: NDArray -> NDArray in a given context" +# Create copy: NDArray -> NDArray in a given context function copy(arr :: NDArray, ctx :: Context) dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) copy!(dst, arr) end -"Create copy: Julia Array -> NDArray in a given context" +# Create copy: Julia Array -> NDArray in a given context function copy{T<:Real}(arr :: Array{T}, ctx :: Context) dst = empty(size(arr), ctx) copy!(dst, arr) end -"Convert copy: NDArray -> Julia Array" +#=doc +.. function:: convert(::Type{Array{T}}, arr :: NDArray) + + Convert an :class:`NDArray` into a Julia ``Array`` of specific type. +=# +# Convert copy: NDArray -> Julia Array function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) convert(t, copy(arr)) end -#------------------------------------------------------------ -# Basic arithmetics -#------------------------------------------------------------ -""" -Julia does not support re-definiton of `+=` operator (like `__iadd__` in python), -When one write `a += b`, it gets translated to `a = a+b`. `a+b` will allocate new -memory for the results, and the newly allocated `NDArray` object is then assigned -back to a, while the original contents in a is discarded. This is very inefficient -when we want to do inplace update. +#=doc +Basic arithmetics +----------------- +=# -This macro is a simple utility to implement this behavior. Write +#=doc +.. function:: @inplace -```julia -@mx.inplace a += b -``` + Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), + When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new + memory for the results, and the newly allocated :class:`NDArray` object is then assigned + back to a, while the original contents in a is discarded. This is very inefficient + when we want to do inplace update. -will translate into + This macro is a simple utility to implement this behavior. Write -```julia -mx.add_to!(a, b) -``` + .. code-block:: julia -which will do inplace adding of the contents of b into a. -""" + @mx.inplace a += b + + will translate into + + .. code-block:: julia + + mx.add_to!(a, b) + + which will do inplace adding of the contents of ``b`` into ``a``. +=# macro inplace(stmt) if stmt.head == :+= || stmt.head == :.+= Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) @@ -342,6 +408,11 @@ macro inplace(stmt) end end +#=doc +.. function:: add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) + + Add a bunch of arguments into ``dst``. Inplace updating. +=# function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) @assert dst.writable for arg in args @@ -354,7 +425,15 @@ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) return dst end -# We fix the first arg to be NDArray to avoid ambiguity +#=doc +.. function:: + +(args...) + .+(args...) + + Summation. Multiple arguments of either scalar or :class:`NDArray` could be + added together. Note at least the first or second argument needs to be an :class:`NDArray` to + avoid ambiguity of built-in summation. +=# import Base: +, .+ function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) ret = copy(arg0, context(arg0)) @@ -363,7 +442,18 @@ end function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) +(arg0, args...) end +function +(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) + +(arg1, arg0, args...) +end +function .+(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) + .+(arg1, arg0, args...) +end + +#=doc +.. function:: sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) + Subtract a bunch of arguments from ``dst``. Inplace updating. +=# function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) @@ -372,6 +462,16 @@ function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) _minus(dst, arg, dst) end end + +#=doc +.. function:: + -(arg0, arg1) + -(arg0) + .-(arg0, arg1) + + Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create + the negative of ``arg0``. +=# import Base: -, .- function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) @@ -380,10 +480,25 @@ end function .-(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) -(arg0, arg1) end +function -(arg0 :: Real, arg1 :: NDArray) + ret = -arg1 + add_to!(ret, arg0) + return ret +end +function .-(arg0 :: Real, arg1 :: NDArray) + -(arg0, arg1) +end + function -(arg0 :: NDArray) _mul_scalar(arg0, -1.0) end +#=doc +.. function:: mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. + Inplace updating. +=# function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) @@ -393,6 +508,13 @@ function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) end return dst end + +#=doc +.. function:: + .*(arg0, arg1) + + Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. +=# import Base: .*, * function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) @@ -401,8 +523,14 @@ end function .*(arg0 :: Real, arg :: NDArray) .*(arg, arg0) end -# unlike *, we only allow type Real in arguments, because array-array * operator -# means matrix multiplication in Julia + +#=doc +.. function:: + *(arg0, arg1) + + Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication + is to be added soon. +=# function *(arg0 :: NDArray, arg :: Real) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) @@ -411,6 +539,11 @@ function *(arg0 :: Real, arg :: NDArray) *(arg, arg0) end +#=doc +.. function:: div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. +=# function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) @@ -419,33 +552,45 @@ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) _div(dst, arg, dst) end end + import Base: ./, / +#=doc +.. function:: ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. +=# function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) div_from!(ret, arg) end + +#=doc +.. function:: /(arg0 :: NDArray, arg :: Real) + + Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. +=# function /(arg0 :: NDArray, arg :: Real) ./(arg0, arg) end -#------------------------------------------------------------ -# IO -#------------------------------------------------------------ -"""Load NDArrays from binary file. - -**Parameters**: +#=doc +IO +-- +=# +#=doc +.. function:: load(filename, ::Type{NDArray}) -* `filename`: the path of the file to load. It could be S3 or HDFS address - if the `libmxnet` is built with the corresponding component enabled. Examples + Load NDArrays from binary file. - * `s3://my-bucket/path/my-s3-ndarray` - * `hdfs://my-bucket/path/my-hdfs-ndarray` - * `/path-to/my-local-ndarray` + :param AbstractString filename: the path of the file to load. It could be S3 or HDFS address. + :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. -**Returns**: + If the ``libmxnet`` is built with the corresponding component enabled. Examples - Either `Dict{Base.Symbol, NDArray}` or `Vector{NDArray}`. -""" + * ``s3://my-bucket/path/my-s3-ndarray`` + * ``hdfs://my-bucket/path/my-hdfs-ndarray`` + * ``/path-to/my-local-ndarray`` +=# function load(filename::AbstractString, ::Type{NDArray}) out_size = Ref{MX_uint}(0) out_hdrs = Ref{Ptr{MX_handle}}(0) @@ -464,13 +609,15 @@ function load(filename::AbstractString, ::Type{NDArray}) end end -"""Save NDarrays to binary file. +#=doc +.. function:: save(filename :: AbstractString, data) -**Parameters**: + Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built + with corresponding support. -* `filename`: path to the binary file to write to. -* `data`: an `NDArray`, or a `Vector{NDArray}` or a `Dict{Base.Symbol, NDArray}`. -""" + :param filename: path to the binary file to write to. + :param data: an :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. +=# function save(filename::AbstractString, data::NDArray) save(filename, [data]) end @@ -487,6 +634,10 @@ function save(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) filename, length(names), arrays, names) end +#=doc +libmxnet APIs +------------- +=# ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ From ac39fd6c2ceb153b9862bbd6c00e3c8ccb70f408 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 00:10:13 -0400 Subject: [PATCH 136/630] libmxnet API for ndarrays --- docs/api/ndarray.rst | 279 +++++++++++++++++++++++++++++++++++++++++++ docs/build-api.jl | 59 ++++++++- src/ndarray.jl | 162 ++++++++++++++----------- src/util.jl | 5 +- 4 files changed, 425 insertions(+), 80 deletions(-) diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 6bf6b5defb29..c8b9aeb6dc6a 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -334,3 +334,282 @@ libmxnet APIs + +The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed +here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered +as + +.. code-block:: julia + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) + +unless ``NDARRAY_ARG_BEFORE_SCALAR`` is not set. In this case, the scalars are put before the input arguments: + +.. code-block:: julia + + func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) + + +If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the output arguments will also be defined: + +.. code-block:: julia + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) + +Upon calling, the output arguments will be automatically initialized with empty NDArrays. + +Those functions always return the output arguments. If there is only one output (the typical situation), that +object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. + +Public APIs +^^^^^^^^^^^ +.. function:: choose_element(...) + + Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: clip(...) + + Clip ndarray elements to range (a_min, a_max) + + :param NDArray src: Source input + + + :param real_t a_min: Minimum value + + + :param real_t a_max: Maximum value + + + + + +.. function:: dot(...) + + Calcuate 2D matrix multiplication + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: sqrt(...) + + Take square root of the src + + :param NDArray src: Source input to the function + + + + + +.. function:: square(...) + + Take square of the src + + :param NDArray src: Source input to the function + + + + +Internal APIs +^^^^^^^^^^^^^ + +.. note:: + + Document and signatures for internal API functions might be incomplete. + +.. function:: _copyto(...) + + + + :param NDArray src: Source input to the function. + + + + + +.. function:: _div(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: _div_scalar(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param real_t rhs: Right operand to the function. + + + + + +.. function:: _minus(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: _minus_scalar(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param real_t rhs: Right operand to the function. + + + + + +.. function:: _mul(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: _mul_scalar(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param real_t rhs: Right operand to the function. + + + + + +.. function:: _onehot_encode(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: _plus(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param NDArray rhs: Right operand to the function. + + + + + +.. function:: _plus_scalar(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param real_t rhs: Right operand to the function. + + + + + +.. function:: _random_gaussian(...) + + + + + + + +.. function:: _random_uniform(...) + + + + + + + +.. function:: _rdiv_scalar(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param real_t rhs: Right operand to the function. + + + + + +.. function:: _rminus_scalar(...) + + + + :param NDArray lhs: Left operand to the function. + + + :param real_t rhs: Right operand to the function. + + + + + +.. function:: _set_value(...) + + + + :param real_t src: Source input to the function. + + + + + + + + diff --git a/docs/build-api.jl b/docs/build-api.jl index 9ddf9ff53b5b..82d5d320674b 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -1,17 +1,66 @@ # extract API docs +using MXNet + +const SRC_DIR = joinpath(Pkg.dir("MXNet"), "src") +const API_DIR = joinpath(Pkg.dir("MXNet"), "docs", "api") function extract_doc(output_filename::AbstractString, input_filenames::AbstractString...) - src_dir = joinpath(Pkg.dir("MXNet"), "src") - api_dir = joinpath(Pkg.dir("MXNet"), "docs", "api") - mkpath(api_dir) - open(joinpath(api_dir, output_filename), "w") do io + mkpath(API_DIR) + open(joinpath(API_DIR, output_filename), "w") do io for in_fn in input_filenames - for doc in eachmatch(r"^#=doc\s*$(.*?)^=#\s*$"ms, readall(joinpath(src_dir, in_fn))) + for doc in eachmatch(r"^#=doc\s*$(.*?)^=#\s*$"ms, readall(joinpath(SRC_DIR, in_fn))) println(io, doc.captures[1], "\n\n") end end end end +function sort_api_names(names) + names = collect(names) + names_pub = filter(x -> !startswith(string(x), '_'), names) + names_pri = filter(x -> startswith(string(x), '_'), names) + return (sort(names_pub), sort(names_pri)) +end + +function embed_mxnet_api(output_filename::AbstractString, key::AbstractString, generator::Function) + output_filename = joinpath(API_DIR, output_filename) + contents = readall(output_filename) + open(output_filename, "w") do io + docs = generator(gen_docs=true) + function gen_doc(fname) + doc = replace(docs[fname], r"^"m, " ") + """ + .. function:: $fname(...) + + $doc + + """ + end + + names_pub, names_pri = sort_api_names(keys(docs)) + docs_pub = join(map(gen_doc, names_pub), "\n\n") + docs_pri = join(map(gen_doc, names_pri), "\n\n") + docstrings = """ + Public APIs + ^^^^^^^^^^^ + """ * docs_pub + + docstrings *= """ + + Internal APIs + ^^^^^^^^^^^^^ + + .. note:: + + Document and signatures for internal API functions might be incomplete. + + """ * docs_pri + + key = mx.format(mx.DOC_EMBED_ANCHOR, key) + println(io, replace(contents, key, docstrings)) + end +end + extract_doc("ndarray.rst", "ndarray.jl") +embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) diff --git a/src/ndarray.jl b/src/ndarray.jl index 6db7546349f3..eb82e06ec294 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -656,32 +656,36 @@ end # functions can overload them import Base: sqrt -""" -Import dynamic functions for NDArrays. The arguments to the functions are typically ordered +#=doc +The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed +here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered as -```julia -func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) -``` +.. code-block:: julia + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) + +unless ``NDARRAY_ARG_BEFORE_SCALAR`` is not set. In this case, the scalars are put before the input arguments: + +.. code-block:: julia + + func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) -unless NDARRAY_ARG_BEFORE_SCALAR is not set. In this case, the scalars are put before the input arguments: -```julia -func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) -``` +If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the output arguments will also be defined: -If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: +.. code-block:: julia -```julia -func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) -``` + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) Upon calling, the output arguments will be automatically initialized with empty NDArrays. Those functions always return the output arguments. If there is only one output (the typical situation), that -object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. -""" -function _import_ndarray_functions() +object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. + +**autogen:EMBED:ndarray:EMBED:autogen** +=# +function _import_ndarray_functions(;gen_docs=false) n_ref = Ref{MX_uint}(0) h_ref = Ref{Ptr{MX_handle}}(0) @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) @@ -689,6 +693,10 @@ function _import_ndarray_functions() n_funcs = n_ref[] h_funcs = pointer_to_array(h_ref[], n_funcs) + if gen_docs + docs = Dict{Base.Symbol, AbstractString}() + end + for i = 1:n_funcs func_handle = h_funcs[i] @@ -708,66 +716,74 @@ function _import_ndarray_functions() func_name = symbol(bytestring(ref_name[])) - #---------------------------------------- - # get function specification - ref_n_use_vars = Ref{MX_uint}(0) - ref_n_scalars = Ref{MX_uint}(0) - ref_n_mut_vars = Ref{MX_uint}(0) - ref_type_mask = Ref{Cint}(0) - @mxcall(:MXFuncDescribe, - (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), - func_handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) - - #---------------------------------------- - # prepare function definition - n_used_vars = ref_n_use_vars[] - n_scalars = ref_n_scalars[] - n_mutate_vars = ref_n_mut_vars[] - type_mask = ref_type_mask[] - accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 - arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - - # general ndarray function - if arg_before_scalar - args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - else - args = vcat([Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - end - - _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) - _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) - _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) - stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) - if n_mutate_vars == 1 - stmt_ret = :(return out1) + if gen_docs + # generate document only + f_desc = bytestring(ref_desc[]) * "\n\n" + f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) + docs[func_name] = f_desc else - stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) - end - - func_body = Expr(:block, stmt_call, stmt_ret) - func_head = Expr(:call, func_name, args...) - - func_def = Expr(:function, func_head, func_body) - eval(func_def) - - if accept_empty_mutate - args0 = args[1:n_used_vars+n_scalars] - func_head0 = Expr(:call, func_name, args0...) - _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] - stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) - func_body0 = Expr(:block, stmt_call0) - func_head0 = Expr(:call, func_name, args0...) - - func_def0 = Expr(:function, func_head0, func_body0) - eval(func_def0) + #---------------------------------------- + # get function specification + ref_n_use_vars = Ref{MX_uint}(0) + ref_n_scalars = Ref{MX_uint}(0) + ref_n_mut_vars = Ref{MX_uint}(0) + ref_type_mask = Ref{Cint}(0) + @mxcall(:MXFuncDescribe, + (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), + func_handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) + + #---------------------------------------- + # prepare function definition + n_used_vars = ref_n_use_vars[] + n_scalars = ref_n_scalars[] + n_mutate_vars = ref_n_mut_vars[] + type_mask = ref_type_mask[] + accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 + arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 + + # general ndarray function + if arg_before_scalar + args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + else + args = vcat([Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + end + + _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) + _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) + _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) + stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) + if n_mutate_vars == 1 + stmt_ret = :(return out1) + else + stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) + end + + func_body = Expr(:block, stmt_call, stmt_ret) + func_head = Expr(:call, func_name, args...) + + func_def = Expr(:function, func_head, func_body) + eval(func_def) + + if accept_empty_mutate + args0 = args[1:n_used_vars+n_scalars] + func_head0 = Expr(:call, func_name, args0...) + _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] + stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) + func_body0 = Expr(:block, stmt_call0) + func_head0 = Expr(:call, func_name, args0...) + + func_def0 = Expr(:function, func_head0, func_body0) + eval(func_def0) + end end + end - # TODO: add doc string - # eval(:(@doc($doc_str, $func_name))) + if gen_docs + return docs end end diff --git a/src/util.jl b/src/util.jl index 1c52fdf3f2c3..b3997dbaff45 100644 --- a/src/util.jl +++ b/src/util.jl @@ -46,6 +46,7 @@ end ################################################################################ # Internal Utilities ################################################################################ +const DOC_EMBED_ANCHOR = "**autogen:EMBED:{1}:EMBED:autogen**" function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) param_keys = Set{AbstractString}() @@ -63,7 +64,7 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch arg_type = bytestring(arg_types[i]) arg_desc = bytestring(arg_descs[i]) - push!(docstrings, "* `$arg_name`: $arg_type\n\n $arg_desc\n\n") + push!(docstrings, ":param $arg_type $arg_name: $arg_desc\n\n") end - return "**Parameters**\n\n$(join(docstrings, "\n"))" + return join(docstrings, "\n") end From e7ec661e5530a212e722d940b78d94d7bdb11026 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 00:19:54 -0400 Subject: [PATCH 137/630] automatic build API for symbols --- docs/api/symbol.rst | 436 ++++++++++++++++++++++++++++++++++++++++++++ docs/build-api.jl | 10 +- docs/index.rst | 1 + src/symbol.jl | 41 +++-- 4 files changed, 475 insertions(+), 13 deletions(-) create mode 100644 docs/api/symbol.rst diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst new file mode 100644 index 000000000000..9feae90c681f --- /dev/null +++ b/docs/api/symbol.rst @@ -0,0 +1,436 @@ + +libmxnet APIs +------------- + +Public APIs +^^^^^^^^^^^ +.. function:: Activation(...) + + Apply activation function to input. + + :param Symbol data: Input data to activation function. + + + :param {'relu', 'sigmoid', 'tanh'}, required act_type: Activation function to be applied. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: BatchNorm(...) + + Apply batch normalization to input. + + :param Symbol data: Input data to batch normalization + + + :param float, optional, default=1e-10 eps: Epsilon to prevent div 0 + + + :param float, optional, default=0.1 momentum: Momentum for moving average + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: BlockGrad(...) + + Get output from a symbol and pass 0 gradient back + + :param Symbol data: Input data. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Concat(...) + + Perform an feature concat on channel dim (dim 1) over all the inputs. + + This function support variable length positional :class:`Symbol` inputs. + + :param int, required num_args: Number of inputs to be concated. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Convolution(...) + + Apply convolution to input then add a bias. + + :param Symbol data: Input data to the ConvolutionOp. + + + :param Symbol weight: Weight matrix. + + + :param Symbol bias: Bias parameter. + + + :param Shape(tuple), required kernel: convolution kernel size: (y, x) + + + :param Shape(tuple), optional, default=(1, 1) stride: convolution stride: (y, x) + + + :param Shape(tuple), optional, default=(0, 0) pad: pad for convolution: (y, x) + + + :param int (non-negative), required num_filter: convolution filter(channel) number + + + :param int (non-negative), optional, default=1 num_group: number of groups partition + + + :param long (non-negative), optional, default=512 workspace: Tmp workspace for convolution (MB) + + + :param boolean, optional, default=False no_bias: Whether to disable bias parameter. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Dropout(...) + + Apply dropout to input + + :param Symbol data: Input data to dropout. + + + :param float, optional, default=0.5 p: Fraction of the input that gets dropped out at training time + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: ElementWiseSum(...) + + Perform an elementwise sum over all the inputs. + + This function support variable length positional :class:`Symbol` inputs. + + :param int, required num_args: Number of inputs to be sumed. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Flatten(...) + + Flatten input + + :param Symbol data: Input data to flatten. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: FullyConnected(...) + + Apply matrix multiplication to input then add a bias. + + :param Symbol data: Input data to the FullyConnectedOp. + + + :param Symbol weight: Weight matrix. + + + :param Symbol bias: Bias parameter. + + + :param int, required num_hidden: Number of hidden nodes of the output. + + + :param boolean, optional, default=False no_bias: Whether to disable bias parameter. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: LRN(...) + + Apply convolution to input then add a bias. + + :param Symbol data: Input data to the ConvolutionOp. + + + :param float, optional, default=0.0001 alpha: value of the alpha variance scaling parameter in the normalization formula + + + :param float, optional, default=0.75 beta: value of the beta power parameter in the normalization formula + + + :param float, optional, default=2 knorm: value of the k parameter in normalization formula + + + :param int (non-negative), required nsize: normalization window width in elements. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: LeakyReLU(...) + + Apply activation function to input. + + :param Symbol data: Input data to activation function. + + + :param {'leaky', 'prelu', 'rrelu'},optional, default='leaky' act_type: Activation function to be applied. + + + :param float, optional, default=0.25 slope: Init slope for the activation. (For leaky only) + + + :param float, optional, default=0.125 lower_bound: Lower bound of random slope. (For rrelu only) + + + :param float, optional, default=0.334 upper_bound: Upper bound of random slope. (For rrelu only) + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: LinearRegressionOutput(...) + + Use linear regression for final output, this is used on final output of a net. + + :param Symbol data: Input data to function. + + + :param Symbol label: Input label to function. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: LogisticRegressionOutput(...) + + Use Logistic regression for final output, this is used on final output of a net. + Logistic regression is suitable for binary classification or probability prediction tasks. + + :param Symbol data: Input data to function. + + + :param Symbol label: Input label to function. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Pooling(...) + + Perform spatial pooling on inputs. + + :param Symbol data: Input data to the pooling operator. + + + :param Shape(tuple), required kernel: pooling kernel size: (y, x) + + + :param {'avg', 'max', 'sum'}, required pool_type: Pooling type to be applied. + + + :param Shape(tuple), optional, default=(1, 1) stride: stride: for pooling (y, x) + + + :param Shape(tuple), optional, default=(0, 0) pad: pad for pooling: (y, x) + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Reshape(...) + + Reshape input to target shape + + :param Symbol data: Input data to reshape. + + + :param Shape(tuple), required target_shape: Target new shape + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: SliceChannel(...) + + Slice channel into many outputs with equally divided channel + + :param int, required num_outputs: Number of outputs to be sliced. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: Softmax(...) + + Perform a softmax transformation on input. + + :param Symbol data: Input data to softmax. + + + :param float, optional, default=1 grad_scale: Scale the gradient by a float factor + + + :param boolean, optional, default=False multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: sqrt(...) + + Take square root of the src + + :param Symbol src: Source symbolic input to the function + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: square(...) + + Take square of the src + + :param Symbol src: Source symbolic input to the function + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + +Internal APIs +^^^^^^^^^^^^^ + +.. note:: + + Document and signatures for internal API functions might be incomplete. + +.. function:: _Div(...) + + Perform an elementwise div. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: _Minus(...) + + Perform an elementwise minus. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: _Mul(...) + + Perform an elementwise mul. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + +.. function:: _Plus(...) + + Perform an elementwise plus. + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: The constructed :class:`Symbol`. + + + + + + + + diff --git a/docs/build-api.jl b/docs/build-api.jl index 82d5d320674b..b70cbd94615b 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -4,8 +4,10 @@ using MXNet const SRC_DIR = joinpath(Pkg.dir("MXNet"), "src") const API_DIR = joinpath(Pkg.dir("MXNet"), "docs", "api") +################################################################################# +# Document Builders +################################################################################# function extract_doc(output_filename::AbstractString, input_filenames::AbstractString...) - mkpath(API_DIR) open(joinpath(API_DIR, output_filename), "w") do io for in_fn in input_filenames @@ -62,5 +64,11 @@ function embed_mxnet_api(output_filename::AbstractString, key::AbstractString, g end end +################################################################################# +# Build Documents +################################################################################# extract_doc("ndarray.rst", "ndarray.jl") embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) + +extract_doc("symbol.rst", "symbol.jl") +embed_mxnet_api("symbol.rst", "symbol", mx._import_atomic_symbol_creators) diff --git a/docs/index.rst b/docs/index.rst index 6af0d3b6fde7..adc4ef8d658c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -32,6 +32,7 @@ For more details, see documentation below. Please also checkout the `examples :caption: API Documentation api/ndarray + api/symbol Indices and tables ================== diff --git a/src/symbol.jl b/src/symbol.jl index a1f008c29cd3..635b4a494c6b 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -253,10 +253,16 @@ function save(filename :: AbstractString, sym :: Symbol) @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), sym, filename) end +#=doc +libmxnet APIs +------------- + +**autogen:EMBED:symbol:EMBED:autogen** +=# ################################################################################ # Atomic Symbol functions dynamically imported from libmxnet ################################################################################ -function _define_atomic_symbol_creator(hdr :: MX_handle) +function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) ref_name = Ref{char_p}(0) ref_desc = Ref{char_p}(0) ref_kv_nargs = Ref{char_p}(0) @@ -274,13 +280,16 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) kv_nargs_s = bytestring(ref_kv_nargs[]) kv_nargs = symbol(kv_nargs_s) - f_desc = bytestring(ref_desc[]) * "\n\n" - if !isempty(kv_nargs_s) - f_desc *= "This function support variable length positional `Symbol` inputs.\n\n" + if gen_docs + f_desc = bytestring(ref_desc[]) * "\n\n" + if !isempty(kv_nargs_s) + f_desc *= "This function support variable length positional :class:`Symbol` inputs.\n\n" + end + f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) + f_desc *= ":param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":return: The constructed :class:`Symbol`.\n\n" + return (func_name, f_desc) end - f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= "* `name`: Julia Symbol (e.g. `:my_symbol`), optional.\n\n The name of the symbol.\n\n" - f_desc *= "**Returns**\n\n`symbol`: `mx.Symbol`\n\n The constructed symbol." # function $func_name(args...; kwargs...) func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) @@ -349,12 +358,9 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) func_def = Expr(:function, func_head, Expr(:block, func_body)) eval(func_def) - - # add doc string - eval(:(@doc($f_desc, $func_name))) end -function _import_atomic_symbol_creators() +function _import_atomic_symbol_creators(;gen_docs=false) n_ref = Ref{MX_uint}(0) h_ref = Ref{Ptr{MX_handle}}(0) @mxcall(:MXSymbolListAtomicSymbolCreators, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) @@ -362,9 +368,20 @@ function _import_atomic_symbol_creators() n_creators = n_ref[] h_creators = pointer_to_array(h_ref[], n_creators) + if gen_docs + docs = Dict{Base.Symbol, AbstractString}() + end + for i = 1:n_creators creator_hdr = h_creators[i] - _define_atomic_symbol_creator(creator_hdr) + ret = _define_atomic_symbol_creator(creator_hdr, gen_docs=gen_docs) + if gen_docs + docs[ret[1]] = ret[2] + end + end + + if gen_docs + return docs end end From 9c87c61dde1ac236ed55658e70fa7673d6e357ed Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 00:26:31 -0400 Subject: [PATCH 138/630] fix mal-formatted param type in sphinx syntax --- docs/api/ndarray.rst | 66 ++++++++--------- docs/api/symbol.rst | 165 +++++++++++++++++++++++-------------------- src/symbol.jl | 16 +++-- src/util.jl | 2 +- 4 files changed, 135 insertions(+), 114 deletions(-) diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index c8b9aeb6dc6a..3f8404afd7e5 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -367,10 +367,10 @@ Public APIs Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -380,13 +380,13 @@ Public APIs Clip ndarray elements to range (a_min, a_max) - :param NDArray src: Source input + :param src: ``NDArray``. Source input - :param real_t a_min: Minimum value + :param a_min: ``real_t``. Minimum value - :param real_t a_max: Maximum value + :param a_max: ``real_t``. Maximum value @@ -396,10 +396,10 @@ Public APIs Calcuate 2D matrix multiplication - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -409,7 +409,7 @@ Public APIs Take square root of the src - :param NDArray src: Source input to the function + :param src: ``NDArray``. Source input to the function @@ -419,7 +419,7 @@ Public APIs Take square of the src - :param NDArray src: Source input to the function + :param src: ``NDArray``. Source input to the function @@ -435,7 +435,7 @@ Internal APIs - :param NDArray src: Source input to the function. + :param src: ``NDArray``. Source input to the function. @@ -445,10 +445,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -458,10 +458,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param real_t rhs: Right operand to the function. + :param rhs: ``real_t``. Right operand to the function. @@ -471,10 +471,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -484,10 +484,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param real_t rhs: Right operand to the function. + :param rhs: ``real_t``. Right operand to the function. @@ -497,10 +497,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -510,10 +510,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param real_t rhs: Right operand to the function. + :param rhs: ``real_t``. Right operand to the function. @@ -523,10 +523,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -536,10 +536,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param NDArray rhs: Right operand to the function. + :param rhs: ``NDArray``. Right operand to the function. @@ -549,10 +549,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param real_t rhs: Right operand to the function. + :param rhs: ``real_t``. Right operand to the function. @@ -578,10 +578,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param real_t rhs: Right operand to the function. + :param rhs: ``real_t``. Right operand to the function. @@ -591,10 +591,10 @@ Internal APIs - :param NDArray lhs: Left operand to the function. + :param lhs: ``NDArray``. Left operand to the function. - :param real_t rhs: Right operand to the function. + :param rhs: ``real_t``. Right operand to the function. @@ -604,7 +604,7 @@ Internal APIs - :param real_t src: Source input to the function. + :param src: ``real_t``. Source input to the function. diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index 9feae90c681f..a645d9a13dff 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -1,4 +1,17 @@ +.. class:: Symbol + + Symbol is the basic building block of the symbolic graph in MXNet.jl. + + .. note:: + + Throughout this documentation, ``Symbol`` always refer to this :class:`Symbol` type. + When we refer to the Julia's build-in symbol type (e.g. ``typeof(:foo)``), we always + say ``Base.Symbol``. + + + + libmxnet APIs ------------- @@ -8,12 +21,12 @@ Public APIs Apply activation function to input. - :param Symbol data: Input data to activation function. + :param data: ``Symbol``. Input data to activation function. - :param {'relu', 'sigmoid', 'tanh'}, required act_type: Activation function to be applied. + :param act_type: ``{'relu', 'sigmoid', 'tanh'}, required``. Activation function to be applied. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -25,15 +38,15 @@ Public APIs Apply batch normalization to input. - :param Symbol data: Input data to batch normalization + :param data: ``Symbol``. Input data to batch normalization - :param float, optional, default=1e-10 eps: Epsilon to prevent div 0 + :param eps: ``float, optional, default=1e-10``. Epsilon to prevent div 0 - :param float, optional, default=0.1 momentum: Momentum for moving average + :param momentum: ``float, optional, default=0.1``. Momentum for moving average - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -45,9 +58,9 @@ Public APIs Get output from a symbol and pass 0 gradient back - :param Symbol data: Input data. + :param data: ``Symbol``. Input data. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -61,9 +74,9 @@ Public APIs This function support variable length positional :class:`Symbol` inputs. - :param int, required num_args: Number of inputs to be concated. + :param num_args: ``int, required``. Number of inputs to be concated. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -75,36 +88,36 @@ Public APIs Apply convolution to input then add a bias. - :param Symbol data: Input data to the ConvolutionOp. + :param data: ``Symbol``. Input data to the ConvolutionOp. - :param Symbol weight: Weight matrix. + :param weight: ``Symbol``. Weight matrix. - :param Symbol bias: Bias parameter. + :param bias: ``Symbol``. Bias parameter. - :param Shape(tuple), required kernel: convolution kernel size: (y, x) + :param kernel: ``Shape(tuple), required``. convolution kernel size: (y, x) - :param Shape(tuple), optional, default=(1, 1) stride: convolution stride: (y, x) + :param stride: ``Shape(tuple), optional, default=(1, 1)``. convolution stride: (y, x) - :param Shape(tuple), optional, default=(0, 0) pad: pad for convolution: (y, x) + :param pad: ``Shape(tuple), optional, default=(0, 0)``. pad for convolution: (y, x) - :param int (non-negative), required num_filter: convolution filter(channel) number + :param num_filter: ``int (non-negative), required``. convolution filter(channel) number - :param int (non-negative), optional, default=1 num_group: number of groups partition + :param num_group: ``int (non-negative), optional, default=1``. number of groups partition - :param long (non-negative), optional, default=512 workspace: Tmp workspace for convolution (MB) + :param workspace: ``long (non-negative), optional, default=512``. Tmp workspace for convolution (MB) - :param boolean, optional, default=False no_bias: Whether to disable bias parameter. + :param no_bias: ``boolean, optional, default=False``. Whether to disable bias parameter. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -116,12 +129,12 @@ Public APIs Apply dropout to input - :param Symbol data: Input data to dropout. + :param data: ``Symbol``. Input data to dropout. - :param float, optional, default=0.5 p: Fraction of the input that gets dropped out at training time + :param p: ``float, optional, default=0.5``. Fraction of the input that gets dropped out at training time - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -135,9 +148,9 @@ Public APIs This function support variable length positional :class:`Symbol` inputs. - :param int, required num_args: Number of inputs to be sumed. + :param num_args: ``int, required``. Number of inputs to be sumed. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -149,9 +162,9 @@ Public APIs Flatten input - :param Symbol data: Input data to flatten. + :param data: ``Symbol``. Input data to flatten. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -163,21 +176,21 @@ Public APIs Apply matrix multiplication to input then add a bias. - :param Symbol data: Input data to the FullyConnectedOp. + :param data: ``Symbol``. Input data to the FullyConnectedOp. - :param Symbol weight: Weight matrix. + :param weight: ``Symbol``. Weight matrix. - :param Symbol bias: Bias parameter. + :param bias: ``Symbol``. Bias parameter. - :param int, required num_hidden: Number of hidden nodes of the output. + :param num_hidden: ``int, required``. Number of hidden nodes of the output. - :param boolean, optional, default=False no_bias: Whether to disable bias parameter. + :param no_bias: ``boolean, optional, default=False``. Whether to disable bias parameter. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -189,21 +202,21 @@ Public APIs Apply convolution to input then add a bias. - :param Symbol data: Input data to the ConvolutionOp. + :param data: ``Symbol``. Input data to the ConvolutionOp. - :param float, optional, default=0.0001 alpha: value of the alpha variance scaling parameter in the normalization formula + :param alpha: ``float, optional, default=0.0001``. value of the alpha variance scaling parameter in the normalization formula - :param float, optional, default=0.75 beta: value of the beta power parameter in the normalization formula + :param beta: ``float, optional, default=0.75``. value of the beta power parameter in the normalization formula - :param float, optional, default=2 knorm: value of the k parameter in normalization formula + :param knorm: ``float, optional, default=2``. value of the k parameter in normalization formula - :param int (non-negative), required nsize: normalization window width in elements. + :param nsize: ``int (non-negative), required``. normalization window width in elements. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -215,21 +228,21 @@ Public APIs Apply activation function to input. - :param Symbol data: Input data to activation function. + :param data: ``Symbol``. Input data to activation function. - :param {'leaky', 'prelu', 'rrelu'},optional, default='leaky' act_type: Activation function to be applied. + :param act_type: ``{'leaky', 'prelu', 'rrelu'},optional, default='leaky'``. Activation function to be applied. - :param float, optional, default=0.25 slope: Init slope for the activation. (For leaky only) + :param slope: ``float, optional, default=0.25``. Init slope for the activation. (For leaky only) - :param float, optional, default=0.125 lower_bound: Lower bound of random slope. (For rrelu only) + :param lower_bound: ``float, optional, default=0.125``. Lower bound of random slope. (For rrelu only) - :param float, optional, default=0.334 upper_bound: Upper bound of random slope. (For rrelu only) + :param upper_bound: ``float, optional, default=0.334``. Upper bound of random slope. (For rrelu only) - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -241,12 +254,12 @@ Public APIs Use linear regression for final output, this is used on final output of a net. - :param Symbol data: Input data to function. + :param data: ``Symbol``. Input data to function. - :param Symbol label: Input label to function. + :param label: ``Symbol``. Input label to function. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -259,12 +272,12 @@ Public APIs Use Logistic regression for final output, this is used on final output of a net. Logistic regression is suitable for binary classification or probability prediction tasks. - :param Symbol data: Input data to function. + :param data: ``Symbol``. Input data to function. - :param Symbol label: Input label to function. + :param label: ``Symbol``. Input label to function. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -276,21 +289,21 @@ Public APIs Perform spatial pooling on inputs. - :param Symbol data: Input data to the pooling operator. + :param data: ``Symbol``. Input data to the pooling operator. - :param Shape(tuple), required kernel: pooling kernel size: (y, x) + :param kernel: ``Shape(tuple), required``. pooling kernel size: (y, x) - :param {'avg', 'max', 'sum'}, required pool_type: Pooling type to be applied. + :param pool_type: ``{'avg', 'max', 'sum'}, required``. Pooling type to be applied. - :param Shape(tuple), optional, default=(1, 1) stride: stride: for pooling (y, x) + :param stride: ``Shape(tuple), optional, default=(1, 1)``. stride: for pooling (y, x) - :param Shape(tuple), optional, default=(0, 0) pad: pad for pooling: (y, x) + :param pad: ``Shape(tuple), optional, default=(0, 0)``. pad for pooling: (y, x) - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -302,12 +315,12 @@ Public APIs Reshape input to target shape - :param Symbol data: Input data to reshape. + :param data: ``Symbol``. Input data to reshape. - :param Shape(tuple), required target_shape: Target new shape + :param target_shape: ``Shape(tuple), required``. Target new shape - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -319,9 +332,9 @@ Public APIs Slice channel into many outputs with equally divided channel - :param int, required num_outputs: Number of outputs to be sliced. + :param num_outputs: ``int, required``. Number of outputs to be sliced. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -333,15 +346,15 @@ Public APIs Perform a softmax transformation on input. - :param Symbol data: Input data to softmax. + :param data: ``Symbol``. Input data to softmax. - :param float, optional, default=1 grad_scale: Scale the gradient by a float factor + :param grad_scale: ``float, optional, default=1``. Scale the gradient by a float factor - :param boolean, optional, default=False multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + :param multi_output: ``boolean, optional, default=False``. If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -353,9 +366,9 @@ Public APIs Take square root of the src - :param Symbol src: Source symbolic input to the function + :param src: ``Symbol``. Source symbolic input to the function - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -367,9 +380,9 @@ Public APIs Take square of the src - :param Symbol src: Source symbolic input to the function + :param src: ``Symbol``. Source symbolic input to the function - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -387,7 +400,7 @@ Internal APIs Perform an elementwise div. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -399,7 +412,7 @@ Internal APIs Perform an elementwise minus. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -411,7 +424,7 @@ Internal APIs Perform an elementwise mul. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -423,7 +436,7 @@ Internal APIs Perform an elementwise plus. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. diff --git a/src/symbol.jl b/src/symbol.jl index 635b4a494c6b..eb29f74594d7 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -1,8 +1,16 @@ export Symbol -################################################################################ -# Symbol Type -################################################################################ +#=doc +.. class:: Symbol + + Symbol is the basic building block of the symbolic graph in MXNet.jl. + + .. note:: + + Throughout this documentation, ``Symbol`` always refer to this :class:`Symbol` type. + When we refer to the Julia's build-in symbol type (e.g. ``typeof(:foo)``), we always + say ``Base.Symbol``. +=# type Symbol handle :: MX_SymbolHandle end @@ -286,7 +294,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) f_desc *= "This function support variable length positional :class:`Symbol` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" f_desc *= ":return: The constructed :class:`Symbol`.\n\n" return (func_name, f_desc) end diff --git a/src/util.jl b/src/util.jl index b3997dbaff45..c51ad598bc5a 100644 --- a/src/util.jl +++ b/src/util.jl @@ -64,7 +64,7 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch arg_type = bytestring(arg_types[i]) arg_desc = bytestring(arg_descs[i]) - push!(docstrings, ":param $arg_type $arg_name: $arg_desc\n\n") + push!(docstrings, ":param $arg_name: ``$arg_type``. $arg_desc\n\n") end return join(docstrings, "\n") end From 091c2349b8a5923f8f0673fe4e4b7ee336be2158 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 00:28:11 -0400 Subject: [PATCH 139/630] add title to symbol doc page --- docs/api/symbol.rst | 6 ++++++ src/symbol.jl | 5 ++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index a645d9a13dff..cb73a7d35677 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -1,4 +1,10 @@ +Symbol +====== + + + + .. class:: Symbol Symbol is the basic building block of the symbolic graph in MXNet.jl. diff --git a/src/symbol.jl b/src/symbol.jl index eb29f74594d7..3c0109c35e17 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -1,4 +1,7 @@ -export Symbol +#=doc +Symbol +====== +=# #=doc .. class:: Symbol From 54e621e79b309d0c0c185858948e7f920a67782a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 00:39:00 -0400 Subject: [PATCH 140/630] prepare for v0.0.3 --- NEWS.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/NEWS.md b/NEWS.md index 53c3f2e5418d..84858e3aca13 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,15 @@ +# v.0.03 (2015.10.27) + +* Model prediction API. +* Model checkpoint loading and saving. +* IJulia Notebook example of using pre-trained imagenet model as classifier. +* Symbol saving and loading. +* NDArray saving and loading. +* Optimizer gradient clipping. +* Model training callback APIs, default checkpoint and speedometer callbacks. +* Julia Array / NDArray data iterator. +* Sphinx documentation system and documents for dynamically imported libmxnet APIs. + # v0.0.2 (2015.10.23) * Fix a bug in build script that causes Julia REPL to exit. From cadee3af2eaef4e41df8b9d3069c3d919066640a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 10:48:01 -0400 Subject: [PATCH 141/630] better type annotation in docs. --- docs/api/model.rst | 111 +++++++++++++++++++++++ docs/api/ndarray.rst | 104 ++++++++++++++-------- docs/api/symbol.rst | 205 ++++++++++++++++++++++++++---------------- docs/build-api.jl | 2 + docs/index.rst | 1 + src/model.jl | 206 ++++++++++++++++++++++++++++++------------- src/ndarray.jl | 5 +- src/symbol.jl | 2 +- src/util.jl | 2 +- 9 files changed, 462 insertions(+), 176 deletions(-) create mode 100644 docs/api/model.rst diff --git a/docs/api/model.rst b/docs/api/model.rst new file mode 100644 index 000000000000..266e4162ef33 --- /dev/null +++ b/docs/api/model.rst @@ -0,0 +1,111 @@ + +Model +===== + +The model API provides convenient high-level interface to do training and predicting on +a network described using the symbolic API. + + + + +.. class:: AbstractModel + + The abstract super type of all models in MXNet.jl. + + + + +.. class:: FeedForward + + The feedforward model provides convenient interface to train and predict on + feedforward architectures like multi-layer MLP, ConvNets, etc. There is no + explicitly handling of *time index*, but it is relatively easy to implement + unrolled RNN / LSTM under this framework (**TODO**: add example). For models + that handles sequential data explicitly, please use **TODO**... + + + + +.. function:: FeedForward(arch :: Symbol, ctx) + + :param arch: the architecture of the network constructed using the symbolic API. + :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` + or a list of :class:`Context` objects. In the latter case, data parallelization will be used + for training. If no context is provided, the default context ``cpu()`` will be used. + + + + +.. function:: init_model(self, initializer; overwrite=false, input_shapes...) + + Initialize the weights in the model. + + This method will be called automatically when training a model. So there is usually no + need to call this method unless one needs to inspect a model with only randomly initialized + weights. + + :param FeedForward self: the model to be initialized. + :param AbstractInitializer initializer: an initializer describing how the weights should be initialized. + :param Bool overwrite: keyword argument, force initialization even when weights already exists. + :param input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. + For example, ``data=(28,28,1,100), label=(100,)``. + + + + +.. function:: + predict(self, data; overwrite=false, callback=nothing) + + Predict using an existing model. The model should be already initialized, or trained or loaded from + a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, + so it is possible to do + + .. code-block:: julia + + predict(model, data) do batch_output + # consume or write batch_output to file + end + + :param FeedForward self: the model. + :param AbstractDataProvider data: the data to perform prediction on. + :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory + allocation of the :class:`Executor` depends on the mini-batch size of the test + data provider. If you call predict twice with data provider of the same batch-size, + then the executor can be re-used. Otherwise, if ``overwrite`` is false (default), + an error will be raised; if ``overwrite`` is set to true, a new :class:`Executor` + will be created to replace the old one. + + .. note:: + + Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO + for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better + to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a + concern. + + For the same reason, currently prediction will only use the first device even if multiple devices are + provided to construct the model. + + :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` + + + + +.. function:: train(model :: FeedForward, ...) + + Alias to :func:`fit`. + + + + +.. function:: fit(model :: FeedForward, optimizer, data; kwargs...) + + Train the ``model`` on ``data`` with the ``optimizer``. + + :param FeedForward model: the model to be trained. + :param AbstractOptimizer optimizer: the optimization algorithm to use. + :param AbstractDataProvider data: the training data provider. + :param Int n_epoch: default 10, the number of full data-passes to run. + :param AbstractOptimizer: + + + diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 3f8404afd7e5..d3ca98574596 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -323,8 +323,9 @@ IO Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built with corresponding support. - :param filename: path to the binary file to write to. - :param data: an :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. + :param AbstractString filename: path to the binary file to write to. + :param data: data to save to file. + :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. @@ -367,10 +368,12 @@ Public APIs Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -380,13 +383,16 @@ Public APIs Clip ndarray elements to range (a_min, a_max) - :param src: ``NDArray``. Source input + :param src: Source input + :type src: NDArray - :param a_min: ``real_t``. Minimum value + :param a_min: Minimum value + :type a_min: real_t - :param a_max: ``real_t``. Maximum value + :param a_max: Maximum value + :type a_max: real_t @@ -396,10 +402,12 @@ Public APIs Calcuate 2D matrix multiplication - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -409,7 +417,8 @@ Public APIs Take square root of the src - :param src: ``NDArray``. Source input to the function + :param src: Source input to the function + :type src: NDArray @@ -419,7 +428,8 @@ Public APIs Take square of the src - :param src: ``NDArray``. Source input to the function + :param src: Source input to the function + :type src: NDArray @@ -435,7 +445,8 @@ Internal APIs - :param src: ``NDArray``. Source input to the function. + :param src: Source input to the function. + :type src: NDArray @@ -445,10 +456,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -458,10 +471,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``real_t``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: real_t @@ -471,10 +486,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -484,10 +501,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``real_t``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: real_t @@ -497,10 +516,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -510,10 +531,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``real_t``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: real_t @@ -523,10 +546,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -536,10 +561,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``NDArray``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: NDArray @@ -549,10 +576,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``real_t``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: real_t @@ -578,10 +607,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``real_t``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: real_t @@ -591,10 +622,12 @@ Internal APIs - :param lhs: ``NDArray``. Left operand to the function. + :param lhs: Left operand to the function. + :type lhs: NDArray - :param rhs: ``real_t``. Right operand to the function. + :param rhs: Right operand to the function. + :type rhs: real_t @@ -604,7 +637,8 @@ Internal APIs - :param src: ``real_t``. Source input to the function. + :param src: Source input to the function. + :type src: real_t diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index cb73a7d35677..914535f210de 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -27,12 +27,14 @@ Public APIs Apply activation function to input. - :param data: ``Symbol``. Input data to activation function. + :param data: Input data to activation function. + :type data: Symbol - :param act_type: ``{'relu', 'sigmoid', 'tanh'}, required``. Activation function to be applied. + :param act_type: Activation function to be applied. + :type act_type: {'relu', 'sigmoid', 'tanh'}, required - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -44,15 +46,18 @@ Public APIs Apply batch normalization to input. - :param data: ``Symbol``. Input data to batch normalization + :param data: Input data to batch normalization + :type data: Symbol - :param eps: ``float, optional, default=1e-10``. Epsilon to prevent div 0 + :param eps: Epsilon to prevent div 0 + :type eps: float, optional, default=1e-10 - :param momentum: ``float, optional, default=0.1``. Momentum for moving average + :param momentum: Momentum for moving average + :type momentum: float, optional, default=0.1 - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -64,9 +69,10 @@ Public APIs Get output from a symbol and pass 0 gradient back - :param data: ``Symbol``. Input data. + :param data: Input data. + :type data: Symbol - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -80,9 +86,10 @@ Public APIs This function support variable length positional :class:`Symbol` inputs. - :param num_args: ``int, required``. Number of inputs to be concated. + :param num_args: Number of inputs to be concated. + :type num_args: int, required - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -94,36 +101,46 @@ Public APIs Apply convolution to input then add a bias. - :param data: ``Symbol``. Input data to the ConvolutionOp. + :param data: Input data to the ConvolutionOp. + :type data: Symbol - :param weight: ``Symbol``. Weight matrix. + :param weight: Weight matrix. + :type weight: Symbol - :param bias: ``Symbol``. Bias parameter. + :param bias: Bias parameter. + :type bias: Symbol - :param kernel: ``Shape(tuple), required``. convolution kernel size: (y, x) + :param kernel: convolution kernel size: (y, x) + :type kernel: Shape(tuple), required - :param stride: ``Shape(tuple), optional, default=(1, 1)``. convolution stride: (y, x) + :param stride: convolution stride: (y, x) + :type stride: Shape(tuple), optional, default=(1, 1) - :param pad: ``Shape(tuple), optional, default=(0, 0)``. pad for convolution: (y, x) + :param pad: pad for convolution: (y, x) + :type pad: Shape(tuple), optional, default=(0, 0) - :param num_filter: ``int (non-negative), required``. convolution filter(channel) number + :param num_filter: convolution filter(channel) number + :type num_filter: int (non-negative), required - :param num_group: ``int (non-negative), optional, default=1``. number of groups partition + :param num_group: number of groups partition + :type num_group: int (non-negative), optional, default=1 - :param workspace: ``long (non-negative), optional, default=512``. Tmp workspace for convolution (MB) + :param workspace: Tmp workspace for convolution (MB) + :type workspace: long (non-negative), optional, default=512 - :param no_bias: ``boolean, optional, default=False``. Whether to disable bias parameter. + :param no_bias: Whether to disable bias parameter. + :type no_bias: boolean, optional, default=False - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -135,12 +152,14 @@ Public APIs Apply dropout to input - :param data: ``Symbol``. Input data to dropout. + :param data: Input data to dropout. + :type data: Symbol - :param p: ``float, optional, default=0.5``. Fraction of the input that gets dropped out at training time + :param p: Fraction of the input that gets dropped out at training time + :type p: float, optional, default=0.5 - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -154,9 +173,10 @@ Public APIs This function support variable length positional :class:`Symbol` inputs. - :param num_args: ``int, required``. Number of inputs to be sumed. + :param num_args: Number of inputs to be sumed. + :type num_args: int, required - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -168,9 +188,10 @@ Public APIs Flatten input - :param data: ``Symbol``. Input data to flatten. + :param data: Input data to flatten. + :type data: Symbol - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -182,21 +203,26 @@ Public APIs Apply matrix multiplication to input then add a bias. - :param data: ``Symbol``. Input data to the FullyConnectedOp. + :param data: Input data to the FullyConnectedOp. + :type data: Symbol - :param weight: ``Symbol``. Weight matrix. + :param weight: Weight matrix. + :type weight: Symbol - :param bias: ``Symbol``. Bias parameter. + :param bias: Bias parameter. + :type bias: Symbol - :param num_hidden: ``int, required``. Number of hidden nodes of the output. + :param num_hidden: Number of hidden nodes of the output. + :type num_hidden: int, required - :param no_bias: ``boolean, optional, default=False``. Whether to disable bias parameter. + :param no_bias: Whether to disable bias parameter. + :type no_bias: boolean, optional, default=False - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -208,21 +234,26 @@ Public APIs Apply convolution to input then add a bias. - :param data: ``Symbol``. Input data to the ConvolutionOp. + :param data: Input data to the ConvolutionOp. + :type data: Symbol - :param alpha: ``float, optional, default=0.0001``. value of the alpha variance scaling parameter in the normalization formula + :param alpha: value of the alpha variance scaling parameter in the normalization formula + :type alpha: float, optional, default=0.0001 - :param beta: ``float, optional, default=0.75``. value of the beta power parameter in the normalization formula + :param beta: value of the beta power parameter in the normalization formula + :type beta: float, optional, default=0.75 - :param knorm: ``float, optional, default=2``. value of the k parameter in normalization formula + :param knorm: value of the k parameter in normalization formula + :type knorm: float, optional, default=2 - :param nsize: ``int (non-negative), required``. normalization window width in elements. + :param nsize: normalization window width in elements. + :type nsize: int (non-negative), required - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -234,21 +265,26 @@ Public APIs Apply activation function to input. - :param data: ``Symbol``. Input data to activation function. + :param data: Input data to activation function. + :type data: Symbol - :param act_type: ``{'leaky', 'prelu', 'rrelu'},optional, default='leaky'``. Activation function to be applied. + :param act_type: Activation function to be applied. + :type act_type: {'leaky', 'prelu', 'rrelu'},optional, default='leaky' - :param slope: ``float, optional, default=0.25``. Init slope for the activation. (For leaky only) + :param slope: Init slope for the activation. (For leaky only) + :type slope: float, optional, default=0.25 - :param lower_bound: ``float, optional, default=0.125``. Lower bound of random slope. (For rrelu only) + :param lower_bound: Lower bound of random slope. (For rrelu only) + :type lower_bound: float, optional, default=0.125 - :param upper_bound: ``float, optional, default=0.334``. Upper bound of random slope. (For rrelu only) + :param upper_bound: Upper bound of random slope. (For rrelu only) + :type upper_bound: float, optional, default=0.334 - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -260,12 +296,14 @@ Public APIs Use linear regression for final output, this is used on final output of a net. - :param data: ``Symbol``. Input data to function. + :param data: Input data to function. + :type data: Symbol - :param label: ``Symbol``. Input label to function. + :param label: Input label to function. + :type label: Symbol - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -278,12 +316,14 @@ Public APIs Use Logistic regression for final output, this is used on final output of a net. Logistic regression is suitable for binary classification or probability prediction tasks. - :param data: ``Symbol``. Input data to function. + :param data: Input data to function. + :type data: Symbol - :param label: ``Symbol``. Input label to function. + :param label: Input label to function. + :type label: Symbol - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -295,21 +335,26 @@ Public APIs Perform spatial pooling on inputs. - :param data: ``Symbol``. Input data to the pooling operator. + :param data: Input data to the pooling operator. + :type data: Symbol - :param kernel: ``Shape(tuple), required``. pooling kernel size: (y, x) + :param kernel: pooling kernel size: (y, x) + :type kernel: Shape(tuple), required - :param pool_type: ``{'avg', 'max', 'sum'}, required``. Pooling type to be applied. + :param pool_type: Pooling type to be applied. + :type pool_type: {'avg', 'max', 'sum'}, required - :param stride: ``Shape(tuple), optional, default=(1, 1)``. stride: for pooling (y, x) + :param stride: stride: for pooling (y, x) + :type stride: Shape(tuple), optional, default=(1, 1) - :param pad: ``Shape(tuple), optional, default=(0, 0)``. pad for pooling: (y, x) + :param pad: pad for pooling: (y, x) + :type pad: Shape(tuple), optional, default=(0, 0) - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -321,12 +366,14 @@ Public APIs Reshape input to target shape - :param data: ``Symbol``. Input data to reshape. + :param data: Input data to reshape. + :type data: Symbol - :param target_shape: ``Shape(tuple), required``. Target new shape + :param target_shape: Target new shape + :type target_shape: Shape(tuple), required - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -338,9 +385,10 @@ Public APIs Slice channel into many outputs with equally divided channel - :param num_outputs: ``int, required``. Number of outputs to be sliced. + :param num_outputs: Number of outputs to be sliced. + :type num_outputs: int, required - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -352,15 +400,18 @@ Public APIs Perform a softmax transformation on input. - :param data: ``Symbol``. Input data to softmax. + :param data: Input data to softmax. + :type data: Symbol - :param grad_scale: ``float, optional, default=1``. Scale the gradient by a float factor + :param grad_scale: Scale the gradient by a float factor + :type grad_scale: float, optional, default=1 - :param multi_output: ``boolean, optional, default=False``. If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + :type multi_output: boolean, optional, default=False - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -372,9 +423,10 @@ Public APIs Take square root of the src - :param src: ``Symbol``. Source symbolic input to the function + :param src: Source symbolic input to the function + :type src: Symbol - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -386,9 +438,10 @@ Public APIs Take square of the src - :param src: ``Symbol``. Source symbolic input to the function + :param src: Source symbolic input to the function + :type src: Symbol - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -406,7 +459,7 @@ Internal APIs Perform an elementwise div. - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -418,7 +471,7 @@ Internal APIs Perform an elementwise minus. - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -430,7 +483,7 @@ Internal APIs Perform an elementwise mul. - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. @@ -442,7 +495,7 @@ Internal APIs Perform an elementwise plus. - :param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. :return: The constructed :class:`Symbol`. diff --git a/docs/build-api.jl b/docs/build-api.jl index b70cbd94615b..46adc3fa59ce 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -72,3 +72,5 @@ embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) extract_doc("symbol.rst", "symbol.jl") embed_mxnet_api("symbol.rst", "symbol", mx._import_atomic_symbol_creators) + +extract_doc("model.rst", "model.jl") diff --git a/docs/index.rst b/docs/index.rst index adc4ef8d658c..58d0e43c38fa 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -31,6 +31,7 @@ For more details, see documentation below. Please also checkout the `examples :maxdepth: 1 :caption: API Documentation + api/model api/ndarray api/symbol diff --git a/src/model.jl b/src/model.jl index 84b581b00b09..f21e42ed6f2e 100644 --- a/src/model.jl +++ b/src/model.jl @@ -1,5 +1,27 @@ +#=doc +Model +===== + +The model API provides convenient high-level interface to do training and predicting on +a network described using the symbolic API. +=# + +#=doc +.. class:: AbstractModel + + The abstract super type of all models in MXNet.jl. +=# abstract AbstractModel +#=doc +.. class:: FeedForward + + The feedforward model provides convenient interface to train and predict on + feedforward architectures like multi-layer MLP, ConvNets, etc. There is no + explicitly handling of *time index*, but it is relatively easy to implement + unrolled RNN / LSTM under this framework (**TODO**: add example). For models + that handles sequential data explicitly, please use **TODO**... +=# type FeedForward <: AbstractModel arch :: Symbol ctx :: Vector{Context} @@ -29,6 +51,14 @@ function _split_inputs(batch_size :: Int, n_split :: Int) return idx end +#=doc +.. function:: FeedForward(arch :: Symbol, ctx) + + :param arch: the architecture of the network constructed using the symbolic API. + :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` + or a list of :class:`Context` objects. In the latter case, data parallelization will be used + for training. If no context is provided, the default context ``cpu()`` will be used. +=# function FeedForward(arch :: Symbol; context :: Union{Context, Vector{Context}, Void} = nothing) if isa(context, Void) context = [Context(CPU)] @@ -38,19 +68,21 @@ function FeedForward(arch :: Symbol; context :: Union{Context, Vector{Context}, FeedForward(arch, context) end -"""Initialize the weights in the model. +#=doc +.. function:: init_model(self, initializer; overwrite=false, input_shapes...) -This method will be called automatically when training a model. So there is usually no -need to call this method unless one needs to inspect a model with only randomly initialized -weights. + Initialize the weights in the model. -**Parameters** + This method will be called automatically when training a model. So there is usually no + need to call this method unless one needs to inspect a model with only randomly initialized + weights. -* `self`: the model to be initialized -* `initializer`: an `AbstractInitializer` -* `overwrite`: keyword argument, force initialization even when weights already exists -* `input_shapes`: the shape of all data and label inputs to this model, given as keyword arguments. -""" + :param FeedForward self: the model to be initialized. + :param AbstractInitializer initializer: an initializer describing how the weights should be initialized. + :param Bool overwrite: keyword argument, force initialization even when weights already exists. + :param input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. + For example, ``data=(28,28,1,100), label=(100,)``. +=# function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) # all arg names, including data, label, and parameters arg_names = list_arguments(self.arch) @@ -89,57 +121,6 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove return (arg_names, param_names, aux_names) end -function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer, overwrite :: Bool) - init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) -end - -function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) - if num_device == 1 && !ismatch(r"dist", string(kv_type)) - kv = nothing - else - if kv_type == :local - max_size = maximum([prod(size(param)) for (k,param) in arg_params]) - if max_size < 1024 * 1024 * 16 - kv_type = :local_update_cpu - else - kv_type = :local_allreduce_cpu - end - info("Auto-select kvstore type = $kv_type") - end - kv = KVStore(kv_type) - end - - update_on_kvstore = true - if isa(kv, Void) || ismatch(r"local_allreduce", string(get_type(kv))) - update_on_kvstore = false - end - - return (kv, update_on_kvstore) -end - -@defstruct TrainingOptions Any ( - initializer :: AbstractInitializer = UniformInitializer(0.01), - n_epoch :: Int = 10, - eval_data :: Union{Void, AbstractDataProvider} = nothing, - eval_metric :: AbstractEvalMetric = Accuracy(), - kvstore :: Union{Base.Symbol, KVStore} = :local, - force_init :: Bool = false, - callbacks :: Vector{AbstractCallback} = AbstractCallback[], -) - -function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, param::CallbackParams, type_filter::Type) - map(callbacks) do cb - if isa(cb, type_filter) - if type_filter == AbstractEpochCallback - # epoch callback have extra access to the model object - cb(self, param) - else - cb(param) - end - end - end -end - function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_shapes...) if !isdefined(self, :pred_exec) || isa(self.pred_exec, Void) || overwrite if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) @@ -158,6 +139,41 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha end end +#=doc +.. function:: + predict(self, data; overwrite=false, callback=nothing) + + Predict using an existing model. The model should be already initialized, or trained or loaded from + a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, + so it is possible to do + + .. code-block:: julia + + predict(model, data) do batch_output + # consume or write batch_output to file + end + + :param FeedForward self: the model. + :param AbstractDataProvider data: the data to perform prediction on. + :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory + allocation of the :class:`Executor` depends on the mini-batch size of the test + data provider. If you call predict twice with data provider of the same batch-size, + then the executor can be re-used. Otherwise, if ``overwrite`` is false (default), + an error will be raised; if ``overwrite`` is set to true, a new :class:`Executor` + will be created to replace the old one. + + .. note:: + + Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO + for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better + to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a + concern. + + For the same reason, currently prediction will only use the first device even if multiple devices are + provided to construct the model. + + :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` +=# function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = false) predict(self, data; overwrite = overwrite, callback=callback) end @@ -209,9 +225,77 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::B return output_arrays end +function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer, overwrite :: Bool) + init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) +end + +function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) + if num_device == 1 && !ismatch(r"dist", string(kv_type)) + kv = nothing + else + if kv_type == :local + max_size = maximum([prod(size(param)) for (k,param) in arg_params]) + if max_size < 1024 * 1024 * 16 + kv_type = :local_update_cpu + else + kv_type = :local_allreduce_cpu + end + info("Auto-select kvstore type = $kv_type") + end + kv = KVStore(kv_type) + end + + update_on_kvstore = true + if isa(kv, Void) || ismatch(r"local_allreduce", string(get_type(kv))) + update_on_kvstore = false + end + + return (kv, update_on_kvstore) +end + +@defstruct TrainingOptions Any ( + initializer :: AbstractInitializer = UniformInitializer(0.01), + n_epoch :: Int = 10, + eval_data :: Union{Void, AbstractDataProvider} = nothing, + eval_metric :: AbstractEvalMetric = Accuracy(), + kvstore :: Union{Base.Symbol, KVStore} = :local, + force_init :: Bool = false, + callbacks :: Vector{AbstractCallback} = AbstractCallback[], +) + +function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, param::CallbackParams, type_filter::Type) + map(callbacks) do cb + if isa(cb, type_filter) + if type_filter == AbstractEpochCallback + # epoch callback have extra access to the model object + cb(self, param) + else + cb(param) + end + end + end +end + +#=doc +.. function:: train(model :: FeedForward, ...) + + Alias to :func:`fit`. +=# function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) fit(self, optimizer, data; kwargs...) end + +#=doc +.. function:: fit(model :: FeedForward, optimizer, data; kwargs...) + + Train the ``model`` on ``data`` with the ``optimizer``. + + :param FeedForward model: the model to be trained. + :param AbstractOptimizer optimizer: the optimization algorithm to use. + :param AbstractDataProvider data: the training data provider. + :param Int n_epoch: default 10, the number of full data-passes to run. + :param AbstractOptimizer: +=# function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) diff --git a/src/ndarray.jl b/src/ndarray.jl index eb82e06ec294..db2b300bdebc 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -615,8 +615,9 @@ end Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built with corresponding support. - :param filename: path to the binary file to write to. - :param data: an :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. + :param AbstractString filename: path to the binary file to write to. + :param data: data to save to file. + :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. =# function save(filename::AbstractString, data::NDArray) save(filename, [data]) diff --git a/src/symbol.jl b/src/symbol.jl index 3c0109c35e17..14adb56f0473 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -297,7 +297,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) f_desc *= "This function support variable length positional :class:`Symbol` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param name: ``Base.Symbol``. The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" f_desc *= ":return: The constructed :class:`Symbol`.\n\n" return (func_name, f_desc) end diff --git a/src/util.jl b/src/util.jl index c51ad598bc5a..4ca613cbf7d1 100644 --- a/src/util.jl +++ b/src/util.jl @@ -64,7 +64,7 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch arg_type = bytestring(arg_types[i]) arg_desc = bytestring(arg_descs[i]) - push!(docstrings, ":param $arg_name: ``$arg_type``. $arg_desc\n\n") + push!(docstrings, ":param $arg_name: $arg_desc\n:type $arg_name: $arg_type\n\n") end return join(docstrings, "\n") end From 1b62ddf7b4af8b5a0322b13c34a686d4b50feea2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 11:10:27 -0400 Subject: [PATCH 142/630] more docs. --- docs/api/callback.rst | 27 +++++++++++++++++++++++++++ docs/api/initializer.rst | 27 +++++++++++++++++++++++++++ docs/api/model.rst | 21 ++++++++++++++++++--- docs/api/ndarray.rst | 4 ++-- docs/api/symbol.rst | 4 ++-- docs/build-api.jl | 2 ++ docs/index.rst | 2 ++ src/callback.jl | 23 ++++++++++++++++++++--- src/initializer.jl | 20 ++++++++++++++++++++ src/model.jl | 21 ++++++++++++++++++--- src/ndarray.jl | 4 ++-- src/symbol.jl | 4 ++-- 12 files changed, 142 insertions(+), 17 deletions(-) create mode 100644 docs/api/callback.rst create mode 100644 docs/api/initializer.rst diff --git a/docs/api/callback.rst b/docs/api/callback.rst new file mode 100644 index 000000000000..56ab1fbb7074 --- /dev/null +++ b/docs/api/callback.rst @@ -0,0 +1,27 @@ + +Callbacks in training +===================== + + + + +.. class:: AbstractCallback + + Abstract type of callback functions used in training. + + + + +.. class:: AbstractIterationCallback + + Abstract type of callbacks to be called every mini-batch. + + + + +.. class:: AbstractEpochCallback + + Abstract type of callbacks to be called every epoch. + + + diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst new file mode 100644 index 000000000000..63e1a231b187 --- /dev/null +++ b/docs/api/initializer.rst @@ -0,0 +1,27 @@ + +Built-in Initializers +===================== + + + + +.. class:: AbstractInitializer + + The abstract base class for all initializers. + + + + +.. class:: UniformInitializer + + Initialize weights according to a uniform distribution within the provided scale. + + + + +.. class:: NormalInitializer + + Initialize weights according to a univariate Gaussian distribution. + + + diff --git a/docs/api/model.rst b/docs/api/model.rst index 266e4162ef33..e9a8ee5a0a63 100644 --- a/docs/api/model.rst +++ b/docs/api/model.rst @@ -1,6 +1,6 @@ -Model -===== +Built-in Models and Interface +============================= The model API provides convenient high-level interface to do training and predicting on a network described using the symbolic API. @@ -105,7 +105,22 @@ a network described using the symbolic API. :param AbstractOptimizer optimizer: the optimization algorithm to use. :param AbstractDataProvider data: the training data provider. :param Int n_epoch: default 10, the number of full data-passes to run. - :param AbstractOptimizer: + :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for + the validation set. + :param AbstractEvalMetric eval_metric: keyword argument, default :class:`Accuracy`. The metric used + to evaluate the training performance. If ``eval_data`` is provided, the same metric is also + calculated on the validation set. + :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients + and parameters when multiple devices are used for training. + :type kvstore: :class:`KVStore` or ``Base.Symbol`` + :param AbstractInitializer initializer: keyword argument, default :class:`UniformInitializer(0.01)`. + :param Bool force_init: keyword argument, default false. By default, the random initialization using the + provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous + call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When + this option is set, it will always do random initialization at the begining of training. + :param callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, + see :class:`AbstractCallback`. + :type callbacks: ``Vector{AbstractCallback}`` diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index d3ca98574596..8ac5e9bda8f0 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -1,6 +1,6 @@ -NDArray -======= +NDArray API +=========== diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index 914535f210de..604fadc0f43d 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -1,6 +1,6 @@ -Symbol -====== +Symbolic API +============ diff --git a/docs/build-api.jl b/docs/build-api.jl index 46adc3fa59ce..29f99369d431 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -73,4 +73,6 @@ embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) extract_doc("symbol.rst", "symbol.jl") embed_mxnet_api("symbol.rst", "symbol", mx._import_atomic_symbol_creators) +extract_doc("initializer.rst", "initializer.jl") +extract_doc("callback.rst", "callback.jl") extract_doc("model.rst", "model.jl") diff --git a/docs/index.rst b/docs/index.rst index 58d0e43c38fa..cf44dc197b2d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -32,6 +32,8 @@ For more details, see documentation below. Please also checkout the `examples :caption: API Documentation api/model + api/callback + api/initializer api/ndarray api/symbol diff --git a/src/callback.jl b/src/callback.jl index 049f19432981..14406add7006 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -1,10 +1,27 @@ -"Abstract type of callback functions used in training" +#=doc +Callbacks in training +===================== +=# + +#=doc +.. class:: AbstractCallback + + Abstract type of callback functions used in training. +=# abstract AbstractCallback -"Abstract type of callbacks to be called every mini-batch" +#=doc +.. class:: AbstractIterationCallback + + Abstract type of callbacks to be called every mini-batch. +=# abstract AbstractIterationCallback <: AbstractCallback -"Abstract type of callbacks to be called every epoch" +#=doc +.. class:: AbstractEpochCallback + + Abstract type of callbacks to be called every epoch. +=# abstract AbstractEpochCallback <: AbstractCallback type CallbackParams diff --git a/src/initializer.jl b/src/initializer.jl index 502baf2189e1..3f830860f586 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -1,3 +1,13 @@ +#=doc +Built-in Initializers +===================== +=# + +#=doc +.. class:: AbstractInitializer + + The abstract base class for all initializers. +=# abstract AbstractInitializer function call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) @@ -32,6 +42,11 @@ function _init_zero(self :: AbstractInitializer, name :: Base.Symbol, array :: N array[:] = 0 end +#=doc +.. class:: UniformInitializer + + Initialize weights according to a uniform distribution within the provided scale. +=# immutable UniformInitializer <: AbstractInitializer scale :: AbstractFloat end @@ -41,6 +56,11 @@ function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: rand!(-self.scale, self.scale, array) end +#=doc +.. class:: NormalInitializer + + Initialize weights according to a univariate Gaussian distribution. +=# immutable NormalInitializer <: AbstractInitializer μ :: AbstractFloat σ :: AbstractFloat diff --git a/src/model.jl b/src/model.jl index f21e42ed6f2e..f28fd0d9f5af 100644 --- a/src/model.jl +++ b/src/model.jl @@ -1,6 +1,6 @@ #=doc -Model -===== +Built-in Models and Interface +============================= The model API provides convenient high-level interface to do training and predicting on a network described using the symbolic API. @@ -294,7 +294,22 @@ end :param AbstractOptimizer optimizer: the optimization algorithm to use. :param AbstractDataProvider data: the training data provider. :param Int n_epoch: default 10, the number of full data-passes to run. - :param AbstractOptimizer: + :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for + the validation set. + :param AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used + to evaluate the training performance. If ``eval_data`` is provided, the same metric is also + calculated on the validation set. + :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients + and parameters when multiple devices are used for training. + :type kvstore: :class:`KVStore` or ``Base.Symbol`` + :param AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. + :param Bool force_init: keyword argument, default false. By default, the random initialization using the + provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous + call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When + this option is set, it will always do random initialization at the begining of training. + :param callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, + see :class:`AbstractCallback`. + :type callbacks: ``Vector{AbstractCallback}`` =# function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) diff --git a/src/ndarray.jl b/src/ndarray.jl index db2b300bdebc..347346e6751a 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,6 +1,6 @@ #=doc -NDArray -======= +NDArray API +=========== =# # create a NDArray handle of specific shape diff --git a/src/symbol.jl b/src/symbol.jl index 14adb56f0473..4c656cfedacb 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -1,6 +1,6 @@ #=doc -Symbol -====== +Symbolic API +============ =# #=doc From d19f28fc3d14c9c141b2bc9cc9c3e5c899e2d227 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 27 Oct 2015 11:25:01 -0400 Subject: [PATCH 143/630] docs for built-in callbacks --- docs/api/callback.rst | 65 +++++++++++++++++++++++++++++++++++++++++++ docs/api/model.rst | 4 +-- src/callback.jl | 59 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 125 insertions(+), 3 deletions(-) diff --git a/docs/api/callback.rst b/docs/api/callback.rst index 56ab1fbb7074..ac1c8f36b862 100644 --- a/docs/api/callback.rst +++ b/docs/api/callback.rst @@ -25,3 +25,68 @@ Callbacks in training + +.. function:: every_n_iter(callback :: Function, n :: Int; call_on_0 = false) + + A convenient function to construct a callback that runs every ``n`` mini-batches. + + :param Int call_on_0: keyword argument, default false. Unless set, the callback + will **not** be run on iteration 0. + + For example, the :func:`speedometer` callback is defined as + + .. code-block:: julia + + every_n_iter(frequency, call_on_0=true) do param :: CallbackParams + if param.curr_iter == 0 + # reset timer + else + # compute and print speed + end + end + + :seealso: :func:`every_n_epoch`, :func:`speedometer`. + + + + +.. function:: speedometer(; frequency=50) + + Create an :class:`AbstractIterationCallback` that measure the training speed + (number of samples processed per second) every k mini-batches. + + :param Int frequency: keyword argument, default 50. The frequency (number of + min-batches) to measure and report the speed. + + + + +.. function:: every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) + + A convenient function to construct a callback that runs every ``n`` full data-passes. + + :param Int call_on_0: keyword argument, default false. Unless set, the callback + will **not** be run on epoch 0. Epoch 0 means no training has been performed + yet. This is useful if you want to inspect the randomly initialized model + that has not seen any data yet. + + :seealso: :func:`every_n_iter`. + + + + +.. function:: do_checkpoint(prefix; frequency=1, save_epoch_0=false) + + Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. + The checkpoints can be loaded back later on. + + :param AbstractString prefix: the prefix of the filenames to save the model. The model + architecture will be saved to prefix-symbol.json, while the weights will be saved + to prefix-0012.params, for example, for the 12-th epoch. + :param Int frequency: keyword argument, default 1. The frequency (measured in epochs) to + save checkpoints. + :param Bool save_epoch_0: keyword argument, default false. Whether we should save a + checkpoint for epoch 0 (model initialized but not seen any data yet). + + + diff --git a/docs/api/model.rst b/docs/api/model.rst index e9a8ee5a0a63..7593e6e68f55 100644 --- a/docs/api/model.rst +++ b/docs/api/model.rst @@ -107,13 +107,13 @@ a network described using the symbolic API. :param Int n_epoch: default 10, the number of full data-passes to run. :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for the validation set. - :param AbstractEvalMetric eval_metric: keyword argument, default :class:`Accuracy`. The metric used + :param AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used to evaluate the training performance. If ``eval_data`` is provided, the same metric is also calculated on the validation set. :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients and parameters when multiple devices are used for training. :type kvstore: :class:`KVStore` or ``Base.Symbol`` - :param AbstractInitializer initializer: keyword argument, default :class:`UniformInitializer(0.01)`. + :param AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. :param Bool force_init: keyword argument, default false. By default, the random initialization using the provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When diff --git a/src/callback.jl b/src/callback.jl index 14406add7006..ea890ab2c269 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -37,6 +37,28 @@ type IterationCallback <: AbstractIterationCallback callback :: Function end +#=doc +.. function:: every_n_iter(callback :: Function, n :: Int; call_on_0 = false) + + A convenient function to construct a callback that runs every ``n`` mini-batches. + + :param Int call_on_0: keyword argument, default false. Unless set, the callback + will **not** be run on iteration 0. + + For example, the :func:`speedometer` callback is defined as + + .. code-block:: julia + + every_n_iter(frequency, call_on_0=true) do param :: CallbackParams + if param.curr_iter == 0 + # reset timer + else + # compute and print speed + end + end + + :seealso: :func:`every_n_epoch`, :func:`speedometer`. +=# function every_n_iter(callback :: Function, n :: Int; call_on_0 :: Bool = false) IterationCallback(n, call_on_0, callback) end @@ -50,11 +72,20 @@ function Base.call(cb :: IterationCallback, param :: CallbackParams) end end +#=doc +.. function:: speedometer(; frequency=50) + + Create an :class:`AbstractIterationCallback` that measure the training speed + (number of samples processed per second) every k mini-batches. + + :param Int frequency: keyword argument, default 50. The frequency (number of + min-batches) to measure and report the speed. +=# function speedometer(;frequency::Int=50) cl_tic = 0 every_n_iter(frequency, call_on_0=true) do param :: CallbackParams if param.curr_iter == 0 - # reset counter + # reset timer cl_tic = time() else speed = frequency * param.batch_size / (time() - cl_tic) @@ -71,6 +102,18 @@ type EpochCallback <: AbstractEpochCallback callback :: Function end +#=doc +.. function:: every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) + + A convenient function to construct a callback that runs every ``n`` full data-passes. + + :param Int call_on_0: keyword argument, default false. Unless set, the callback + will **not** be run on epoch 0. Epoch 0 means no training has been performed + yet. This is useful if you want to inspect the randomly initialized model + that has not seen any data yet. + + :seealso: :func:`every_n_iter`. +=# function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end @@ -84,6 +127,20 @@ function Base.call(cb :: EpochCallback, model :: Any, param :: CallbackParams) end end +#=doc +.. function:: do_checkpoint(prefix; frequency=1, save_epoch_0=false) + + Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. + The checkpoints can be loaded back later on. + + :param AbstractString prefix: the prefix of the filenames to save the model. The model + architecture will be saved to prefix-symbol.json, while the weights will be saved + to prefix-0012.params, for example, for the 12-th epoch. + :param Int frequency: keyword argument, default 1. The frequency (measured in epochs) to + save checkpoints. + :param Bool save_epoch_0: keyword argument, default false. Whether we should save a + checkpoint for epoch 0 (model initialized but not seen any data yet). +=# function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) mkpath(dirname(prefix)) every_n_epoch(frequency, call_on_0=save_epoch_0) do model, param From c051c2a456ffb21fa86500e2953ccef9f0cd7305 Mon Sep 17 00:00:00 2001 From: cjb Date: Wed, 28 Oct 2015 14:09:32 +0100 Subject: [PATCH 144/630] fixed joinpath problem in example in README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8e148c2d7529..2c28bc58f15e 100644 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ mlp = @mx.chain mx.Variable(:data) => # data provider batch_size = 100 -include(joinpath(Pkg.dir("MXNet"), "/examples/mnist/mnist-data.jl")) +include(joinpath(Pkg.dir("MXNet"), "examples/mnist/mnist-data.jl")) train_provider, eval_provider = get_mnist_providers(batch_size) # setup model From 0132f19a17b31bd9e40d4aafbdf71fdf30005117 Mon Sep 17 00:00:00 2001 From: cjb Date: Wed, 28 Oct 2015 14:52:29 +0100 Subject: [PATCH 145/630] added ADAM SGD --- src/optimizer.jl | 1 + src/optimizers/adam.jl | 64 ++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) create mode 100644 src/optimizers/adam.jl diff --git a/src/optimizer.jl b/src/optimizer.jl index d01f92e7d735..c2c8ba4c28ef 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -30,3 +30,4 @@ end include("optimizers/sgd.jl") +include("optimizers/adam.jl") diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl new file mode 100644 index 000000000000..b555773a49a9 --- /dev/null +++ b/src/optimizers/adam.jl @@ -0,0 +1,64 @@ + +@defstruct ADAMOptions Any ( + (lr :: Real = 0.001, lr > 0), + (lr_decay :: Real = 1.0, lr_decay > 0), + (beta1 :: Real = 0.9, beta1 > 0), + (beta2 :: Real = 0.999, beta2 > 0), + (epsilon :: Real = 1e-8, epsilon > 0), + (grad_scale :: Real = 1.0, grad_scale >= 0), + (grad_clip :: Real = 0, grad_clip >= 0) +) + + +type ADAM <: AbstractOptimizer + iter :: Int + batch_size :: Int + opts :: ADAMOptions + + function ADAM(; kwargs...) + opts = ADAMOptions(;kwargs...) + + new(0, 0, opts) + end +end + +type ADAMState + current_lr :: Float64 # current learning rate + mt :: NDArray + vt :: NDArray + beta1Power :: Float64 + beta2Power :: Float64 +end + +function create_state(self :: ADAM, index :: Int, weight :: NDArray) + return ADAMState( self.opts.lr, + zeros(size(weight), context(weight)), + zeros(size(weight), context(weight)), + self.opts.beta1, + self.opts.beta2 ) +end + +function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, state :: ADAMState) + lr = state.current_lr + grad_scale = self.opts.grad_scale / self.batch_size + + grad = grad_scale * grad + if self.opts.grad_clip > 0 + grad = clip(grad, -self.opts.grad_clip, self.opts.grad_clip) + end + + state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) * grad + state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * (grad .* grad) + + mt = state.mt / (1 - state.beta1Power) + vt = state.vt / (1 - state.beta2Power) + + #@show state.beta1Power,state.beta2Power + + state.beta1Power *= self.opts.beta1 + state.beta2Power *= self.opts.beta2 + + @inplace weight .+= -lr * mt ./ (sqrt(vt) + self.opts.epsilon) + + state.current_lr *= self.opts.lr_decay +end From 6e0c5c7a8b5d2bf8d309d3937a762ecf5ce042a8 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 28 Oct 2015 10:11:29 -0400 Subject: [PATCH 146/630] some doc for symbol --- src/symbol.jl | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/symbol.jl b/src/symbol.jl index 4c656cfedacb..ed7e2d77c411 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -23,11 +23,22 @@ end Base.convert(t::Type{MX_handle}, obj::Symbol) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::Symbol) = Base.unsafe_convert(t, obj) +#=doc +.. function:: deepcopy(self :: Symbol) + + Make a deep copy of a symbol. +=# function Base.deepcopy(self :: Symbol) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCopy, (MX_handle, Ref{MX_handle}), self, ref_hdr) return Symbol(MX_SymbolHandle(ref_hdr[])) end + +#=doc +.. function:: copy(self :: Symbol) + + Make a copy of a symbol. The same as making a deep copy. +=# function Base.copy(self :: Symbol) Base.deepcopy(self) end From 762c45e544d431bb1f171b096567af33ca0a2836 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 28 Oct 2015 23:27:47 -0400 Subject: [PATCH 147/630] refactoring and naming convention dmlc/mxnet#407 --- docs/api/callback.rst | 12 +- docs/api/optimizer.rst | 104 ++++++++++++ docs/api/symbol.rst | 14 ++ docs/build-api.jl | 2 + docs/index.rst | 1 + .../Prediction with Pre-trained Model.ipynb | 48 ++++-- src/callback.jl | 57 +++---- src/model.jl | 32 ++-- src/optimizer.jl | 149 +++++++++++++++++- src/optimizers/sgd.jl | 41 ++--- 10 files changed, 360 insertions(+), 100 deletions(-) create mode 100644 docs/api/optimizer.rst diff --git a/docs/api/callback.rst b/docs/api/callback.rst index ac1c8f36b862..2bda7477094f 100644 --- a/docs/api/callback.rst +++ b/docs/api/callback.rst @@ -12,7 +12,7 @@ Callbacks in training -.. class:: AbstractIterationCallback +.. class:: AbstractBatchCallback Abstract type of callbacks to be called every mini-batch. @@ -26,19 +26,19 @@ Callbacks in training -.. function:: every_n_iter(callback :: Function, n :: Int; call_on_0 = false) +.. function:: every_n_batch(callback :: Function, n :: Int; call_on_0 = false) A convenient function to construct a callback that runs every ``n`` mini-batches. :param Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on iteration 0. + will **not** be run on batch 0. For example, the :func:`speedometer` callback is defined as .. code-block:: julia - every_n_iter(frequency, call_on_0=true) do param :: CallbackParams - if param.curr_iter == 0 + every_n_iter(frequency, call_on_0=true) do state :: OptimizationState + if state.curr_batch == 0 # reset timer else # compute and print speed @@ -52,7 +52,7 @@ Callbacks in training .. function:: speedometer(; frequency=50) - Create an :class:`AbstractIterationCallback` that measure the training speed + Create an :class:`AbstractBatchCallback` that measure the training speed (number of samples processed per second) every k mini-batches. :param Int frequency: keyword argument, default 50. The frequency (number of diff --git a/docs/api/optimizer.rst b/docs/api/optimizer.rst new file mode 100644 index 000000000000..97b152375bf9 --- /dev/null +++ b/docs/api/optimizer.rst @@ -0,0 +1,104 @@ + +Optimizers +========== + + + + +.. class:: AbstractOptimizer + + Base type for all optimizers. + + + + +.. class:: AbstractLearningRateScheduler + + Base type for all learning rate scheduler. + + + + +.. class:: AbstractMomentumScheduler + + Base type for all momentum scheduler. + + + + +.. class:: OptimizationState + + .. attribute:: batch_size + + The size of the mini-batch used in stochastic training. + + .. attribute:: curr_epoch + + The current epoch count. Epoch 0 means no training yet, during the first + pass through the data, the epoch will be 1; during the second pass, the + epoch count will be 1, and so on. + + .. attribute:: curr_batch + + The current mini-batch count. The batch count is reset during every epoch. + The batch count 0 means the beginning of each epoch, with no mini-batch + seen yet. During the first mini-batch, the mini-batch count will be 1. + + .. attribute:: curr_iter + + The current iteration count. One iteration corresponds to one mini-batch, + but unlike the mini-batch count, the iteration count does **not** reset + in each epoch. So it track the *total* number of mini-batches seen so far. + + + + +.. function:: get_learning_rate(scheduler, state) + + :param AbstractLearningRateScheduler scheduler: a learning rate scheduler. + :param OptimizationState state: the current state about epoch, mini-batch and iteration count. + :return: the current learning rate. + + + + +.. class:: LearningRate.Fixed + + Fixed learning rate scheduler always return the same learning rate. + + + + +.. function:: get_momentum(scheduler, state) + + :param AbstractMomentumScheduler scheduler: the momentum scheduler. + :param OptimizationState state: the state about current epoch, mini-batch and iteration count. + :return: the current momentum. + + + + +.. class:: Momentum.Null + + The null momentum scheduler always returns 0 for momentum. It is also used to + explicitly indicate momentum should not be used. + + + + +.. class:: Momentum.Fixed + + Fixed momentum scheduler always returns the same value. + + + + +.. function:: get_updater(optimizer) + + :param AbstractOptimizer optimizer: the underlying optimizer. + + A utility function to create an updater function, that uses its closure to + store all the states needed for each weights. + + + diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index 604fadc0f43d..457b462982ad 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -18,6 +18,20 @@ Symbolic API +.. function:: deepcopy(self :: Symbol) + + Make a deep copy of a symbol. + + + + +.. function:: copy(self :: Symbol) + + Make a copy of a symbol. The same as making a deep copy. + + + + libmxnet APIs ------------- diff --git a/docs/build-api.jl b/docs/build-api.jl index 29f99369d431..abccd31f1222 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -76,3 +76,5 @@ embed_mxnet_api("symbol.rst", "symbol", mx._import_atomic_symbol_creators) extract_doc("initializer.rst", "initializer.jl") extract_doc("callback.rst", "callback.jl") extract_doc("model.rst", "model.jl") + +extract_doc("optimizer.rst", "optimizer.jl") diff --git a/docs/index.rst b/docs/index.rst index cf44dc197b2d..1176f927174d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,6 +36,7 @@ For more details, see documentation below. Please also checkout the `examples api/initializer api/ndarray api/symbol + api/optimizer Indices and tables ================== diff --git a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb index 5359f41b299e..0f0bd945c723 100644 --- a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb +++ b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb @@ -22,31 +22,36 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 106, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAACAAElEQVR42kz9WbMlW5Iehn2fu68VEXvvM+Rw761b062qnrvR6AEQ0E0QFKwhoQEZZaIJRsr0Jj3pp0lmepBkMhpkFCROMooQSKoJoNlo9Dygpjtk5hn23hHLBz3EqTblQ+bLSTvn7Fix3P3zb+DP/U9/qoCqqqokACooWUKvKgAAyBIRsCiCirElAEq6+/UyMiUyPSkcFS5Q640ssFTVtInSzJp1VbVGterWtFHEzEzVWmu9NxZEVESaqFes6+rbtl7Wh/PlPDxHXS7rdh1mUBMRoWYyqiqVkIJE+NbUjktvXU0BoKois0pGcBtjG+eQzCoiREwhDBYsM0UUZSwzQtRTQsSEzVAiFE6aEu4+MlE+mLGum29rVWl4RaUqp25tMumhjdKMEpTUkggSHSVjBJJSQKKqSI1yEds/56oiCSCGswBRjwIYESwgSZKNKMnMQkQEsjpbpQ5eqkA0k9Zm2Ak2yWGal/vj6aDWaK1VAUlElqa7RxYA602WzlYAVBWzMPH84cPT0/Pzh3j66vr41Xl93lpYh0DUEResAjW3QFW2TAeTYelRGd2gNMmV1CKSSTKraKomkFJJEgWSWpFVJaZZqhRtUgabOU2qk8mxlt570zZTuxYUWSJ0VG7p11zP5WtcnmI85vXpcn5cc5QEkUUVEckYIEOAEhdXVRYAj0pInW4WA1nlBBRAIVkKFuv/7/STRGaKElWAqO2PSsysNdk2z0ypQAGwIodfqwpFFXELUsnRe7Qu5qLKTVZrS+9wC7PIjHRvrYmUmSWILIWMRHjBBV6+jYrM9AiqvRwXkACYSBYCqo1KL5R7gEKrQlaG59jgGVVUqAmTopBKzUJVAXRPsFiRKCUr2JUiBZFwAshaK9KTYwt3j+DY6IOZWZWUMDURUBIoVQMplQJmIgMsJ01gxUIlBCgWIKUsJIqkCCK8igUBQQBAugupZhAACEkgCFRCTEQks3JdWzbARoZzVaiMRhXv8PUS00JVRlqjNQune7XWpmWSqQl4mOab+xvtssErY13Xeep9Xpo9qzJrOEdewpGwikqLjtLIzGCKE0RaFSlgwN2LyQQ0IGw2mRC10kKkxIRUFoogWRRPFFUoFAkUSBGBEAAiX+5fQMFKRCSSiKqI9GLKGJHuompmbapg1UhEJgFCzbwSqCREpJiQMhUmgNTeLJlEGcnKzNR6uYtIxU8qw/4vQAAiAiAiqgoQkTQzkh6XcCEE2IAkpQr7J0Qqihs8oqypmbWmqKiqXobKykhjlbQGCJCRsX/nikowI0ZERQRJsCICAPny46kZUFCYNey1CclilYCZhQRHRAYLBEWVRFWgEkWiKjNIhoeaRQ4CfTJVMc3wyCHliYoq+tAIjjVHSIxKR0FEoSyiRAvw/eQyq1gsIimwTMmqqkBp5MtXAKGieLlqsgokARawf+iiKYSqFVAGaSJSESEigGSxhLGtsAgXZjLLM1dHDcg57BznjvMlb+5O05TzzJzCrDdrfdJ+dzicjq2pmc3zPM0tWNfr+ny92LamkErpNbjGFNh0nMf1uqXT0EBWk6BnBKAKAYgsgISSUpZ746CSCohBJi0CGAJVahUJUASAA5UVFQAaDACE2pspW9c2qViSBYEkEembxxY5uF0zNmRWpReTpKq4l1eSEuUFkgQpQGaIkAKyuqn1SZuYwiksFLIg2F8Evpz73E+YiJDc/87c+x9BsSrVZK8SHjNLwJFIpJBa5RTJFAIizABQkMwaiUBpMfHSEAgAJQnJKKiwGJUR9Nzf9hij1nWQFCGSJYVKKaEppbSZZ5ClqgCEBsmIZCJLMopQIFQNQEVFQSn58loXySqIYGwXVbbW21zNUgV0JqMqfdUx0mO459hyeCKRiWIqm5iI1X67qIpw/2OVUimRQtGMyCySBdsvNsCzUCgl9/uFVQQoBWtAZpKt1ARkaGhDlJimKgOxl2dopVcw4AEvcfhWvm5w2DPK8PTueX0zjreHw02b5jje1t3dab5ZptNhXpbj7VEmg1RvWklMkEUvF43a3Le+TYfTIjq5e4zqH7brh3VcAo6XI+GWlSRBQIUCJZRIJRPNxCSSKd0oSqaICclAlQikqhqqIsKLomo2qSUyCTGdu4hIIfeWT9jNLEhLlNPLx/AIVGpGIlnF8qrMzNT9LhFSiUqyur0UFhGA5NRuXt2aFIoAKlAle5kFQSKrivKT1pPcO1SyRAxZ6UFSlShUce7ToA93VAKNVNUCRKWBSWYlPQpSXpE05palgOE6WhNlQ1SWFwVKASNybJGgZ7nnGFtViCigJEypzcTILkCWlKhVlYkWiyQrhVWlGYwIAqIgFcjMYqqY1X4pAyxUzlFn0VwOh76gLdmNFQqiilrmgx61Dd+uW4XtzSsgun9GomoUKQEFRGSJQTS8sgSQqiBE1SpZSBEhMmGJQNV4eWBaSCBbbykFKEhVUWMxAZSUmDJSNKVJAeEukmXVFBWg00dpI1vLNcYYKOSG9z/+6nq9Pq+H+djYT6dX2Q79eHfq82SL6aFRoZSqstWqygfmSdelX7epr1PmWVQ29ZNOvdt6GZeH7fp+SJjEfn2OAtWUlcygSCNpQomQlCZqZKUWQRoJI6GZKC8hBFEAqZOKVGZkJPenqdIELiJAAkhUVonpdLCqovp+OUpZePEntbTJT9p4YaBUFeYpIMVAFrRrO8zt0A1ZQu41JzMDxZeysTcwBJiZJKsIe2k8WFDV2jsMpkgRkfAAWFMmUNm0V1UxySIhJqCJoGTvqDKcaRgjkJuVIaKZqLR0B5CZ7r73Wt26nOR8PqPQu82TWSvtEGMQJeIxRPeXF6IaCCUqMwEzrapAvNSMdMrekxQorEoUS7OGiByPergJO7hMjGBtpFCSvkWW+ojt6j60nBQpUETMhFollawmWqzMl5mWezcPUVX3yEyRBFRIFXF3AAQKUNUqeoQ26b2DfzWAiZimeFUAUJUSqqrDIUlj62jVt/CI0JRsqObcUnQKoXegmm6owMOH81Nsd3KzbP2WiK5hyFYwtKmXFCJV1KIGIjMTJa3sUOZlEnmlDofJbNZP2g9T6Xn7/JIBYYhE0UTSlCihoylLqqqEalDNFJZSKsF9UmKwBAgIzQQmIgqke6ZASmJ4JjUqARGKNFTWy1OVQtJqmtUH070iQVdIsAAECvmCKJAswgQwI9USTdSm1qd56cveEiQAIfbHtbei+5i7P6EMgCgBoyjg3rSyAFQKWa0TDC2RMUcgIpg0aYAnEtgbDKoqkHvDk1HUGptrL0TmFfO8ACaSIVXC/buw0Kz3U0/44TDv3X9rOs2iTZIwulfSKsr5UgNXqBSlPF+6SRCQTHpAoVlIAsXKiChqphTkMs3z8d6mm63aWtCpHaJiOJ08Dx+brmuMrYgGoFCqIgpIiUIFSgIoIlGAkKCQARRRJAyIvxoO8idvCblfelmVfW6ttZLKDCGLqMyqYgmKkGIWeM1GEtoEWlGeBK0MqFGIUMnIitxKxdIk1AFGWsEEvZU2QMrTE0EyIsYYbVoyfd18bO5+TURUrj5GPtOGzqKyhUO2Sgs6zfJG7Dl1/SoqUiiNjeKiIQQQaKpCLQooKFYSBJEZhKhI7mOqVEYVSoVgBVAFgTBr28b5kpOUAkVRKRKQBECkipWISPWmsuioDSEjWFuSEOE+UGXmjjWpGooiYl1Npfc+WWtqpqoREREQeQFVCpm+X2/c625lFVgZSVGoyQ6uECZiSrUesqC1CWp+9XUdvuUYgyosyxBEVRVZKFRSlIBHoRLhUJZwYJV9aiwVUVDLTKT6fq2LiCr3ykCpqOGVhBQuWZVZqAggWWwoug+BkKqVXhpMZUpVVohKpkqmAxBjnyyIYZMeSm6cc8zzHFv1ZKq/T4WgVIYji2osdzFNAnAK1ERMBRDso1MklEVJJgIl2HtMQHWKIojAqEqKogRklItUM6OwKqSEZPDl164qBiuLxkQqnTqVkvRKd2yiKvvRkOQQwjAhJ0oSm8TVMUltPqmc7o+3t7fTYUlIjG1d16YdqZmX7ToycN02v1yv69O6Xq7ndVwu6WezqNDUDBVVYc9RFZereB7ue5O6PkauKQJRsqgAzRzR2Ix7d5Sg7qAAhAVUMUYos5FrFUoqvFStNzQx04IksKZP0rSrqEoxc6AUACI807dQdtLJ/VqJ/czuvboUCCZZEapSG4iiEV3YTCYrk41pYkpSlBFRlSQpEgOlWQFIZTnVyCpElGQgk9baPiiwsk2iXWXuy8H6rIRl9ss1zk8R7sNRXpnIAWzumWXaxaKECRYQiMqoQVICJmqAcNpPr3aS1DIxqjQxCkHyfD6f/XqJcyYFUVmshghlJiqIxRA73h5SESVV4qYVklQaZS1l+e001dw8x2dv3kh/9ywf2mwfvV7Wy/bjd1/d33/y9fXj7z99HzmAiXCwEhWZJsomaiKSRFCtDGBFUbIoyEwWSbAxan8RVEj3FYDKVAhUVhaR3VTMClCNqih2IVgVEYmiGoRV3kiVOdwRPsqlobFDGFNplqqOdC+nqaSVJ2VoiUCLoovgUHKSaloYHsft4mde5zRcZIuxuY7rdn58l3Fxv665eW1mWkSam7YJiMgRwm1jAQ7bQpCjnFoWohUpWdWAMBqJqsHK2qt5iWQ15RbuIkWpqCpDAdiCZNXm6HNvS0MrTDV1MbOpmVDTq2mG1+aMKqx1vcZ6uawbtuuW2/ryfilMELEfZcgOqUUNiVZa64AIjFkQwdRpmdGamM3uuV63MUZWqIjsu6WsYhaz9uNUMiqVYEIo0kobZMrpMNnSlpP2JtOs2mS91vMZ6/O2nmtbz170bbgkXCszUgAhM2vsM8IOgUhZFkAT0oTa2FszMwgFL7M4QGSaSSdTdYvyIIuJDO4IbAMkib0jqkhT3TGoMho1Spzo5DR3Oc1lnHK7e318Oy9XvGr3h+f1x6/v3jytsWJd5CI05IVxBSycBqZRpdSEmkDt+zsgM0NVqRqZIraDTpkECkKhpgelWBIIVr1AIUro3iXs+EdSsyiRVTvGwKqsGAFPV/W9sVBLZDGoBaBaCpru1ZWIzBiQrWSSLZMT5KT9VkMGiSyOsc6m56fL5SJ1jcv1+fky1qeLjwczg7nzSit3jxpoRWVUUZyoaVqKyNwEbWujq/n7zOdEEGKSVJYKCyPhQmFJJsBwoYhVum/BMkgJIqMchWCgZBGIsstyO+lsU1NrJczM9EwhUVQ1X71QHrVtEYMYJaUFlqdk0TR8kACQSArTQ4XpI5OBEmOfe29qFCuMTK3SaWp9mnyM8/m8rQN0SpFKyIjIrFRjCUhP94hJxSbT2aR7m/KwpDU9LG1aWuucpuxLbXM997w8T+vg0CZYsfnYGFkKahPj5JujKE0CGRFiUhmZSIWxAS9gsYhm7pdvRrkzXhqLGplRBc9wbF6DMUynIFBQCpWEYkf9k1QYKGCbzU7WhJNhkGs8nU5f/3e+9+++v/74P/+9Pz28/uh7X/vuH/7p75+vH1xMcBObm0vKgLBR1EAWhaoiYpWoQTYrorDDbJqQl82JCIDKpEKoGVFJIUHsVXefi2rvE6WJKQKidI/MEWuEu5R4VkqlhhilgV60ohVHVgFt6KLMlunMoSIpPTC6NGmtTapNBF4YEbKurAyVed18fVq3p6fz83N4lYS1i5ont1Kn5sZRXWwipMy6qCWvffZClpSTBmArXffVtppWIau8MliKEkCrEpSCRoQSVRKejuoUJiJkZOgsfTIoRFtTnXubu5mp18ZWmwz3fZuSfZ58G3LeJENSIAgkRCg1xvB9TIgKVkFoYOeIRKSSRmwZFpGUFNVPfvpeRFVVBNZknm2aZlEdlV6RmawiKpFEI6hAwfdFTRLt1KZFpy7LwVrXaenLIststujcVRWQqiIoKM3MyH03kTbptMwwtdZUxZlJINPMBFX0UQHA1EQoUo5AFphUT8kgRo2oEbENHyPC4VtdxFK0QC+qqTSxqn3Kp8DAHacstZpvlpySEjdT//TNx29Oh+v43KbpF77xq995/a3t/VfixTDP+el9Pn+5oug1TFSUQFqbhCWEqApVCEpBFNQkKUbRBNRUVKiyw6ncUZ+fbBeFsu89SSGlXgBoLRJMHzlGjLXKS1KYLArjpbTXDj/L3vaqgsUoAUX0ZWRklRRH79Zn08msydQaqaKsqpHx+HR+fDg/f1ifn87b8/XyeF0337anwqiUjJFYSxwMSIau3aY2taHPxW2HOYXmGfTUeNnmKVpVZWaMQgmhO6q4nxgTKBAjt0hUr2xeESG6tPnQptlsnqmwxj6pCkW0qYqhTa2qSBQcTC0WWKkVzMhwz0BlRWaM3IGgApS2l899gyRqbMKmbWrTcZoW06/99BtRUoJCU2lTm+bWZ21mVA0gMhMgtYqVVbI/XEOIY1AxL733bksdlkObrc+tH+c29ePxZjocI2345uFRXok+zfOhtcX6ZNo7lTaJNJQAkEqwpBhVUVlJipRIhAQlqYAUFQlcx7au13V9Oo/rFr7GmjWSwzoFsCprs1IL+55OTBQUF1FFaNHEDjpbE2Vp/uK3Pvupm49ubj96/Orp177+G//wN/5XP/u1z/RJ3n/+8NUXX3799Te//LcPOxjDQpXb3EmqwlQBAUM15IV9pKJGiojY1Ptk3M/A/pPszXCAVQAIIRNIKVCouq8s6RjlGNuoUVraKIRgX2OWJHci0Y7gkmpUq4gkRUukJJNgkalu0vrU0FALrbE1y0JEDa/reX16OD98uLz/6vHp3dN29XTkdqGAqtpaaUKSksIhfVLth75MU9cJIkNFMxAoL5dIA5zuDg3FgHvVjn/vYyiFrNKsLIZFclQQLSpDRgD92I43S+9zaxM1rZFKIUdEInfwJyLBqsjwACcPJAylBkPBtxprlY/M8kigAMhehcNDoCbaRLpYF+2ija1Tv/Yzp4SrSrPWmomKiLRpmqfJmk29m5mIBpBFiDiRrMoiBRUjwlo7HHpb6nBYjgfTg+nUjnZYltNyuhVtLIwR27Y1ldvb4/G0LKe5d6VpmyxZAmpr6ZXhmVkoIl8IIQK2YlcBqZUYI3PEuKzn6/VyXcd1rCO3TIekyr5LVWPbe25Sda8gYEJUm0qqqQlE9ND6ocntcV6or+rm177zG3/nF3/rtR5uXn1889Ev/cLP/dbX7z79nX/2T3Bdg8u6YZIOIDJ0EiHMSlRJVqU2UpSYiqQKyWnq86GrESwKBUIClZXISO7YfxazdF/A/4QXAFTkXmWTqCY7KF0UiWJUwZGVTIUTkKAXSwVUiJUYqIQIBKKpqmbCpmJqaqwiLTy3NS/n7fp4ffzq8vjVkz9XRVFUskyERmslbV/isfdpmu7ubt/eHu7mabJuRGZlZl19VIV648Zy1CaSlZUFsJixM6Oo1AJFLDw8wLJwZGDfeHlqVLZ5nnoTkzb1rEikmoiJSDVT94iIzIoERUnLSEmBi1eMKvesgVw9ghlpkMlaiUApZuxqTfsySVdtqpO1bv3Q9JOfma213idVVVWKQFTEBGI29W5qYq3PU+8z2VAFYhAlTJLhqKo+83BaWpNlEZ05tfkwn9RmlTa3ZpyQUiWtt8PBlsPSp2Zdp6mpCYVIZFZGjHEN30QSQsjO+NoGPGJ1+Fbna11Xv25jva7n4dvmmeXbGCTNlEqqqU0UE1XdX2coRArKKisYtZSmRnDq0+nY527L4Xhox1/57Fd/7mu/sET//I//8PIXf8CxfuMX/tHf+63/TTxczt//N4wVdiNzt9aHX1sryn7LQATsVrAEqYShTzofJptV9sW1kkURyciIRAkqXwpCCgUqiEImMxOFgkRV7oTQQkBLzZFVWZkK4f7xp4AojWK2JrCSRqhQVIzGkiIIVqmayaSlW65ZzBFxqffn9fzhLI9Za25DCGksiqVszbI10IZriPbb0+uP3356e3Naej8cTjfLq9mOkcjKbQMG6G1cKy7ktpcFLyAdmqoGkX0pK9qaSHqGZ9AldzylOIYwxGNrJn1u7dBSUk2lo6tNsxLmkSVIVEKjKob7GJJVXiPcURmFKF93xNlURERKUE10mXqT1rR16820yzS3aZmtm7757L71Zi8rATNrapYFYRKVCCpMm5Im2UTmxt7AQgEqQGF4BKIZtYlMnKw3O7Xp2KyZqnDKgujUDkubmnVbltl6t9Z6pzZ41tgiPce2jm1grCMzMjO3a54v43xdHzZ/utbTsz9u2M7jfNku1/XZaxu1eaT11lqnYOoqpqoiQlOliKgIhWUVIKgsqinMfbATGZ/ev25JL/yNb//GT03f+eov//J3/8W/+PM//u//7H/4gz/7g9/5/l/+N9/57l/79d/8xz/zzW/94Z/+tz98fP/p20+m3j88fdFa3wl5qqCxRBwhmhC1yeZlmqZmvSm5L8hU2v6SR1QGQCECRUIKQVZUFTTTUelBeYG99lUyq3KvDdz3qYXITK/QYId1pVU1UkWYJtLEQCmUlKo2CkV6KQpVoTV43bb12eO81Vp0EfSIjRJijRpqUoKy1DbN0/H1zavXd/c3p9f3t2/nw83hcNt0AjUSsQZ8Y2KLsXllaG2oACAMMTNhAK7K6dR1CZ3mdqCa+tD0LGKNTFDNSgmiH0wnzJOZ0IxspiogMuGxFekjI0eOuoxt3VwCNQpZ6Ts3kdIUxmxkE7VsXczYDjbNRiuZqbPAaLP2Q9PbrzVTmomaalM1pagQKpVSkZVZWQ5m1l4PoUKKqDbwBeEdW2aKmYq1w3w69KO1Wa0rVLSbaVF6M+uAlJiKlXXIJEHZthGxpbtfY13X1YdfuW4edvXYPCN0czurbtJG5QgfHheKgEomTPYL3Vqpqs4KK0tQrUmyoNp3dlTtbWE1MFjq5ajxjdcff/2jj1+Pu9/81r/zoz/6w9/7N//dw/qejuVwfC57/v4Xv//P/uO7Cd/5W//LX/qV3/yd/+z/8OMvf3z38Scfnh9KCW3ampioSOlebITCqdvp5kanpkqRLKKKBc+gj+GelRQEUNiJfxk7w3CfgmonBr60Q9x7YVQRidipQ7KT1UOYAjahlbKhFVrthDQUiVBRbaJNhSSiCpFIZwXGOcfFt4vnOqpk5xgIILYkVhEWA4q+nF7fv31zejXdvDoebk/Hu9vjq6nNUWXaJWpLjNxip+lnRiQieirVVQiCLQGcjoflruuxHQ/9eHvgyfrNJJ1eEYQ1scVsatMJot5677OWBKFlzoK7J0tk3zxlJisKUVIWKVlSUYxkUVSgsI7eWjNr3djEZmuLahfpgqZUQmFdpnnS/pGWkFpiKiKonT8nUSAL5ZWeXhnIqMyI8AytF1o0hEJBFkduAjnO8zTPp5s71gtJgCJmXbUlQJVAUKkqfTr0PvuI9br6GGON63Ud2zo28TG0p3UCbN0OJ5xuTBeqlXZVM20iZqCJqomqgWK01nqqUmy2vl8bO6NVMnbWpzBFcowRsOkwTRlXq/Hzbz/7tW//bTxefvAnv18jTtNNpr66ub+ZGm2Ktf3gX//Xdze33/yFv/vzP/XX/tP/6v/yOCqaVJPWRbrYJGxVpJpRyxqWuS+LNSMkgJ2CK1FIxzZGOlRMwKwEUVUQRYGQRCWquK/kWdh/ft9v/wwKFeBOkIJYgckSlqmRXpqU/XoKSJGKgiiBqkQmPdI907ldwtfL9pR5VUahHFICiVEWZq2KLi3N6rAs96/f3n70tddvbl+/uZ+XuU+tJCGCApC+xnCItVltZktPXy+qEOmKrgghptPST9bmNi3z8dXCLm3ufZkOh2NbejtObZa+aOuTNUqHNTObqAiskcPDRwzPUSg1Q6qkVFR5sXZ9U0XUtjmoUjvcKtZVTMRUm0qT3vWv+oKCtN5aN1HR6UaQgqyqRAnBysr0BCoBB0t2UUlFREXFyLQdb/KxywxSrTISolNvvfVlXvq0IKGgiE66TDaJ7kKIzHRVsWUy1W29nM+P6xbrJS+Xy/V6HRsPi928apE+zXo89uWGy0G0SZtIlLLpzglFy9zValQznWjNWmvNmrZuKipCFYpWie6SqjSqChmsFvj47g3dfvb4jV/77i//8e/+988Pj6fjrZai2eWymWmbl8Px9n2s1z//V/fdvv43/tEvf+/nf/+f/59+bAjNpReasCElCIaQFdOsx+Pcu4mKqVQlIJAeEZer+xiVOzUILwPEjg/KvuOTAD1dwMJOJdv3GcliRILKyqogZf+CfGmKFFIhxWYkKM62LzuZOVTlJ1wuzagcFQO1jXFFbOBOLWOhFNCAW2NvLTJs0tvbu/tXr9+8/ejVq/uPPnp7Op1UhcqsAiojxrZtEVW1tHbTT01nkusY41rpkBo29cPtPJ3YDq0tvTT71I83J2uqDe3A5bZNBxNNbRSjdCsplaACEpt7VCTTY3h4VmWkghEVm2PEtnlElCIDFSVNQIiBjdDSLjaZTa2ZkKRKiYiamKlJ66KHux5bRuzzVUUiYgBZERVZyUqke4RXZlVmVkZF5nB/Wa8iSkNV19yaNRWByLIcunUmVSeTpgpTBgpC97HPKRG4XJ4+PLw7n9dt5LZu4es0692rRTRb5+EopzvtB1gTGlSIAsUICS+UVgpFKM1aNaXqYjZbExNtNvfWd1puEyFSgG5mJnPXtK2b19g+++Szv/3Z3zj/8PMP7398c3yFKpswH7pvo2xqmqp2Oh6fPqzx8Pmbt8dv/uLfP8j1//m7/6SaHeZZVaCVggKc0ZrOy9Sa9t72q4QgRbNkXdftPCKCxUpkReyvL7DjP1UV6aKiprHXLJQIdhkGwAJRYBUrWWQBoFBAsJigWC+mULSxZIhQAO5XVKIclYgRlYVkrDWq9uVDelbsSwy4ZG/CQiJ00pv7+/v716/vXr356KPTze18OBShyoKI2LatT+uzh7NoEGHvtvQ+S+q2pV+uCrSlT4c2H2U+zjK1ZjYtc5ubTW06NZuEUhRYkzZTm4q11qG9RAAmdYfRXmhRmZUeUvBIonx4RBYlc9fGclclShNrKko1tW7WdIfmEkHVHX5WI1l6840pqjKZkZmentsY4Q7ntm0ea6bvgFdEhkc6KmN/B7JGwaFCbfuwHxEBlHBp07EfkBrYaQIjJYss1hZjHRvK13U9Pz8+Pj9sq2/XQfdlstu7uTURwe19P5x0PkInoYTDKbBm+2/rxc1dyBLQVKR3aWaiJqLWtFlr3SbT1pp23eF1agPN+8SmaFbzXG/6/a9++9eeP/+wXc9N2rx068bMyYSgUddtPTSbb99Ua88/+BeHN7c/9bf+ox/83n/xJz/68/bmtTB1l6gyiGiTzlPrRlP5iSgGlRxrbKtvq1dUvUgGWLs4DXxRzxW02+3NzduP3izLvDMUwz0TpOzdEvZdDKSAQlF2sExUFE13asauhaWESok0gWZ6JQGtQBUqmV7D3auaqLVOokaOSoHQKyOpsMnu39wcTof7t28++vqnN3cHnSjK3ptnhucYvl6vl3X19bpD2qqWkeVpIruWyCBSyY5+WrSbmS7HuSaVSZbTbF21N4jSmnTTbtbZFVNvJRJA2dq6mlK43xE/2SzsAjIC2tq0TK3vC0U2FRVRocK69MlaMxGSLztqElSKSEVVBJH6tZ++EbHMcg8fSM9y+jWuV79exzZ8G+4R23AflSGVEuHukRGZQRFtbV/QCuE+2PQ6VkTcnk4JeqbHdR1nr6DI5sPHdlnPvp193S6X8+W8hlesrqz7u5vDcVap080yHbks0idhQ+QVQlVT1czyRMaL3KeYqQIxU+t9r+ZCa9ZMRERFqU3LxFo3bdm6svF26lOzudtf+/rPnsZxOz8dZCqE9oNEVUZvTZAA29TG2DLW17dvS/CDP/5X3/7F3/zZX/qb/9l/8X9+aFs7tEowMOBmOpvNU2tmKFSBFCTdc9tivbhv8XIQE+B+DUntZ7nApje3N6/ub9+8fv36zf3t7W0zZda6bfkizAaq9rEYFN813FUsTtaiZZWrFrRKqAqgZGduF4XKkozKispyTwpPy3SaZjXT1syYmeVAsEjnWG7a/avTzf3N64/evnrzqk+m/QUm2bbr8+Xy9PhwvjzGtkqFaqcZhU2lq0QGtswUH464CgGzNvXDMsvc+qHPh2Wemgj7PLXeYPKT1kVEQGNSSkssiTRTch/TX4QSWbmzT3ufeltYbM0Ox2U5zLd3N8eb+XCYe29m2kx3agF20SbAKoFI7WLu0k+/d9eamJFgRoSXCEXkul4vV79e/fLs2+q+ZXi547qtPnzfxu0diAohWXCS1J3Uko+Xx+V0uLu5iXEZ4zp8eIQIfIx1u66Xp8t1jcz1EtfLlutG+GFph5s2tb4cpM/VZ/TG1ltgZK0glW2/8BKSIQrbr9MkIU6FaiOaKK2Z7tujKpEygZm0KVVi6jpP1lrNE24Ptz/95qdfr6fnz79soM1zmUqVj3WeF0dRtaT7iEoMXt8cPsnzdr385bd+7h+JXP/r//Y/wWES6Tk2UMz0ps9Te1naibRKZiFczpdxPY/wF7k1qvBCUqmdfqvNDofl9eu7m5tDbzZP0+1pvr05zIclkdfrNbN2AGQX6OU+Pe9qVbDAkgBZrBfaxa6ZTtlZAGBVxc7RQcHU2tTuTof7+5vbm9vD6TQvfWqWGU/bRq3jfbNj3bw+fPzJx29ff7QcDtZEm0A1w7dtPD89PT5+OF8e3C+QbNYhCE8pmfvcbWop28gYq7IkSyjTcemHZtO0HA/NtApipl2bSZvVTCBClTLdN3pqIoCYklKoXVWyE0DMzGxuMCmVLBFp3VqzZV5at9774XBYlvlwWI6n4zzNZkpJH7mv0irSrJnaSNdv/PwrsaJ6MLS1w818uJmmQ2uTZXpEbtdtW2NsGQ4fft3W2FiObVwQMJFCkiVKQFlkZbpfI2Se3ry+txojR72gloHy8/n56fxwHWPb3EfmqPChwvl0sK69SW/Z5rSuvZMiWQ54wgGByIixjULt4jSvrF1PXvtpEjWqUJA7zJhCmEhrNc3ZLKVBmfevT0vrb/s3fv7tr8UPr+a+rddpmaVhJ+pN3frp5vH5WXJAatb2fHl+ePb7jz85f/nHxz7/2r/z7//L3/m//fEPf6jz7RbPaq2pHDpNhSgVK7zsttY11mtUFrijPLsEb2cyVElJt+PN6dX97ek498mmrofeWnvBMYp1vlzGdXtxUsELdY4FvpjYSAoBL0rBKKCiCJHSnfikun8vERTKrE3dDofD65vTcry5ubl9/fr+7tXN7eGofaqe02SnV215fXz15vU3Pv309es3fenWSIGq+Iinh8f37z88PDw8Pbx/Hl+hRNiqskqQmsPhUJpy6qyxrRmRlQXhLH0yNopIFkM1idZMBOho1l609ajhnjHKsqJKS4QgVFVopn2xucvcRJXatE299WYqJqIk95ZHKK331ro1m6dpPlhrHbUjB4XdlkKp3/zrbxIxMppNd3enm5vDsszLcjje3t3dLofjrGqbj1gdG2obLzw/L8Su2d5pLxqVIklGZgqlzxM1b2+XeVlSCIEaKJEVz4/P56eHaziifA33KkltOM7LZKYTdKrWlOLSQWvXWD1DTEBW6rgWUhMIKVUVmHtpcO8HACq0oAI1ShOIuhlU2E21iWp2yZL89P7Nr73+u+1hGg+fm85mWlAG4E6T5+u6LLfjmtt2OUqy8nDzpp+mh4cfv3n9cX744enbvzgJ/+v/73/uhyUvQ0QPNs+dBEQ7iCobwAjfhoy4bhwmu/JWSgMIpFAmYC315djfvLq/nZaJ6L2rWWv9cFim3sYY18t62a7uCahJq8idJha7D4OwKcL2iRfWmhjYU3upQdv24megWcVWbdJpnm7e3r89vr69v3l1f7g/HOab4/E43x0P8+3dbFNC8s3b+zcfffzq7cfH2xl9mBhlQvl6Pj88Xj+8f3j37sdfPX5xva4o7rp9yWTJ8IqAX5Hb5p6Pl22M4FrhVdpL4NhUW5WkDm2AElaFylpRycK6XkDHfrZq32TvpEMqtIsKKoqpQk5N59a6mZl1qa6tKcW6WbOqSo9MT1SjtiatSeua4PAXKEc/++XXQE3TdLw5LIfDvMzzMh8O7XQ63dzen46nZZmsMTI2v3puCqsQVL3A1rLTYUJRgDgpJssyLcdZDW3G21dHNUCiNAJji3G+Xp/Ol9gGKit2BaSrZV+MAp1VBEyKkGZb+GW7jMoSRNIHq4zQKkiYVQeBJJIZuY0VCcJEd2nqLuFX1Uk5QRy8sPHmcKOoy+XyvU9/tn9JPD6INqpCikgV6G69sY7T6dW2ObRFstLvbw+Uev/+cZnm+9tX3/zFX/vTP/jd3//T37+fjstk0jBLiU6U9mLwQdkix3YFUkqKSBZUMguAmkKyQr1KlHc3N6fTjU6td+2Ttd5EOE2tTW3d1ut6Hdv64h4ie62T2g1YSgqNL3pvh6T2HSj2QihEbEKxChRtqkvr8+lw8+r17enV/f396e3t/f2rpU1ouajpQee5H47LfHN8++r+sCzsOingmhHX89Pjhw/P7x4+vPvyfHnYtufLeo7VCaoaCz62zPQYVeXn8XS+XC5XdWW284j0DFTvs6qQbNNkZqoUCqlII3Yp2ItcNF+cN5BFT0clBVUCNmVlQNmEikKp5E46UlOh2ovXmJpily/uMINpaw1SntvwqyP1Z379ralO83w4LctxmabpcFgOx8MyL/M8t957a603m/YFMXJkZkGlNaESQlGoiBBR5pXzbDe30/H2YItoG/OiOqm0grr78HB3v1xW9x3bBrIqHQIx2txEo2pX5mdWPl/P63ap0lHIkOEFWhVYomUZOYYTWgEIPDLcASGkdnsJqqqaKukFz8xRNeks5RPn++17848l6zwiCDYzs0ZTLxfSr353+2Y6HVqfzfrw6/n5wzQdIvLHn/+44+H19/72dz799D/+z/6P7Lg5vEamTgcKyZ3CjaT4GIiN2CUsufeygCb2Mwkt8VKPrRtu7++Px5ummBdtbWpNs3zzVZTWNHIA0WezDmmUBhWaEUhIUJTI3WMuUaUlVtxVOwQQIkpQgN77YTkux9Pb+7dv7u4Pr47TfLg5zKqkGU3Z23Q7f/TRq2U2MT1MvauOjDG2D199/vju4cPD48Pzh+en9+fL03V7Xq/rdb2OkQAyR2SFR3r4iu2S1/OzX0Ow+MZ1O+/I03JzEIOKqgIKUY0RmVGsQnrmtrqHR8a6XTKyihGxT8FCI00EL8BOJZU0K+yyu13Bmy963r3jyRebNxGKJhXWDKqJ0p/9H30iqq233qd5XuZ5OR4Oy7Is82GarfdF1URKzUynXY1f+6KxqfzErowgRCIIsM+8Oc7LzXK8XabFwLJmVAdjXdf1uoXH2Mb1WvtucoxttwnZ+THkTkPfvLYRcblexhYozRSkEhpZmVCYigHwUXTxQO+TiPjwLKkEIvfxkghwKw5kEbMzmGVX+ah//Zdv/2Y9fDjHemhdjGPLXd2FciYiiiIj4vHDVzenRU2vm/tWonI8HR4fPv/49u7tz/zmD//wX//RX/7rw+tXrU2UTWli1RopmcDqHrn5vteKZClLC0wioypMKSkZOcbww83pozf3SzNrrfcGQ5Rvvl3WazLVjCZs1iadFrNZtO19XkqHWnM4IFUsprRsk5i1nRJGyZ0bTIrZPPfjab55/cnb00e3N69OzUqboRlEVPyK0Q/Tq8M8ebzR6ZTydLnYFtvj+Xo9X87X9PH09O7dw5cj13U9b9uIdK+CSBYyZFt9u/rYhm8R1xpXzy0yM0tX94jRD1OfJ9Yg4xpO3YWwYydkRNbm2xYjM1C1W++YiLAJlLtzw7542FERM1DDMz0SyErPoIC6m/u8vA+qUClRJFK0dZuqysQUuU+K2q0dj4fD4dhaE5qIZErvXTtVOv2pIkuGXsXdq7irXXajh0oUMrNiE5X5cDgebw/U1OaJ8Ijy2LZtXSs2KQ9hRWKMsW4XADO6avk2YqCwavOq9MDYMkvgzF3MApDarUkyR8ZIRvMRplrJSgo7ioBE1TqCXLOiRLoYSDIbX1+v+b23n3xt/mw8PAevZrZdvC82tz4i3QMFCm/uby7Xy+F0q6394Ec/7N3u7t6w5Hp+UObc7/7s//Nf/vTH3/kP//3/9e/8wf9dLWkL4SJMYbFQpDh1g4q2jFEtLIKBKBQjK1SojiSrwdZ1/Oirzz/+6P70+lVwXP0JAg+HpM3E5jLXQaYJyiwgo3zbrplIN5RFpG59bJWZtKoa7tBm4aDCoITuPiU+Bsg2ifbox7Y002Ybsjo91vO6vlmWG2+v/fC1ab6Pfj2v26y51uV6/ZHjDy/PD9fHL7768YoV7jEq4Cksf9ahpTcSa1a51C55HRJo6iO9gEGPuvD61Q/e397eDo7xnJgm1a1MRYssaBVDm/Tsa60NbSdBKRvRdi+wQkVGVSJ2f7ytqmUiYjg4qRBoU5vnWVjUbFooZkUBO8lWoM3qZpmsBLsIWK2sYZrazc2h9UVEBOruYswqX3M7rCOWLY/Ei6cNALx4hGTV8HH1AnhvNjf0ztaXJlq9FSXW7bz52Lb0VVaPGiODY91iJFWGr2JaYNaguOdWhTFkHZlCr6HVqwopKJaWJHNgbOruJlrFbd32Kx8imW42i2lRipaQyCatgTrLTT/qF+/O3/ruLa8tfWvVxYwZtJpmg3CMHNulqswO8zxPh9O2XWVc/XppNh0Oh8enr8a66aw//K/+r9/9h//47/yt3/p//+t/MX26AHNl7nAzioiLwPWFFAYa3GO3hJMSowSQFTtlCpJfPnzxx9+frXGeoioKkcyi9FkP6FyxjSHSTRQZXiNyBqQC2zrWdXjMvmVErON5kC/+CLpbldm+PvAYw70QTTsSObwtjYqoTdzvq1WcfgGvvqEnEemBoy05FYRrjYue7zY+5oc/ef9HQp6wfFifUIDIGGPEFVLoypGZGJFWbVQObGu6u0paVaZjexzv4qvT6XD36WlgmBcz+jKLABqAC8fUFRG+iSgydv+yoKhKAzQzlZJkeIytqjwqPSM9vPLZQ6HSbJrOh9maoZtGyRjr7kbT2qQmHpuoG/f5A4Bkm6Qv2uc2z936DKaPls9cR7Sp9blP2zTPs3uoKn5ioEuRQqxrdPbWrbUpXYmumLoetFNkZLpvWwUiar3GuIyxbT7ol9XDtWuojAjAsgLpVZFlmUVY5e6kIJmIUeXjmlvTXslxHbudtGdQCUh6SIc13UmwomI6dZtb782OrSZc5dXXXs2nKda5IlEyVp9n86rIrWezss4pxK+r13YOhx7vrs/Ps0X4dX18b8qPX70J1W199+7dD7724fm3fuN/8Tv/5r+xOVnqDpSjZMt1zS0otGRCiREjIaB6VCZIjeEisF5NOffD4Pjw/NUPP7SP7m5VNWvsWgbrdtSDmNgQlE7dVOeqUpl2n9YxxroOH7WuY4z1fK01W5lHXcjazXeBlEpRwHZiDZj9/HR5nJ8oSpePng+vPh+3l6k5Ro0Pj+8vz89cZhYk89Xd3duPP1mO84/j8XdTJ8zXzT9AMpG78STkvF5R7+HctgxHPsu4VgQ8eN1WHSXQgitsffIf/cWX/TTLzKC7SuCiuheBjIGInfIXuycod0btLprWUrHy0VuXfsCE3fobW5Y2um8eVXE+Xx+fx7zY6bjcHA40e5EkvVjXbECamf70r3+klACs2en2Zl6O1vvUWp9Vm9B6Ro2xDs91g/u2bSMzzJQQUZ3miaYQS7Xe5uN8WKbjNB2X5STaRQyqplqhl/NlXZ98eFzL1+15rNdzhiN3xxZkVe4WCFERUWNoJVUMbkyJTZDMLfw8fEspI6oyhIaq/Yil5+7q27vObRZRUW3W5nmel6W3aeo2lW1pn96+fbPZPC7nS9U6dgwuSdsJWfkiCuzNhLoss5nSpB9n65K+Ssntq1chxGZ36V//7nf+5Me/9/n5Q5+P59qCW+XY4tmLHlo5alRsGKO2DdcNI8srKsMoU5fp0GWuZlzmhYrEamoQjUJmQWiqZkqRaZ447fQi09YPh6X3pk2Xm8P98Waej6qihj7bfDgUi4SZggTUKGQkC7Cm/e4wTTcHPcwQlQ/bJ9+vj//t4Pvr5d2HL5+ent69f356QrO6buP58nB5+vDuKxY/++wzbfL5ux89rI9PHA/xkBVQBYU0CEs8gteRdN82Oa/wrXAObppOVGpWgVGSTDvyeNNVubGSIyvSMTbdbcK2bYtwoShVQGEjVESsmZn03vvcp95b671rn1Rk75VKyNZEOrUXZada6XJcluVwmBYjwz29ICRFf+bXP9pda+Z5OR6Os7FJibU20cyYNbYxho8ttnG9btvYtoisgjXtTc20TY1ddm7Wcjgel2We5953GoaaECN8retlu1yetmvUZtvq63Vsl2vGgGgpdyMd7GyPEqKhGMF1q0zWoAfGNWJjOjMyPU1apoyxDY9KRURalNbUmmgTk9abcudCtWU6zXZsdtOmvvm2SPvW4d7er8USEsNH7DY9jowSLxmq2tuhJJd5Pp1OBSSaQJsoIsa22tQvl+fhT29/7hfeHN/+zr/451vPzX3lqC1Xr3S619iK14ix+cC4MDfIbnQlsC5tEjHR1tAJRrcWw8HIqoqsGoXISoq0qbW5mU3adqGJUqFN+zRbb9b7PE8UtnmSppPVpBpAwRdrQBUSyooi9GBtWe70/uZU/e49vva5fP0HUe8e3/kHBDQL1nWeu/SojKrZpvL44bsv4lrf/dovvI/HP3r/p1+Oy7kC6ASqPBBQ7Bp2MhIpQ5U9Pcbz8AsqtEYVLMpB2lztKPNhkh4piEpUZUX4KqIRta5rhJkuTCB37jelT33u8zIfl+lwPE3zZM2s67RM09yW49Q7wQgJtNRJWlfrDYouebpd+qEndhK9Ib2JWaSUwkx7M91HDEXKGtVqjEzJzEwvDMHublW9yeCujRRVlkjXiY6LX4haloOJwdU3TJPWagO1rpfLZbs+Y7uUb9f1um2r/2SAJksySMB3t8XdUCoZGzIKyAzsfPoazID77qrjGZGZO89jOenuNiwCAcIzNNGkChkC9KnfdJ29yXE+eeLp4bGezzm8qrQ4NpFwlS5oLHMfpaWMKF639XBzJ2vVOI9thXvF1ppw226Op+fzh6ff/aOf/pW/+7Pf/af/7A//+XR/g6t6RgrGyPXsNWKsOTZZrxHO8paJXZYpzUJ2znhSV6FkkpSHh+friKnrMps6hgnrrvdmNO0ibDnKUyKzKNpEKWPEiLSpT4f5iPl6fn4+Pw3BWJF1qQxP7wmrMGGzg5jx3Tr/6Pwdb5+09pjn1eqeN0PQD8fhyOvWhEXpE7lu6HVAfPnl5996eN58fPnhR96N1vyyiVaABCUqcfUW1cgobxU+0pydQamAoDJRAsmcelPGdr1qn8BKS2eZKAWZvvtkV8AzWMXcmaGqynme52kyXXq3TJeWu/fMSQ6ZOXy5Xk7vHz48Xy+bb6xcepumGVmbb0X0Q6upx2VNwRjDnGMnECgLFaCWWqav6yoSGVy36zYu7usW15Fr5RYRfDGD2vXmAKr3aQzftgFymmdVZcp2HQJS7Xz1x4fz5Rqxruu6Xq+embudfDpZTKYRpbXtcswMKS0yS30LCWQhRlXsPSWqPP7KbhHQriKiWSPHEFftTK7DFaQ5YrXtuU3HNs2zTtKsj4x3V+1t27b1Ovq8bPWU6zZ11T5BxdpMcmT6GB/ev/vqi68Oh8Plcp6mab1cjocufVJp07LczK9+8Kd/9DPf/u5v/83f/qM//ldn9Ofi5sNHrteoDXHVbWQM8ZE+IkdBVK0X6ekqQLmMfVZgVolGIS7XM1Ib5wgBzLdzblNWp2RKFWy9DqBUEGa0EpGit65m4tlynsBqk52f5HwRH+uGUTDFvMy32jW38bUP/qvt+Dan82V78tVEqWbCy/WqOi2HQ0YsMEFu6ad+aBd73J7Pfp6l9d4m4uGyebqRJppehSyVbbhCUVHCSCKV2ltnbOCg5yBpXbRZkVWVnioB3T2ZTamArGMrAkgWACnJpma9995UlYrpQDIrSmAqEFMz80RDm07WbnCzHq7XLYZPvROa7lHJim4dkowKiqfo9/7GRzvLYmq2TL3PE00LvnPZfYzn89Pz5fl8PT89Pzw9P43LcC+AKoKftKM07uTcEau0dry7FbAIrw0lVXK5rg9PHy7ns4/h7h4Yq1eC+xQVQBCFGFERUkQgI4GWgdgSXrFVDI+xE5R2R0FqMctb47S0QmBkJbPCzChIEC9WI+XhQl36sYs2wRzy6kO0hPt4vmygtkXcfaxXoUMyqyIK1m5Pp+Px6OvapikitYmK+4jVx3pxj6oxTq8OPS+ffOe7lw+X3/3zf/O+1hF5fvb1GnHe3HP3dYvIXedFAoqSrMydqEkUaZSJFI+ozBwDDpPetffWBFAxRUNKelwv23YdgSShJUxCBBAhutoWHo7wYcC0TNIOyLisZy15Nb+N28Mc+Zv5+u+8/bm3vHnWfL8+WdZhPqLoLFXr0ltrxZQIZrTZBOGJ8/Z0/+btyPzdH/wP18rzCC8HKyhMk2QJUhUQZsRgrJZX1lq1BkZg7Efaj7ft+Gqxg7QZras1YVOzJrp7IWAbI4YbOwFVsybWupmpCqS1uauBWm2yeTI1qpJGUWlzp5LK1uywHA7HpXURLaO2ZiroqpN1UfWq6xhmJEQSHFGeY/U1t5pNygo5xubX9fm6ni/X83l9uozn7ZomKjZn7hqFrJLYPTwkrNV5fbxc7qY251jZK4b33guBKt82hO/mmlXcLSKLRdWsQkqSFhiZEE3fXd+QUbFrMr0yMzxRJTvDX9EgS2siO9N9D76p9XKFIMaQLdR7IyrtistTPfbb3TF9ntDSB1jB8fwYco23H71aun7xwx/per053bW2xLZe0O5ev755+/HY4jjG5fnd8c3rD+/eny8P2/X8+vbGx1Yh58vjcrn8w9/6j37n3/y3l8cPn4/YRvnQWadrnN03ZFBEpAEshIAVVDF6ZVK7jnVDWQb3HyrDr7nJ0Ll1CQU0N3hdIVMiLufr6ljH9XRziB7NtJ0mSosUYwmq0hEvROh76jjdLRg3yWlpy8a/PX3v3/v6L63FR+IaMfU+VVuzTGyxHrSuLT1MGmwXVtaIbJbT3GrDx3ef3s+3f7l94caezd0TJSa7kQvJ3TCrdjqK7UA0X1rpsp2p0TqWg2mvYjq004Da29+IyPSIYdLI/pKKArg7lTWGjzYsijJps247jJmoAkIHoTNvs21ZoxDuNZIYE3PkNgQ0a631RJ23s37v197ihTZfu/PULjSKcvdxvl6u1+u2bZfL5Xq5jHUd7jvuny/JMTtHrYAKjwp6ZDGtWdTIyM0vo7aRl/Pl6Xy+RGZERo7IjIjC7h/74huNQkDGyBgIRw7kYA2M1cNrtwzZadw7ERqJZqb7S7z7LEiq7UoB+jbSB1G7qXYiI/18vSimnzp9ejrHNi7n6ybCiM3X7fn5w/2ru88+++kvPv/88vS+CefllCWXy2Xz0N4//dY32nyCTL3PLG+my6zLzd3z2U8HG09Pp0+/e7T5X/7e7//5F19K4O50B/byIMT2IJrdcZ5lKkZTSnJXuhGpY9R6vvrlGmtsZ1yfnyO34+mwLAelya7eGrFdt8saj4/Xzz//4vJ88Rjrul6v5zEiM4RBxVjX2hvMrA2RETcyL/1okH/35jv/4Nu/6tneP39AuFzdZCpaE+t9FuouQgMpQt11VSImlkFkqk2ffP2zf/nF7/3eu78ogSp/ouiskmJjIBGBlEyWW21ZVxln8XPIMCHZMN325Y3ZgWoqJm3exw3drfLc04cDxVJAd2PwXRgnRjB39dVOlntJqlERoyiygNotkjJliEC6qjYBxMT6LK0BCYaKZJR++1c+hrB1qrH2wCJG5HCPMWJdV/cxfGzb5sMjImKU76zsLAap/Ctha8I9UBmxQjwR21gvvm3jcl2fz+t127Z13bZtDc/MwT1VIrl7luyy1+tIH4i1asAdPhBRdGzuAKr2XBZUFRUUqvClA5YsiZ0CSRqF7u7uw3P1UYWxjXU9P6zr4tMvvvpsqRiXyzoqU7rAoyjyxY++WJblZ37upz48fPnlu6/W8IiRcTWz4/F+3Twzrk9PmaHC0HZ+fJCSnNp2ftC6HO3u07/2a//0v/wnf/LDP/2Zr30v3R/OH2bI1CbV/cnskTI7ZT/J3Tci13UNr+265rjG8PQUojgCW5+X29P9ZA3ch4HNx0Dq0+P1+z/40edffu7DY8Tz09P1cs64ekQFxhoRY01fwy2q02LqN1f924fPfuun/ka7+sPjB81CVFsOoIhJmw/KllJzm0REu2WuXWVqlukkALtenp+u548+/tp/98N/9Qdf/PnSZgLarJjgQAWhYuLY4wcRTjiwEZtyQIdFrnrC6aObfiu64Hg4TK0lsqC66+yF2xjh1bWTfY/2MYqw2tSsNdMCAzkJe+QAvU9mrTWbREB2M4U6LKUZxZAQMRVSG0srk8isER5MWqYrdU+Ky+zunumwNkvfi05GZaaZTYclCZRuHAikRwRVLgWTsqAURslKMqnP57M0YcVu4pg1Nl/HtqVvGVEBoe7WMXv4ko8kNaqhrpXwUS8FFYjdQ4q6p57uoiDuNqWsERtKUdWoaJoSSIfIiNi2EVFqynWtKyZz9qk0jwe+bXNwc9Ara4SwQ7aKujse/uJP/3Csz5999gsZf1adp9tb7T0Df/pnf7Be/WuffBzjGVmHPmXl4ebN49OHj28n97xcMR6/P+Mb//N/7x+9e3yg1+Xp3LQFRM3nmkNAz/BMYSQiK2vNsecc5jwZUVFok1FE5TrxsMa2ruu6XrrM4urckOkZKh1Abmtero/5rs6DxpIvp8N8/+r1/f39bDaQK8EcXRqUh6G/8eqz//FHP7++H++fLwADukwzVKUZklkQ42yLJqiSLLgqhVSBJsuwAYjVFTj2ubXlON9dYhu+TkKarOFBoqBWYsyV0sAlePHVtBr34ITldmkHFc2pWTMBEmx78KJYj6gMUWm7lxhQTUTVao/RKGtNMzOHr5FrPNuUJO/kqNYp2hSQCrFSTWSObWBNr4LFuDIZEbGNiowx1hzWF7HGRFLUa6inqiLqrKWVexwNibaL3dE0FUlfnY2eOUY25aZOajF3t3wpZIxMzxwuexgL/8oelCVUqQCqSb3A3SxBhKCssAUiZEQZkpF7eUkja6hqoviThDlBD3NksFpZK0mUU1UqRqzukq7lQ8F1DTbk0DL/9Pbt3Xz4y/MPM9jZV1w8nbGbafJ0c/PFVw/r+P7Xvv7Z51+9GyFYyS5vXr0G5HpZD9Ph6el9TBMDHy6PrPH+/fs3r14DuT48zZ9/+Q/+wX/w+Z/92f/+//WfLPfLEvZue5rUUthFa01QVVhrwDnGGKutY520z0u3VpVjOvH8/L5ZqfU+zOtp+DUihoyKRFZUDtmK6NpLrzm2D+/fG60sn85bbNmlnUWa6pWp4Wvyl772zV9+9cnPnD46+/O784Mnux6mLpBWVTG8t0XRFKSD0oDKHFQpwN1VGxjX7TrPLSk+6vZ4e3e6a9NUq4io45nwxkqVMgorfUiJEpsXZBLJwRj0+WTTqXEa1ffhs0pVuyYwUCoY5yEhqg1AjiFmVRARbdPUtDXzKiZ9PJ59Bafc+rNkl6tJa9PchSXoXajIiiHGZOQILW99XFcAqi1jLVRT2v2rWxGMMTLTaJUyooAU2UoEgJmoakZVayJSuUbEqIx9m5cIRBQQ8ZOAVUBRVcnIjM0TKIFERZFsxsiEZiazarfOTyJLVEdWhQCSvmUiaZWoClVFBKClKS+RksUahYumRBU1UYNBCoKbi0qE6ECq+FQDA2DEtl7aLF1qrE+X54dMFKPgYiqV7l4lqtObNzeX8/rw9Hh7ezpvYzoeT4cjgMvl8vr1zc3drT3c/PgHf/rR6zcKXM/+8OF5sWm+u7tk8Ud/cvvtn/vN3/if/Kf/4v9hx9s/eTgvN8euMdKleDvPRflwXrM8ISyR9LfzAfNMrLc39zYF+EEXq2rWKM9RYUissfq6IVlpEa4NsZ4zr4FCkWUDWXtUxPrYcUjI/Uf37/zLefW/9a1f/3vf/JU71oenx6frtQdFyhpU2h7e1uYutP22UwUZiGjIopAc40rR1tty9wqP71tEbuv7y6VSFFyWggfScg/DxGCWlngaKjKZKTn2uERpB5tfWbtrs8lMBWzLkBaQMFi6+9gyKcUuarBoxkalmMjxuEjvI7wLPa6UG8ppdwAI5/PDtc+LzekwCE1U5AVelybuFKFRmD01cgcggcrUv/nb3zsc53ma90Cln0RBkihVba2ZmYiovrQkWSWiCt29/EQ1gdidUvfxNFF79m9tufOwXsK0SsA9o49VuhcLFHbzmxIKK2IX7lUWMiOKwB48KCBUxUqNQIo4SW2EaFjBSClrQdscgxIsUigUlVZhLCTyvK7bZf17P/Xr37l7/cMvf7ytgoiqQRqrosrEWAQ5LUeUfO3TT9+8eXtdN79u7z68i9zW7fLh+fHrn3z6/v0Xjw8Pb968bq1XxuV6PRxnsz7W58P93e3bjw9/8W8fzk9fWX00HbqZAnd9/vh0s7mf1zi2KcoJReTtcfn06x+vfLKjf/T2xtqYl364ma64lIzGFFqJuK/bBuxiQR/X6/Xp+cFjCKVErPeTHbaxxbZpVveS2N59+YO//51f+ce//Btvw56fzmsOoiTR20RQpSWEZtPhSGpktKZGbmOTyorBPS9R9hQoZfBpe9Su/f71P/3B73zZL8fpuLuGUyVKIgOMykAplOnuI8pbbahV0qM3uXt7Wu6tzdIm4aTs1EkEqk0i3CNUtVszaKPNfTod+rLc3My3piK7x3SEsmRnDmXueprdTI9MUaPuS7QUpRjJiswKiEik7z4dTLAYMfRX//53em/LMs/TDKAQFIClYi+nX/fE1Z8oLSorA1moyErfnQqqaico7CSm/ctYYO0yfwV3bTOrtFIpBmgJC3uy10v1qGJJRrBQnpWJSmRVhkymzShFSVG3LiKWlDTahNYbCWtXsyBMCTVBsaAjEZUGVNaomKX/xrd/6bM3n74/Xx/eP1dQBGNzVfMYrJynlpWFUrUvvvjSIzz8/eMjhV++++LDVz9+fP/V5eHhZ3/+F/74j/7g8enDPE+UOp8fI2KZD7fTaT0/Hr/3c/N6/W/+4F++uX3zVuxoy5vD628f7046Pa5bbPnZR5/EGB/efXj7+v7q5zd3x4/fHs/PP/zOpx+9evVqUmlTbPUEwZ5pT6WvNa61ByWQuI7xdL4wyyhVWdC3x7vz8zU93t7dzdvl6cc//u1f/rv/27/12zeaW67Xy5qbT9OOuquIOYSi1htKq8oUhErknmkKloiCrMiuqoKiPJ7fz/Ny+Pqnv/Pwh/1mmjAPKAUmgsptbJ6VqJCqciYqVUtlIDaycjnY6eZgx7IDZKFNbLtrlDaPTMDUjDq3PvV2XJa743GZp7mfzDpqt+UUZGUxd6Z/JHeP4mooM7HerXUrRMSg7DmeqqoKicRLdkcmArnlGKv+2t//KRU1UTMz1WYaUT6CL9kL2qyp6R6XDWFEIiQLXhm159NFRXhmZsUewL77VzBVbJd/7wED1D25MiEqBdllj7tyi5WeVVovHoysksoKD6Ban9gT6qTQpHeIajXCoi8UHaKiJuSGgpTuFkUsIOYMSmWtmm4i8fVXH/2dv/Y3bzj76hRet2ts21/ZVBGcp+PNq9eHw02fljZPfZ7vXr+iye39q/tXr8Mr1vH+3YeB+ujNqz/8N7+X8MpQ4HhaQsRUZsTyrW/D/MNf/OFc8/0y3fXDT3/8ta9Px4eHc9n0ts8fn47ivD5dv/X1T6+xXnH59scf9V7a66Ob+9N9u8RDbdtuGOpp1w104cWfQGBjjW2M67ZZawTCq8KX+fZ8Xln16rjo5fLXvvO9/90/+A9Pjq8uD18+PabHPM3bFkSHCm1iazd3t0L45ktvSs2Ipo1NkkmUirBETaapi9rz9ujbdmjHulv+sj6Mgmoray/Z8CKZtUUM+ChvKUgLbzlQV4VLMz3MCxpkrn5oYlQRUaQhyO26aXHSNtMOvR/btKiqSNdpTyAsVmZt6S41aoQP960q1/VCBqUm03lS5aImYEbtFr80ZRPNRL6IED08cmRFDV/t5WS/pJLTrDeNMLBSoQqt2jNqAcCoXWbnNej14qVeEIZkxQAEyt3Scv9fUblzHKilBkFBBTSERCMcimqq8EgwHUitPdxgB6bCM7NNxknLoDrUjJpiLCVb7GHuAiOzpIKNkgQjogIqXYqWVS6eAlGTZia9HVtrlBSo0oIVoIK9L6fD8XQ6SZuTHMMPx5siYotpmh4/PNzf37/9+NPcBqS+/2d/8au/8ktf/8a3/uzP/uTV/WlqdjgcDkeuNZ6fzncfvn93+9FvfPrt3/vqq4TNN8dvL6dGRI7p4fHuk0+/+PDO3nx0b8ui7fbtcm2Ph4Hbt9/cND599c0P24/ubm98I+uxcSx62C6H94/venAwAb+Ma2SdJhFtj5crvcpxPV9m45brjeB/9u/99m/86l+f1usXDx8uY3UfYv0aQygAQgDTeZ63MVR4mHvbT1lWcrfnTRFhAshp6ptfK0q9eltsOfXeyya/uTl4+HPFaMAwzqZDdesVAqFnOmOEbywPEG1qaj0koXtMTA7ZGxFf60pRVhnQmyrYgcr0csJVSrrJrqiuct/Kr8PhnmRt2xYRIha5rdcuGGXoRxbKPZtpVZm102lSbdvzJSPCScuMQdL2Rcbe+qvqS8QkSq13bc2mrAgEiu7OLEbsXoCevhtTQjMi3Xfs2iAo391opZLJrH3RUyKSKJqRYi5SiMwR+RK5QVYhd+b67g5LSGtNGlJGM9Om2rw0XNlmSc0CSylawkwhTbJQ4Vr07GNVjDS2Lco55i7Hw6tPP/7MNh1tZIPpxHwQQQ70rtJMTG2e5tNh27bb29vwej4/Pj089G7v3n31xY9/+OknX/vkk4/+7ffPl6f1z/7iz7/7Mz/34fHheFimuW2Z16enm/mQotvzOn3yyUff+Obj+Tl0WW6XWy8x+6k3r2/mw7qO+ea1d70uR53mt+sIvnp7e/uhztX0m+3Te2/WkfO7V+3VV59/MbejtNs/G4Nz8uk8RrTWJzOTMp3rGk8tTI+3fZ4n+8bbr/3dn/3rv/3rf2e7Pj6vH9ZxrcLU+suMR5bV0k/ae2amh/ROQUSIVOsS20Dk1GSs2xZ1XA7rukYOZKmallfVdHNjT6eF1c9rX61rhyJqZT0BQO4eCXuitQK1C75pyk4zy+JwbxNKZctUQY4RuU0yU4pKVnkOgVKQtRKtIiClGqxNsUGgFttYGaGGdb1G+XVb5eHh9nQ5xLL4ga1aU2iBGrTjYVZtrs1kQj17rOvmvR2sqqpQCVBVLZC7FSNJ1fZCm1at8q7dywWQFM2dXY5wj0IBpTsTYVfiQhRVyMqEF7RS9rWvGcvAiFYFxUpElYhVZAiE4VUkMxIl5L6ETNGkDSqlhzbZVKoHE6CpRWm57iFQQWqVxRaTHBy5bePsWyZu7+bT4djl/tNXn/WmUUVp07KHIAhjD9uAr56JU8b9/X3EyKj7+5vV/bQsh+P0F3/+bx/evxezpnOb+5fv330jvv2tz757eX68vT2FX5/G5Xv3t11dCuRxPr46dcU0HW0a41wje+DTu9vrJdnl7FeZDjd3tz9697h6fPub3/r+0188X9dv4TZO09MPfxzLN8X73enG0m/vPn18fLys6ze//tH3f/T+9e2rU7dYt5vT6xb4gw8/umvz129u7k3+g7/z9/7mN392uz6u53dXbNE6/39E/emvbt2al4fd3Rhjzvl0q9v925+eAqooqiibzkAAxzEGCaVBchwl8sf0iSP/HcmXKCJKIsVKHAgmIGOBCZ2hCqgOquqcU6dOf97+3Xvt1T3NnHOMu8mHuQ/5vqS1pDWf+Yxx37/fdUmx6cjuJEWkW212kIqEqVsqBSJMNaG46SKfgbDmYeEOcBpnBGWhYFSdwzxJV6OVvqxEMaBvq6mauXALRGZO6s29LQV8MDdzwmDuAgFAJbETNCA0xKaGQODWmluoSKMmgV1ZgUeEgYM2c0DhjOANmnqtWmdQ18mhmjYzm5s+jIAgJHR/vLmMJ1s8W3Vl6Tu5r6hPZlayQNWSupo0Fn24uUyjNsOu944A1AWg48yJMCigBbBVAAwEWY7x1c0RUKiYQDLTcKvh3ha3eRhRkpLBY0lsYpCaMytGWrqcyBQLsCyciCopELgwzkjqBBaBjGA0cw4jCIoQI5Igcw7O3iFjiFNVbpGYGeNNzKC51YCeqNdwksQi83Rcb8rbz5/uhp2eIuphkknpnGoKVnaojYwcpjlQjnV89emHBLHdbnePHm3PNlJ7rf7y5csPPnj/8Qv/+KNPQDW85pSmafrJx5/8gT/4c//yn/3j81331otnh4fT4bQ/3/Y47uH2+zO03fpsNmytMaVpmodu1a2GdEaqremqXA2qh20uRiWlfHX1VddaSrHo4PznPx/vPjt+/JWrF/uHca/xB65ewNxO1i6u8qPNdtWtjk1Tvz4dxtN0vDrv3x3kj371l96/eFbrwzjvpwZ1DCGweqJlDJJYun6aa/Ioq54qmFYRIVhSJACBzGQVWrMQJ4honooYGDsYykO9e8xJy4YItoRTn+RUclp7PZg0YkYVIknss9UwQg22AGBOIkYQRpQCGVwRKZwNNIysEQGOoeEVciGaRTRRZivIHH6a2iHC5lbnWo82tmjhMzh4DZ1tMWdPGgB5SnNQMEfCXYfZiSE5GExtjgjnoGSZ2aggTGYmt/uxlGaRI7tJEkIWQcRwNlsQKBBuRNKWLJqZ+8L2QARJiRTYY2IAMCPilNLiWoKIpgtT2gAXbC57YDQnAggGAk5QKDU1jEiJ69JYAtc33S5mCczGCVRcCuHCkGFYBFlESBxAysjohJFb03DMlLxinS3cH11dPHv86Ops26VBtsNlN/RMEjOhjq1hSTqNRdLxMNc4WXMCZsbj8TjNp+uXpVlsdtvVavX69etutWrNbJ7M2/XxZpVXt599evf8xR//E3/yB9/5zXVXLtfr7/3kBxmfln6N8Yqbg3Srdbm5uRnB53nadWW7GhAxcAVBGctpzpdDf3NzY3XablbI2cxzN2wuzy7u/KpdPn/05BXf/d7HHz6/uOiG/tvf/d47z9/yUc/PH83ot/vx/fPLZPX5s0dfffSVf+tLf+h4+nz/8IBQZhvDYJzHrusgQrqSV5t4o44GD6BUyJA9St+11kBnsKZoFuARTIwBi0FdXdvi4EMKd8lMq0w2QygIBmoko6CudbOr+aQRPcoEFgScJEsSI3c3FHNXgyCjcFBubuEK5izkwRDGeERImcHFECeYskK1aAa1eWutNWuxSDmrhgMimQY4smPVNjZAOiU5iHTMzGzRQseGpthFzpkyWU9ttpQZg/jya+eBCr6AeNQh3kBEA9zB3RdfERFHuJt7eGvqas3UzCPIws2NEJJIylmYSy591xVJb2zEjEiwqDNj6dFgRhRYAmHEyLi4Z3QZsjqqhaMtmjcUA0FMKMlJjJgdPdiAnAiRnNAZAUFc0YwIuwDWiefZhpxfPH787PLRZrVed8Ou7C4250/KWdKoZsdpGoaVZNY6GS2IyX7o1+uz8361Xu/O12eP3KxONTw+/MlH5+fnb7314vNPPy2pUMCk87Mnlw8PNy+eP7tar159/vFQOiJx1cz88OozbDOmxCl3fX9zc3d3f9t33W63a61N8zz0q8NxbO73D3f399eMMXRlbjWXUnKCOiXCQvz8+Vurspbml5tVO5426+Ht7VmPtF1vV5JWq7UkOWd5a/3s/WdfSeUwz7fjqVJO6mzz3K/XbuYWqaym2lprhUVSYiQkZslLwVyIVNVtnq2Z6TIX+WmWqi3pQrBglO36vD5dfR9ehauD1cmqNwBzUwBjAmYQIW/VakRjcMTgePOsglE0cCRXb3MdqzVtS+MNI5asILg5A5vb3CaD1rTOXitYC9WwgCAia+HmbhC2RLbDHTAQQ8ygqaaUh9UgIgQ/9dgukp3m0ziaNq3aZuPteyuRYBQGMKsRQYEU6GFuXutsphACCBHWWlv+Pl943rE4HqJ5I8Yky8WZc84d58LSpYyZ3qQdwgGA2IkWv+iyXECLQCYmcDSQRe4tS+7N3Q2VMiIT5mBySYSCCwUfDJFiwf754iR0YO4gRCeFyTPBbr15dnV1eb4b1kPXDXkom7KCMaaHaViv15vNeDiUUhyW9DwRJwvQ1jhJWQ0o6fzi7PLq6vziEbp/55vf7Nbd2dVlPYwXjx5vdv1Z3++6/PH1Z7lkPO1fH/ab9RnApLU+3D3knIZhUPWSystXXxz2h812uz07n+b6+uUX4zTeHx6Oh7v94dZN3ZyY1W29WR3H48vrmxa6PtuV3c4tusR97tj5ydU5IgX4OufNet13Q0FJDRPnFy9eYAmtGI0VvY61ELbWwKPvh3Gqc9PSdbkrROhzizpLSrlfmVZXBVwuYQSBADFO0ziOqhWX2TNhAJU0PH721su1fnf/Uc7irTWdEEyEELC22bQRspAQlzAHVdAAo4iEjIbL9pSQQEHfLJWW6zUgEYjwEh9AD3VvEbNOLZrhIoc2BweHcDcPRAlHr2S6bIzCGxDy4jEC8Fy6nLtwXOD+rTVtOh4nndp0PE3jOM/Gq7d7phAQgGhePWCpzpupalOt7oZBbq7WWpsw6E0exdzN3iA+GJgwiRRJqUvLHjuJ5JQkCRCbofvCs8c32hbi8EXevswLHAgzJ1yWYhgIGAGORgLExAVZIIQIcwSQEzpEEBiipwAMVPMAZFzI+NYccbVeP3tyfn429F1XhjWkyD2CwenV/mx7cXZ+Fk33+5PkMj3s728OD3ev5+lwvH99uH01nk4Pd6/ub2769S73w9l2e7bd3N8/vPvB+0aw3aw2m441NuseEg190TZ6tdN4Avbx+DBVr00vz3bjWG/vbh4e9mW1ktJbYG0zUty8vp6ORwBLpRyPU8lDq1USB8Krly+Pp8Plo6vVqtNWH27vLs7POGFOJQsdxiN6nO92q76fJ3398nbaj8+/9P5uu57up3k6eSg0dNM6uWTklDQgSelKl/uCCOxBsVhlDD2SsGRp2mozJonwcRy1qbkyLfkAYABzTGV19c5bP8DrHx0/FqGms4IRLs3WQAb1qja7ejM1DQgUFgJkFAABWtxuFGHLvhacWISIBGzJYC/cH7No6oowoxlZsC8M5sQkjAxmwW5hCosvzMxcw41Cl/OdTdNcqwpl+Kl1sDWbx1rH6m461+k01qny8GxgYWECcAdzgDfdNvPW6ptrQKC5am2wnImWNPTCm0P0BXoPmCUJMzPllBPnnLqUJAyXCPByWofAJdztTurmAUiAEe4GQAxEBEgGYBGLFDpQkAVAINjdEDSzMaACkrtEQzD28ADDCFdk5uQcCE4oiZ9eXT66uBhWq1yyMAXPNMNQ+0fnj4Nhs+r7fsVE06woYjadDveEkZmmw1F1AvUFY3q7v099v05DyfLO73///uXLq2F1bIeRlA4Hr9PmydNoBiGTWZ1H0GamJdHpdPz008+s6fnVo3BaItggsF1vbNLgfH27N+L1sPLQhYL/cP/QDd1bz18cD8f9w/7u9o7CAaD03TxPNy+/uDi7MIim8/F4evX6/tnzZ++9/z6oPuxvPRScx2kOB+47FJrMUhn6PCTghRFGzZGQZS0sHM3U2lwB3C3m+dTq1NoSpBUk0lYzp5TEVLlfbV48++bhJx/Nn2G4qs6zhqqqOZhGMxgJ3aLOba6TWYMwWvbDi0UGKWAx2nmE4pJ0JvDEgkDgIJQRMci5IDIEARIuzAU3xGWW5Bq+oOAiHGEpmRgACJFEgKpZtVYtAgSQwuaq2rTWVue5TtVVp3Ga55mHpz26pCRMjhBhbuaIvtTSalW1CLBwr61GLKxJ9ABdXDBAvuDuIxaBRRIppXSpS1mW+zAvPisE8wBgJkHkZi3cEJwQF5b4cq0OVCCLIA8MWJbeEajIhp7BxJuhYcTSt4jF/UgYiBQO7iFvtBNMzH1HF2fD+flFWXfUEbH4DH1dfeXiPRHqV90babVCpHQ87fcPt8ICKalC7oZUhpQzOIyng6xKbQ3m2ubjxZPzs8dXL7/1ve2T9dgOp+vbh+Phxdvv3e+PYvAwHsPa4/OzcRprnQDi5u7B3YGw1nZ2fmbgEZ4ln06TAc9qpcsCwSSTzo+urvZ397vdWcnD8XT66OMPIQCR1kPPzJ98/Al4lFwOx+OpzgnlyeOn7331AzLY399WNDDSGqc69UPHqXO1Lg/uHOrgCMxIKUsWzlRKLt0bWrjqXKu5Qtg0TfM8qmpOKcAhsCslmIn4bPdo/d47//LT3/7B7Y9QYxqn6XQ0natOVWdzd53CnJgC0Kq1qvPYdAZXAAvX6gAYpBCqjZALsbkJCqEQACGlyAEBpEtJigGEkTkCnSAiwDzMMXRRoxIahfHCpViMLYhIIIziHmYtwpkFI1zNVOs0W2t1rnOdT9OeV89WdbZElgjA1c1C3XRWsKY2z2C+4OTMLQCX7EOYtQWwHsv7H0FNwyOlVEru0iLmoAVyj7IICx0gwsLUXSFCTZ08CGJx8wFF8JKVEw9WV4vqIYgJWBkENQuKqpoCukTz0AWCqEvVlICF0GxOJJkLZoJ02qzy06unwyrNug8UOnJ3TB9cPnGsU53ncbw/HupJp6k+XH+xv7n18JxXKF2/Gdbrs0NrOaXLR5fnm/XTi7NgI/KbDz998ZW3yvnq+sOf9AGBDCJpKFeXj1+//HhuD13ucuoawPE4e7O5WepkfzogR78ehqG/GLbH+/3D8eTM/bpbd6WkfP36evfoqraZEQjTfn+c6vjDH3zv3Xffefzs8TRPr26uD4cDkHTDalJd+jQXVxcNY26tzlMCqbON89yXgUhO44mpMCVCIuSSulwGlgSOyNwlDmLuViywTDgYgrnUVk+nU9dlEZ7mKUkqqXNJjvBk95SePfrH3/pn33/5fQbRycy0tXGuc23VWniry6sYDBFSBM/TFLXGbGFokJEIQtyDlqABBjKxoxsuXXAKZwYidg8BBDQEUgh3IETwCDNXAzVrjibknSuqerizMKIREgYhMEDEm2o4MeE8z21u4NbmWVVrG9VnXj8dzJsFopMrtKamas28YSguSrb4NxcQx+ZBSGbuSxgIYgnomztEpJS7lBdHOy/CpsQiQkxAqOZmoUstUsEUm4ZahBsyICLEkhJij0CK+qb/brQk0z2shQCCkTV0awzohui4aNTeqMAEnC0xdmQdc0p52Gy260StnG6nK7h4b/O21uk43V9/+noe23g83N/tx/lU5+N0PI2nk3u11lqN1dnZl778rmJ8cf3Fbii79apkRrRWjy8//vSdn/3K/rObdn+IIQfQ7RcvH18+vjvi1dBRspcPD+e7iyxye3c7T6N7AOTd2eMlDnJ5dXV7f+NmrcWwXW36fjod7k/71dBrtYuzJ9e3Nyihcx1Ph3fffw9d2qx3d3f7/UMWIcachZG1VrPJ6+zAYH7a76fakAmYZq3i1FASIEkGhEAzQgEWAARwB/QQBGJOXelKNouqNUJFCIjrXAVh1RUiMTImefb0vbbe/LVf/Ruf375c4cqiQqU6m6mqVg8N9AB3h6azWYAzSwKLmMkNjWfBYujuc7/4zPtOgIhIECMMGRbvkKMGOgUgZURzNABABUByjHAl6wv2TOyOaIqYgoCAAAtiWjDrKJRTlySDWasa0eaparUAN1X3MBt5+3xFSNpaaLiHNm/NwsgdTMHdECUiTG2pI4YuyGloqgHhvmSagRwglrUx5ZIXlJmISJLFH9PUF5KJq2lzV9W5aXMPDFx0igsaaVFVIRi15tFC4s0j7hGhgY7hmYKtqdYAF4CGCAKZlk4wCKAwRielH0A6Hta7nJOpyUhP8pP3r95DjZe31w/7/fHhOI5Ha7W22sZpOh1bM20GHsPQOTWw+b33P3j+/NnnX3y4P1wvgzJGfH33eXt1942f/bnf/fEPURuB337xUi0uHj8fD6+7JHPz0nVqfndze3vzKufEmLdnW6KUU6YU97cPzFkkW+i6L4f7QxmGUnLfr0/j/Or27uLi7NPPPn3y9Mnl46txPB4OD6fTOI6TgSNhM5Ocd9vtNJ4YKUPZ7/f7w55FRHIzZRFwWHKOiOABpmCGKecIj+bIISzqbG5CVHIBiqajVsiCVauHlS4RiXAB5H7VX7z9zvXh9q/+1//Z3bynCnWeW53m1g7TwawBmLam1qZa1UwrWENcOL1Opt60BjC6siCRZ5EskoVlObIQYgAgquviiAREoAZhizecGQGwNUtAhbehZBYLA6C5IyESAsgb5zimvuu6oeeFkB8B7mFe52nJ2c91Vqs8PC3gYLV5VTIwtyXPsEwhEWihT8ZPZQy+jH/cm9vSy7KF2LgEms0hIom8OfwTMS5hO1yWaBERjmGual7dZw8Nd3MwRIbAf3MRbOrhDE7oCIFuBO4YAC5hblbdmusCjkRZEEWIQsCSGDMiSgZOuCppu1pxyVXH4XhxIeeXu11muX99Z+oIdLo/PLx+XavqVLXNqi0szBoLvnjriQh+9MlPrh5fvvP+u599+tHdzct5HFPqEvhnH334cLp/74N3Pvnoo47F23yc91ePtvOkOa8gzE0P+/v96eawv+/yjjtmofOzc49otbV5Pp0OZ7tdncculcPxYbvbch5OUzVtEF5Kev367urxE49Ipbu9u71++UXXDXOdkpTEaXGSW0RgHO5ur+9uzs/PAek0nQIhKLF0gSkc5jqaVYAoXde8NWs0JMzJKLggeJz2x+NxT4Q5szCD4zSdkE2kL2VVvSYqVxcX3Vtv/c4Pfu+v/8P/8niadLTpOB9Ox6nVh9PDaTwsyJup1VnnVltTs8qhPp+m6aBmDYjA0bURESTKCQpCKalwEHlCcY8Ac2yLP8G9BSghuy15TXMDwcxRfOk+m6tBBKEhUBIoy7kcgIRTyiWnlJjJCeBNTmfJfUKAVaut8erpKmLpRXmYI4C9iXN6BCKRWbjF4po1WyLPoI4QvEDZFv2Ug/sCfY1gokVYnUSInJHDQc0QwZq7RmtqLbxFm1SrmmlYENKbYJ2DGyyHrlj2ZxYUiB7gYIoeFL5cPgqQpmXbxpTSG3IMEjMxZgLGVe6HQrmAa3m3/5nLvKmnh/OzTavuZoBx9/r1/e2dV0Bg9WmapwhAdAAoXTo/v+w6/vCjH7nBk8fP6nSo8zTOIxi0aJ9+/OOLy4v1sH59fb17dLE/3iXudhePtE3aqlCu437S4zzWWXm9W7HkzXp9Op1MPbyN+0PuS05pOo3VWtd14XCa5r4UaxUNh9VmfbFRU0n93f1pmufEeBoPfbfCpbboYO5zq3d3r/vtOsz3xwMnLikzSdOYTlML9KbhQCCubdKTtpkZCTnA3BqS55Ka1qlWRg60w2mc57lI3m4vAmB/ut+dbzabVT5/9J0Pf/Sf//2/s9+f6mT3x4fjaZ7GqlaneZrmmYiAUV3B3S0gsgAnkPnQxuNsRsBk5iLYD5zYS8IksUjdChdw1JgM9E0kMypGmPtyf8QgJiHjCCBAi8UWSIJIPzVnBIGpMgsRI5EIY2CY24LM8cDlg1XNzLUqr66GsGWKgwsNg5wW52AERqBZvFEBOEAwOIUtuQMCCyYiQASAFmaxXDdEJOX85nFkDGBA0kUSoNGqa9VoTdXGaZqrxfJ5X8oEBKoKgAC5zQ0W0pUGLHpnC/MgFqRgJkmceyy9i7BIkYQkyszIQYSEzJmplK5IlwvMw9fPvv6lx2/X64dpmrCj2pQd7+5v1Zs3qq151JTSPFVhSl3Xr9aYCT0y+mefflTneTucmdvt7UuhRASS5NMvXj55+gw8SteVYYWxSiXf31wjxGoYxnEaD2OiDplW67PVsIYQInLTOjetEwl3pbhZbTXnTtUO48Fq7buuqvabdcpJiE+n8dXLawwbx33uU1e6aZyYGVEgUKe5255z4OHmzt04pyJJmzZr682mDH1YhDkAjodja1pSEfN6PMz7g05K4VrnnJKQMdHpVF/dfFHyqu92SHY67RH53befM0l68uK/+rVf/i/+yf+XqUBFa3Mdl6NFi5gtFAlyt9TNkCllWmVIEizRE9A0zc3MhVms77HrUuEAaoiQMgsKQDhbs8okQMEUgkyJkRzdQAEVwH0hX7FiYBEgAHRyDjA0QCRkIpYkTPzmCrysd5dJv0I4tKZzNQ+QACMIX9icgK4RuFThZfm2kkQR6F6JISUhFgglAgRHRGsCiAu5P9w8KIzcUSNaLM4nB6gL2oUJhSPhcmx3Yl+gKuHsCorhaCQIiGYaPnsYAQIEEYFBOBMgSiAbi6ckktGZUxJmRoSIQEwighKAQaIRMrdp1u08xwpwEGKHYbs+He9rbVmSsne5a8NmgjY9VDfzqpvVGsg55wBobVbzTLgZ0t3NF17nt99+zmKvr19tt9v5cBLpPv7443fefvvlJ5+9/cF7bKVNbWwaAN40LFqzOs3d0GeCeTy1Nvf9KsKO4wGxdr0wpc9vXzrOK+uF16ZxsMOwWW/WAyDPk7Y6Tqe70NNpf0iC2/Vam+ahbC/PTfF4eNiuhob5NJ4IDVgg6HCacumvLs9JVqeTgiShnCiZs9da0pYTgk7Y6jzttU1mXpi3m/5h3t/ta5d3w2pYrYZpOu73+w/eeZclYwxY0ne+9x2bYlivHBp4nxKxo9UZRLnjqZ1o4lKGQBKiwoTBc3gpadWvam23p70zHE/KOYY1AioGUwQRIBgnI/dEbB6oAQkNfFHvIieBwHCRQpEgQFJCSwhu6IQCFG7N3SUVVUUMFtJmvNDhkTCQUMJbBHoLtEBEQUQIxTBEjIW2SeIQcHJITBwYjIiuTgzgAWzC9IY6HeQWyxweFxwRoDZvs4ZGqJsZIgHqUlnInBrq8qWRcxeBfedew5TAwNlDQUdAljD1mGlBexNSCoAIRSRoCMjRlVRKwkzEvowQkJyZkqSUiMWDDAozL683Ok0j2vT6+vqS1scYMcOg6WFqTWtKiaRbbbjZNO5xnKsk3mzW41TRqxm3cTSKgBCOOh+++PzTy0eXZnZ/d7sZuu36/O7u/mXJj5+/ePnJZ1/+8pfnCZyRcnHI01gp0XRsVNP+8FrDk5RS3p6rRljOeZrGLz67Do0QnVs1r2belQIoktPh8JCoI/ZpfBiPD4icc/FGTNKv18ES7tuzzc3167HpbjVsL85Gg6axXm+kX6vGdDzU5oSdpJxL36/PE8LyWxJvFPcahyR51125tanp/f3RHM83T1JyAJtnBZBh3c/Tcf3o2e3dy9/4rV/lDlOHaMA5g1jzxoAUxTTAYppP6sgCm7QCrKpMRMi4IOfJo83VDQ/iZ1sbMgMSIVprLEAJYQ4mADcQRiYgd7XFFkwUhMEIGVYo6iAUGEiCCKHGGIrNNMwI0MyYnYnMLCG1cNOAJX9h1lpDDEbk9eUKdDlPBoQjcEAsnjpCREQ3C38Drgogxrx4+QA5nCPYFokBwJKZQ0QRGUonjAUpfBmnEjiEoTXSiilJJqFAcK3N5moeGG7RWrSkraEDBRMCI4pQRAC5MAcHCkqm3AEll8JJhHjRdHvuqeskF04ZuAALdrIKUUQpBHrirV4+v7joWI7H0TAE8TTOk2mtR21a6wyAm836NO9ZCMIyy2rbtdpqra1NhEGE43j67PPPt5urdbcyMEF66/lbx/FU51a6bn946ZVX26fuBn4yMA9KkllKgD3sbyyilJW7QSB6TON0ON6thiJpYC6Hw/1Yp/X6LCdq2sbjuFmtT8eHH//gu1q9Hzopq6FfoYgjqQUh3l6/aq3tNmsz6/r1VNt6NayGoWokLOvhbLvZ5DJkzlOt4zi3cR5Ph3F/mE+HaTrprKDqaqdxvrm5AYX17gzJofnxYX9/dyslXT2+UIv1ixd/75f/yf/p7/3N7dV6kESZSIzFSRCZkDMSMZuTq4bFRER96sEJjdnYnU5tOtW5TSHgQJZdVlIMLTBEEqEHGQjGAoSSCApOkgQRDRctIEVKuUAmBAhiEEJAISSHxQposWBEwmlBNcDSTas6naZpmuvcWtMIA0Bi4d2jTQR4NMRgWOSS9lMkkYUtsR8KBwISQkQkDMQMKD9F9FCARwQGMBGipJRKkS6J4+Jy+ik7zikMIxzcUhJGaurTPJmGVUZlVwMXACGWtPhCKByJIQECJ0aCtID/E3ICEYQUGETsuYPSldSl1EVOESkCIyVFTNGkk1RgeCRP15QBnMgP93ttRsJNdZqbh4KDqSXh8/PzXCgVqjZnQWEKb63NhJiQs3AhvL57terXV7vL+VQD4tnTZ7fXd+fnj4L0dKhXj97JQ7q9edmXToQlFVXf7s5vb27A0zTp5ePzruvqMQIaYDiqSLq/26cub9brVcnW7P5w64rrfvji849efvbJbrNZb7bIpZQ+d3m2hkg2T6HzejXUcSSksdZqrfQbNUwlMXdplQ3NXU/jMdDcbZwmbZYIicLfOIbgdJwOp72Fr/ozKaJVT8fDeDpAxPZsu9l0qRuGyyd/7e/9rV/56LfW6+3KhQo6mBAQL/2pQAZOYuggjTpw88K547UbszF6zCc/TdXqFMxgjKpSMGWiQMAqiQgJIILMMMwjQAlj2SsJQZYkIT2tEWAZFwUALFoVojDXCA9bPORCpLWFujcws7nZm4PJkrFkSkmYkfur5GHmhsGIGZECGCG5xZISAEQCRiIAoKXMzgAEiIyA/wblgAzkwIwkxIlkeSsQAXAEuQliAoDA6mHqLiTMubk1C21u1cOCGAk9clDBVFgSAQAEMGIQASESMSdOiEtERJAAICIlTplTV6RPlBAEmHJQVJyEJPk6z/lJfvpMHmcsHu5Wx8P+4XCIiDaruhNynU8AHt5KH/3AQ58RKwIg6ul0yyIIETCbW+l7YpjGE5I/e/HiVOt4PLz/3pccdD2cE7vqhMQJUzvNw2aQbnWa6/pyo9o6FEYo2/NIXe71tD+Zq0iZx9PhcNysd6XvgfLd64fjwzUXJE0PN1+MdX9x/oTL2m1erdfdcH6apq5Lp9NECLVVN5h1nqztzs7DYZ7GeR7V23E8WCWRTBJRW5aCjDpPbj7Z6GbWFAPH8TTPp+1qq5BYEDhOpwO497vN7uJsyGl19ThdPfrb/+gffOuz3xtWkMmDgBc7dBJzo4SUAkUxGWVwEnVHp4QpKkZVbdWa17GNqmiURUZ0B8iUgCTQIgiFnJZUNLpbTjkFESAGIiCBMAtA0gAMRyBYDiOAulhf3uQ6iIExyNvC9dQIqE1hqZlZQwpO9CanM1ysIt6keiIs3BAwYCH2IDiGIzMREhOFh9CiB4bFDoAAHosCDoRQmEkwZeHETCJSALDOGgEiCQDdFrotIjIye2CtzeamVWPJvXVAGbmQpJClmokIGBBAuEQmAImYg3jZPVA4IQIQYArJIMmRlJPklBg8dCl35C2tz61nYQfb395PpxGT1Glq86ym1uacQMi6TABN20LfR2aBwNNpX/KqdMUhcl4hCXLenW3DfH9fHz95NtXDeDpAyo92T8Y6mXnVVueZQp3AAHeXVyJpHsdSinolgOePnh4Otzd3dynlJNzaNI51e/FIcrm5ufvJh99fiTl1q7Oz/d3NPM/dZpdltbs8W+8ubm9ORD7XUaRTnXSeqlV3Xm0uVWN/f68613YUpjJs+/68mbkqGt6/vj/e3Y/HY6uTNrPmoXWurdaYWwBhPwy73Vqt1rDS96WU7WqTS9p+5YPr24d//K9+9aW9ZmopCwBFOAtwYuQABhbAhMEBhIbgCGCGHuSsc1VX4giI6hoa4sREro0JKVviFA4OgUzECQk4EbHQ8j8GJkR3ROIIimBQC0cPCEMPaPbGK+wRYYRBYRHL4ugNnwFwEWaHL1+5JMzCAssAPhDCINrynoYwBEBgAEQAbW42BuScxQPDgSwSuHAYLQ8nhIMLARKhMAou4yCNpt6aikBKCYLdUnggNESEEIIqxClR12OzaOHIb9QDgogEHGTmtow/39xKEBCACTzeGKcdQwM5TBsG5pKRgMgIM9MGyE7z4X48fjLHOnd5ldqRvFnKA5JRpvBIDBmkK11t5KaAwzSfaq1EZN4Ipe9Xkmjo16rd0O9SSh7zMGw4/Ob1yx98/1vPn7/lga3ZXE/b7fnDwwOxPNxc77rkzVNHfc7j7JeP353uP3/56raUMo17n2O9Xs/jKXEAihF16xU6fvLj74ZPuftg2FysdltIHUnfr3brfpty/uTTV+FSUktMx/1DbYcsJVPnmKZpMp+YnKmszs4BRYLmaY/IOtfb6/3h4UGI3QLC1ZwDTFXRJPWZJfdDN/A4n47zaXOxW2237XjCMCk7SP0Xx7vuaniyP5/nwV3b/FDnQAbhCAsPfMO3dG7NCN0wKvjME3FCIWIUi5XTWOOkEeY53JFaQPUQtZyzNUuMmYUQkDEAmgZSICCRqLk1dSAKNyNGAEf3cAtHNPXZF2XoGxSnmmIEkRBgYCwKTSRbsnqLXog3jwZEClu2xwwhZhhgsbTbidwNIFgICSE4yH256UKEmQf+lNZPAcBJck4JBZGRYDEBm/niuUFcPoTmFghEga3WVmtEIKFjAC0ouAXOHZTIAcKc/f+vCnyToGYAYI/QJbDqgESYQgrlTliYMFiyAzVDDleNh2nusb8oF0POrY4elomZWHIuOW/Xa0DLWbq+Tyml0okkD2jVUpJuVRCg1pNkYHZAHfeH0nHpu0dXT5CYCS8uLkvaKkJJCKBmgQ77h3ti2G3OSuqCsC+Dt9Onn/zw4skzLuubmxvC0Ho8POx3u8eb7Vli/vTDD/c3Xzx/+90X73+tH1aDoM3T7nLDiVLqbm5vWTKzt/HgdartlHLpVtvpWFXHuR3dqjXry9Yj3T6cEKUvyU2Px0m4O7+4VHewCE6uDUybOqYEoMK4Wm9O88R9d355gQgm2A/p6ury/NnzH3/+8nc++93vfvaj69N9nzkJuDBlTlQiyBwMQjIBO6YIchJxMEB3h4x9JhQKAaIwiGYeFqliYJeRUsqJCyMHvnEFGzMiomsDZoiflmCamwUaRoA1t+bNMAwNfNK51eZvHhGP8DA3DW8eEYQYjhFY64Tkkog5ByKSyxuYYThy4JK/AIjghYqKuNDf3V0oKBZvSsUAaot+b7EEeLAEkjDz8mPuvgx2lgREM3NFL0H85gjkDm4a5ozMLJxD3D3AXCEANQLB0AEAjMEDwxHJAyPMEWwOJQNCd1JVFgwAymQ15tEBRFgg1B1VAzwpehNQr3Ozgx5WfTfP8zSPZ9udpAwRVSdBsRrMCbm2URkT+Nj1KcJLGjYXq5cvP6+jnm2G1WrVut7MoukX95/studDGp48fu7Exxo6XUcEmq/yMOHRImq0zarzadLWJHecUp+LO4gMzKf7w8PxUJ8+zY+vrn7y4Q9fvvp4vTu7uHzx+NHFaDrf71dl5SVU52k+bbdnEHV/+2ocZwAathsgvr+/BZubz25IOAz9RgPG+/vUD7mU2to01aHb7LaXL1/d3N0fXjx/Olp99ZN71Jb7Iffd6fgQzvu7oxTouCBCs7rBIQWUftiL/JMf/KtvvfzedbuGnnXWXkpIqtUNtc3zQq8DBBExUuQAx8wc0FqzSlPGJIICSdR1SM2iRaoNEQwB3cksJCEJL3dKCIJAQgDzJUxvvqR0IjAEoJlqdQ9llAibbAZzhuIU4UYA6LCUDhYaj/tP0WcLxxAAEQ1M3IIYGQPRkYAE0EMbhCIygUcENAVkl0SIaK6oVt3CCXnZfDvyYuRlQQpzwyACdSIKRDKrrVrDMPPF8QQAod5aVbdAWFj/uLQQkBZyKARAAwhy89Y8XJfXPhFEUGsaFICsgGqO2MKkdFRHYFZCpiQW5pC8mUARBp+NJRODq2vVi4tLDiR/U/Q3BZGU+26aTmqt1dENkKLknlmOx6NwfvvFB7c3d/PkKfl2+9i1blary8vd9fX19WEadpsvf+P33e3r3cs2ja9rm9jp6upqijmY1MfVsKktDg+f1dG0YS7w7OriBx9fT83OL6+aN2Ca2oRJnrz1/tnFpav3fRlvD/1qfapjneL88uL13e315z/Y9P3l5aOQ7uZ2b/XAUMf5GBFnm8cspZrOY12tt2dnFxEWmDarNQVf37z67NVn24v+ra+89eEPf4wBXDpKMo2neVJadTnJZpDjw81h8rOrR+cXT1lr3/ff29/+4P7zl/bgObbRj4ynNksQd9AUMQsaCCKiRyiiuLoIgQdjyiTRamSgnIQUXFLomvLczFyFVBJFWKvBTCJAwuHRzDInohTWINDNVR0ihcESl9RqVnVuEU4BHugE6D4vCbQIxGUGCmG2UGuxNWVmSUTMHuHg4FWEOUIDnEhIkHhRWQkw1lnjp6944UQQQMYsZB7NNRBTZrGAxdnMKSgilsWwhZMDI1kQYRcxtVYXYihzEBGo1VqnVtUUAJaNLyL8NAuHC0g9zLVaxAK+ckAOIvfm4EG0mJfAXcMroNY0jWYITqYFmSA8xLA1M4JtObvcPO27TY+GWk3nlAatLVQ1NBFoVUBD8DAnFE54dnbZ5sndu6uz4173D9PubJ2kO46nNh5XZ4O6P758EZA//uTHH3/+o8357q23vzwdjoe7a0A+zuOOUPLOw7p+kzC3dqy1fvm9rz168tbDfP9wukmprFeXfc+PHl8wJ4cY1punT19cXV1U8CI50ai1glMZ+tM83d5+kcrq6dtfrqoff/KqcOo6cRPp+kRpHtvhdAfsuazneX51/QnKar0627cZDOaTXV2cvf3B2/M817H2w6pqvb+/m/YPqUsFi0eM0zxr67ddGEDXb7cXgfN4uNvrfbBnlmqVSIQ8UFtTYGLgwGV00yAgzIUJiSQXbC0FFKIkzBiZVTmkoEQwGaSgXhw90Ah4bkrCEogYEuFYkZKQWNg8V18AaxGgNmuzFlatNkMnBDZwgkALLgmBa60AgILo4GoAi53EF1xVmAeEZJau4/4iEQqQcHJiZRZEZEZAanpyg4ycO8kdclJiJJyFiio0nQkgUV6EqZiR0+K4YeEMwACQkiQsauCmphrNogJA8tqOdTxNbZ5mqDPAGxZKM9CGERwYZhoNY0JvWJtnUHIGz76U+x1CGS3CFAzRAJQhUoAhIgJjBBq4NrDelaY4iXbvdM8f7Z70wypLQotaa5gJAmeGwAg3tbmeVBsh5lI4yWq1Es4R+fzRo64r8/G4GoZHjx5Lv97tztfbHZBfPH4seXV9fXP76vU41/Uqq+vd7V1oPbY5qMsl9R0SpYfb22Ykq9L3NJ/G03EsnAHg5vbV2fl2KcJeXT7ZXl6y8Gq9mY+n+8PnNzc3gqtguLu/Pbs4u3r0VI0+f/nZdp2JoAIkWWnANM/H8b5f567fltK7YimZhc0ijCAs9Hj55JKG4faL14fXD69f37g38FnIcjc4lWgTWnDJYI0M3v7q7xveewuq//qPf+2bd58KSKta3awaajQzs2UBasiOC08GnZCInEvOQfJGfohdHrpEnTfV0R1caT9O1QMSkrASuQRxAgxIRGELoc3DgzD+DZokEjppddPwuUEQLmFJDPcApJREWJ/2jcMAAIAASURBVNwhkRABE4lIOOTcEyGqLZ2TCOeBuZNVPwiFc/FADHzDGCQkgxmFUurm2UEsZWLBnDkQwknYTsdqxljQrSIiQ5ihahRJboAdEdnC4kNyEYpcIsBqm+roVQUwrM7zrG7EkBJ7mFpFpWgKJoAJDdUswiKCgdQYyYEagnMJylybm6K2EmDMbObzNHKS2QlVQ9PMVUIgaVn1gz95zJu3Hz0/79dtPoHaNE2AsV53WWhqNQGBJ2PgJBHOBCklAra2GDHV2ry7vHr61ruvX10/zPHo7CKl3IIxmGD46pevzs+2H/7k029++3uU9Btf+kByvpn2WYY+pHSpKbH41KpI7rp1gM9z63J3c/OaWZ49fU9nRJyfPn1mioykrqpKROG5lF3qu2Mdd7vdlruXd/fYb54/e+vm+rP7h+MwrFvU6XTfWjvbXUmi4/GIMOVcxrERVxGzmHOXNhfbeTrxsWvz+OrVF83bfLI6PgxDl0sPEF6rh/S5G8dmduo2K0hPceekAgBTzK1pKDrj0m30aBAkQguWoQEgkmNtXnvtMAkGUDhHCCiGt5gBXSHe/FsDBQnAQ4CFAKt5gAUxKzkjegNTFpYEEcjaIhq4oxsGZPOGJAThqokppSScMJwCI5CZ3M3d+tIhpimC+9UyFcUU0smw6XJOIpmHYWDx1maIjOiUWSA3nXnKiKhROUnpIGVwCHA2bxbOWMKghaWUwMENTUO19QMTG7IAkFlQmKQkqc85W60Pdw8PD+PokNzNKkSYAAG6a3Vrjj4ahCxAaQ9abvyJGAO9NUrKCYcs0vNx1nnycNeKqkYMesIZ3KraFFaFhkjkrc4G8mTYvn/23uPhCq1N497bydWYubW225wJ5+nhgBhLYyGlVEppTYUzBh11zIxgen/YO+Wrp89zFvAmktt4fPXy0x999+HRbvXWu+984+t/sKwv/u4/+Fuff/bhz37jD5QyIBQu7JHn5lkcEDnharM+Hca3n7/9e9/5prb57OLifPu4ttPx8PD40bOH+5OZrfJQp1kgwPthvQ6rpXTrYTOe2ubiSbde333+yXSc+lLq6eF0OnGJbugDsWkM61WdZrVxnkxyx2Krza7vs7v1XT8djtP+mBAmtXracwDnzWyhh7uSUTmHc171z56+K9IHAuScsK/TeKiza63zOLqiO7TWqiNYFgAHZDQ3QLDw5tG1ikwcQJ5ESIgYzN0CCBmIASAMIAM6uSAxRIQZILovqmQPECNgCg9wIQ9XcI0wMHVGIhJXh8AspevycvSHJf8ZCB4AS25AwKzjCAoA4pxR0EhZQFil9DkLp+KlSGupRcVkbkDIUoAmjGBgzx0xozOAUpgAz17VLTORIzgguoIzYORcmJeCDC4woEE4C0dP1TvFmOv9cX86ztURJAEvn7Jm7ARq40wABqREGAhCFIFuQRERLoybde43BGwgvN508ylO99HmarOFSgNwra1R6DFOKJ1sV0NpleaHpvV0mhgV0FQniFjnM8F888XtelhJovu7o7u36n3fM+eUuqXn2ZXBVes8hR6KlMrQjnb7cH93/er+9au7/c1Umx2O69Xq/Ol7f+xP/zt/+S/+9//q/+3/XNtv/cLP/tKpSffo8ni8S5OtOUfEsCkBmohPh2NgnJ+fHw73wl0SWg+rMC8dE6d5spzBTJC7qR7CmqRhrrp9fLXaXpzuX+ciq8324eZ6qveUZbvadN1wPLRUOqsK4IyRCUWS9CtOyTXmOgmlOkedp+aWWCrN6/XVanv5+u41h20uLoCKElxeXDx+cnaaX/XwPAzv9ncPrx8eXKFWbbW25q1hGGggQ50cSYKawhQLIZNkMk3SennTFFwKvoGdgbJgMEQWd2/g7EGmgAQsyGhN1YFZABEDMUDNIUidKQwI7c14RhEJcTG1YzgTp4AAdHcngYgQplwSYyBBlwQ8jCkPnYNP5mbNkSUX5DLljjjJw2mOSRGDSAOIM7C4G0EgEbB4YFiqhCsR1OZhEOzuoUwdOUYk4QgD1AAA9K7riagkWfUZRGoYsZOF2mgM0RwSS3EOtWaSCDUog1dbdNLhEGbMjCQBFRnLWlbnTKVFxOXZtmlAdV6Tlzwep7mqTnOWrpce0aD60K13Zf3e7uzx7qrLm5PWLqrPjVG6fqCUCamZvXz5eb9aI0Cd55wzc3Ijlpw4ap3MTShT4dbmNh5eT8dXr2/raTzsxx/+4NNPr1+3cKGyW9Hw8ndeXV//+T/37//lv/hX/trf/M83P/rixbtfuTuMHXGW3FpDpPE4XTy7OOr+7v6hX/f1MG93Z6uhS4m32y0RUWYS4Zwwxc1+bgFzm/b7/cVVvrh4vNvtxvmUxBDs7uZVL7EadtL1QIU5dUPKub+5edX1CR0xOQqENa3Zw8BhgtNhf7zb3589ubCppu7sfPf4eJxtPr54+0ULJYaLR4922/L6eN9/8cXwYsT+8jjh3c1rLVxHtUm1za01ImCBcCJMKBI0veGWBGLj6rUWdskA4GABQEQgQZoIghkpEwIKExEEvpGqZGBERgWPAIS2pNAAHNjM3asruBeEJIvcBw1goTEjEJq28JDF6IIBECKSJbXWaGGWFCHBeVYKUHcwln4jJDNlyIVzszoZAjAFsnsTSaEQAIKIEUrojgZcAxyhiwDwoABAJ8wsiGSAoRZAXsrQ91kCS8lp6CTnNeKu73rJxnW/f7h5vQeunBzRyIE0BafeWtTl5bIsIDTIFucTZNo8KWXbKEOfh0Rl/zBLCkZoHOLkEAK0Kt269IbiqIyRUKKl6VgTHI7JEHpQFU4AhIinw7FOo6ve3d0NuSOiUgosNU43Yu67DADaYJqhaTxcHygXyOcffvj627/7o4fR1Fa1Hcd20pvTxUB395/f3/3NP/fv/tk//kt/8re/8723v/S10/4IfX56vr25f1nn9uLRIzs2CMNM2eiorVTvzlM/ZIW4PL86Hvel61EwIvaH+9B2miqhdF0eNutq7XB/PR5ubz7//Gy3ksyJOyKaDKZqpe9as1RyKSlAZju2aSwcrkZECK4aieStt55O1Sc9Pn787n6v43TYna0piSo8urrIOe3ncZsud7IyvaGcuq4zszbadJqjuc82TxNmGUrmLoLAohEJBSvYMspGTtWjerDP5EAoJMkCESPQIdGS5CQBYTYGIARUd00gQKihECCQVJ0ZA4IIAWPJOgASQkJsy+S15MwC5hMSUhAAmjXiRTpkkvLyXhbBkgIxPEVzI4SgEBmIJSW23EOuBAeNYCdgMpbgBNaWeIUDB2BkThiJaFZoIoDI4BjqgYyZjEdKWd0QmDiInFNZDUPOWXJKKTFuSumd/YtXcKwPk8/eiYgImVTVit4rrQgAWgAgIxcDa2YEsTrv85kPO+q6Qg5sMSWWNBskwaCmCct6fd7lklIaKKmg4vHV9Y0ex0fbzcXpZMceL55upBcSDW/HfT2NdZrcVdsphs12vTuepoAGoUNeMngiIiyZMGFZn46n+7v6/Q+//5PPXr3/la89Xw2nh+mwv73evx5nPe2n3/zRFx/e3u3rf/2n/vQfe/Hes0nnZ0+e13a6q+1UbdcPZgZuFFCkONb1sEopqVvpVobgEdvdpYYG8HRqq+Hs5tVnD/vx8eOr3e5yrqZuOXcqw7NnXwqxKSqYoMca0R8e2lzbPK+7PjxYJDGrNq8n7KRZZEwpZ2QKMIr25OnjTz66PRxazrkbZIx2cXl2dX5uPKlKnyA65ry5//x7/+o7vzLV0AZ6MreGChhUhBnY5iZDODmDMCTFBoIBiGKK0CKFMZl5MDAVQBNd4H+JKTE6mqKyFCRjYfBAR3/D4kRYsGmGphMEmaE6EjmhI7lXRYYiaaH+uHtKmYAjAoBNDQBaa00hSyRmRLUGIpKIGWkZM0nKwcxA1Vmo58gQRhzq7lKcBYgTYlhYEvblu8dl2CAnh2ZhAZAIILiSFMkcFIt+rLW5SWbpUkqlF5FUUp9T1+duhrFZPdnd3bEyIZCUjVC1w8FKIx4wrwmR2+hEDMBzM0+Quxg2uFozUZACgQmHFCFOoJ4ASWNzuepKjwQDClNuUEa+GVbd+frJo67PhFODDgO5AbRax3mcIyKiAfrpdOi6rvn4sL9m15kkUfZQZCDMm4tnz9760uXT9d/5u//wk89f/+E/8m+V1cCEf+Iv/amyvfx///W//eu/8s/KZd1uH3/7W/96//Ctt99958k7zx4O+69+ddvR5UcvP2RC9JmoN4hoMwbc3x3WXXd+sctdZ74wCeY0rAFJrZeutYeb24fDo2dPn794gdzfP4zry13fb4G73A+n/QPVY+p6aHZ4uCMCrT50GRFrw1XH/VBQnK1FRoQkkSiX0qVVP5Dwq9fjrMd+GAjYrD46O39yuVYfKWKVEkhEIoD+o08+fXVzOB7382nEyq6QUlqvNv1qve1LUz3CQ0DjkE541NkjIxb345v6ObEpKRAAFKGanJVNTQAJRZMxGYKnn7a+AiEmwyBADg9EDA1oZh7NEyATOaIuAf5FM+puP7XFE0RzCwCMQHebJhNRIQlRETEPZTYLRHY3ceAX31hLMhYTRkZqszMxOhGSB7uDNy0FVucpD5QIEIiIiD314mEcOXDKneQhdQP3PQASWlQ91HkOyB33XZ+HYVNKVzruenKEAEHiU9sjz0whCMxNugUjRJsd785tvc5UiFJgZ8N5161LXyD3VnrGiOoNoYDkcHJsjhMw5q7r+7xer/u+6zerPDCkmPSEYJthWIv0sircJYRwdQ/VYACoOnoj6ihIa1Wbx3HvHtNxOp1Ojhpgm7Ord7/2i+fPvvLZ7enXf/d3L148PTs7+7Vf++2PP7/7T/7T/93jxxd/9f/6n/3uj370+ublk0fnX/rKB7/73e9zxJfeev/86slmuxnWHYTa8RB1XO96c5qPx+PhtutXfeklp4vdpRpzwvV2havd5H3u+3FsR5ve/dLbzx8/a6p3d/eqcbq/2+9vMKfjaVznnkKQCqeOCIauL1lASnXNvahqAEpyt4BmXemkrIf1MGy3JW9yXq82w/pip6ba5rPz7ZOnj1u06g0jUubd+VvD+x+gH//h3/+7//DX/sVeT0PqqCEEdauyLv2qrNerbjNc9mk7xRzUaujJWyAKSABlkIQ4iCRo6wSrLgmlCLQgMx5rnsIkYcrCREGBRGAqJgICYQARSOpg4RCEQUzIQkAYAOhMJEgmQhggLCUlouoRhEqh5CkCLKoI9p1IXkDMTmgRoOamYcFC3EQKCzMhZhyGoY6mAQFL1ACkQy4A4Ik6FJBFrQwWzfuBJgdSpmwpkxBLhEILFDc+HbVNp17GeZxg13LuKYkhcOa+H9I4DsO6RdfkABZhKEJpl2CjOUseOkrOtatTqjrnHsCHAE3dkcUcAJq7VHexYnMdVWrqC7NB0jLwqvQMaIzVpzX3hcGpGSW3INIMKcLbNNc6g3liGrCYYmC7vb9jZqIhE1ea1A5G66uLR8/f+32rxx8cVL797W+71idPn7bTw/Xd59/61g//wl/472r4t37v25n4s0+++PTjj/9H/8P/8Gs/80c+/PSj29uH7eUFJxaUvuR7tzaeCHAoopS227P1emD1tF7PbSylpFXPwznIrrAweqX917705en29ru/+3tffPbhw+GLNtf5YSbibt332+6DL33j6bN3jZwzswpmysOK9vceLQKaW2KmtLbTLQGHOQ3OOWXqQ5tGONBuWHdvp/3ZPmWe6wER+7w2r+6eh8Ilf/G973/3u998erH+yuWXPrnd//D2e0li062TkLBzcObgnHtfjebCyibq5EyZAABaM2ftc0ZK5uGkgO4QQJgydW82uMiSISzcF9Il4ptYP2AEKCASSSyldQZwQAdOCBSIyZfoGzROCEhCrr5YcsGnpq1No+uGJSoFI6CHYgB4qlOb55Mwp5SZyJeZY7dKrS6yVGYIyeYBVIwQMSBR13XNAs2FhCtPBuaWOHeClIVEBBCNBBDq6XCcbtf9er3ajOO+36YwCRCIRgApQ9+vTjVbQM4MligEkSHRaigeVIau6/KRTh2sWfrwpuZJOMnCYOynqgETZ6YETEyFc8mZJbEkJI8ghCzIltCNJQKahpuHIWvVphNaTHNTgs2wmvxU34DzsKTu1GK9Xpc8PH/23qO3PujWjwNgf3vz0SefX148effZs3/+T3/54bBPQ/dP/+k/NaTdbrOf7v/Mn/3z/+P/yX/4q//sX+y2xzre//Czj1+8eAYaKCGAJISp88qpb/2QwHk+nkrZUGROnjqPLpX1Y8au+Xw81reev3N//ek/+gf/1fVnR2vlVHkeeWpGjtu7k8jdpx+9fPvLb/2+b3xte/XWvB0EoR6mnLAxnY6TawvmHF3aXZ7miXNKyB0mV6ttBAug1HXdsOZ+uFD1aZpK7ptGVcurlNYl9vcxPXzta19ZX15+74uXx+MxF4mqhYckRAApc8oYiFmoQRaB2XSCEEJEI1cIsCgWC8AvA1bwpVGl5gpvzLmL5oHdEQkJCHyJkDkAECVekLdgQMpiqABKiwAYgFogUEhCycYIXmPBN89tVG3uboFzqyVhECAGU5jPBOjNrLoQkQghhQcSQc6csrXGjBDhRCHCKVFJQFCJexRAB2AohVmEOZhKphW4IYIRpMiub3Tyx7vT7c2h6/KwxrTqV0OnrTFjrUpsORNLztFnIsSEzoxCKfqUKWPpejcCr+aClDwaumVhITY2MHF2ZBWjlCMhY3ZJlgiQGvOAHpDMWAh5gLyVoda9UuYGYeR1qvOxYFqv18Bw2I8aTWsDgKYzsAzDmnN6/Ohi6C7XF5eb8/PTQdvpYZ7Hx289v351e3d7NHfEcvFoHRGhdnu8+9N//t/7j/7KX3l+9fj/8L//PxrK/WnsV0OHzAHeXPU4pBTQlUGPxz0aIbAn2pxforXT6frsaserjTazSqnvbNLf+OVfvbluH73U0WMCEj5Lq+1pf3j5cNrI5iLi09/7sd492M/Ho3fePt6Pp4e7gOZcUvGUsmo7qfV9GRKrTkBxmg8I3Pc9MnFOscQrASJitRlqrRAwrPrHz9+S3RnUdhhv7vcPv/693/3s+jZttk8v3rq/eV2nfT+cp5Q4UekYAHKl2SEWAgq0wgkJXDXcXZVEhFFQBDGS8+Sg4E2NCAkJPEUgs0YQpohYyljuhhji2R08nAUlIbEjG2ZxNUAkSgs0SNg4jBmBHZuC0zw3NUBOEOgarTGTs+BPtQQjU1AkATDmjJwFMUDZQjLjPAEgGoFTztJ1uRRTmKsCOiF1qQTxLCmI0pA7RjCXlAnAAHCZFaMlqLZ/PW63h/t9KasppQnczMIh1BTJk5BAl0AwMnIGoCScMG0Gwky1zZ3lqgRA8wRETuBmwJAUXDKbUoQxA/cByQiJIyCBC4Qb0CwJgKlDgXBJZIbu3rwxgIh48zrNWCAXoUbBjuE4JAQZhoHAEbHbDdPoz1+s6nTKWTjpfjrsZFO1jcdKMSBQ4CgiEPL/+Zt/4z/5X/zHv/DzP/fo8dPf/u1v/oEvvVtWgxCTYc4duj1+dMEid/cPfbedp5OkfrPbOWEikUjinVFuudnptC79N7/9zdfXpx/8ePz+3YOcd48uH+8uLnR+2Dw/h5Db6/HbP/rx164udvPmd7778VcN18NKcjIgNm+mREQuhiAiXZZx1KZR9bgeVpk4sET4IiZpOrvbdPLA4KHf7Xb9+jI4/eQH3/onv/kvvv3pj8b97aasuv6MwMHn+/kBMZiR0ATRGfrSmfvslTEQ2CoJQ6ItYgVTgoSISFZSLxQOVNDmOrVa1RsgRHWDhsJvdL2OpugGiAQpWBDDS8KcEQkIyV0bmPtiH4UkLGTChN48nDhQEYDcgDHcwYM8FNHB0B0RCSlYghgkyUACxA7BZghvlCsWAB4ZCPsVdQNzhvCsjuhIEcKYBwTAlBm9BYRgYWEOnGutWrGFNqRE4jHu2/39cb09gEffr5pNVScFcPfEIrhCCxQKEMQeAQidiIgZdJFlzOrhgQiCGKgKwYIwYHiAs9GQlutytCKYKYXGTOQUwICApBiRYAXbYjlrSDCAOfFCQPXmCz0+5czdDj1EpI73jx5dcaZC9vTpU6BUGC52q8uLJ7/7/Y/+zB/7E++8+/6PPvr44f6W+FxSHus4DKuPf/id/+n//H/9pUfP9w83t7ev5und3e6cF/QZ17Pd4wgc2yHlDB7H/f7q8VPBzgmFyBBovUFKYtGv+3rdPvnw81/5rd/7/OS/8Of+6Fe+9tXnu6/+tb/5t/+bf/73dDz9O3/0T37j9399/9aT7//Wt/pKP7d5cjpY1xkR2FxtnnJKgZ5SX8DcdZotycq1CRX0PNUmGToqEGhuWTBy8hbCmTLmoZf1xfT69ld+7b/5V9//nbzpX1w+PjWeIJDTdnNp2A7H1yjrNg2nlFBaYu9Sp1NlzAMxB5hXYCeSAiVDIiIiwrBhGKRPrSLl882k++k0zsdpquYqocBOAeFASJ0kU/BIpTCjZonEkFiYourMkOuErQZ5EkGEeeFUMUoRIkpnkU7HmThAWoAiObGaKYKEpYhCzCmLDKttLmNECyf35dgjKfVmcyBSIilUekAKceEgoQirENylFQp4hlbJLRgdSVF91mbq0WhqFROmHr2maR/393sMiggkG0+jgzccI1A4iaBVVYOUilmLKKE9JRMyxNHdwxxSJO4lEQrY5JRSGDLMKJaZcyokbCDQKEyDKhNITi7mFmABjjq3VWICZgkObpqCNSJ0rjZPAHG6r6XHrs/Mvt70HhpAn378WT7bIb2VpHt0dvELP/MH//k//60vPvv8/S+//+lnH55O33l9+9pKMtPLs+0f+P3vlZh/8OGPv/eDH4LqrtuNp9YuYNP7eJiZ8tR0s05CIT6t+r7O2l/ltNno3f328TNYd2bVZ1fnjz7++Nvf/+yHn++/+od/Vuf27/7p/+Dmlf7eDz6/fj1Op8Nf/xt/89+7/2M//0d+8eu/9As//q1vfmXevP/lLw+DHQ5fGIwAgCQEKkDTNAXGaj2YGaCXMhAlCxd3V8u5uJqHI1DOXFIqfZ+HC9isX/74u+O0f36+PYbL+bbdzovnwdLgsNnr3OJQQ1OIYENsWXKWJC1TRJe8BlfdZxx63vYiffKUEgoSwabr5hKY+o3JbPowne7u9uN4mOZDm1WIE4twzpLQJLjkgsKek0JMDDNTCKUZCBKHQiAjKAOHV8qQpGsK4CxdSkAeaggCkxAmwZRwrjMAIpaUEsHAP/PHH4sAcWHKANaq1hqtqptFMHFaDf2wEV7OzhjcIyEhrlNfmAE8EuSFRkdspnUcqzZtzsc9uNF2u0uFgaN0fZLkWtGjjqO2yWM2VJacJQOQKQinrtslIaRgEghq7cFMl2R/Ji+ZMualNemRxjZV9K6kPq9S6ogEI4OBMJVBkCvIFDELpZVsV9Sxh86u4eBxmmszc/V5ns21X2/WqzN3BKSu64mlzZWIGrQPf/Q9bY79xVyn9x9fNo2//y9/+emTi0ePL1f9ENFyF++98+IXf/5n//if+GPC6V//9ne++Tu/++LZkz/7J/7I491mt94kYp9P4zx2JQ9dSeDH+1sWOb86x9U6DatxfyjrHeQNBpZudXx1951vffO/+Ee//qWv/uzji/L3/sE//r/83//WX/9bf/373/8tUxkPh/v761bbarfaXJzn8mSq9etf/7qftIPJ2r26grbQOp+OjNCv+tJ3p9OJBUVkaqPDBOGBDAgsRCgeABDIPjx5Kz3/wEtq93fXh9cPt188POxXq83Qd12ggEuPkV1xBom+y6UEoxF7otTcJ5gZvc8CZB36qgxM1HW07cuqT/1KUsopJymZ85ClpMS5z9thV/JKkoSjayTMXS5Dzqths+6H9VDW6261KixBCZIQAQLM4W4utc6JISUJbIuCS1AEBZ1K6ggTEUPU1FXJRhQLpKd0HZEgJCkdMRm6c8pE/XgkhEbomHmeFECYObFDkKkvhicWypnIQDJjSXUGQQ0P8zr5UZVMOXQRvPg47Vfbp9v1usfMFQx9cm+Is2mHsk1FzZxQUCY7qHcrzkjM1Ajbgv3NqUGAOxExO4IbcdFaEYOwE80pupxWJJQoAEtUT0IMFkJCjNJCp6pzjdSZA2NomqYx5qlWF8acGGkTFA18vV0TRCZh9BC+f319ebV9cn754Y8+jLRaDSUNm//eX/yTiu3Xf/03P/iZL7/1pffXl+dD5rOzs5SH7/3o+jd+7Vu/869+5cXz53/5P/hvX56txtNDqxevr291OpZMQ8fqTSMo5xTYpRVzH+ackkLqV1ubHMZ6eHX3L37zWz/+7OYbv7ht7WY+3f/mN3+jdKuz7W7/+tWf//f/0n/6v/2f/W/+V//L737r++fr59uri4eb16/uH56thttPvjjef9iOY/UxEheW0g1qp9bOLndX0zQdjreATiFAwEmIiBeOd9Nm86orabOGsiHwYRi6Bptu3XVddVfJ945r7maPhnFqanjvXgl6oHuKLOiM7i36Doece0DsMwa4B4uTzEJY+pJFmFJIz569SWuYvG8dStenvl+ttnU/RauALeeUBXOSrmdOE3AwnhEzo47H6l4rGoai8+nYIDR3WOtpWHOfh7A8Q4TDsDrLOTfbtfgi46RxEqlTgAP23XayEBER8UQsJNVyW3XjcQ/E7CpUmDkVZ3F3kQV7whoOBtEJM4BQZzyrhkVVr4ANCQD7VpGQSNo0Hi62m0ePLlWVSUgiGGbXMGBPDOJ4UqsRDEwes8OJIoMXxACeJDFGJgJDAvcIcguLIEpmEQ6ClFLpcy/i3pyoUwRiT50gBkY1AkMNnVqjBwNokTyLzW0+UTALh3lrc8bCaTYPlqQ2a/VQD7fTzcP2rfXTq/6T7/7ryWl19ej3/8wf+o/+0p9963L7G9/65t5cuk4VP/zeh9M0ff75XVL8j/8Hf+EXf/EX+rwZb1+/vH613pSr8y2Bi5CZJc6tnkrJQ9cpONgch7lfbWDoyZUI7z99+cNPX/6z3/idy8vLvksvP39ILo8uzlUKSJrnw9e//sG//W//4p/6M/+t/9f/8//xS3/E3Gyufrs//L63nj5cw9Sm8f56aqe8Xo8OLIdhfXmBZZSHeW7T6WDWdtvLROV0vCOMzfo8563kJI0w9xFuoKh6d3+TxvlqWF3buL95tepwtbs42nQYK7P0Xa4qhOpYE60X8HIS77Mn5JwLUhKcBDszI57cAVIYQVckpQFk06G4Q6u5ap7NEWcAKClrPqK1BdlAgLlLnD1lAWyEgZzRWyq1tmxuWqNVrzUinFKQELiVwhQpTAlWl+dPhGie140KpZP67Wm+A9fWqhTcbLdCBF3qSuqYGWdd9V3botlxnBpCYkYiYxImCq9EQCJN4c2RBwWRchZVrXpEIhJOHVozQOPEoXpxcbY726xWXWvm6jlJhQlBhZCREnJIttBwECGHaqZApKYSlQSSdK4tYE6Srbq7IHL40sQHRGbwzJKldGzLBFM4E1HpyGOGpbvJJMjjNB81BsidSNFAmBrA8TBqm9frLSsTB6ipKwuq+fFwEsbjWG9+cPz6V76y6bNotPn+137zlx9fvfsHv/Hog3f/+PX1w83d/n5/4ovtaiibVX+2G853qy+u9/vDuNusyNNcT3NFBjztD5uhV62qmjkCEUqZpmnoOgOkecbxMJ785ub21c3xZIUI1Pzs/Enq18CvOka2lmT3z3/1tz789AtxeP7isYpNp1ksCQXm03vvfTDf39VjW+H67n7PBNt+fTydjtNn2/v95eW5cIeQ2qzuhwqnruspSN1SKUNaSwDUEK02PdTbL1YX3eEByt4uu3KEada72zodvVHuV8xpylZAYyY9y72n1Oox1tYxUUkdyeRKmQUzeFASaDrR3M2rUlYlSY5ITpxMZA46zWHAkuosmgDNI9B0JAzmYEEWMZ+BlRGbOQMyVkL3AHdyg9ZaQHSFABugdXnd5w3BpkgPWK1Vp57ZNttH6Vhw/xpsnOpnZ9uVABqnIikLJXfFruA2E8zXd7RvjSRSZmJEhEJi6oiaZEXYZUk5ZzVE9NKhIZlra5UTS5dyzpVstVu9897b/TCkIgBgxCyeHDsujM6CyMgLRQIQgABMtVJm89qsJUZc6DBAjBYMYUaQCIFQuiJTI2+VBTAMgJEzODIVpqHL4B7hE0p3+3Cc6mmV1itOyUAE0SkPHWlExGq1kpI92CMBSmBrVUW4dGmapkxop+P3v/v9r3/jZ590fUn8k89ef/LD755uL59cXD3a5HcePRrWq3nGaZqQ/HS8f/X5nqDbFTkerkPH8dBgl6tCmysPa3e/u7ndPX+03p5NwKrzbCpV11nq/c39nR3n2qXy7PLJd774yf3D9VdePH/3vWff+/DH9aB9H4+fPvrtf/3rv/DzP/feW+/87B/8es6AEcfDfLla9086fH3x9lsf5KyH2/u5unBMMyCHYBBBhBGJKcx6IqTcFeKi7uJuzUrmwIiqYMGtXnTlOpehu5znGOP0cHiYYo/pvAMO0o5ljG4fTUpKNPdp2w0XQQenCaOloigOqWujMUZXCrO6TWQcOGDHWViwgzSMrQmcEiByoXHGmBHcGxIosYJRzpkFiBsDxCIEZorGBJJYE8uxeVgguKsDYspifqLkq3IZmtRORQSx1zolWSFMRCCE69VqHPF4fOA/9GeuSteV0iNJRM68JRB/I4jVnHG9zd3AyBaoQSq8hkiJu6GsA2DW2QIEI6Xi4a0FMRKBOULQ87euHj960nVdSh0CizALaAsIyoU4MTBpzOGKgGZeW4tQphLhJMpJAav5ZF4JBYmW1SHiYtZhD4vw1dB1uQRGYDAV4V5o1efImRTV1GmOHF3S1FVqVb0aB5r5rG3WlvuuW23yUKT0QiKckkjTJimzZKtaurxe94fjw3oznMZ56LrHF1tyRSRtOE9VEm4uzp++8+Vutbs/nObjaVVkmg+vXn5+PN09e/Z4Oo03r292211mWQ9J5ykcN9vz3PXj/c00TZv1tmzL/nD46AefBZfd7lyj/pN//C+H9eWXvnrZZzo9+GfXPxmn1mYEP15e5C9//WuPHr24uHz6+vXNz1xe/uGf+/27ty9immAaOQeXohpQx6HfpMIQARDa5jrVVCRllJxK2jBlTstiE0vp6HzD51dtmuonP5yODwYy+dxscggiFuHt5qIUZgoWrKyO1q140/dZzrpy3q97zgThKEokRBIOdW5M6jEmkdx13WqdVlvKQzd0XdeJYGA1aCktcE4gRAxnnhFaRDCuAA3lJMnCW3gQsKtYDVMIlfFkENQPqXQhuZXSS1JOKsIISaS4IwmmRMRMYhb3QEbEGFLrJIvAHZAQc0rMLIyJ4rIftqthnOa7xGOXABCnSTMn5i1oSpyZk7oHAkZCiG7QEGhNzAxKW211t7l4dPGkT4W5IWgpvcdoBuqNEubcKToyoc0RIwN61LneYWwtn8zCjobcOFWPGUAjxJURGIGBDCFaQ22OCAwoTOqNyDMLAxEEUE8RENxay5wzbwi6nvP5buiCpFrL02y1d7em7lTyAIA6qaoBzgt3z8LKegCjafSut/vDw/bskRsU5ot1Tuv07K13ADtOuV+fl25z98PvTYc7gJNZPT5cH/dffO3rX9+U9OMPPz3brnORgDbup2hVU5lrw3qbCKfj4eHuVeqC1KOCQwyP1/+dP/9n7h+u/87f/5Vf3+x+7pe+8Sf/XMI0/uSjTy3sS+//3Fe/9sHlo6vt2fPXL++2E/2hP/Sl9bMBujPfvp752Ev2kjkl2Q0WPh6rSD6NszmcXW6H1RYAAUJSVzJTpgqwLplIAgp4bdc37XAQ7nfrjarSdNwmmfrd0dpYTxPUU8KZvKbIqy4P3SCPSLCUlPNqvd7eldub/bXHjMF9x1B1GmsujRMndcLsmjTxZAZslFOh3tDn46kvwkglcc3QpjSHIjrGrYODH9iByQBHCyJmlJYLT+K5IGHHjMiV5f9H05/1WtNk+X3YmiIih733mZ7pnWquYnWzmzO7mxQpURzUlCEBvrEF2xIMG4Ig+8rfwveCLwwbsD+CYcCAIEG2IYuQLMqSmiJ7rO6uqnd+xjPsvTMzItbgi108n+Cck5kxrPVfvx8QNoLs1lt/PSQmeolERKyWglwSs0v2AomyjENxMe9m4U4pc8rFGxAk2t/mFmPSpnPwe5IViRISBBML5RvmpH4cigCPtdagrrax+DhRa+AeJdF+zDNTyoHUER3ZhOj8eGx+TsEAE5OwZNVN9dSsOrTAtrU1D2dErtVo7cnVzMwU3VRRBJKwhdMFUYeRiBDUrAIYBIE4IQpxZhKZClloHgAPfJfzzlfNyTCgtQWEGHjIg2dXdKAkQkmKqdx/OFmtGHZ9tZtGeXqswMO4O7x+c7/0+OzjT7ZzTUK0tHeff3777OX903Jc/sU8jL13jk2m4fx0Oq/Lzc2Vbuu7dRHsxKbbORD6uopI0346H/t2zjkDuLf13S+/enh/6gvu8tSW96TL/+bf/Td//MnNf/Zf/skf/HP+7Pt3/6N/+9/66vMvQPrt4eXdi89Ssj/6w5/Rh/W3f/KDmx+W3fe+4wD9fB441rotT49DpjTdPa1nbBjEverp/kmGcd5dMY2mS7fOHZNwSQSMnoeUD7GuaX0w1GgIfRm1suQK5ZHPvcVSTYQkibpN2K1gKfupTJI540SUxgRTPgDl9+8+BwCkXkrWbdtaB+J5l9QEjAkEgGtXCYegQZKJoHSAKDmXhGdVbSfADYEY0RUdILEBqqMRk2Rz9ZRpGNIGHNEvSB5kstAI7bqBHxOJZO61A3awttkjcs1DuArgWIT4r/zrLwFwyAWBKDilWXAMENSeedhN41QyQCA2STqO05T3g+w4ZRIUYWZr9qjR3AwxE+eIDZF3824slCXJSJS4dQ6MZtvD0zu1bT8/Ixo4cU7Jvao/WTQA783cI+cxJQL0gEokFhcdeQZAgAu1AZlYRBwhEafhVxk3AR7KVIYroqEIJeEe9by8W9v7qqfWV4vN174uZ4Jo64boOeWcSx4GckssHvrw+GgG0zAnwnVZHh9O5+VeUhCyaSzL1qsPw/T2zbfv3rz98P6b9+/evv72q/PDN8vpA0JoM2t9WVcFXL1/8/b14/E47qacWCLAjHJmyUMetu1MFEDQe2NksvSnP/tZ3R6f3e1yWns9Dow//M5H3301pK5v35zvTw/7/UGQlnNrT/H1V1/OZr/9w89+87d+8oPf+nXsev/mz/2bz5f7dw+nh3o6QUKRISKbewQKyzgOZq1bU62SgUEcwANK+JSn/PKjOBzs3bd2eupNH9ejrU8EUDgFwarYVLtwK1zZlCNlR6JOtJv3Q7qa8iyJCWUer6c8Lkt9enoH4YQc0B1WcBzSCOO+jBNPBRNBNIhwA7fN4mz2BO7hqF3dkIBUwV0DmmoPgIjNLzb3cIyLDTITZo9GqXLaSJSFkALJ3EK1d+vHp+PWHljUfFE7Cg25ZLrEJ1H4L/+rLxF9GGZm2dpS0m4cb3p/AsZduc6lDNPMnHv0SD7nfcofEZechTlHYLcHi+UiekR0ZiaGiC5USi6ptMziNrW+mp9Pp/OyHklwmg6SMgGzUFDVvrgpkJpSeBKRYcyOFQAAulm7uCsBiQiYOKdEyO4O0Iix0MDCnECEmTNBCQcnA1b182l7/9jebnZsG5omRCySEyOA1tXO69K1erdee9XWthoKJOKoqis4dusX91suOZc8DeP5tAzT+OKjT57O27DfYSrD7gAiDtwNlnP16qe6dO/eZOth1qY0XM030zjv9nsFaY58mTgOiLb0cz09HllS91rXh7mwJNKUbvbXu7s7s/XlIf/0Oy/HdZGtQYWbJDPZR/Pw1376g9/4m3/pO7/2PWimH748ff2nx29/8eH169O6eGKAYTfuKM2SACAICgut25mQ9ruZMVNO+2mmIjxO8+0LvNpXdd6eCDqYRIDaIhFAWAGVzACcU2RGdEGkNDkl7SdAfn77/ZxLkAmXeRxL3o95cm21Lu5G1DHUXEvBYd5T2achAziBI6oBgNVYzfqxra0rqbbokWjIPAiTo9a2uB2HQZl7RKDPiCAyMhYkcGgRgQweKtIRkyvZxXIabAGInGRgQQQgKjlzEkJUABRkCITu50TZvD2dv2QYhnyrViNwGgdKlPI1pn5cgWjHRAEJwHNOgLasyhiZseHizhBGrCyKVIFH4mJgtX/YWmXGquvW1pSviM855drcXRIfUrrvfu+GuTBS59TMgYIvbuy1q6qzB1OAsTDknAmlteYuIjKM7FG3tjoGkiEJ+YGhWHSPToRCxJBLnnLkWhuwL82SRdcFHJMMtvXWq3oQY+1L71ZNhfxqvrq+efkcnx0fjvcPp5RW6+urZx9/8cUXp9Ppo48++frrr8c5ffzJK9P2cP+u1/N+Hk6Pp/BtW9fweT/tMu9LnrPsZdx3IO8bY5gFMKPa24djXc5hwAOnotM8t7bdUuyvxvV8f/Xxx9//rb/1xb/479vrb/76X/1EW+K8dwrK6eZ2f3197ULH95+/+/yXT6+/iPOC5kE+5ETlkDh1U/UGADlnGdKHD28tNNBTHnZz6aZb1NtyMx2u8eWnuDvwm2+juYBARrVagSKCGDPSLiWmCttZiAvjqZ+c8YbnJ5S35/a0Pr64/hiVJTkXZh9v5pfz9/cfnt598fXvnc6PiXgoXe2+tw/mz+s6ug1QsnonCMDsAraNXc9JUHhs1N0QIhW+TpxTzKfltfYnlkBKxAxBDp6HMIOdDWVIarxVreeaDqn1SkSE3aAiIUaoasm5Q1OrphlFCIGE+G/8w08DFakzMgBrP4fHbnpmGpJzKUysSAbkEA5wsUQzEQR24tbs0WwDCtNqcVEV/apsWXIWSuGOhK1tW/uA1CGCmeZ5n/OAxA4qnJBa93uHSmRAyJyFMgaZuzGYe9ss1BGJORNxSiKcUkoEgkDCDakG9sDK4swlghyUSAHPtR2tKXuRXkpMU55JPSubhWSG1nXb1NXdwPzx8X453bsqBo1lit6HkknILMKQKYhx62pNv/72Cxb74Q9/sJzO63IsgvMg8zAgAOXMlLvBaV1Titu76+evPrm5+0hZugYjZ5KUZsnlfH7o6xKhTmEEhKXwkAjN9O7lS8oC2vKL5zff/RGYOegG1tvbu9vD9fVsvmzL/Zd//ifvPv+zxzdf6oe3EF1HqRdvltrpdH/ShQnLwIJJtQN6GQphFoiUC4gOeTjs7/KLj9rdC0dOT0eqR6+tbjW0YgQERViADykLAAQ6IkCwQ+KMMmDIuR1Xf9xfXSWZLxrwnImiueYx7+bpFkKRn5iRpYMAiuScOGdHY+oE0NVCrW0BhtE00wiatKPZBQuCwiJUMLJbNF0ByI0CutqGKOE0lIEJhZhwuDB5zD2CESUcIiBcESTCzDfmlNPIlJiZf+fvf4IAjBHechLm1PoGGEO6JmJzD1QiN61NN4Om6oAIKbqdFBaL8wWsTpDCIsAAxQMRW07ZLrNq+VpYAJUpBURAzWkYyh4jIwQyEAdgmJ8CAjEJT0xFrQW5B/au2npKxMI5ZWZCREIWEjcxD6KVUrBkAAM0ohIu4QjQVWt0B0u7fH1TXk3plsKEU6LMgNZ7183aGt6Wuh6fllbrBeVbZOxd12VdHt/X09K2bV3Pj08Pp6fz+3f3VTdiHobx+9/70Yvb/fH4er+fX756DuHvjyfO6Xhq5+rTbpfSsL96fn33aab5cH3IJQNmSmMei7N5Eo6o2xNCSsMdAWyndyUDsu6GabjeP71/z8vy+OaXE/WrNGdC9HpaHj68+7Y+fvj6Fz/fzk9MsNsd8v62ajvdv9mOT8S0KxNCCZKUSkrD0/Hpw+NDV23dMIyt17VRwrv9i/H2JdxeYRapWzy8i3qioN57XY91XbRXAMu5dNettiBA5oiIgO6xMW0SgNKhEZxJAmk80GFkljylNJpp107MZSiAgEnBgj0NeYc5IQIHqyXX8BYUBUzbeqqnBT2D79yJuBHZpbfDgO4twLsqXtDPnpgmZgpQDCNIYQWR1Lw3J8pJBqZMQMzJzIki6HQRjSGM4Yn/xt9/gYBEYXDyqIgU4dtWJRVmsOgskDlaX9e+hPWuF0r75tHUFgPLaXJDQBMuCEyQmQnQAEl4Kkmy7ETGYZhTGtWbxpHES5qYExEjOxMBRLeViAICnYnIyA2CEOTiqifAC2OehCBBhBu7kSQhuTAwECECHELCS3hCtNC2rrW3mhDJsJ43Ao3wAHA3bWvdzsu2tNbWZdnW1lo7HVcPbmbH8wlLzsJrbU+nczDncVyrrkvL43C4fnVz+1wbjLtp3E0a0oxk3Odxv1TT8A/Hs2k6zPPbt1+//urz+9e/QGvjmHMZpAwkDJww7zbzrTbBPmUJsHV9AID94SZxmvbXfHdLeYr7x9d/9scfPny7H6Wv29Pjh0zU60ZIdzd3hHE6Prx++/X94wcmmuYr4eTuqkaoaZhbxZx5dxgCJYnkBAlTGmk37K5ffJKfPwcIWJ/g/Ih9QzPrTohuW90qYFDExdw8YgJCd+PAAgnMK3ZKpXBCYFvPW/0AyaYyj3zgPLgJBNR+6npiwTKVHoHREUI4B2cItubhGKbRzc0Aums9nU5djYgBAdQll4sKt61HdyVkdQf0CMFIiCSJszCRaI2uzcL+JQz90tbJTEKYEZEYkJp5NTeCIQj5t3/3FUmn1ADcIwAt3MO49T4kQXGgDta6rc2qqfVem1YkRXKzHhZMCYMCTGgsMiQsCQtDQZqmcS+YIhyiiBTOEGi1b246lCkPCdE9Asm7V9UzUvNwIgwIdXRDhnB1dEYphFQSljwiJL+QV6gEAFFH1rgQdkMjOGy0ixwcvWu35q6YYDeXGyzZW7PeKULXpS0bGHjr29oCAYFKmVKZgfFw2M/Xh6WrCef5wGUEZM4Fc8oyHG7vPvr04x/+5CeGjGkkmX/++de9w/Xdi+vbjxzjw9Ppi1+8/eKLPwd9f0gW/e27b79+99XXx3dfPb3/8uHta+sdTLtlwFwSjQmHgajINN7kQsSblKtyuHGyQf3h3fuz99b1uKy7+ZBkVIdAOh2ffvlnf7Rtx6vd9d319TjfCedxHIIuplCD8CIJo3VdGbCUJMKMhRMdDtfjzS1e3Zl6Wo7ezuBMAdu2uVcENwtzC9NcpKAQwOodwUtgIAKhAAzN2DgMG56a36s1KTPKCO5mUdvZLDwq8vliL/fwFpeVboxI2BSbhnqY18sksy/a160uTVdmxAggYFnDj9p720AhnLZQJEyAcQkYjGUOJ1NVg9ZVZGAamdM4zEyFIBMJoF/iLh7d3QGB0Pl3/vFLkibJIghjANKuFTx1RQ8thZHATavVrjXCuvVuG3DzaOEaGOAsyMyZQYrMwskMEMckU5biBqq1pCmlpL7Vfg5qxMw4QUCwawc1DWgePbwSJiIhEQwBJ74IkSAhk2AIRUqZqXTrCACREAKjE5qBhYeHAiTwITy5t96amqpuFGVML5lkiGHMEzVHdeEUCOdlpYD9za2kknLK0wgQ0ziot9PxFERZOMDX1k7bZs1ePPvo5Scfvfj4o08//v60m3b73dXN7e3dy+9//4coMB5uFGhtrchwWvub1+9O70+Pr5d5mqYxH+/fPb193c7vc3YEOz18/e7DNzwcDoeDhALC4Xr37O6WhzSl5GTDfJUOd4tDRuGAZV2n3d2rT18hBLg/PH6IaPM87cY74UtOPjVda12QM6eMgK21arWHIaRhGBHImhHC7dX17nAYd3u+vo40sBlsmy2raSfTtp7XbRGmJIyhROhq5F6ICKH1FcATyQjDnHaI4gJPvoQsUZ+6kvXStlOtdWs1oHXd3NeIaq5o/qv914m5IETT1cLDGxCeT0/aFvCmtjV92vQRsQMtCCugujez0K7mJwaKYCZm5lJG4nRBSWMgCSMREw6lTPNMxOEBgAAkicxbwBbh4c6c+e/+41eAxgScCELcKLDH5cXCSClSEnNYtyNAA7TeoHs32OjCNwlMWBInRmESwEzEAa7q6CZEdJkGYkiZPHBrR8lngN47cCoAEKDqm2oldkJB4EASSeDAgAHg6AQoBEIm5ECIlHpvph0tuStGcDIPc/NfmWlsYGSzbt7UzmEqMGfcJ0k5ZTBAN2ZGYDQfE2qCp2WtW3N0xw4ArXVkHOZ5LPtAWratdZ33N/PVx5znT7736a//9Nd249W6rvePj93gtGzN/O7Vx9N8vb+5SWm4nq7TMH7+5TdPb8+/93t/sD0dX7w6mC15GvO8P2347ddPtS6nitP1bV9Oz/bz4bAzqxxGBIwy757T7oZePKMXr4bnn2YRW5at33/55S8fPrxJ0m+udnpS6NG8Gu9cQgCG6Sbvb+arq/nw/HD7Ypxv0jjP13fXNy8DUmvNrKmeOdHNdDPmwa2DCPCA22qn16316BXDh1wi3HVLyA4klGgo+zKo9arnAcM1EMQzBcGKa8fwUKfH1pqbb1tvXVvTbVtrVe09omtvGMRB2swxM47hXXELdVfQqL1aPS3WVoAmKcKt+SlAk4QwIaKjMQL0FmEA4AFEOSKllAMtwJhHgAuSl1PKQxmI0V0DVGSIAPdGrB4GEEyToASTICsCE5tqQBCAImaSpTYkEggB7EQBQIYerq49JLvncMHEUqj2hpCzHLquhFVYVHXZzvO4B3SLWpsF8TiOJLdrnFZ9WDuJ7ZgZjN2NsDogIbNEhIuk5s2thzkzMwv/SgfTzZ66mikpPxBlFwAFQGytA5pIiiCNCOeI0A6Oq8iHpQvDrfdtxikn1taXukRvKedEfJWvxqEIjwiyvx6n3fVcrnuvmyEK7w8HABkPU57G88NpzAFYOsF4c6uPp69f30/T7t3PvpBScs4/+tFP97e30xV/xvkv/7W/+vl0Z325HuHd43k3wLOrK5Kb/+o//73Iw1/4td/44Y9/M+3pruhHn73II58edqftJI7D7av06V9EHrVmV8yYx+c/LF++rcejNtbA9+/7vA+7ugv3rGs9b4/3xyzpxcuDjDsDhHBEKfsD66SqLSjtbmZCqAXhFWXaikw3e0wD9g30bNtaO2y1JrAs7IAeJCIIho3QA9wWcHQ88NgimGyzh165M1I0VsuSTfKC9yeXHPVhe02R2QftweLTNObky/rYnIJFe+3ba6ALaErQ0UJCm8YKURNnDCmjRqwYzaJ2W5kzS4XI4pPVioDmFgBgW86AZGArkJeBseWIztmcuzUwM2QMaBDiEImh5Nwrbe3Ef/N3b4hdmBEpgsxAtbsjkTAzov1q/EohoLtBtyByDxPOANIaYBAxmoIbI0HOORw9OmCvm6r27rXb6rgAakrDfvx0zLfmfVsfzXtc5K4BZo3ZEQGDL6AvCAIAJCeCxMBMiAbYI0DVPaK1djHcBziE974GUHhBGMLIbAmP5keHDaGBMToTEJMQDvvrm93V/urm43F+Pu93u3mc8762bfXz49Pp+PDheHp/2tbeWvemoKfl+M03X7358ltQu9pPuaRxmJKUrgHIz5+9lJS/84Pv3756+fS4fliXtVph/LW/+NPa+5/+wb+4HWger+ra1WN/9+Inf/nv/vbf+bdvnr8cZhn28NOffjZdX+N8fVo7ycHoCspOrl7E9BzlgIBuK+aQnOpywsBWq+R0Pj4e3351fP82DKI/9m3RcPXqrZ4ePjw9Pbb15Fqtt7o8RTu5LghxXk5KbXcYd8OdOWIhdY0eyUkQQGjMiQDUDNzAm5kacIRBmLo7mGrDCDA3V1TN3gydAZlayBIIahxRu3ltZ60NAJGMQJgHD6tb7RaE3b25aqhGN1c323o7WmscQGHhxFxEsgggbkDdvImEqoZTIG3diDMiIrGaRqzmzUIhSJgup2dB8ubWagCadSTnFIA95axmtT4KhJgqgjAzBDIrc1I3wN5auBPiqZCoqoe6g0e/IMdqreEXBXHDmoVmAFrW99mGy44BAMJD9zV0gW7EY8lDyTvBw5Chj1hXX+sWHCIumBBHiMYJwj0CARUJPLpZzSVdNN1ABEbWUa212k2lYwNKnEi9Qwh5sghOatEcKwtlhNN5Q5wOu0KQym7/6bMfFp3O52NE6Hk7Pnz7+P716fT+dGxlGG4/fn5ze+edHh8f/Xh8Oj4s520+XAHLIPPNzVXA06Kn73/8ats2ijTNh8++930Kurp7UXbD9fUL+BH/6S9+Vtfzbn/Vev8Lv/4X3/xrf//3/8n/Iy25n5bMw/nUvvfZ87IbH89/zoV+8NGnGYina7y9vdrfFIXltCENCBBhXAYAWe+f6sOb5eHtn379FfQj1vv3n7/JnKfDtZQhYZSrZ1e3ubmqNkLfT8NpOUePeg6lOEwzKGzrmtNw2O05yX7a764mxcKRrW29nsAszEcZ23Jat9W9QxgHMWWLBoEBoL1dWD3gARDknIUi8ZUEoKtRb4gl7QlO2iIYjBgZ8YJfMW2AkNW9aUtFwSHMzQxBiIv7ol7DEyKHeYBG4LQbmLtjp6LoSDSWSAnByLFTKLPIMO21L70/WWj46OFCyGxmPYwAovceYSTsGOgKkVTZUVlMupIwBw6AIpQCKyGKuGuHCEaptfb24MZA1cJ6uEeYATj0vjEgp8HAPJ0AyKN77SKC6B5+ERZFJEDCGKb8CQaZn5jHnGbmK7WOpAIpECRJgJutSI5I4Qk8MBQ8rCtnIEZiCcBAN3d1RZJLmLV3QzKiMGMhUFsMqrsjcUS8evbZYfzufrzNctXP6e0XD+LvT/UhPCWDhFGmm0bluy/k+e3dw3H7+hdv1uXhm2+/Tnn86W/85l/49e/vr1+WgRnw7bvXMuCPf/zruyH//h/886e3T88/+mT4apzSDJJU2q4c9lc3h+sD5vHdh5O1fn24/d3/yb9DGf/k//tPvNOOrnl4oYA9Hm+flWfPXoRIa/z0xVt4fy7jeDTN034/32EacL6KIATMaTo9rf3+A67Lu/fvrq7m+cWnx/t3uj6FQynz4/Jk3pkxp2nBhBSJRdK42x02V3efpskWfHh8O5Xhez/4i9cvPrOZY0xRaYgSLG1do8yn01OYa9+0nURyyVeIQbYFUjNFiiJJW1iClHMxqM1WtAYdQZOnwW82b5q6oPf+RDgiZmIx8w6W02RhJMXaB6sbM4ODdndfAx5AyYEDL+I6UG3gjXPkIohT+JmTISycc6LAMuQ0bcdoG/kG87Q7bab9aLYyiUVYXy54PFNs0bwHxwwUiZypAGhKGDHzX/67d4jENIoMgiMAh2MEmTojqru6LdtJwbsv1Rd3gMv3HOiGrmQGHtBiUTMMNKuALcLNe4S3rqZKGO42DKWUnXtHbA64rUvXSiiEmYgQMQLUnyK6u6mG28UFDwEhyYio5IGQVdUtulrXTShLskvcBdAIi9BU27b2N/M4vHjx2ctnn+3GGwi/f3dejmqrZ5N92edhTCJZhlYD8vjq4++V3fAnf/onP/vjP3x4/9U0yr/69/6Nv/Lb/9rV7avWjn/4L/7b3//n/93jun36w79wfXvXq37+5VdLXX7tp79OXJpZq7b1Fmhd9enx2Gp/fHza1vX49FSrffv2/uruxdPj47evP7z40Q9/8Jd+4+7jZ4frstsVbJrHbN2++urb1+/evX3zzfnpbCetFbwjJwYG4iBJCVnWZb4ba22tWt5dyXQdMiNnZFYki+CQ6AyY1OJ4PLp5HvI0HUJ7zokEXT0P4/721XC4CUgQyk/v7Xg8nc+hDaxZNEqUy+SewAysNt245CJCSDmlxEIkgWTmrbfACOsIRgAOqVIGJHCv3mvdwAgiEWVgEkRCDoRetfezaQMwIlRf1Z/M1DsFJI+LVhoiolsHB2YSYSDP2VOycJfYcRoL7TEyU7ZuARwk3Ra1k6oChEV1hwDstjXfupMZM6FIMEVgY0bwgf/S33mFUBiLUEJkQkB00w4AW+2m3HytvVY9dm1+0QCGQqi6moU5Vq3qtXcFgG5q0YDNwdRMLVTRlRDdzdS3eb4Bl96bW9feEaTwjoIxzB3MIeBkdlEOoimYIYAjQk7AhImLO/Wm7nAhvAediYzFiC9NMz+e3uci3/3or3zy8ffC28Pju9fvvm7tacxjonHeHcpYuioEhvett2l3c/v8+f37t3/6sz/Lkj/79JPf+Kt/9ePPfno8t5/94e/9f/6T/9vP/vQPvvuj7/723/7buZT7h69/9rPf/+//6R/M+8Pf+Jt/nTmP81yGaZx2eRg++vhjkRIBb9+++fDhAxIvT6eHNx/evnmXUqpray0++t5n560JD0AulEeBp/MSrmXi3aFczTvKQ5qmy58zjyWset0iA4/TKCVI8+46zdM4DuM4XF0/nw+3424/39yVsusa3WpdTolpHAaG4IQIQChAmJjneff8+avd9RVQMAHU1Y5Pdl5tWawu2s5b3ay2koZxvoph7EwEKCAo7OZhFghIDKamGyEUgsTi6ApuQE7chFrT1jYHMq11a0S7LBMAIlAYoZMQWDMCl0uNMoIwmXmYa7RamzuycFe1jm7EQqlwTrnkjJC8s1rGyAgDEopI+AQktZ6bLrU1QGBGM2IqRNQ7qv4qxpOSEF38uMbM/Jt/+7tJJsYEECwM4A6rea/NtTnhEOREol7dFSEiCJmsq5rWrq1uFqpWL34o5AD0rrWrmtvFZox4gXFlj2rRmbm3pfcNwhnSkK4I2b2qh1mobu4OqAHmHhYG6ICepRCSX8zg7q5gHZjS9c11ySOLEjUzbX37+OOPXr78zBq/ef3F4/EL03Z9/XzMzy4LQK2ttXNEgCOhzPublPLnn//Cev3pT37zuz/8sSX8sz/92Tef/+z3//l/czyf/9bf/Xv/2t//t0oZ/8k/+c+/+vyrd+/v3797+Nt/5+/98Aff++WXX7Lww8OjcHo8HcdhXJft6fGx1m05na8O1/f3xzB4/vzatFdt33z55TyOZb9/OLfe7Onxvml/PD6pwXnZ1nUtwzzvn427w3zY3bz4NOfROSCLULa19naMZA14nPe78XBBsmrv63IUSaGGgcLk0bVtHlbyMM27cF/Ws0MAQKjN+3G+PlStsGwYCzWNNONQJHEqmRFdN1Dr27nVI4TN02F/9SwPV2aEVJBEDWozCBBhYFT3Y11PrV1GyCNoQ+rgXrm1trYPVU+9gdABgcM0LIQk5UlkTDJDsBsQXbqlsrXe2qa9uRshEYzWCWEIUAgU3pWcmcO8qrFpCM1uRDhSzBgpydhaILC7uTdGyjkDuTbAEAYeypBKQvQAw7AkyH/xtz8FF8RgJklERIDV3U0RfMAL6Fc40d5jQeyc5VdWU/cIAw8zxbi8+gHYzRe1LQC21WurFs0M3QHRAajpav4gTBFGmBGIQgiyB29Nu2q4BawBG5IjB4BFeIBjKFMGQEd3t8sNJedUhmEcx5LJY2MaP3v1V8N2X3z15x+e/rhkmFLJXLIcgDnCgpxTHuUw0K1QGcacaXj7+v3HH7189el3Wt9++cufvX3zDaCty/qDH/3k7/3r/zCo/Df/9X/1//tv/ot5zEMZj0/tf/6/+Pe/+6NP/p//yX/81/7aX33z7v08lvN67q7LaXl6PFetfavI5OHeNM3D/elxW9rp/uGf/df/tJ1Pzz7+7u75c8SutTKntZpkPp4eT8fltOqx+vFhHdK+pNw1elumDMGA5uKKuSAzBsgwKicNqMvZ+7asjxYA5LvdNJRRHVLOTIEIWrWudatPErAb9/PhbvfyVZqe+TDZplByKiMPgwy5nZ4+fPjQzqfWKlAPb9gU3Hpft75SygZACKWMJU0gbMweUqNphDEjgprXLmSDWTOjrfbT0rd67K2P+brI5I6URFIKz+adqYhMAbhpRQhHuAw/RWDApVqXEUZTAKyqFk4izgShohZmGWxIuFfN7sYhEcy0QyCEIDZEyakEAMKEnjMlSoQphiEBNABnFv61v/USLDML86Xw0pG8tUaYDZI6uy+JxsIj4lmhCQ0p4WXG1DTMwMyQUCQcDNADAJDX3k29bbqt3bxBEDgIFCaPaEBJ8Fp4KHnUQHfW7hC8LEvtJ8CNxQKBiCToV+JuV1cCSuESwY7EUkZhoqVIJ+RnNz9+fv39r17/4qs3f0ggiWb0IScHN0d3FOKx8CjM1hvGtrUNQwjo40/uzOznf/7n7799OByuhjwQjj/46W9+9J3v/P7v/Ys//Gf/NIN++uknzeLc+T/4D/93kvn//H/6P/5P/53/2R//0Z+7Yu99W5fz8emwv1LtLELCvbV337xe6vr6m2+++uXnT+/fvf72XUIkkd5drd/d3pacAWgcBxSap/H67plTmcZ95rF3b72uy0IZSx59aw5KJTOodxcIyQUScsrT7iDgvZ4OeQbTQN4fDlMqGNSsaTfDdSpjTlKmfJiv99M15QnmKe0nyxPwQGj9fKpP5/V4Xk8PBJaE2dC6GgZ6t7ZoPUN4WAVw9dVAgYmTKDR1SzQwpe6dUBKOKKVVPOqHZnrarG/BkBAySw6QkkfhjGS9b6odMcLN1SMaIpgugeHhiJhoAG9qbhYGOg7F1d0aEXRXVW0LmObehTlTcEpjBEXo5T5CDCCLeSAUvnSXgDEkJZTkgOFuAMF/6e986gBJikV1vCdyADbzAAMcTZEuXyNbQHMARGHO4eTu2rtqIIJIkoSEgoERoN3dQFXXpbVqYYxIzBhIhIEiAMA4SgATCu/MIcARvVntdrIwZCMOBEISIA7HgOyOSEAMWnsAD3kWcskGYM9uPy55+PKbPzot70u+CcsBCLAi10AJCIOGthvSDsMZU6vdFD/9+EdjGd69+erbL792TB89f9VqH/b7V59+dHw6/tnPfnZelrvb2/3d4dzIY/h3/71//3xc/6P/w//+P/jf/od/+Ps/W07Lbp4eHz8cj4/X11fzPCzLKXH66suv/tl//3tbbXXTra59q9rq4TA/++jjMh+O51Ndz9ZrTmk/j2br4+MjJy5lGubd/up6dziUaRymqUyDe2cmTgjWxem81uhba74tj7YcW20qfHjx6Xz9nfv3X9fH9x/uX58e7nVZgCPct20jb5nwcHf73e/95u1nP4qbaxCJh3fL/c/LuvpyOp4ezw/HdjxqPwfYthy1NaJILITILEOZh2HPwsKcEiMABlrvdVusbRzAgO4dKcDBICr0Vbe1H5f2dF5PCkQ0sucAbFYROKUSzu54qaWsa22tBVmAdt/gUmGNIKK4nISthA9judpNV+CpdY/w3tQ0tgUiEtOQ0x4wAYBrRJiDdasWG6KJJMLEXNwRGVMGYuu6AToh8l//+z9wR4bU7YzpQSQQGbx5BNJMOKSEEYpI5hUCiNnNPfyidXWDgB7YEDMipVQSiZpHmKr21rUzhBCTMF56FkRsoOZbTkk4hRESRARgCJG7mjdHTZexB0AEAnQA9Eulx8PDqmp4jCVHyKtXnxD1Nx/+mfOS4Ht1jW4bsmdid1dszSuCiw9oqeRkmxW8+8kPf305n37xiz9C8v3+2c3t84e3j9dXd1d3t1998/XpeJqGcZqnPMyPp226evb3/vV/+8vPv/q//l/+o//Vv/+//PKr13/8B3/04x//cFlO7r4s57u7uy+++KLW9vDw4Y//8A9N7e765vmzu/28S5LmcSx5SOP+6u6ZpMwE5+Px8fHBPZACkRWI0tiaqnYZMoBLYhQch9x6G4aZQIjZQB/WLfry4f4dRpC2h/u3X3zxRR7GH//G3+RyhajL04enpyfrJ0aXPCELRJpTZia53o83L4QPGJ3WM9SWw/p5zZzA9fj4YX168tCcUiJyd0lClx4TEiJFgF8kvAGqXXsDDw+tfTmuT9U2Q93Cjl6f2qZ66qDnuoZF4qJGZpCY3TsCmMblbTCLbavrVg2bgxl0pCAUpAhojAUxCc9ZClOax0Muk/BAmLXbtp29W0DyYEC6fBWq5t4QtcfZooowUyl5FhYiMddcOKAiBSETMv/OP/gBgQC6+SLZWIBICU3dMYYiE0RtvXmQRQ1yBEIi91CtANy7mVWIgEgRwEzCbNrMzUIRJQwBmIVyYmZuql2NGCw2i0AQEoYQASQgsEAOIHBvRJqEL7dqRnSDRICBiAAQVauausPHrz5xjPf3Px+H0W3XtVknD3BSpM3sbEAQ7E3Fpix+Oj0VHj/7+LP7d/dff/WL26vDfvosHL76+uu725ci8vnP/6yUoQzFwK5u7tZz3c2H3/iNv/L2/Zv/+D/9v/+b/9Y/VLXf++/++a//2q8/Pj4y49u37z755NMvv/xKNbatdqvzNLx69gzdReT25vbZ82eXrPaQ5epq3u1KyjKOo5q9u/9wPm23z17wMDnykDOAJxaIMGtzHlkSE67rtjs829RSlnmarR7XZXn75sun89Pdfj/p8c/+4L/96svX+7sXu/1QpjLsD9Zb73U3TLvbV7cvXx2ef7K7ep6JsFcL9M1FcpU4L+eZcwA0a2HKmQkcAUUYL1c9QDM1cBZmuTyAS+hKmXEYc0KGgDwUESk8UC7AQ9u02hLATQGDEEU9hSN6Z0lde2tba5uqr8u69eO6HTucUTZiADTAxhKIGmCX13eeZ0Q0tVwA0JiyRW3tCRkDSbu6N0R3g6ZHB+u9YTobLADANAonZAIUCGJhZiZCohJO/Nv/8BMi5ARBhmIkjpepe0hmyd1a62vryAbsDk6CRKzqCAl8qisiAIALZQRABPCLOi8AEJzcIIIIsORhHEcPU6seGoDmm4Uxc6bMiMTWbVW3ALeoEZoSiwBiMBCjuCtjYsoe0Fqcjk8vX90iLa8//HPizZTDBzU0jUBk7mpL1y0AiZLAzOTn0+NeDj/94W+9/ubbZVlvrp4xlG2t796/efX844j48z/7o5LFAVnk5uZ2XSpz+cmPfv30dPwv/+l/8dt/67devfrsP/9//1c//Qs/OZ+3/X7385//+Xe+893Hx6O7MdM8TykXAiKI3TzO+zlAEb3p9vzu+sWL27qekuB+f7i+vhmmUVUvedXpao8IQ87TWERoSGWahmnaOVAuxbSZ9/lwUKPzu68ZAgOsHtvp/ZuvvyXA/W63Pn7z5pufA3QZh2G/313deKRxLM9evry7fnF7+2J68Um6+rirAi02X2kZYn2q9+/v3795fHp3Ph/dsACHtd7qZavWcARMSVLJIokIiQiJJHMuCYlE2AGZk1xkss3UCDCZQyATswZ4UCAJZ+Hc7YxIEWjWa11No/dW+7HWs+GKvEgKZoGwX7WFcGMacs4AmHMK7EyA5ABntdrVERmRwwLMwTnAa5yarj2a4xNxIPIF+otIhIUoESEzAhAiBzr/7d/9fgA4avdqWJGMwJNkRjGn7dRqbwbKCZghIFggvFBMQplx5JBEucihpMyEGIAQzOQeXVut6iaImVByKuM4SObEgBitrRGOhGZRchlyBu7NNzOtfjLTABNB5iyQiZAp/yp+ghUCn+77Jy++l3n89u0v5pkDajgHCMHQtREj4KbePCKnnPgKUNf29VX5yQ+/+6988dX/4K3f7l+15Xh8fGzr8uL2Y639m2++fPHiuSEAwfX1DQaa20effqTW/+TPfvbi5Ytf//Xf+M/+0//XT37yo9779fXhl5///Nmzu3VdT6fTfj+nlAFoW8/b+TRP0zCN3ToTEvjVbuemZZrGaUJVDM9jub07DIUIA8gp4TiU68Nuv58lldu7W0Y8bmcIkDIMQ16Oj0lQhhJhD2++GnZFys1EeTeItp5Ynz1/Nk55e3qigDDdD7tPv/djuXo25XF/feDnL2wqIEnKtaSpguKYOAmeTc716f6N+UbaODSlAQkBADGQMbGMZUglu3lr1VQBCQJUVdXMAQFbq9u6ACiZYQASs1AYb6Bn6xrdobEgJVHdTGuSjEiqql2JAUiJyehIrES/antRAHMggakSUYAxh0iuVYdcAKy2c2uKIgxDkaS9tt4UevPVo1lsDiujmF0y08I4EuzdBUEQhHEgzIDG/8q/+d0AtFC1DbEjOYEziTlBpLbBVjcLJUZmEE5MJLhHGLwjeWYcGG0sOQubmxsQATM3t9Zrb6BNKAbGQSRLQqaUkggXQOoNXRNCMrNcRsloCBZqvrXew5GFEybChAiIGIThFuFPj+tHtz+8mq7fvX19uBkzEYJFuMDOwdW6e/OoSB6gCCmnIdx35cWPvv8X/+SL/245P0y8Qw1dTrbqy5vvgsObt+8++fijnDKQSCnCUrf67PnL1tdt60+nh7/4a3/jX/wPf1gK7Pe3HriujwCGGI+PT/M0sZC7927LeqRwIjfvta1a28vnz0XScnycr+Z5Nxchs21/NQb0KadxHm/urqd52u/maZzHcdrtdimlXIowRQBGlDQlSa5LQst330XGxw8/vztcld0tsA8cEC0Q9oe7/bgHwJubu3DM43y4+6jcPKP9deCcJak3ZKzrVuaZpquMJXuAbg5mmw6JuBj4EOjMxEyAzEjE3Ky5O2IQ/ao3jwhEpBaIhB7uqh7gyJJdZG1+//h035cYM6WMocgISHU51daImClHRK1r1wXZWBTSxskuZq7EiuREDNQDVjMvZQAgJHZTBEuclu2x2VHYRZglzBfDBtRapaqPASe8yNtpQmrESFgYd0IZIhDIDAmTx8Z/6x9/jChNt6YLQECPhCEogNkUwmPdNABRNHESSVkO5JM1ImAwCo+ciMgtFhIgZkCwwAjyAIzilhmHnKZ5HMY5lzIjIhohT4SiHREFIhF5Hkg4HAIsWl89IuHEEAhKUQgpoJnZ02m5uf7senr5xec/m698nke0GcEjQAQtVN2YLtCUysCCo0eHTj/+/u98/fWfrE9fPZu+X/yAsFlP0/Bsf331+u23z+6emeL5tGrT/eFmWU7TNKhhEv766y9evfx4OS/39x8++853Hx/vd4fd+XwiotPpnHNBQvfGTACgauC2GzOCeuhuHHNJqh0sANS83dzc1N6JoOSyGw7z1dXVs9vb27vrw+10uDrsDyUnREzznMs4lHHalc22vDugIPaNest3zx3i6asvOPH4/KWHAqya5gQpX92WUvIwzy8+CUPtLY9X44tP3AnUINy0cxI8L/3+vdBEwGiVAFRbD+2dWl85cxaJQJGcpaBwmaZShpRzGcaUS85Z1ZpqTqOhm65Bdrn/ba0eq64OD72dXSGNgEXSkNPsBuZPta0AgZDDyWPbtgUpSLpFc++5OJERXjD+gWABTsSIHEBdl4i+nM/gm5OZniC6ZIZwJGnW1lZbde0VEHO+jA0i8gYQEAwxhDlxmLkpunnrR/6d3/1OhF1CAbX2S0eMkdXJVHu07hGISTyLCE7JiwMgi+BgtiGypMv8QB+GVPIEaD0aBCGgG7QKETKUcR7H/TBe7w9JcoAz8yAlAFrzEZlgkySAkCGEfbO6ViUcCAnZETJglsTrep7H/fXu2ZdffMGJy1CEx5wHQAzSwAhAMCcJChIcxvRsGK7r0j/75Cfe2sP918/2Lz3YYDPQteLN3ScP7++F87atj6fHWuvN7bX1BsgAqW56Pt/33r/73e98+dUvP/30k/v7D4fDrre6rsu6bkSX2hZGYIQTAWFJDGOWbd2K5K3VaRxFsqtSipRKGcZt3Vqrz57f5rJLpUzzVZ7n3dVhnEZJiCIy5GmcOI1BzCOVLBCYxsk0fH3E1qcX343E/f5b5OnwyQ+9OkIM482w2+2vX+bDQcYx0zCnibaGuvHhQOOVbrEdn/h8wu3Jv/ni/bc/R2J2WO6/9djO50foOQ/s5s26Y3BOJImQE/+qzHKh6XdTNQtEi+h1C+8hQCyhfKzrB12dBVAggEsBcowAHlzM+qraulK31i/FcmyqGyKEb4AOYCJJDT0gABwQYQRMampe+7q12rd6WuojEYoY0EZoKY8KDh7bspl2dwJkpDCvHhtzQnB1RWQMEmSMar333npb+W/+o+8G4NbWblvvzUOHPFxKre7QVd2BGAINach5T5AghiwDErmh0MDiIh4YLIgM6tXBAdjdEZIqhWPJCTGYZZznaTgIFzWxpkQAUdxgyDnCJBWiDVEMUL27ucDEOJk7SEO3kuZ5d/ji9S+B+LB7ISjClEsiAkJqzQhKytMFvpJ4P+WXCa6f3373Zn/31c//bM6oGF0X26rA/PzZp7syH98/6La23pB5d31gSWvdbu7u3r//IJLevf/2xz/+8bfffptSGcex1rrb7d+9e2/eEImIcs6qegElEHFOOUm0ugxlrL3VWg/7Q5hv2zLv52EYVTuAm7V5nsZxRJJpng9XV3WrwpIk55QBIOUxJSE2sMycApXSnnmE4s0r6zBe7zJI0s0tdq9+GJIkglnGYS/THvMeHdLNjONYTwv4RtPA82HIWbeVXUqZ1vtffPPH/4wlV/SH+9dZLkV+6L0zcsoJkdwjAIIiSb4ogGqt27ohoVAiRETOqRDCtlUkSiVzGloQInhYB3O2ZluYETijI2XT3NpS29K8pRQIqTdDbuGAmMMREdwrEREMZtw7uIf10AZaL+HgFl6HYcjMDoDozG5u2mHbqroGBAIzpfBMREk4wNyAKZtW88W8dq0Owb/9D76jEFuvvfW6NVWVLEwIQeEQAF2bRQcAAM5lSFIYRowS7utaT+dzxMbsgebRHHqABZKaqXaIFP4rbMuQR8IRg8ZhLjypX9YREJJwNW855SyBABisXjVWiCxxQzghOTIg4tXV7v7hfe0t5zlRKTymlDgBUQrMtZ0Bfcw3CW+YZiIWzkKHFy8/ev/hW8JgnIrRjscDfDrxxy9uv/vtL7/t9YThAVByHsbpw/3Dq1evzsvSu14frolhnndfffX1d77znaenp3mej8ejuzMLXaZ2EMwMEUXEXYcykAATofCybleHA0SER+udU7q7u1uXc4RFRB7K/nBIw5xyIWYASCkxYCoZIpDlV+V4kEvFInCIEE8tsdhRSQSHgwD1vjmk/c0nQTkAmjYnT3mKklwJ5is+vMLTKZ6+jaTRKVSTLu38lOYpoZw/vNde3fogWQQtkBBzzlkyIDIJCwJGuPuvRk49pZQkJWEipAvkHmkcSkk5IYGjGEb0p7o+1WM3Q0oKttUTJsiyYxx6b23zZTUHA9faFo8Ix0CPMEQKMHcHyNrBDF2TKYaBh1/mgHtriEycicFju1z2XEs947qFXzR4NBEOl/AeUTIDN9PYIFTV3QMZ+Lf+4WcWbh6u0LZWW2WOaR6yJKTGHNpdmwJwGDFhykOigVwC2DzqdoLoRAbY1RqQEUMEtW5mHUEYEwQgyDTvE48SidlzTixea3UPDUPo2htioKzgnGVydPUONmQ+CBdCU++7/bTZg+maeRdKiJGHgsHBJkk80MMQKPEV+TzKPObJml3Pz7EPfTnOAyecB9qP6bZtueRkbXv/+ptMoSGENI7T8bSMw7Sfd1999eXt3W2r6zCWh4eHYRj2+/35fL4gWi7JbQBEhEuHKOUUEaVkJiEWFnYLYpIklzqGCElKRFxbZUkaMY5zSoVSGeZZRNRtKAMx11pTyaYhQh4BHOYunN06AEJTDKQioKsnNpgG5rrdQ4Dsb8lWZFxOJ44Ybl9EpNiOlEBuP4b71/r2TVCHtp7efbm8/UVr/fbVqwTt8cNrFiEEAAqCVLIkuQRmiVC7AkTORYT9X/4ggmo3M4r4FbcSgQ3V0SKSelfrII4QBGv0sz2JsNAotMucBIQhbbWdlpN7D7S1KpAICxG21lvrqu7G4bQtDjCAEyBwIotwZxEhSimRu7pbQISzdupb3tbonQEyogA4ElwGDM3C7eKiSESDuTET//Y/+sHF2gth7r1ry6WMQ8mJiBUpwsHU3YCRhRJiEZxKFklFJAE0tJBkw4BIFG4GvVtsW3UHdAlTc289IGI/7gk4vAZ44rJ533q99MnCcKurwZaEheac9iSFMYhIpDAFp2a4BnZm5LjQhzIxQXSklGQEJAIGLwIjhAxlf9i9ZMzovj0dRx4ERSh55G2JDvX5sxfQiD0VHkNwGGYHBI/rm5s3b96o2f6w//qbr6ZpcvfdbnepfvTea61mhhgifHFslVIAkIhLHiwMiQkTpzyMORCQkCUNQyplsHBAyKUM076MO8mFUt62KjmVUrQrIvbePZyFwa33noekreu2EXbvSCmDNyAiQgrHNECvqcBmRwTEstenczwdt6fF6rncXQMjPL6L5Hb3Ez+/L++/9ixOQwNYv/mz+vBV3h+Wta3HJ2Y0YBYys63WICLhYZiGcUqp+MU3EtFVkQiJWERSiktpiED7VlvfuhkFE5ljJ6qwPG33j/Vk3vbDnvIuuoBpgpy4NO+nZenaHYkQ3ZEwh7N27I3cWNV7t3Bn8pQxMFQ1AACh8BQuzJlJ3ClMVAk8MYg5blWBMMLcnZAI2AzCHSKIEoREXK40mX/nH/3QwGpbPdTRkEiYcsYsxOyXloGZqYY7Co/MI8eUWKQQAjg0M2eOlAECWt88uoFvtZpBRNpq7d17h/PpNOScZXAHRFRTlBRoECGIgFKbq/dhnBPtJe/zkEsxMzdNqSAnW+qxFBEWt2CahIbMlBJ7SDhcVNVojFAy7zxiv79Jab8sZ7NzQJhht1M/v4UtbuePr9LL+/f3Dr2vm1OogjsAU87ldD7nUoAJ6eK/5JyLSEpJHh8fL0tgKQMiMctlKxDhYRhEBDlKGQOglMxMnHgYxqGMYxmkjCx5mveSp/lwLWUCRBGJgLrVknNcLl4AddtYgAi1GwExUj0fySI4eUYBhQpEaM05hznp+UwY9d3byNepJKFtOuyPH97I5mW+9ccv8f1D5CiHcfvwjW0nlwLz8+38+OaXf0xIn33n++fjk7oyYdduqoA4zXPJY0oJidbWa2sByJLKMLKkYZwkZSR2BEI0bapVLRjYzDtG7fbN+d1Xxy/vlwckJiAi5lzASFt3BVOqvW51UWN3FCaiBMAQYhrhbObmFsApE1IDMARwx5xmpkSWTLltkdOOI2lzj4AA62ER3Rwuaw+yO6gBuACgeyOGPAxmv6rk8u/87vc8LpuIeygjY/iYIJcQNoJAQDVRZe3gRomFhYmJGQEdcEMId0/ZIkLNAdFRa93CmaCYkyskEnAM8N1+RyCS2L1qWM4SvRPJPO3DyZwIr+Z5GOeSeJfz6HHuujGKee2xkLSSM/FgkZBERJh3bm6+sQThQDSWtE+yJ0reOaccsJk1COMwqCEIQmXCu1BUWyIgD5fp/h4QwzhdYFCUpDdDoOvra1UdxxEA7u8/+EU0TpTzoGq9t4hARGYaxtK1J0ngMA4jAgpnIkl5LOPIaeRU8jDmYQZMZSgA7BHgUEq5nKyGYfiVxRPZwMI859GdulYB1NohyVoDVEE30A2Y+7ISynpc/Wlzb+v6/vrVp7ZWPZ6vP/kuUqy6Yhm41dAjEAkOx8+/rsuHkQPy2KKtT2/LmGWaTuum2pIUFiZmU7tEfj1MKHHinLNIRqQhlZQHRGitUbhuzdSIWVIBREU4te39+nh2M+CU2QWPfW1VC48Yg3kN16Zt2U5Vt7DLqEokTu5w4eIENYsegSkHoDMjuiNEkSHhQFGsU3heFzUDRo4wjHC18KTu7mgO4EFMgAGggI5AzA7ozJxSBuiIC//tf/xDDEcIEujaKbAwzIWGTIjg7ojSN+pVAqK1ypzSBCklRo5wJHPwiAa4RUAEIrFC61oRMuMAIRQoAEXGrptkvDncUbRAVN8wdJBdREkpjePEUFjKVHb73ZDyIdG41futPqi22lcqR+RtkDHRGMgRjJiFCxIBmUXzAIJB8o54TDKFuelqvvZWQ7ekNiiHixnfzXemjYUZpEOEd+vh7k27RxiAG0DAfrdj4Tdv3rx48aLW+vBwPwyDuxMRgNW6MlNKOSJ2u/n+/gEAkyQz2+12iMychceLAA2ZOQknaVWXdZEshNTadsEYppQuy3/O2ezy5on3DSXlYdrqmSCW86lBL3nfzls/vfdwiXw+Pq3nUxpH1Q6xWa01dvPV1dvPf4kW4+FKMsR+H7tDd/DPf5mR0m54+PrPCsP1Yd5MEeD04UFSBkQiBkYWDPDMOSdhiFIKESIgE4G7sIBHb6u2RgDEMAwTj6ML17WqmXFsW21cSpkR+djOj9tT7X05b4lGxNztXPvJvFVtEJ6yloxCA+Il6FWIimMl6cQUaMKEhokL4+WNymRiuuudI3LrGoGuFhHhKYIiSI0IBEkAPBdhgZQREYg6kqlXlobY3Rv/zj/4EZInEQjUejaPJCUlZCJ1NxB3UvNaPcBDOrBkygMTXfqUCGpbt5qYk4iZI0vigsYBIVkI0MOSDIUHdGf3w04C1aEPAmrGZZiGK4yym6/KsGeSVPI43EoiIN96X+r9cXlE8cxVQ51gGmfCHGDk5AYgCsjeSXsluLxOY+KBAnvrrmfEJWzrrVkwWAxUxjRua+tu1UybtfW8tQUAEQmIkNjQEWmc53fv3+eUxnH88OFDShIRy7LM81xKbk2naSainAUATqfl+fNXiMychmFAjJQYEVQVkYWZkCGw1uruvel+v+9dtXcS1t5TSrVWImqtCaI7n44fgmMYOAI3w9PDBzyfaRwYCUxbU5CShPu2hathn4adLP3161/w/sVut+v3v8iSeBxtKPh0HqzBLp9Oj0kwmb/7cE+w7lO2poFs3QQCgNxUWFLKampu9KuLb5CgQlcDQgEkYMqp5CQsREkCMDwIiVM6914dCiUQOtn2/nj/0I7mFh5P/YSurdq6ndWrRtv8kcVTlpSQizLzIKOIIxFxIWxCVggzFsSCVCKy9qEbmrJpIGBoa7p2JHVBQgVwV0aQNDsAseWcipQxR2Zz1e6r+gOSQVDtlf/mP/iRQhMhBN16d4oIZQ8PdzWwMLO19mYGyACM4CUnZAFuiIqI5q3pKjkN4wCQASOXnGWgIII08RxODJQTlyQlYWYTpgikJGadYtjtnu131/v5bhrnQWaEHAg5UUSufevxoHZeN3PcSmZ2IHBJhWKnG/TmddMwj94cAEncQ60L5ySjasVUUbTbEtDdacAdqIMTS2ERvAz26KJaXTWAUp5609rqkIf9/up4OkpK7t5ay7nUuhHRfr9HJBEhIjM7HPYfPnyYpvnq6hoASimIMAyFiC5WD3edpt3lsNRaG8cxIsxsv9+vy8rEl06Cd3UIZl5OZ2E6nY8s0tV103Eoy/Gp1QqCZRxZiAFVa+ZgZGTazJ/efH17OPjDl8f7D8Pzu+LNLDznHDlG7B8+ZE88j29/+efQz7shn46t5IRg6iuXACKP8EsdFFHkgjlnulRbUiKSIpkJkiTGIA5khAA3Iw9BRiaFcIQOttX6fn16WD6sdtKuusWq9rAct74p9q37povh4mEIncUNUNIwlpk5mIlYHVYmSJQIOfOesWBINLKWtELT7m7m2ltr3QOJ5fLvlkScOTNBzll4KGVADGIADo/VdPWoQbXpUtvKv/27P8oCAC0wulvrHRVAqYeCOwZpj7W1ap0olZwDGogTk4VHmDC7Qbgie0rZ3EhkLDkRWVfBkoUuAWaikJwHEaGeWZCpA3iAbrq/ut3tbwoNu3EqMhho7cdhGIR2VbfaHxPNblJdhaxA6g4YkWDoXeumrZr3retZQznNzKluhsZI7rg4tN6dwXaShthrjykN14cXW4ckLIim1qu2doLAcdpDkJmzlKvdrgz58elJRJKIqhIRsyDSfr8joog4Hs9XV4cIf/fu3SeffDqOk2pLSZh5GMqlO5ZzvpQRe+9mJsSSZLfbmZmIJEki4u7oceHq5ySmflmJc5lSHr22kmkYB6u9ZJGyezzeD0zWq9YTuW+95v01Ht99ePNt2k345ptl6LFlbtvIaA/vdC487tvPP883+1x2H95+ftjN3WNZjtc3NwF4eZRIKJyGofxLgTwjIjMjsuSBIRG7e7PeERAgHAhQiNPlGSMRIarqw+OHt08fvj5++14/bLqa0uasxuh5tbXqsesJXcG7UOBF6sY85KssO0kSEQ4a3glmpnALMEEvZBNEAgPgZuEB5t7Dw4MgQCQYgzmNhRgICKVcZAWMiO5EAizozWqt3Wv3Tbvz7/zDHwomhB5oFmCthyo6CDEReXjvpuZh4U7EJIndgBgRs7VISIIcoECasjADURJKhNANHIzYNBQQSTgUzC1lzzkZhYYBodmZU3p2911GFlLmBMxrPSHpkEZwbluPLomzQpidL52yrhaW3ei89Nas96qxOgULM4t2XdfVvVqv6F4kZQ7T6j3v8t1cdixz75EIwKP1BtBYeOC9RQKAYR5yHnpXIF+3mlOapqn3flm/d7udCPduqpZS2u3mDx/uI+Dly5cASITTtCNCZgEIZimlpCQ5l2VZtmXd7XZJUi55nmdVLbkMwwAAjMSCl12lDDnlZB1KljxOicCtYy67obgjUAlb0BoRL6eHpw/v6uMHaHW6/SRsTR5NH+/fvc7jmERH8Loey+sHubleJI5ffXn13Vd63J4eHsapfHj/zfHxEQHq1olS72HWzP5lmh1Ata/rwpeKh3tvBkwimUmAspSJxhE5QS6Yc/TWW23akkiRGTidI87VHur2FCsQFRyZ2BTMGkYkolL4UlAdhyKIAoiILKn1jgLAwBdPkCJBUWUECSA1VauqrtrMlZCZA7ELQSLJiS4uk8CQBIxIQIgmpAC9d2/NmqmbIyT+V/7B95MUhW7RrUFXYKLAgMAIUutdO4SBsakQSUkjYwkXZMcAbxhUWywQDojECdwhhPCASN22qjXAARFJENCsB6uhKvRASlm4YN3as5tPyzC5BTIT8+l8Oi8PJbFAasumtlJgD1ULM0BAU1iru+JS+7JufnFziHMihCCM8HANCp6HkcApaOBxkBm7gVM4123VttXaFMJtcwuMjJLzkCTLpq03HccdC43jmBK3Vs0853xzc2OmZn45/avqtm3DMJRSUsrTNJYy9K4RTsS73e5ScxiG4Xw8EdE4jtNuHoYh53zZWzgJiwxZEkvKKaUEALv5ICzoiilR2FjkXDvUhom72cAgGF07Bq6PH/rTGzu+jd3LQ4aRqXKG+6d4fr3bX8XDIxSw9uhPj/l7L8+vv22vvxKOx/s3hfmjFx8/PjycjkdCIioi1HoFABFhFne/HNvcI8KAgtOMMhNLAORSUJKDmZp2beeln1fCQCYSHsqwm+a97AfaEyZbbauGEV0BILLMiRMYZM45E3IwIQUAhAcCQgCodYuVeROEUHC/bKjq0c1bV9UOgB5hlzkpoShFABAikMHAgBwpEg+JGQLAMRw6WNVaa4NAROS/9Y9+nAubm4c3dd3CQTSag2l473bpZNUOZjTkMVEpskuUELq7Ne0aW4OtawdElEAXwcmdAzElbq7dNnNFIhGMsO4QAZicnAKRU67rypwOh49aw+6Lu671eDo9ONgg8/l8qv0IYV1rdwdM7MVDLMIsmnptysyXVSkJZE4USYIQkKMkSmNJIxdR5kbW1TZKNDIEhUlOCIGuHoyElAhFgNmDpmGaD1fC0lpV7cMwlDLs9/vLDKS75ZyGYbhcakspwzAdDrvLuXnbNoBIKQ3DUMookoahHJ+ORHQ4HFLOpZTLNxMe5lZKudrvmYgQc8kpJWZhiKGkIHLt4D0AtvMTWQXGAGrLKYEyYHR7On/blnfnp7eA0zQU4sIUHGbTTV/bmFrVDksf2jbtdl6DqKVBHt68H+fh1ac/bBoMxpyATVhyLsxsZoickkQAkaQkKac0jJIGJAAwQFDv0Lr31tZFuwWTmiMhEJgiEoHj2rs5OOWl1WM9btaJ05B3QxmHRJIwgLr1ZhvCGI50GUkn3GpFNDdIiATg0QFR1SAIg+3iQEJzMiZixgAnhjyIR+26mAKgCiliiBAhEVymc8Ad0Ar41BryX/9HP0FAIq6q5obOtVrz6qGMGB5u3rp3pfAiNHAw4jAOQ8K8bbrZ2REutCz1xmyZZvLsph7QPQCgtzWoBbqrAiDSYKHMAkiIHdxVodaYhxui1HXdWm3tuCyvz8s7yUVdj8u3hisJWHRmFJoJxwiESK5MlEsp865IRgC/+LETiwQPPA4pZ0HVc+srsKAhhOzGnRDkPIIjYVhv5kQMIJDSUJsjpaGUlFJvbd2Wq6sDIpo5AGzbduHS7/f73vvpdBqGaZ53t7e30zSWUtxDVUvJ0zSIpGEYc87upr2LyDBcOmPDUIacc5LkZh66n2cmJMKSyziMAFh7C/c0DOBmrQ7D4NZsW4PYu4tuy9Ob9elhvxt5TNZOgvkU49tv/vT27hpwtad7GdnaxsfFiPJUsNn9h8frVwdEgeApyePpzXh1s58O1hYAAqRhGB28toYoklIEAFISGccppWzee9tMGyK6O4aJcAByTsN+x2W4kJ6X9XzS5am2p7r1dq5eGztLskAPoMhSOGci4ZzmnGYh2nqs2xbhBGzOapgywwX/pysLAMHWcFvVLNQUCBDRw4Mh5UA0j96jpoE92rK1CEQyZu29mfWUMhI4WngwjAyTdeyN+C//vc8wiMUjFGEEyqqtLu6uKWF4mPauEZYJi/BENFIumQ6ZJiI496VbTUAXlB0SFRqEBg1em9bmrTW11uzsEZc5N3VijkyFEzNiYmeSeokSDkM4qS1rfbtsXy3t9dP61mnb6octFmIgFOEJQC6QCQYsaWSZCHEYZRhGQgfvRJwgCdLAKQOzIxglZHRzgFF2o5TWtoeHh/P5EaxLylIyoUzTlRoAYxlGbTrvDogUYUjQmhJRTsN+vzPTUtK2rcfj0zCM87zb7/fTNF3OQdu2EXEpebeb53mnavM89962dZvnOeeMhLv9TliGYRAQScJMEI7u67KM08ycUmIA7O6pDENO2hYRyUXujyfziGVd3n8rYu/ffft4/+bV7fem4aZ5uXr+6jDA1tab3f7+7bvFlut5evv1F0o85MFKNqtPb78eMB/ffygDItnT+w/iPO4m5CC41DSj9W4el0mqcLhEvtethXehECkBlJNkZr/8WmVCZHFIGNqaMJGwUUTUp3r6UOv7p6fH06OCEQliF7YswyDXTIVF8vB8Hp4D+nq206OaJUqZmRhTkFftGu7EYalr1KYGARoOUWsAQR7RY4MwZHK/8AgTSAD3i87RFIgAIBCCPEGdVFPrzTX4L/zWzNJKnjBKhJJkpNKb9nYOxAi15hiFhb13hGmcXzLtBDonwJy003JeAS8jyUhBtS/EjJj61qxHbdu2roiVpBFlAGFxJAFsGefEBdxzHrPsEo9TGV09wNft/v3jL1s8LfW12tlxNezMIjRkmVmc2RCTQ5XsKQsFJJFhSiKOCIguhoJEjEOapnxIktU00LFbrxrGy3lJA0+7mfM4lIKUmPm0nQEpp11KZZqGcbc/L+v5fAbwaZpvb2/3h2ldN7MWwWY2TfNud5inPRIg4jhOqn48nqZpOhyucr7EJXi/359O5659t98P43DpYIvIbrcLUCaZpx2S5JK3rTbtwzA+PNxzOJHrVplimMv5vIRaInAg257WD1+dTvdJRI+LLieLk5Kv7bwbJgjfuj2/vf328QOW3WHY1bpp7+F2uL0Rzh8+fIjQ++M9ggy5dOvNXCiNOefE3exSN0Ezd1dXYTJyBxvzMMxXwkOSBJwoJ6YMLKpqtTFYuHZVYZqmcZ8n83iq7dzW07Y9tO20vlv9LFkST0nK9eGm5DnnYTfeTOOrZ4dPrufPslxttbuvKcNcriMAwps264FcyHN4AkaA1Fa80BeGMXEOAwMwuygDMFpvSA3IgC66504AFhie3YsphuPWjf/R//hvrNsjEhAWjSd1QhgI8bSttQYgdGOAYWBKkfpSp+GqlMngTIhCYq0tW9OoSMjMgNyttthQRE2X87FrN1egyikAIqfEJKlIpgyBhAxAETTNhyIThgRVD+xen+ovt/aBmQM6ogP2JFlSYuLMBcFE3APMex6QJTA4cREyht6tkjFgJmYIdtC1PRKBWsfAkoYp71IeUx73N3eHq2eOhBHLsgDzYXc1lHGa5mkcP9zfn5d1GIZpmuf5sNvtlmU5nU7unnO5u7sdhmG/P5Q8HJ9OImma5giKiHEabm5uELHWmnMexxExVFVEcs4550uKTkTGYUJkkTSOQxIuQ2FKiHh8Wk7Hh/P5AUDPx4dWK7ltpw/Wtvnq8PjuK9RTVwv1/VzOy2Mpu3Ger1+8fP/mGwlwgKf7+xeHG9jNhDKTnNbT6eGRAOerQ183dA2wbdsQaBhnYoYIYprG4cITZ8KUxN2RUASFRVIiogDgVESSX7LQNGIghFFYoAZFyilTOS/LYh0I1GmtlyBmXdu5a2XMIgVQUpnH4Woqh914dXd4tp9eHua7Vy8/en73ijiFgVsgCwPWCy8RnVyYMKwhJgIJU0B3i5TpcqYmAuEiPElQdPNuSSS8rbY5kHruiuHglWxTEON/73/97wCmHgtGd7AL9CQa9h7mK4KE58RpSEOhtK1bbTVPsyMDWaARmQauG1hUEWHirqt6M7WutepJu0eEqgEIkucCIgNEHqd9SQXchf3Cqcs5S04UjBgkQ7d1bQ+AwCSAW1xi6BnNNvJEII4r5U1tQYGSBV0ZVchV116bBQGQBIM7sQiKNZ0lH3YfT2U/l4mBx3Eqw6hq6t5byyntrq4R0zCMkuV0Oq/rsr+6FpF5nsZx/v/T9GdNk21Jeh7m0xr2EBHfkJlnqKru6gYagEAMhJoADQNJWNMISqIZdYWfLZmJkCBA3eipTp06JzO/IYY9rMHddRGFv7Atduy13N/3efZ9LWXdtp2Zvv32u2kaRSSGYd+rSDgeT4jk7syUcsw5ulsIUUTcPcZwj1XfS1XMDAC930d4bGZEyIRENIw5hsTsRM7kIuJdy7pA6307f/npx9KtLS/nl98c5zkQASJFUZK6bTHFw+nw429/ezo9aG/t/Tp9+wmI4LbnOW/b/v7yCu7DEIcpsbCgnI6PzpRDZKTWaqnNXN0dARGAmWMIbqCmpsrIQASAzCwxIqLDbl7cOiMCumoHIHRet23TWnqtDbZi13q7tuvuTaGTZyIoe221Hw6neXh6OD0/nj48P/5qnmdVQ+AQgkGv/SIchJJDUHXwFhhIAAkQmAXdgVmI0FpH10ghpDilIVE+TQ+Rs5CQASqqYVd0jgzBGnhHIOAA/L//+z+TkK2DhHsYtYIXcN72brAFziMNgSiwBHSwuq5rsZbT2HVDRIlBKEY5QC9qHdCIm1nT3mqvQPBfCSmInjhAjB7lCJ4BKPFMBA4lRLFu5pjyKEwOFmQ+TB9a68vyZt6RCSG6EwCYWms9hhQitP5u2NxKIkBtap0YALS1VpXcjJEZQpQxSB4oJoj7an1X7wiuHLiUsu3bvYudUkAOiOIEtXXt+vTx0zBmRAghrOtK7Nt2a01//es/yjmr9pyzdmemjx8/qN5Hn0gMx+PxjtWJMfXeQwjMtG0bEd0/Aojo7uM4rstNtSMCmhOjdSVEkWC1lr04eG+l1xvovpy/Wr1aa7UD1EuwXvcWmY055GxkTU0bELqwbMv27Tefat+/vH55PD7pXta6nk6Pw5C77jHIly+fswgRO7irIlJM6f16bqWaOoDXUsyciO5TUXBAQJFIEuBekHHt2gA6gpu5dUVAUwXXrpUDVTNDC0Oubu+3y/tyvu3XvW9mLeXgxstaUsofHr89TqfD6fHh8fHD84eHw3Erl62e97p0uzbd3JRjFhlMDVwphiApYUAAoVBb4aAiId9slwAAgABJREFU6NBDkHGYgnGCIJxSGLIMoNDUDcSdwSn4gJYdPGeex4H/t3//Z3WvtW0d1NEZFaAYlFZu++5TimNOBJxFoKl3VW3X2wZkOQiQiIRDenoYD0JQajEyTm6oxE4A5DFIZoAYAiA5KhiijzEGQXRlQFZ0iSMBtboR0TQ/W8OUwnF8SOFxWc5L/1tkAku9mzsiBqYYYxinEQnMdu0VujKKGzkqMbTetk4OCAwkqfXe+g2d9rWrbjlNQgmIem/v75cgQqR130SYSAB43XcJ/Pj4nPJwvwDGGM18L7fb7fqL7//wdHwqeyEiRIoxPT4+5pzVmrm2th8Ox8Ph0Kq5YWtdhEXI7Pep6TtXOEoo+x5DSCnd89Xgjo5m3rsSM3u9vH9dry9vn3+oyyvUvZe9rzcSnubDcvkSSFKcSy0VHAhGSdP8KDysy3lMfBiPe+vT6Wi9X94vrtVNS6nglgIiYM5pvd2G05GH4LXWvaR55hjbvph3bX1dl33fWmuqnYgkxiAxpimliUV6V3MLUVhGkYxOpVXtnYljzByEZGCPW+ubY3G5aXnfb9fr+rrttdwIacgzgO7lMqZxyk9jfjydxo9Pp+f5cUy5+H7d3i+Xt27VvXRrCIBE2tycIqDwnVxBQZwcJSBy66aDTBln70wKA+bk2bzfbGlQxIZgA2JAvD/4EKPzv/v3/9zcu16uy0V9RW8OW7UOagx5iFMIIgECduhbbXuzvpS+tjaP4zQdiGKkIUlOaUQgVTWjEEYHRPckg4RM9/AU5t7NzFQB/L6uiojkzGDEoGpLqZcwhnl4DhyHHAMnh7Du6215B3R3VO0hhDEfSIQJogRB6t3cOcqEzoCKBB0IQEqDtezmHb0hGDjJkA4h9opCyb1v+zWGTECff/7d8XhsrZTSatMhj8+fPmozJ3cnkZBSPJ8vpezDMDycPoQQAbDsJYQ4z0dmYSZTN3WzHmMa8qTqIqGUql3neW6t9d6naSbCVur9GrCuq7vHGO8dAGFW1W3bRKj3kpigl7effqbetBezuu1nIfr47XfWmhoM0yGP2cDBoe0rK44xA3RhEQi9b4D7h6dvnXG7nuuy7vs2D+FyPmsHYh7GcWv1NB+tNQcszRhD10buzKKqpRQRSSmbubkRSUgDS0QOEmJOWVLGIOik2s3VEB2DGtaqpXZwr+Rvpf10ff+yvb7dzufl/dJuqrs2SDwJUSlv6/6epzDGYwgyxDGFSUL0YMXq+XrbzhfD3nFT3ZmJgb1bQCIiRCCmKFE4BWb33bRCS2M6oTMaUhdwAgJwt0bWGTyypJxyShEIWDr/d//u+94rArn51r42fTfbTTuFPE9TkANjiFHY1fqm3dbSb62ZI1r98PwhpSetLcUxpti81LohkKkxIYpRIARC81ZVVbqi+33chxIyubgTuBMqQmt9WfsZID49fj/ECVEQrRUWeKh63vcLeFCzGCXFkYGZLFBEH/dWCIR9RIjIDbmTiOAwDYfIA5sFSJkOh3RgEF2McZjHUy1rDBzTeL2uf/THf4wS3l7PccgPD8+S8743ZlT31g3ALpeLqqnqw8PD8fgQY7her5fLeRxHAByGQfWOEbBxGkWk1hZjQsTWCqAdj0dVvd1up9MpxrBv+z0zl3O+XC6llHEchXjf93uGdFlu6/KuvaYkOY99L+6KiEKyti2GLJymwyMIc8zWe9tWpEi93a6vQMVBrXchf395u+3bPE1JJARa3l7AdZgP5mZmXe00HPdlp8AoASjspSD4EAdCBIRpmuKd0ZLi6XSMId0P4BwC0P2OQCQECIggEiXEIEJBILCISAwwpA7x1nRtbVtW091oI2xmDszAxoLrfltul5glpqH0HREdDJRa6bf9cr1+vWxvQDWAo8MQslBiFu5CLr17q87MiCZRgHpf0aylGFS1NwDCLGH02Wos4EZIFNlxGEJMjA78T/+nD70tvRfE5t5r20pZhnx8mD/k9CwUAwhjdPDrdt21GLKqcHRsHYEOx2dvkFLOYVDbq3aFTtiYiCi6sVlza4gMgObgxqaGQMQYOQEwoHJwclBrRd8QUpTTaf4oPCDavldtNqS5lrbsV/NOCFN8iJgQu3DsaMva0XKSkYUUNhFjAYnHIY6Z8il+GughuEx0TDocH4/TdDJDVR2nyYB+9etfk9Cf/8XfDMM4H45qaCZ5GM+X166ecl7W6/l8UfXT6fDhw3POo3n94YcfWFCEp3G6X3HuRZnj8QAAX1++HA5TrRXQ3N2sE8v1ckH0lNK6ba3W+yColIKISB6E3ay1GiKHSHXf93V7O19YsNWr6g5OkdL0+GFbVzfLOQJzqe7dWtnQ+vH5cS2lLKu1qlZTCOPhuF7O7rDdbtM4HIa0tQ4A1/0WOByengLHulZ1ZwmRowFoL73U3uq6LXvdzZqDiQQCDByZ2RyIAAiZxQjJENwMgEMiEASgECilINDAi4JhoHFSycu6LOWt6Vr2pauFKDEkJGEO1+VtWa/j+JiGvO83gQYFtaXr8vnn60/n5TNBO4aZOQDCHT9oJqokRGDOzoxOiCzRUFuvte5EQiAJwyA5UEyc7jEk5EghhkBxCDHM/C/+7QO67vXWdHeHtvOYHz59/JOH+QOSMAm4uffd9mq9maq3NNA9uVT6mlMIw9QVjWvtCzMAVNMaMBKRm3lvzIJAxDsYWwcAQwBvHUDTEFgcvCEioG3tZgLEs1g4pBNHvJZl2Xd3yFPet9teXyl0wjjiya0bKJhpY4GILgiBY+YAjBbClCDnNKIF7Y4EA+bT+Nhqvdze67L1bmuvp9Op1/p//H//42Hgj59+OQ4TCwDRtuzIkMekpp8//9x7/fD84dtvfpXTZNbfz1/3vT4+PuU89N7v0MzfK8uJzHzdlhjF/PeRodfXlyEP7r6uSyu1t3b/IEzTpOrmjRzQybVVvS2XV2g2D2Nv6+12JicGL9u7EBLIcJqsW9mWvS+MJJAaELm3voPzMIwhiLauit1szoM5MFKvvdc6HaaUs6TY3ZZtPYikcRw/PNVq2ot5C0RqXlrR5tpt226ILUape1+3fa+3fpehEKQ0IBNhAxDnQMPkxE7ghK5WLpdSFjI3kI56vS1vSzm39/P2ee1vN9+hO7rcUxPiLMnPl/O6bM8ffjHmk9aOoCFCgnHr+8vttWyXcZwPh6eQxRHckXTf2qK/P5gJQFMzZIqRkOy6rg5yGg6RSd2ZQgox5SFwIIopHyNHIY6c+V/82fdE1LS32upev/3wq1//6h8c5g/z8BDjUGtb972Ucl2vwI3EqxdEjdHiMAAPFVqKubfS+tV6c1NANd8DgwC7VoOm955bc4WNuAkmJEM0EpfkAA0cGBwJVGnf/Xh6aqYkPI7PrfXl8uK+h0ApHvay7bUgAGhzs657LRujppjRYu8sHIOEEGIQxWhEvG19aztoe0rTuly/vL8zmSAxx2k+iMhvfvPXTDYfTqp2vrzebhfrlsYx55FCKqVeLudf/eoPv/vu+2EYhjy+vr1cb5echtPpJCLbtuWcEfF4fAghmHlr9Xq9PD09Et07Q1jKjsQp597q+XI21ZTSHYEfc6x7RcdIoZbdrNayvr+9ZLR1Obu3KJHAXXcCzykrkjYjkd7qerlEkTxmJy7abS9tW6Z5QBYHjDF2L4CEhL3X1gsT5pzVIechBIE79ElCDEFrdTAHFkEmWq9Xd88x3E1AvRsjEyOLUAgO3I3uUAIFBAIE876DKaGDdlDvddfenOJe6q3sq9X3/a1CNSitdq3YHIX87gULlETC+fa5m3/7/Ms5PqtayJzzccYHdVrrrdRynD8+nX7xMH1giM2bArj3ZkuHW8jEYuDgiB2qB1i2mmQc8xyYhV1EwpQDp0BDiGNOMTC3WvlP/6dfpTgihd41xekX3//y0/M3x8PjN88fH45Pt7V8eX+53i5lu3RtFgRZiJUEjYOkubbdVQFpaytoBW0GCq7iiKbdugNgTGNKCKywU6RACVFCwhAdoBOhqYEDIrKk1nYReXr8tncNIQZJ1+Va+iqIhOOQTqa83TazRaIiiurVVKE5SyAkIknpFOUpRjLSrWzM0hsc0kO0bB2Px2lI0zQ8pzwSwrKc9/VGRAiwLMvXrz8z8fPztzkNW92WZXl7f//22+8+ffx2GAZE6tre3t5CCNM03bNApZSc8zQd7jm5lNL7+/l3v/vdhw9P4zi6g7vX2og5pdR6bbUSMxOJyL7v45hrbYwYic3vXWplNCjV3az3GEJvFdR7tZyTAW7XGwaZUtZaL7d3sHZ8mEOe1+s5MCDA4XASSV1biDzkIaW47VspxRGfnz611rd1AYeiRkRk0EohATPT1mvdt+Xc6s5IRBhiEAmIICGkNM7zY0gBSZGRRRDZrC/XS1tW2Hcsm9WC6JKCmK/Lcr5er7elaOtMHaw3QPFe277V2pUBHMjczAGR3cvL6+840tPjN2M6lbZSwpQGwhQ4akP0aZ6ejqePx9O3QziSCbi7r6WsXV1CyJmdkjoYgJrV3mNIQ0gJo3DGFByQAIYx5BhNO5rxf/tvf+nOMRzGOT89fDPmecjDPDw+PX1LELzjXvafXn4L0NDFG6mZmhOPjujegLqDE+WqveuGBrXsjM4EHQsw39kVh3kec0ZBswTgLCABhRHvbVkhwogoRN5pNaMPp2+DpF49Cnf16+1q5hwdIY7hWThtZe3Wc5QYhB1aOxu2GB+FnnI85GkOOat69bWWGi1FDtApUYqIMQ6AwQH25Xa7vDn0+XR4fvzW3GLkT58+xTj89NPPr6+vps4xPj1+yDkTkbuVsrtbSsPpdOy9L8siEmNM8zzfuXGqqqqXyzmlcDqdav1919Hdh5yZ8Hq9EmBK6R79t95M+74tIkbkdzaNMCECU3DVXtZWVkJijqU3xK51cWjklnJW93K7bteLpDCN47aXnAYCHIZ85zuQYJQYJOU89tJba9M8CEIej3kYt20H8A79Ll0jwrZvdd8BwFyXdeldgUCCBImECODWOxgysZp2M0aJLHQnH6WEIWKI4EBhDDFurZ5v58t6fd+WatrcOm7kfSt72SopUUiErG5uGimWtn99/Xkej8fjE1EstTJwDIKRhvEBIArLMEz5MGd8GFJqdlO1FKdutbp2UODIzqQoJN1q63WQYZABOhRoft+KgXpwdet153/zf/lHIgNTRu4hhDGcTvM3D6cnc399ebtcl252vX5l4kFyQCbKIpkAQxhjCA5eq7l762vpq4MTKklXbJuuEifhEbQ4Uog53mN7VAIDYVQzUwLnzHHOxxynlCLjYPWaMuf4TWvYekXU5fpStaWcmCn4kIcjS1rX6r7GQMIRiUt19OOQnplzylOICSiykzeFpq7NwQl6q/u+lm1rtWznty+t78Nhfv7w7XJd12WfpgMh//T5589fPo/TPEyD8PD49JxSBsCU4h0pllJura7rmtKAiPM831+AWquqqfZlucUkOWezOzOC/ytRC0IIX798CSEMw5BS0tqh91q3UhcE1961921bQDClWLel1mLa9n1JKYUk1gHQGKy1jv+1fMzo18vrPB85TkV1msfWyuFwbLW10sFRm+aUDoeDqu5lC4mFhjTmaZrdvXXdarFu2o0lMouIsFBMkUW0N3dPISOSAzAlQHLoTgqAIUQOgQJjCJACCDPLfcokwimnIUZAPm9lKU1JK7SuDV0DJnRG9ERM5OBKEvLA5fb+8+evHKdhGAJz7RoDAkDK03E+1VrRgIWGmM2rmtZWGXvIDoTdcGs7A6OiKSJT7armEkJ3Ne/OhIGbtr01Qyh15//5f//nh/HbIGOru1kPgd1xzMf3r2+v18tW/Ho777evOVIIkZhjIIkokYc0TjmkPCLYtl3I3YCa7SGCozbX7qbWCYVczAmQg0TEBqwpRCI2gG7OGBPkWXKUxJRjCK7m5vPwaLo4aWt7qV8JnGkgCOZsyjFFoqK9IVCKh5ifrY+lQB6mGCaRUTipO8FAQA5uJowUEIu6gx+GKTBFGT9++O50fHh/v1Urp8OjVliuy8vrlzTG08Mjc5im08Ppobae8+AO1+uFma/X27ou8zzf8aCn0+k+y9+2rbV2L1IN4+8rkffMDwAQkWqPMZrb5XwZxzHlYOr3zO2yXIYcrVUW6Fq2fQlEKUvZKrhq38w0hAjKLDHGSZ2WfXOrriUwRuFt7w/PH0lAXTEECpIkEEop1VRrLSFIzLFbr00HCZf1hgCn42mcRkNels3Vr+t6vV3N7c44IaAc0r0bGLKMh2MaD44MQIhJQiK5D7IR3a1UaB29qxarBg6ltWVZt65LL2tbO2wQkoGliDFEAhIKAyZCQCFiDpRj5Pfzl69vP0vEIT0KsxbLMTpBHNKYx/W6QtkNzYmL1tKvpb47eAqI1pzE3UHB3RScYyrFFu09EiE5uCIoem2tlaKu/K//1384xuM0jN1g3W7q+9vbT5frzTy9vV0IrdSX6/qZkhsoYUSshCB8Ly/fy5xOHAUzc1JTJDcHRSPW0m/qznAAYKAeJAI4OiJAswZACAFUBFgQcooOaIAOfW9nFhAPvWwOa12uXXsQjnJEk96aeSeuMU7CA9NxyB9jnNd1a17zMFkFJrhDjtU8xCCctTdomvIwDkfq4kqHh8dhmrblpk4fP368nM/vb197v1XbWrU8HJmjxICAxBxCeHt7CZFvy/Wnn36UwCHEEORwOA7DQEREdLlcRGSapvf3t2ka7pm5e3vYzFRbCKGVejeduRohUGRCtN5qK4geyBEd0bHbtty01RhiV0dAVyBkxKYOzDHE7IzaWttLqz2nQWJS05xjktDB3ZzV7+1eNVXTtaxIEDj22jHwmIbL+aKqIcYogk4dtJfSu27rptpDEEJMKY55TNMwHz+MD99KjE6dI+U8hzRgCNZar13NSAQcdK/3Qf+tbi/X959fXn/7+vXrcl1LWVstuIJ4YhDBGCKhuDJSVA+mFZ2Ec0x5vZ3fXl+c0hAfY5zUGzP0fU8SDf318vV92zEaRzOvVZujdS3NKrv03pHcsXdoIbCIlFIMCQCAEaD5vcGy171X/rP/679A9BgjMrzfvp6v75/f/ubLyw+1rZfL19fzb7pdqlZH4wCOYI6BiV2JDSl0a2Yagzize59SRgYi6r0ZFKag5oQODqpujoImKN1s74vVFnEGT2qVArCwCGJbq+2b1q4bge/l1mqNHkrdumoQZOTesPembjHNh8NTiE/EUUIE5Je3171c3FrRhVBUfd13RnKqTffYYZLxTtI7HE+U4vvb6+38Mh9PZd9//vEH082pd/WPH3855vn15adpnubj/Pj08PnzZ2Iw67/73Q/btjCHp+fHeT4cDsfee865tXa7XaZpBoD397cYBRHvZ4xt2wAMEcu2D8PAgOuygGurDRkDsQiVtr++fmZ0cqh7jUICuC8XRw1BwB2cABxFEciaqtYYJaU45wFJWrd7E1+Yg7AAgrp1J4GYYspZ0d8v7633eZxN/bKt4ziHEMuylHXbtg0RCEH3LcdwmOdtX9Z9kSD35loaZ8kjcwAC8GZGxAkZACJ0N21uiu6g2lrpqmpk4AbQOxTXrW/XZVuWssmVgyRMLMDixGTsW3etVdEIXTuBMVO63m4/nX+LDvP0hMi1d0Ta9gUiuOr78tb7mgNEjsN0ZKSyF7PG2glUSCmaU++9poBRDJu5EEdOARGpF8XaDYn/l//7PyVEYu9I1+X69va72ta1nd+vv7vW162/VKs557rZtl7dnYh73zuqAqGptuKAYCBeQyDmFigH4SEiaDWLQTDGLkLYxEHMUMgR3LtrB2tgYF219ptD7b4qdu9dqbR+035rrVvrFChEKXW7j357sd5VUgBHIhnGMXMix0wTOb9evuz1uq/rXhVAxEyrumCWNNpoFGMYUore7P3t/f36hdymYbq+X1sr8zy20o+HD998892yLfPD4zefvmtWl8v5+v7+8dM3v/3px9oqAR6Ox9PpIYR8p1m11s7nc0o5RjHT/yoSh/tB/3K5NG05DG9v7yQyzVMrtbVuDlXXOYe6b0kCdXh7e2H0ul62dYlswojuvW4izMJuXtueh0yECArQ0SCl8TAfXVLTIoTgXlujGJ3QXD0yB2EK4zgDeNlube/ffPMNgf785QuRnOZZRPbe97Jp11abOUzHYxiSILCTd6dAIY0BRXVxU8RkzRA6jgOmDCzh955pJBEAdMDebS+rMU55rqXf6t7qulvtWN0xxwi0FDIDBGtt37ujMWnvpni3jBLS7f39d28/A+PD/ETGdwLVXtZdW/BWt8Wqo3vkIBKBOjESAzgwIZO7kzcBVAIMEilIjiFLQvW2b2AeEPhf/d/+BIHMwE1aXc+3L3u5llp6NcTOvOQgH46/+PbDn6yrf33/21t/681q9bWs67ZWrWrVoSDgPIwhC4CHEEOKIBgYY4AQNDEmyYiBgHvfHbv74Aa1+l6LAmzlupRLtdKhOSEgEJKZV6sGAI6M0dHW7bwt27puwJ0DGXirNTIKRVcH8BAzM19u59fttu8F3VBIHUzllE9jmomiqtay7OutbI1RDtPctN2ub6eHOQ8DUfzuF38IANu25ihvL2/X263s7ePHj137+/u7m6eQjqejdjDze8vx9fV139fHx4c73PN+iWTm//pxKMttE6YQZFlvKSZCXJarBHb1UrdeayAGNK271g7MWnvZbkSACNaVGO89AVe33gE6IQ95NHc1l5DjNBKCIwQeEFFNk4T7IqL3JhLieHj6xfdEvF2u3fo8zinl3uq2rRJDDJJZmE17B2t13yLh8TjP8zGGMUoackQmJ2II9zg0iGDIJBEYMCaI2ZAdPcYMzda+ddRlL7elLrW8X8+17xgJgZ3AxEBAW7fuRTc1rkW1KnTovZupNQGQana93S7XN+SeI63rtbayl91KIQJT751AIhJ1rd53Ao4uaKJGTdWVHLgrFCVAHoYxxSGIeK91rYDetfM/+tefANi0k3PVer6+b3VprVqnyGEapueHb56Pv/7m6e//wS//vsj4tz/+dlvVAdZlXday77X31axMw3Ec8jTNxEQEKc05MeAlZ8pCQpwju6tabWV1R4XQO3XV++xIode2GRiImJv1FmN0tGq3pqqV1F1ha7WW1bd2a31VYwO1jr00h2LWyZ2AMx8UwqVsbW9au5oNwzFBJgjT45BCWLZL2RdrLUpCDNo6hfDx44fj46dS2+HhUTj95of/crl86dUI0d2n+UhB3r6+CBIATIcjswDgPB/MrPcK4ETEcld5o1o3s3EcS9n2feu9A7ianh5O27btpYzThOiXyyXHqNb3dW1lzZmu54t1Y4HTMGzredsWZGbmcRz1vlNhUa1gigwIlPJMJLXVqi3FZN2b1pSzSFjWjQnRXVvXDta7apmGh8PhQ9W97C0Fmae5aFv3rSzrvq1p4CmPwFRa7a062OFwPDw+K4C25oQhT8hBzUkCs3gQdHVUFcHh4NPA4wgUQDhkMYd1Wb+eX7+u7+/rtfTeuxclJeu0mxijeSuAwB5KpX3feuuGUDvUUrV3RYwSSlk/v30uViTIUq4AOhAByzAdJWW1zd1due8GBlpqLe4au9LetKnvrTUl4cBR7q+4qoEzGDRQ/vWf0hBnBAAK7rzt+7betBuqhNC+/fiL7z7+n+fhOaUUZDgNv6jVrssbEYBL175uRc1OUwiDRJlyyuMYHaM2FypIK8uas4TgQi0EUqdW1cG6YTeqvZiBu7EUAPeGaswkKQiQAZkQuKkbF7feFR1rhW2p+1q8gyuo3lrfl/1LEJrjHIm9OxNAw/f3q6MSq1o9zo/CGWkcs3jX9bIRyLbut9t1GKd5OpHkz19+XrdFJLy+fH0/v03T4dM334P77Xo7HI/LvpStpBifHh/VVdXGcRrHqda99/7w8FBrbX0XCczs7veYZ+9tXdfW2t1/CIAhhF61lGpdwQ1MxyG03i5vb2OKh4fjy+uL7gu0LQm13hEDIA3DWGptVYdpkBgMTM2I4zjOIhERet17sxCk9c1dx/k05GlZlyHnGKetbkHU1tK7TqfTMB3NoZdVcjw8PkHX1tpWys8//bjt2zjN4zha17qXptXBHRqGKY0TRsHAEAK4IxGlZB3AgJFUO4cASL0XxCqIiBSJOtq5rud1PW/NfVBmEajNt34LmQNH8OBoiNDatm513dq2bdZabbU1MDNADEFK2XuvMTEH1K4dXCQe5gOg7evqquhe16ZudW+9Wzcs1Xv31sydooQQQwzeWqsKbKTNqjf+o3/eA2WmAZQRqZuu663W4mpqu7D/wTd/+uHhV7313rzVXrflWq6t92mcAXDdW4rjlA9EIi5THCRmQGJU03dOilTVFiYmZAohp5PwtJVWStduQNR7b22V5Mi9Vo0sUUaBZAACjaGIBBVxj73W3tyctnXXBr1Bb9tW3679vWPzXhPEo8wctNeyrXuxpmgpCgWOY/j0/Inq5N1BWSi+X25d23fffneYHxDw7f3ldz//5TQl4cGBHh8/DcNJ1d8vbzkNdd/2bXf0HEckWMrycDox853s0LuJBNWeUrrrM+4IrbvgFhFKKcuyxpjuEFxCcLd9v7l3tBruUo0ol8sNwQk69Gratm0nCikOQx616zCk3nrvjYRCSIQxxdx7uwewc8r7vjv0+TBve12W2zCOKaZaIaQA1pbLNQ9DJHt/+ULEpVe3VkohksM0xTQQSy/1tm/LtnpX79paRQIEE6Dp+SPFaNCJgCgAIQVxEiSu627rldpGrUAtDEruvTfsJgY55xAHMtkVlSKnEMYjYbrtK7MchiMhO3UG7Qqtw+XyUvYdNJIHrb2VbRzGwzxI9qWtrbbH41Mafq9VZqQgEJnXZa2tIIFqVG1Fb62rAbsGcGTBmHhK6X7tdnPr/Q7s4T/5l6EUiyFbd/BurqU2haIGWrnua87y3adfmxKAKbT315/flp9QLBARmrMSMuOYOU4BwMBAwBWhOHrvlVCRqyq5RUTI+WHKn0Smy+1ctLXeiToLtGqIXRBBmSU4GFp3q2hGQTqDoGIPptLNvRqqhWCC0Mw2uxERIVDrYmjurWpR3LR2aHGQnGnfvgaSx/kX1jQPGdEV2nEYEw5drbT1888/ENLj48fn50+IsK63rs3Nh0N2s23dhjGv6y4cQ5RhGmJMAPj49LAum0iotR6PhxBkWa4hRGautd6Rb6odAEppiDCOo7u7ubadUBl1vd28ahYJkZzg9ecvCVCx5mlyQ+EAgAQowq3veYj0e99JAEQEIAbz36sriNkMJMR5PrjD++vLhw/PhvR2/t3heBSZl+2Wj4fhcLhdv/Zal2XrpS/nt679eHjqTbf9dno8AfK6rWoNAYeUCIkpcYgiotq9dQQEYSAGjoDACKAFW7WygzdHIJl4PgiGttzW9epqCXjMydCrgfIAAXov6DDkwzRPgZMDA/XubV1tWwAxMrv1Ns6n54dPQ4oiHhNrbQjy8OH7ORwRhAF6W5mRUbZ9rbq5WbW9WUcSxkCGQBwCDOPIAYTykMcUHIHMXdH5H/+bb2t3BCInJkOCqsW0Oba97L15LQuJMvPb5a3Ubd0vS/spBmRHxc2hkAEhJ84i5IhNm3ZruqsVQgWoCEYcjvNTSiewMcfjfHhgHr++f9nLdchZkGtTAmVkYDdrwkGQiJ0luhMJCHVwri1t247uQ5DTGIaBhzHHELU7Wxhj3rez1lb7vumytaVaG4YcEUj7XmtIKecEoO5gpsv7pVx2ILrdLkzhD3716198/3e+fHn52x/+c+/bYT4+PpyI6Yff/PDdd99fb9eX97df/+Gv3ZEEm+rjw0PvWsqeU+bfu31Kt55TBMDee4zRzEorEkTVtbd79jVIKLdr2a4piYCcz6+380sv+8NxLtva9r3UbduXw3gaxhyitNZ774AmIikl4uh2RxX2+5KhaUHAex4bwGPMUaJ530uNUVB1ud0Ox8PT4enr+bK3/SGP1rT3nkMIQQx9r9vzw2Ngvl6vzBEIUQIhuXcHkDimEChGksDI5g4EiAxhcARk4pARmYTMtLcOABYycQbzy37d9r1rkUAxJnUpW9v6tZNO6TDwIU/HYTgEiW7afKGg7uCdYjx8fH7+xfe/mqfHECLHOA5DTulyeSvbNuYJzFrRXnTbrs1uLHttu+reejcHwoB+r8kbUouBOPGYD4MIsoE7Q5CU+J/+D98BcYpjDDOzhoRuVtoGSKBTLbqX/nb5adlvL+8/vLz+5aZfwHugFsSZUEKX0NHZkXdrxKitLst+P76BrWBmOiJE4eE4/eEQPxCZBBrkgwNt9WJK1hXIVF2C5wEoYpAh84CEmDgFEVQmagplt66NUQ4yTYI5hxCGIRxjmAljcAHv23Ir3XZfSm0sPATOAhLy1vrebnl67N0EhlKs7wVVS605D3/wB384jvMPv/mbv/mr/9/pMH3z8RNRCnH+L3/xn2IYH56e//Jv/urv/b0/QeCvL69DyoeH03yYf/rd747HE5PcSfGt7eYeQzBTdxjH0dxVe601xqC1uzsSDWGAbtu6btuWJRwO09vb15eff5gCzPOpmtfthr0ScoghjNGZDJyQAYA4IiIw3JNFrezMTsSgGoQcWq21V2XBlHLZTdVSFgdflm2YhsBYr4vt4ADaGiIic23NrO3bcpxP5rDf1hhSTgNxADBhCndgyjhCmpyIBAgQnFwiSjcHxwCSlCjkg0gCq32/kSSZj+r+fr2e1+vSq5IYBWttraVIPw6fnvK3jNN8nAK7ILe+o/BxGhjaGPP33//y9Pgw5jFPMUqOPKaIHNvl7efz5WsIXsve2q3r2ut+VzwAORMAmJupAyGaq4RK0lmYxJkpBUFEAEY1/mf/+o8SDZFhCDlyTBHvTxuMUhiwn+q+rWu9refb8mWr54575E4CzQpRmQaexznlANq3fSsdiAfv1HsD2l21OeV8BOO9bCzhOH9jls2s9x4k9MaX24UQBAI6SMA09nkcAFibAyd3YvEooXpTU+2m3QxkEhqhRGIJE2GMfHyYfxEpg1lpftsWA2CnIBhHEhFGZPbrctt1Oc4nbMMQSHtRbYcUjsfnvdXbZb+efx6Oh48fv/vpdz9spV0ut7Vcv/vu43Ldpjx9+vDNf/6Lvzg9zPN8Oj0+ff7pcwrhu2+/e3t7vyfkCPl6W+fDwTq01o+nQ9kqKBLBHZ8/xKHt+zwls91V9+s1IA45RJay3y7Xt0D4cDoZ9o48TKN5F2ACigKE97OOBg58b2QhAcI9jddbL6XmNLvjsp5DkKenZ+3aWnOAIQ+B8fb+RubjNKKAoqOE0ntOgxAul6XUvdUSxzQdDnZfJiCGHCWEQVg40DAgCpGokFojA5fsYbLegJ3jZAbYC2AlTwxgpRACQ9gNd8aX9fq+ree6fqlLB5x4jDGN80MMAwGxUBDpdTO75cnSMOfxFIaQh5xTZnGhDI7b/k5oLNBaKbV13S+3l1L37lbaSggOoOZqWNuuauauWBp1RwCsLBJjgN97nV3V+L/9l98/PXyXU1KrEpkZw8BBInQa8/BweHRtt21pzWtlQ2FAMyMGiTykGITMAQCjBKG4llq6InFX3fbSAMCMkXM61GJ72URClIdt1WZFu+5lVe+AHHk8jBOSjSkQIRNFieYEICwMQF2LQzcNa+l73QIxE1GkNJ6CnHI8jnkaUgoYaqtbWXv1KMLEMQcWUi8o2qGJ6pRPHGP3xkJBAqott01VDSoTjePxr/7mL17fXp6evs1xHHPSrrfr7fvvf/n5y5dhTPPhMEynUvd1Xf/JP/4ntfTL9XY8nlIKtVZTnca5tSYi43Tcty0EykO2rtZ6TNysIbqrai+kfVlfwNV7SYG9t95bSBJl4iDkEqMQ3OvgfifP1b2W2kIId9oKk7gjYUgplVaut/M8jyGEZVnQ8XQ6bevaWyt1jSG4w21dzREluAoRifC6bYiQUt62ZSvrbV176zEOrRXEPuRDHmZhdcrhcEBhJHJkdEZkYIDMIvleE0Oo3hT3zbbFTKG3fnkz7U06kRPm3Vop+Hq5IsGUx240juOUp2s9e3cWQvBtfTe8xgmBwNFSTIRRKxJi13WvL01LSmlMc21Q9lpbXdZraQUwAKhp36u2pq1U1YoEgOTdOlYKioTgbA26OqC0zvwP//vvnp+//+7T32Ecm14BIGAcc0qJU5pP0+NxOvZme2m9Y6mVICEIIuUhzVMSQVWo1RHZQUgA3XpTA29OTokRu3ZAYcl72dZ1Fw61luV2U3W3Rl5CYGLmSGngyEPOIswIEuJo0M2K9tbca1mYFaCVvSKgiKtpiqNQTjEHZuwWxNXrtlT1LkxBJESiyGYVEKbhgTo23VKaycfAgtZKLQRGALXvIcTPn39a1ssf/OpPvvn0i9vl68vXr8zh06dv1q0EkdPpuJV9PAza7O/88d8Bpx9/+p1I+PTp075vZV8R6XA4xZgAnFjADcCH4YBO2ru7phBa61HEVRH99fWn3sttvQzDEFIuvQGQGR7mIUjqvZhqCJGQmcM0zEyh1WZu94AdIpnZtq1Ny2Geay2t6eFwtG7n81sIHGNclgWJ1r0cTw8xpt6qtZKGbKbLtrZeLu/nWkoMSWvby62tW287MaSUnCzlkIYDxyRhhnE07bhXSBkPB/MOgJRnbH5fvxo5CiAjWF+Wde/lfb9ebq/b7QocYxqqw3LrxRViIiPmMAxj29Z934OICHXTps15c145KgD01oW1tovBBXkPkXIWd2UkR2Hmve57NeJQa2v3BmrXbd/dBCAQkAM5AiOjM/Pv/997VSDkf/5v//6cnp8fvxvTg4Pv+2K+Cevp9Bz5lOM0DhlwvK2L4bVDI4eUJAQhInQHYJHEnrypO3dsRCKMHIwkRg8oU++4t6Xaba/7dVlu29l8K+2ybmfTxurMEVkUNAlJYAN2S109SCIMbkZs1rt7BTQUUsPeEcCwt9pbCAkBhsTuvZat176um0LNOaU8kIhECSGZA8cZvKr1OKaY5oGnMSdE2ta6rmcE/Pr1xbT+4hd/+OnjL76+/vib3/7l6fHw8PhhWZfe/HA4AuAwHvI0PB6OxPTl5evlcv7lL39Zyr4sy5AGIpCQRAQAt30nhBjDneQE6K1UYc4puakQ7vt+u1xykt46Y5hPx23rSDEE79qmaSaEUndwiOH3shY3jDkiQu2tqxEiMyF566XsdZ5O7qjdUhqEw+VyuSNvm5qEhMA5JjMz7QomgWttrZQgsi7L7XoLMZt1LesdRt261roS0eHwHIYRRJCR1L1Xgo5Z4PGjtXAHN5kZRSR3VO8kkmYM46a+up7Xfeu0IijHW63v26pOQgLObpCyNK1b2bU7C8hATSt4Z7Km4K051G6r2d61AzozOythRfYYxiC5m7k7gDtQ0Q1V0bxV3HfrZiQIhEwI3khEQmaIfa+q3RT5f/x3fzLnj+hExL336/7F4KrexzgN44TKKU4hjute1vaGoaMZmcYg6NIL9YagHFFcBQ061Y4QKTIRAQkiGFsPvffWb6XXve9bv5a296Zlv2nfkQVAmCRxIAOUEVwAyaB222MM5g2AhymmMDKmJFkwLGXftQFJa4tZVbNmXrWo9a64lc1aHcdDSqecMhEadJOGQUWCG3dHThIo9NL6vgH01vv57S1y/Oab72IYz+f3l5cfP3z8dp4e397Pl9v1OD+cjk8GPabpcHj67W//Wl1fX78Ow3A6Hd7f30/HB0DqratXBnKwbV0ZKYXYeydxb92tpyCmpdWt7HuK8e3r15zoYX7Y1xYjg4GrT2NsTcHZUYXZze/CbQeVFFgYhM3d7iJR7yKUU+rN3UFE9n3btj3nIaWhNYsxWO/buiIAknQFRdlrV/PT8WBVa6kxZiTc2y4xCYuZmyET5DgmyQIUUobH5x6jgyqzoFgzm0+cJ/SOAdrbhdqGblqb3a61lPT0YTycRkUzK5F35ZROzsPaLk13gmjKtS9OhYOVsnYHI1C91fZmoNrYK/Ve3FvvaiaAsavV1gBsHFKMYwiht/sIuCEC3V3Oe4VqURk7ABAKEYJJkQghYIgRzFqtXdt1u/F/8y8Px/nZVYLkrZ1f3n8A2IccDXoek+mIkEkcHLsWAGASVwMjBHJHcyKIosKQgwQIqlZRzQGBoys4ILowSUzJAYgIzM3AuiJUBhVmBwA0CQFMWlcAdVMzV+tESqwGLhiCDCnmlNKYhiABgUBVva7boobd09771lZz32sx6k8fPohEcMsRiLz7RtxjDAh+u65gkHnAJpnjvm/b2jLzmAcwLLXvpUzjLHG4Xc/LuhLSN998L0HWbR2GfD6/E+Pp8eHl5eXh4bQst2+++SaG/PXrSwyB5K64bgg+5XFIedmvDAZmoIbk2lurO5o9TMe3t6+1Ls9Pj6W2urdhnACs9eruDr7v650AVcoWUyZiCoJMKSVCBkRwW9frul2HYch5KqXe1w4Avm07AEiUFOPxeGitvb6+Xm/XPORtXeu6aO+APh0PXXXblpiiEJdamQIRAWMMIhJiDMwQY7Zh4ocHYvFqDTsFoS6/t3UCooG9vXnrHEGw7ev2/vK57WfQ5uSNmHFea40SxyHc1tfee7Nlh7cGi8ECvCpsDq21um+tN7cK662sa6lFe6XWqfVmZq2b0JDCyDIigHkh0tqLWQNDBkdA7GpamHDIiQIobRRsGFKOLIiotK71fF325vztP5Dj/MgcWq/d19fzZ8BLiqDutbFwvp4vtRWiFrgKUBAMLGZdrahWRJ/inChHBE6hWDPviK1qJRQoYdM9pzHnKcdjknGIM/W70J0JiEECMkA1qkjd2N2w1lb21nTvtXmnMR845N4bEYkEQYoMMcTEQQK4S+APKT2Yc9dQum5tVwALUVLMeWR3ogbUzDa3zcndQatqU6aUOYHWUppAADN3kBgdcZwO0zBdrq+Xy1cR+uNf/0mM6Xc/fc7jaRhHM/vjX//6/HZhljwM83z4+Pzh5y+fhSUPg7buvW/LMqQ0jxOg3g+s+7oQuhC69bsOSHttWpil9q613sF3IswxCBOzANjtuozD6GpddZ4Prtp6124pJQIA8CHHVmutNcXxHlMjohhTjKHU7Xo9l1K76jRN7v7+9lL3JQki2bYu19tSyjaOiQCW6xUMcs4IIFGIHTFyEkcDAw5IHAnI1xtrhXsHEioqdmY9LxwDJQR3K404h8dHInp7e3lfrte+N+ghHXTXaleVfbm+gC81vBhfEFvRc+lvAJu1tZTetlBvvK5627BsUDaoBddbqXu/3WqS4zg8WwdCQ1Lmilxbr6VuagCEYiCIAOrYIXQIFaQLGVOLRK6+1X7b1rXYPHzPv/qH2W2PnJf1fanvTfdWX4VTTuOyrdt63bey1ws6uLF7D2EgJiC/b30BMQUZAgPLrovZZqoOJAhaSzV3yDmNYx5D4BRzwJyHgcAQegByt04dmMjM1IkQWLR7a7psl77vgUOQMORhiHkrqtgpKQUwVQREiMfD8yl/CwbdXD0Urc1WkVygI7RBEiiYmXprvqurW1VoyLHZXrUEOblKZMGq4H46PUVJKad8SOtyfv38ZRiffvmLPzgen19e31Hk46dPaUp5mLdlqbUfT08p5I8fPvzudz+q6dPz0205awNh1t6GManWXss4HZftFpgIupZdiIi8tWVdLmYlxel2XtwrgWrbAT3FcDw8tdZDYHQEw3GMe9l/327p3bQb3BcOtq0lp0lirqW5G7GN09i7AWDOsbV2PZ9vt4WIjsfTOEzLuoDjOMxEpHW32pfzdbku67Yq9Noa3AscQCych4OEPMQU8kGiOAJ37V35eDQWW6/etxCOOAxwvXEzEPYhGmeSKR7mIaf3y+uyb7XV635tDEi92OvW3jd9W/yl+KJta+Vca9cOvWBbWyvtei2v7/u6t7JrV+3VrJo5l821Y4qJKQAW85tDq2Up/QJAhKwtkylZh8pAySOgdOgFDa1Lb1y7VrOOHOXh+0//gP/eP3u2vpPUYm2vizASBwWTmJj9dlmX29p0V23oPaYpygCMTuTIHEBSFBkEySV2atve72tjdOulmpJCTDHMQ84hMkYiijikMNa+VVtAqmkMcZAwmLlaQ+jAaF5bJYacZDSlO5UJWLTvrV+VOiMIEIVT5mN3dLhvKBeFKwVF1kgVqQGjw77Xt+q1mjqZ0252A9mL73UvI41RMphHl9M8T6cnRGKi3vjLzz9KkO++/T7E4XK5NW3PH5/GaQIIAFa2NY7TfHwY5/z25Yt1/e4X3++Xa9nqaZ5f317n+ZBTeD+/IPE8H1W3yIjm1/ev6C2GQZj2968GcJwftuWmvZBQszpNQytWyz5N47osDw8P3fpt3XLKhE5AXbuqau+MnFOuta63JUq82+TMVEjyMPTet213N0DU3rZtJYJhzDlkdHxbLm2vcxpqq4AeQ9jLzsTu2mtliQElpcxEIeec5xwDjTOKuDDyiDSRsEMkpq4dchDtsCyASIFZxExpLzKOx8OpNV2aOfcXfd/bbd9fipaX7fPtcvZWtlLeL8u2mLUDaNbuvWvr/bzv+2rgdMfoI0VmCpFNu7uhooMirF1r7Zvb1YmrUwKraqVJ54AhNrKlGvRWCUwGg+DKASamUXgcwyP/o3/2S1BF0jyF1joAjPnR3XZbQ3zozdfl0rqhEzGFcGQ+9g4kSATGRBLHNImFZiWkrJj2ukEAdDTFrtEpklCWiADE0QyYAvhoTqW/qoLIgBBTeEhpROQGbq6MrooOnOOYRKKm1rx39gbey152N4gclAJYZxJERsSOV6ebmgLuFB3dyOJSz7f91a113dR7CAy0OhTgTpa4jwIYIRzG45w/dUO32ns7XxYWzynelr13jTE8Pj4lmW636zxFwdBaH3JOY9jXFR0eP33Q1i/v7w/zuNd1XW5Pzw9IsO/b08PjNB+W9V1bBevL5SzsKWU0ur7/gJHTkLfblVRDGs287DtGKWUNgZmp1sLErVdwDxLNDIlSSuiwrkspZZrGYRy6OjOKBHBat33br8OQGMnUsiQm9N72/ebWp8MMTExyWdaltUjUe/PAmaMbdmvunmPOQ+Ygd4JLSiOHiDkjshlRDk6dJKII5InC5O83Lxe/o+LKBl2hN3Iry9LAG0BptbEv29vn9x8dKnNsFdet3q79cm2vr+deOcZHkeSq6KhQb/XCKAIsgvdpz12pACDm5ozdSrd9rbdWqzkWBzMD1w66gzdGpBagB+E4DnNOmVJiEUlugTEMkZmd/7t/9QcpsHAOSSTGIGMMLAHNmqsC+V57bUg0CiUEiVFEotVG1CUEcxBEQjSlbp5zcrR1PROouzok5AAu2q3U8506D+4GYLD2rog0jiPjg3ZiCkRJeGAiVe/WSn8H0ofpFGBC462Vva5dda+91R3Q3IEZXMNWXtby1g0Ox+nx8SQS9l7ExQ1L8+aO1AC7mhk4UycyIBBO3obMHx6nD1OetPfz+afr8rovN7cK0LdtT3l+OD0dT4co+Xo5jzk+Hj/u21tvdZxnA87MIlK1bpcruqaceitBJMbYe0FvHz98WJfry5efj4exlW1d3g6HKJK3tWzrl9u+Mcm2LndVLiC9n18P0+xutZRpmlSViYc8xJTMlJiPx4Pq3WYXr9fr9XqNMcYofv8FAIiIKexlzylLDHdJ831wva+7de1FyeHx4WBt124I4Xa+dW+UWDrGnB2USUKMd8FrGDLliYXNzFEgToys1EwYgTAFar3XG0RgCQaAyCjsqtf19ecvvzuv7xi4mP98ffv59W8YcEgH11g2//zj8vq2dQ05PQ/8kGIQwsip9bJuZ1UNAZGIBWJmEWGkwBBIAqNBbeWirZhj62KIjlBKJUTANoXwcc7fPz1+/3T67mF+nE5TjoKMxGrc0RS2Us78p//TL4fhUSg4IIkhakwSQyDvpd1UoTZuWoko8EAwppiQqPfVbRdhhlRb67gjRnQ2pZRGQa+lGGg1IwfhwVyX7e22LSwBlEq5qu4ORkTT8HgYPuidt23CADmNiGOtvteLt2bmDkyARFjB1ma91e5r1RVaQ4zbuuzbAgRjPk3D06en7x5PnzKe3LC2JUgmj9frrbYeOLh3AiAMgCZBMx8Tfcox1tLO71/3/cwcCLC2DZw+ffr+8fmDGrr55XI+noaPHz9+/fn98+ff5GGI82HMg+3b2/ur9pZDiJEAgADd++12US3H+XC5vC/LNYV4PKTPP/8EVgA8pbnUzawTMBqZK5CLCCIMQxJARFI1+L28kkiYmCTGfV1rrSGEbd8khOPpVGp9+fp1bYUBg2CrxRyGYRSJiCQcQhREUHdTHMa57j2FgL1vt/fjYYrj3BW96bZdt+Ua3BGN3Ls2U43pXrKPPE4s7BAo3gGdAFPi8ZNxaOsbIYiM6IDCLpl5RNaqbbutL5e3L7evm5bOoTuetxu6xBAIad3aT68v13IpBofpwykdIzIJITE4tFqaqUsgTshCjEw4hDykaYiHO6SXQFvve28da3dV7dHTINMvnk//6Ntf/oOP3/zhN0/ff5yP0yASgFid99bX0q7r7XJ525ad/9m/+nUOcRgZCNUdMTBDiIzs+9721bQFV+u9pRyGdDQdCQjctSsRO4ZSaq2ViGMY3RiMcjowTbVp8xokOZM5tNrWpSEAUyi11taIPVASpjGNkU97W4yaQDYLzIfj/CFL7rXXfqXYHaF521rprboV7V2VGFLZilltzdxknh+n9Aw6aOspTqfphEraNvC2lq3WIk5qzhQYg/kegqV4UEUGrL20CsfjKcRxW/fA6cPHb2IaX1/fe28pj6fj8cPHD+/vL3/11/9Je/j2V3+c53x9/XJ5/ZqHeJynGNhUJVIQup7PrdXDfAgs18s1Rnl6fti329evX4JEV09xaLUGRlNjCSFKYCbB6/X8eHogROIghMys2vdtM7daNwBjhNvtaqaBAyDGPB5PD0B8ud5u13eEfphPUdK27+7uiExs6mYQ01BaX7bt+ePTMEYl37dy2RcCR1cOGJkRqLWtls21IiGjhJxTTAOlOAZDonjQmEDcGIwD0YgYuFZkUgciNEQKSdEMMUqMks+3y4+XL1fXDoKQx/TL4/AQjFCDg3x5e72Va1VHpG9PzwjQrKurUem9IqbIc+SQIkX2QCHRmDAPlEgoyRzDwQDW/ebehHn09BAffjE+/tHp+MvHh4eHw3QISdy7X+u2btuylrfl9na77Ldr39bSnP/7/+E7JiBKDaw7pxSQgFkcuDUvtZs5ALoCow05BQpjyJFj2ataATNyol57vUaRlAbiCKgxpphGVKxWJAAAaTdTQpd5PCny5fZlb7dpOsYQGTnKw9rflvqaQ4hyDJKYaBpP8/xk3BVa861502bbWm7rufWdMTpBK7s238t1vW1g6TA9gQsDpzCcxoenw1MMtK5ft/1MAOjQq+yLIpaUmGmIYQAiayh8mPMhYOjdjvP04fFD6/b15XOK8fH0+PD4cDg+qPoPP/zlXtY/+ZN/FObxtz/+ELQfxowBh5x7rdYqEZa9rNtyt2Yw8Xa9TkOYhvibv/7L7bY+PnwwMwcve2l9WcoSQ5imad82Znp7f+vWRcK2rUG4tcKEiLCvV0bw1nIIzMHccx4ohLVVdZumaRzmVvfr9eKAKecUUtd+3ZbuykympmrDMH79+vV2vRALx5jG47bst5cvpVzdIQ/DMI4h5ZCjs1CMUx6DRAnCQjGzoSF0SCPPHyHPXr2vV/IGIk4s7N6679X3K0fEOJt2IXo+HNMwdyA3vu1UK378MM95aJsW7V/f367bTpK0tUABQZdyM7BitWgVDkNAIQtSY9RAjK7ujZljHEKYI08xBWGWhgeWE8ePFB9zGLMQE1O3fb+8rS+X6+u1/PT6+npdbktZ161vJSLMY+I//TefiCOAGmJt4NCFA6ALC7mXUtZ9CeIpZXDLIYQYokmIVvray4bYEYl1CdS6vXMKIkMKkRNNeR5Ydl3VvWt3MMZEPqR0EMnrfn6//MQSDuM3BCFGZ8FSbm59yg9RsjsBcBrmcRrVWutn1JaBvcCyXl19ig8SUozkTqq91r6sm5kJp3mcT8NzrRvQFgR7uem27qtfa0ULrbnEPg3DEOc05CAHwhT9lOh++ovotrzfLpfbw8PDhw8fmIQZUh7/5m/+i1n7g1/9XWH+6etPD6eHbz588+Xrz4BAjgRQtW3bdjufp2F0MHez3pbL++PDVMt2fj0z8zDk3krMUnv78vKzg8/j3A3cvZey7ZshWq3q1sp9Xa/DOA7DqGoI1NViTikOyBxidKJa9/V2S4KfPnzK41S1uXmOGQkVrPd6T/C32pblNk9TL21d1nZbiCCgr/vSzUHd3ADVMAzDOD88xukgMQ05pyGnw0TxxJysLiSDD0dglCAAytet9xWtowTPAxGAVlgMvZnV25evDeDDd7+Sji/vZ+BxrxvYLUkA9b3vr+eX99uCxO593zezvuzvVVvrXlthVpZdqRmQIxtvHWtzNuJ5OpEkr0joYxoPIU9QHxJGh+7QGmz7dlvO19v6u9fyV1/Ov/n68rvl/bwt+7qw+8MQn2Z6PgT+J//jJwdClK6goNbdYZUooJWMBSeySMDCwcmISThrgaVeFd16V9uMLAgFEqXSnSXkEJhjIBw/HB6m0+m22m29qnXhw5QPMQ5I2Frdii3rMk+TQGLyIBQT1r7XfjO3yA8s3H0PRGOeUFO/XEO9Jtgj0hieh/RxGKaUc+u91BUAW8HLednrxbQAgekSya2X8/nrttu2Y9nNPQCASApBUsZxPHA8MQZUhh60921f12Uh89PTcRgm7a6qh8N8vV7B++Fw3Ld+uVxyzH/yd//4z//yPyHR88NT2/fb5brUcr68JAnTmK7n92FI1/N1nkYRvG2rdX94eFRYupaUhpTp/L4ETixB1efDQa3X1pLEthdmRsQQooRoiCEOKY85DxBE3U3VEQGAmbRVUrte37Z9m44P43QspfRazL1U9WL7urVWgQCJVDUwai+9ln29FC9I0vcKZCEKUuCcBZGR5vlRjqccx2E8pvmIRGbG89Hi7CmZdacsMrgMTgZ1pbpRmPvxieYjNW9vXy/vb+fLzz+cX3+6XN5vb6+X7X3Hbd1TDDlMt9v72tdLebuuF0MFMMLe2n7bt3W9oGnX5l6dSu/dzen3mR5256baYZ+HmTg4igCK7UwlEwLO696v1/Xrevntefndef3hdX9Z9lttG7SOHbwPSY6Z52TTgfmf/MtPtdXmoL17Y6POdBGyphEZj+lw4Ec23Pve4WJQGFIDqR1dVSL3uAO2CQnRAb17RVJwDzCN6XmYnj89fBckfz1/WcuWcBYaOVII0ZRrvS37167hOE91WxiRCQDOdX8rZdEOxE6o2inw/Dh9nyLfzj+XyxJgmA5P4/x8mB7TkJf+ftk3LZ2qtKplf7vcLqMEDqnp2nQv/bq1i7aoxVx3EgpIg5ziOKC0QCzUte1dQ9MGrhmG4+mIUfatuLcPH79Bx7fXV1N4ef/aepnG09/9+/+n3/zmN9u2/vrXf+TqX19+eru8EMjjw/M4ZLUaorhbDPzwcKx76a3GmHJO21YIhBDNTIKcTh+3bTEvzCBpWPcGyNNhIufulqaBhFNOpVZiliBxHIlnBUhZtGsvpd6rwyED07rfwH0axt765fze1jOpMiAilb0exymm2LyrOQfspr1pihGZmwLFFIIMEkOKTVutS2bO8zGMQ8wTMNZ9VxZ5+pYxkFfsHWjUFCUJhUmJ8PaOAJ4e4ZCY8/L1d//vz3/ZHP7ib/7qz3/716/L21/9+Nd/+/oXKBpBz7ev13q7XM7n7SsFZzG30ro5MBMBsbm0rm7sqODGEAyidhT2kPauG6cgMZSylnYTKOKbk+3At2LvdXtZy9vuG7KRABBFHwKTdnAg53nCeYQhMv83//xbU8VO4qh+T5VVC5VRxzQO6TjmmMf5srxv9Yyubd96NQPaew2RUuhgHchqp2JguBlQs1ChPZ2eRzliG0Kc1/6+rD+jUsBBREKUIAOAubVtvRCLQyzt5rSXfgYCbf769kNv13F6DDy5o3h4HJ9zPl5v+9uXzxLz/OHT4XAMKRbdlvXSqt4uTZSQgyORYZPrz9e/ei9/C7wBiZsBAAUA3gNhTk8cCHExqG5dMWN3dkuSx/xgqutyCxIAadn31/P7vm2tlzxO4zR9/PBcavvy5cuvfvWH6Nha+fL5s5l9+PBpmkcOzATuEEI4no5RQqu1lDqO47YviO6gx9OhVAtxSjnt+x5DMndDckcJnEJUI4qxW2fh1nt3W5faDaBDtyaB0KE37aW0XhARYzpMxxwHNwXiFAe7m+7au2uFXgnU3CXGGAckZqAYsoQQQkg5xTh00xAI0FPOOQ+glVynYYhpRLJumKZjVyBkGh68me8XFAIJpAqBKH3bsMnrjwRNc+DhMH76fnl9+e3rb0Mefvrp5998+e1r/W3d32p7b/t2Wc5rub29f73c3kGcTFk6kg1hGNLASOQsgRMnwCgysiRVtI4SO+Y1hB2gmLdl20urGN3YKvFt86XjTfuuCiQxhZCkgxo0EXcopntKMk7iYh4D/+N/8YduLapnlpBBWTfdPTaC18D5MHxzPDyNw4OBXK8vte297rWqORoZoBLsUdyid2/doHl1ogZY2sLuh/y8lV513cq72i1KJ0IHtk7mIIRCZrCu+6KGe7uZNrW27XvrtdVyuX4ptqfxFPmBANFTnj/k6fl6296W6/j0MByGIGHZlsv5a9l9X0y7pjTEey9fnERab7V0EmExSZApm1fmFIeHICMjGtduzU3ERChFnrXq7fI+5gHA13VrvYWYkOneN398fF62xQxExMzmabhe3n766beHw+njxw8AkFLc9v308OiAEiIgvLy9DimYOQLV1pB8mo+qqN3NKhEJyzgdendViyG0XiUPMWWkYAbmmOKQ8mBG75dbt45ErcG2NjBnohDEQ9rXmiTkgbvVUpsgxxjM97JuwjyOsyMUbcgMDqrIRHdLOXOkwIjgShQCUhjzlPMhpIkopRCpd2tKw4mPs3mDOFJ6Aqb99iUQY5x63x1ziJ/awLSf+esZasEkz9NweV0/v5x/u/z44+vfWq/TIIrtvN7Ot+22l+uyNzAkNPQ4DnkcU47ESCDCFFIQdCYchjFLtAZW3LlBXKkbgbnXrqKGCtUZO4S941ZrV+u9a1NzMMDWratJQCYlghAFAyhhA+R/9Wd/ShAzg9sC4pJduRgoWjez58dvDsM3gfOQvlnWy/n8BTEChiBjYEEw1cVRMTCG2m1X6BZAQoCqddubwqVd9/altjfvhg7V1ByhgzsgV6DCqFX3ZSutNu3mTtqg9WaOzeH1+vNW9uPx05BH5KE3VROZnov15ls+TUnCvtzOb1+u5259aBWixxyHGMZEhzEdcpQGa/MVEDmwhd7J8vwhpw+P8u0kc6VVcQdXLwI4gXJdyjQMwrSui1l/eHqUkNyRkB8eTj/++OMvf/HrZb28vL18++03puU//+f/lzB/8+n7cZ5TSrdlzcOU0nC53ESkq7pDENr2nUVUjTmqqgjv+8KBwDHEGGMsZY8x1FIcIMfMHIRjkJTSIHlAiSjRTEhoXfcYBgNs6o6kbs7NzPZtN1dC1tprK+otxjANxxiGNJ5kGIjE1AJ609JbBfDr9WZdY0zo7AoKFoUIgTiGPKaUgohpR2SQSMQcR8uT0cwyRrF++9kghvTB/IpmXSaQYD9/1vef/fZWCRzSbVk/v/30+v7iyCLoDNV8r9CaEM/Hw8d5+HCcnw/D83F+fjh+OA0fxvBhGr57PH47TkdBygCBwLu1rntrtbv15tar7yA4pDGERDyaQ6t1WW51a702VayGACgcRJjIJXJKEdEBPOUA7vxn/+u/BpcxZiQ1aiiMiATaHQDKmI7Pxz9AEubBoJxvn1VVJJFHMC9l630lQgQ0JCBrvjJjDpM32kp9W5Z1vSy3rwhNOPXGW3GSNA4TsQPt3crp9OHh+KFs2qqiG7MQRG1UezcXM9n2t729UeQUDqyx7Nvelig9Mk2nRw6xb+16ud7ed2tkzcraneV0OOQwubUOm4bWSQEcoVa7AfJ4+MUUjyOJxNCtVH9nbGyzawyOSRJz2tfltlzm+SAxb9seghzm429++NvH54fT8eE//H/+H3/06z9097/567/odfvu2+8fTk9BpPW2rus4jr1rCOHe0wgxqakBkAiQxJi1N/Bquud0J/R3IEdwInZ3lsQckPnuj+AYQhqaQjdCkWmY91vpvc+ncdlWjpGIXZvW5mbMpK25KREoWuSU8swxdwdidgdC6GZm3u8fx17W5X25vqEX9B1doWtgAm9gNRCxREoZ5lmGERF7iMIjEwMycjbX/fYlRCbeHQsbUQgwpde3L9fX143hdbue9+u6rUutnAJjJgzIMcpxiM/T9GHKz6f0YYyn0/R0mk5zfs78IYXnYXg+jN8c5uPTeJiD9FpbK61ZbdaaBXK1FehGHHIYxziJZyt+u71ty3Zb6tasEdbeA8OYJWbi6DFyIARTJhBG08b/7n/7X2qtpTQnvHfHHIDdOnaEbs2n+SHG3PqquC7lrfRLDAEsOsSm1bomyQDRkAGwt5o4z/kJOXbH3g1rDSIx5t4bIBgwIE3jYFa6bQ8Pj9N4nIf5+fh8u12ulxUhCAZG3mtpamM+Bkmt19t20d9vZxjqDfp+fHj+7le/TjTtCp9//nJ5vUYUM1vavu015XCYM7F2K7VqL0qoDfba1LErxtPwgQmr72jv0G4iMdIj+uCmAw+t9rusSUI6n7cYJUZ6+fruSH/3T/74P/wf/89ffvcNOPzmb/82CI3DeBgPLBERz5fznYw7zwciRMR+J8BRRMJhnNxs2/dxHAE7mOZ46F1bb2YuEoSjcEQiY1SkPM4KwJSEY+/uZsM4aOu9tt/+8Jv5OAKTto7ovVHf9hgIAUWktaKq83RC5NJ7SMmg7/uaYxAJtfW611Jul8trWXZTc2/7dt3XHXtHU2u1tgaAzOTmLBLnkzNhGPowIrGX1fsNDTHPkgSXi2OAEBgNsIE8yTB8ffmq3JYK/+Ev/vw//vY/llKZghEG5ma9NxAaY44MoNpJGCgys6nV7iGmkGitq+01YbCua7nWWra9llaQOwfmcIVUKYxEo3qr1cruy/V827frdueSODowQxwoJk8DBSEhZAADM7faKv/P/+7fgNvb7XzdLmAOrECLNiPMCK6wAGgacu+11OW6vtS+BmSwXCu6iTVUo8Cju7eKASN7FpqIU2BmIFITlphj78W9IgMFT7kiNbV6Ok3CAR2mMTmW1hS8l9IMST1oxzyknKcpJfR+u7wsdXFA6rvr/ukXv/746VeoVNz+/G///Pz2HjmpWzcEAO1VuAEUkW7atmVXr+aB4JDioLoS7sBYvZZ+q17Qx3E4ARAiCzF06K0Ipe6OBimIaVvW7dtvv7+cz2o1x/DbH368S2uYxIEQgUVaa/X/z9N/NcmyLfmdmKslQmRmib2PuqovWgEgAcOMkWM0Gh/4dfkZSL6QD4MZ4wxtBmADaHSjrzxnq9pVKUIs4e58yIPJx6gKs6rIFS7//vNah2EMISBiirGrhhBVLeXBzZZ1a11Pjw/uRkwSsgPc1/HGmENMZmCuABjzEOLgjkCCLObOQqWU3vuyrR8+fko5PR6Prvb2dkaiGNjV8jCXbvM4mjkzA6AjgrsQufVSas4DBX55+Xo9nwmBELZ1DyGO42hu3hW9mSuQEIeUUkoBEAhA2E1FwhPEaH2x6wu0jZOhBEzB3cwadqXe0Ip0Lm3788ePX67Ll+XL6/VLX9tNb0CAoKWv2hEM1apa7163ui9LtW5mdds3U5O7qTZLgPt+vW0vZjs4mKvRHqPGoMzGgsRoFVrFrdRay76VVorV7l0JAZgo0pCRBIksEPdWt720Dq2p7O0LYGm2XJZXS8xONIi3CsUoBJ76ZTvT199lft62q0OlAAi76+ZgqMlNWte9VisF4K6U8ltfxvzo7gxgWRCRwMGwNW28hAgGs5MiwuX653fPf82aartOE/72L78D4x9//PK2XA22XWtq+TilyBxlvur5x0//+Bq/vE+nSDHHzE6IzN0UFnn01gqhj4oYOae6rF9ZjhakmTl5b5BocnLmYT7E3m9f93+sq0Pj4TAxx618CThyeOjaGFHB3WoHIceyeu/Gwuvttm3bcUqfP30NKYlQ0/10+tbMzH3ZVgR8fHza993df/3LX+37zsjjOJWyM1Hv7kAxDU27AhJHJ08pEGJXpxj3vRqoCCNEkaRmIQQiaVXByd161XXf19o8xG3Xst/nhFKxPk9TXZcOziLb3oc8GDiaEYAw916RRUv5859+nB+Oh8Ph+vZxuV6fju+/eX+63M4O9Pj0jhyYmZmRhBEFwBGW1ritwWYI1L98xIdJjrG30Za3tpZw+MZowLBT2dUWMKBL+fTx4w31dPiLP37+d2+3Px/HyCbrbVtvW4Wi2NR6t50jIcS917013ywnyCNbI0DJOU1xysJNovaVfLvPD8bIZqi9m0bs0G0zfQueTMHQETlwJAdXN3YAB9VtW7aYlSQFdC291ta0NiUC/m/+L39V+/p6+XS5fnHdGR3doPVuCqT3CoF7b1ZKW1vba9vQQRC1YzclZiQwNgjqrZddA2ZzUlwJSc3ZmSw5cScofV/K6628OanCXsvtdquqalrcmgjMYX5/+GGYD01bM20G27aehnEaDpE5Dw/tVi4vH8DteHo/P30T0qjd/vz573/8+PdDxCEGGTAdKYgngOnwNBzfCUTt2lvH4o5J8sA0Hg6nnFLZLut2a020JUJVKgoVnVkzKEUkU9a+m6mpbfs6jad12cYxrdsFUY7HAxGDQ8qBKI7joezrOI6t6fV6eXx8PJyeXq8XIwsxGUBzdUJ1U7UUM7ijuzA1tdo1jSNz6L2auyMDCoXAIiEEpFCbOqCZXW9bLdvl7ZXcD/Pw9fUrkyADuqtpyEkBBbFrA8IUo5qBA9jdOylLePny8qff/ZfjFN+/+3UpVfV2mA+Pp8dpGFhSiMM4nPI4gTUh55SJAqeU0yNzUGiotb58Vtfw+GyOdtkwDCxHcCB28M7Ebdk+fvrxzy/nOurab3/48R+Wdr71Zd1u2q97QzPRWrsjS7JOWnfQprq1vtW6KxTHfdvflu0z1cVq6bCikDkYFBftgL3uqj1IBuhWjeHAksGFi3TtpRYGYxIjDMFJCFzvTIF9r2bcetm2XTXz//H/+ou9rKXrsq3sVRgQUDtW64DoAFGYkEyRmR0VTK11MGIeqkEIgwO7K7MxEyKYmbk6NAQgk2aKHoCgQUOmWvdedVu6VQ3h+brVHz/8eD2fmW2ehtPxcZqeTo/TmMfbckHiHCK6TvMh8kQojn1dLoCcp1NOxzSkrd/+9NN/vFz+OAwShhAnnw8QogcKD88/pHACQ0Gvt80N1XCeH5McUVPinMJslrZVS6kGzVAUHQXJA3vMFLG7Qnf3ZV2tQx6HYUyt7722aRpSTL21eTog8sPDw7IuxIwGl+uVWb797nsD3vYqzMzSmiESAJha03Zv9MY4kHBtxiHGNJoTsRAwOGMIIUTiwBxr0VZbCFG73a631tbr9XycZxb+/OWTSBynQy3bvt1ECBF7qUzuYEwUmAF823eRkFJ091bbx59+v12vT0+H+XTM+aSA97GE6XjENFIYKETOWeaZh0MaT+N0DPkIFGur3ZTBcN+xUxgnHtgZKEyAJ0PWbrDeJGZS/8cPf/inD3+3rl8NfIe+7C/eb0X3ujN6JokxzMLxPvXvQA7gPbROhJFIhDkPs2BGRIrshE27gzmxmZS6tlaYo1AinEKYiHMrWPfS2n5f6A3kOVNOMiQMjEMSVevN3F04lE1bBf5X/9370pZlWwAMwcHJiN0JkAFiVzfXnLJIdCNwHBLnFHuvwpJxQCVArFhBlYMIMwV/eDiMOfTWrO+lminFHJjBTQXhGJ+TPwT4PsdvUzgu1/3l7atjiSEdpl88vnuach7j2LXe9mXKp3kYc44hDtq99ELEHFByOh0em21b7XtttVTzPQ0cggvaMMbp+Hw6vs8hiYSmutUlxsgQmkKgjC2SC2pCe6zN962V7oDMbMEC9SRGbGLKW7nVWgBgng7jNCzrpfeWYh6GEZEBsLV+mI85xU+fP6gquIeQpnmWPIHwsm2EzBR6U1NwhxgjuKcYGDmE6MAikUgAkFh6N0cMIZKMBsySzLGrxyBEdFuuvev58uZuMabL7bys23x4lJCul+u2rYd5imHc1+KgZoYokXiYkqnXWg38elmYaBpD36uDUghxOEqMgO7khIRxnKaJI8eQTg/v59NTylOe5lZ7J045CxO7Bqu9XVyET+86m99eSZRoQs7UFlwaY/yyv/z4+ufb7RKFSVOUFOiqDgjZ3SVADDmEgICEbIa9ce9qXogIAUk8yjjw1LopUSXroErQ3VtVbebN2Ikgs0xOESC2pn07m7o1tQ4ing4+3PWDkQABOZJk5sSUiARQ+Zd/c/zZ77gxsyHeZ70QIkZsjtdtK/2G96VraNBbzBwimyLDEPiUxnEaI3FXWw9Ten5+nqdTJtTeStnRopOwUBKJkVMIQdNJ3j/mH6yFIRwzTdvyirAfxveZv314OE1DRHI1XMvabT/MxyGOLNys17obdUo+jeN0fED3uhlTZErrsre25oAxUu815Hc5jswQmaC2QJ5CJMzrtte932FsZlwbEAlQdxdwDsAZ00DjIAeCVFsDa4iQcxLkr6+f9307nZ4kxBgPOY/bvseQHk7PX758LHUfhjHEmNIwHo5IYoa32zLkFCRs2waA9ySbGFMMIVDX5gqE1FsPMZqjWidmCQOHyCwxJzNwdxLatqWUbW99ud0QoGvftg2cjseH1ouVAqY5DUOIrezgigrCIcZMRN26ez+/vvbW19vtMM/TPKm6xJBSTPdNjyRMZLWD7TkSEzBLFHZvagVDNANteyD2OCA7mToSDiO40P6C66vvX2kcME+wvb28vpy3y8vlx8vyqspsU+YJxZU2B0QMMUZ3k+jEzMSM5IqGG1JnQQkQY8pxYpbqvncvfe1WCSITWWvQhRDdgWCI4eRO3dys9nLxamBmrYREcYYQHQCYOcQoMsY4xCDjNAZJCMCPv46qtSmCCxIaGCEHnIHJoBp4TKNrLFsXj0mGmI4h5CD53fHd99/95W/+6l/89re//fb5YZySegtMd2EwQe/a+l6GMDQFIOSAzh5iEI1imKdDCKdIYwysYbPQT9P7d48/HA7f5cCl7K2V5fZy25c4TXOanFh9RzAkI/YQYghhzCc32a43V9j3dbm9uYNgDoDdhQmLm2knWw7TMA/PINxBrtd2Ob+ZmzuZU1djDvOYhMVVgsQ5jwEnBEKvrqCmbS+363lbbymPeZwN5OHxm9tyu62377777vPnl/Plkod8fHja9n2c5mk8mqEwt7Yf54MIq2pKqbW27Qsi1NZEJOVhXTcHyMOATNqBY2AJZsSCEhiQ9rKZamvttiyAWGvdS933FdH3dQ9hSIEul5/YLIYYSKxXgs7o3q31SiJ5mK7nV+0bKvTSJFBrfZjGYRiFJQpFxhACM7t2dq3bWtYNkDhIN3O1xCGO74JM2+Wr1xKPz55HUrReEFeKo02/JCjt80+Xjx8YUR0/fnn7009/+unjf/56u1zbYJASh87keFa7hZiZMqBLcOHkQFEkJCA0gw5gIiGGQ5Sho6l7LVV7dTPmgIjmql0xMImQZ/Lobu5dba+1aa9CSliHSfIcmRsxk4CbpTAc5inHOMQcJLZWBJVr6QrK4m7J3buppRYDa7Xv3n/3L/7637x79z4EqWW73W44+mFMDKXW/f27X/zw/W+J5fVyTn+Ia9k//fST1zOhF/F93yNQtHaiuSqt6+oRj2nMIZnupb8O03Rbb80vEZHDMck0Ds+EiSBo36zdWJdedd2305AQIiFM03POx6YXIK/1soUZceQg3ro6Bhx4D3ULkkY+tM5Xpcg9RfQU45AGG/BmnnZu2r9etg4cY5EYCEJvIAE5ZATWJhgCoSEhiDhoqY0AUgpmVvZ2fDxcb18/f/n47ft3f/rpD7fbOk2HkKblVkhijLm1DkAgpK2rKiOEJABmXt31dtuZmSgisTIjirMgiXojl1o6gBNAGJOqWXdVba2llHq3vb6YIUJcbrdPnz799td/WbZ9vW3z4xhzaqa6b4g6hNhrK620to9DqmW7vn0+TQ/WaxAeD5MpSACRaIbdjRz0zngKkRVVATCoIZrxOEl+Z+qUwvj8i+v5T3I9h8ODzpl3s7JTUopi0z9P3z++/v1//2//n//3Hg5A/vHt03W5tVZ6WC+35vnQWcEmDj+F0L033JlpRLTARm0mIh9cfa91ZYiMpNCss5mZNTVXYqyd0IkTR+RAzEw+3GeITa2bIUMaiB3HaaAUKAlxZElNvffOkUlkHh7m4WHI02F+keP0VO0CYGtZEYLQUHvrbRXiX3x/+Ntf/cu//u5f/vVf/ebp/enr2+2yfDlvX8qycLAdKgfBXoTYLbhJL33Zl13dHVvfcNOHdMiJa9NipfSNKBZooAKt93Y1/HGttwbr3i+Izgyq2ra1pkwojOk4PfHLn6+XVyadczqM7wI9u1q3eW0fHPatf2E+eALtjYMchocBxau33ZrXohuLi9PII1ZUWkOS+ciEEsSu50vdKmANcRJMBt4qxJECsXZ0NgY2TPfOTAihO9W2JQIEu769XrfL+3ffqPrnzy/DlMd5UINm7RCn3sy0pzxeble498LMJMRtW8zAzPa9nh6etPvb2xsJS2aW3Hs389Y6IjCwo6jCvtfWmllXVQBalqXsTbtvtb18fVUHJ78uNzdiFlU9v70chtjrXiSQ9WZNvH7+6Y8SqDc1MxGqtQ7TGIOoNkQlEiQmJAdFhGbKOQ4hIQgzzePUEL7czkF86Ft694tx+B5ezv52g+cA04H6qOXCaWxhpum7b377L//x91/+X//T/6f6h3Cwi112W/bbjj66r0BwWZdKPXOLCYLPYIpQhccg2I0Ng7Tcm4MHBzKzZqCtlb7XboxDQEE0JI55QFREZCJ33Vst2ok4RwyYyWGcBooZhAVJna0WgKbAEqbnd7/85vFXbjHLr/h/93/6rVAijAZa29oVxKN3KH2PzL/87tffPX///PAcUljKpftm2m/bdr6dL2+3shUFP1/Xz1/Ov//Dnz59+NO2r021FOwFk0sW9g6X5bLouoOSBEK2ihmz1Wut54a19BXVy20TjEN6QGtGO6EGGgFlWc7bdnVmRiQ+HoZvJA5EVtprw53EWrXeW6s3wS4sAhIkQCAgBCbEtKuvoRujew1sIWdiDyw5ivWGVJHWQMgiLAO4qvYUco6BKTAMrr3bZgBNCQFiDGp2vZxDCI+n5/N5W9dyOM4IzJRSHE07ALau13U1tDwOrmCmKY2uVmu73dZxmMdhPJ/PgAZOKaaccm/dDJgpxkgsiG7m+77v+84siNSqXi6LOS7L65eXn1rZH06nspdWd/ciStq29fxlCIIAt7evkbzutykPe63jNPVWEf3h9IjMe6kxkjpqaYjauwNgIGEUpChhYggcMaQcwoySwbs2hd48eIoTqyNURPYYKDAaY1BgAHTI4ymnH18+/Kcf/+5Wfty1XPbb29t5021p58vb2+evn/JwOhzfCWMQUlNTDvQtw4TUHVqv7IDIxiIG2Hvdlr3ue1XrKgjuAJtWQCUJIUQkKl23UreyASoxEEx5GlI+SXgSHgWDmWlfHXpKASU9HH6Z80NZt9Z2IU4cg+k1YGpQzBWJSaSbffxy+ft/+I9zmgrUp/NxL8vL+Vq0mrfrUj7+9J8a7qc/PpVGn18uL18/132dpuM0JXSA2gUVwGqxq/ZqRZFhJUqstQfsR/IuobiTSQcni+W6vX791Oq8w/jt89MxZJweTqeny7Z4qxvneSLJCVEMNgDobXMTMGi9dt2ARXutWIQSUiEmjtlZetOy99Wrj0Jg5CqRY8YdOHDG7ga+9xLYmcCsEgY1a9CFzHtXqK17b91bZ7RtW2NUdhhienu77Hudc9yvW8wHJ3bXum1A+Ho5D3n6xXff96q3cp3ykMeh976umzscpnHbNyFiFwDovbfWeu+IaGZEpKq1dhG5N9SYWVWX9YrU3OptXbuWmES73/bLwzxj1yZopaJZ1RpjNuuAgmC9dxbet348PZf1dd8Wotzb9XarTBnUwDtiBQvGgk4yxgGHwzhwpobWe5/inMdwWzdEg69vtioeEw6sW/NlxXnCw+zubisTQtfb56/ocDq+++nlp2q+bLbXGlrYy3q+vD4dv3uX/2bEAOED8623M3FDe+3wjmBCck49wWDeDLU31L1XrZuqmWG7XgrGGImoNHxAwZaMVdvebsW6labYYziy09ScWy1mRthB16Y39Wtpodr6+58c8JJ42KxJq2vkSILaDXggD4FjFBwptN7/y58+fPjp/0GRZeJhTPP0MI0Pw0ja63lZ9vJ64XNf5fxyu8E6DAOym/dpyHGIvW4AptiDhtYhCTvVZltbAWAfn9LANnFcfLx5hylpbZevf+z9WwmhjyaPNMT0zcP3y+v5S/nJdtbWTYvD0vXWjbRLb0uAiCa12ra9MgeJ0ryrggBEASecKILkbl5qU28xojqadcWKyWIe1bqXpr11uLAQAKx2pabAOKAzAUMzUEJQdUVbl9s8jGiw7mcJaVmWmAciqm1db3WW4eXDl8u6zL+cVXW9rtfbWd6/3/e97PvtdpumqamGEFS1ahckkajqZqDaQwjueL0uAODu95V7ANBaQ8Te7e187r0jsaqt17dxSDlna3qa5uu+tq4AkFKoWWKMTLG1HnMCc+ueUzq/ve3FUhRgwaE26+bCSL32lFIIod+2xXoMLcoBOCCV2q7WMyMYBLcd6icrDxRmEdGMphUkG2ap1f3Wv5x//+MfPy9b2fq+9G3v2w7FYKN9L2uk+Z/94m9P+VtpN5qOu50prBSI+mvfCXAKIYQQGLoC3cVIVVpb1tYWNxQUc7zUQhJjlWTYArlab/u+l9rWzlpsDCkyqbckIiB2azeD5rAArtsGLNtWP7f109PxVyTIf/Gvx24NndBib8YMOY8xpSgp5QFBXl7f/vzpx58+fPjpx4/7vk9TCALs0UOs1i1wjNmgMzIjDIGTSDdDIslJxZ3ZjIlxSEk4iWQkxACSaIwxpcmInME6gJqCgRjnNMR0GMYcR4CwbOvr9TOgOYFDC9IR9+W26B5QgTxYVUYOHMfx8XD8Po+PRhjJp5QyDENIknJ30F73tS5laXst2o1ciBnQCRmwe0MCQFdTd0fiyGMUxtaxaW+FWjEtzQw7sNxXJUCphYVEwjBOW9lbqYb48vpVRIact20rpVxv1xijEJ/P51JKSomIWqtmpqop5Xmee++lFHdPKd1ut1LK/dyv66qq9xfAzC6Xy3K7OXjZt1b2Xus3794nEW3bYZq39cIMKcVxSAhdQgickTRFZiYws07MsZbL9e1T228OlQDBqJZ1vb4hqDBarwiKCAGFQUIe9q5r6YKAgRAK9CpOXs3QZIyeknUhGchBX77+l3/4L//h4+9frp/run19e7leqzUWjoGEKfzmF7/95vExBUkhNV2u+wtgu8MrwEnVAJBZEAMidu2m1mut267NTMG698Ztd92c3E3NVaE3773utTYl9hAJ0TgAcnXeSr9s/exYiAuQAUJre11uaJlgZHZp/VV8Aj9YFwYEa2qNaQAA7TpNU0ppvh2+fPpyeTu/fHz5i1+/j5ZSHErDtrtapRin4zS2m1cI0CPmGnCzXWpCHgkgjpzBAIxIFLyLkTURsDjvDMQtOmkGIAegSl7aeq7LsRzfUZopvz89ff46frq+gHDKPsR34B5FZQoRvu29m7QgOeZjHg+OcW83YbTF3CggkchSN+172ffr+a3WSkRhGA+neQ5kqkS0NaulS+yRYpI5cOjFNq0hcTQ1J1CutatXZGYetHnrZ+IY88zubn25ncven56ezuu56C6G5HZ9e+1uIYQ7mbmUcj/05+vlXnZUtxDCPc4xsxjjvu/X65XvxGyAZVnu2lJE3Pf9drsxM3fsvatqCJJSIjMWL9slZjbN1p0gMEVzBDLiuG5N+tcYY05HRIpp2MrXUs+wVGs9ShOhTnpb3kpdTsOU87Mb194kR+RRCLa2vJ3fhilPw0jxBGoghUh0VxoCy+xl8XVvt7p0fXvd/vxPf6x9zcORtxYBh5yQ+PHx4XA4Irrj/nndXi8/9ngbDjQyOK0cxvtmEOLpDhYkBtTie3dFRmGCqmrduSMrOGpxiwicU+/dDUNKMaFwwegqtruSVuPCyTLFxGNvbNiikPk0pCnnsZfIf/mvBnQWDEzRrdZatSMzBM78c/U6jsNwOJ5CjG4Fvc5DTimJkDYtS2PJMaHzlmJMIGWzYowgZBE7AQBQIg4MkFNmCtbB1dypgu+6NwcFqYaNnSKNeUIkZz+MYUDqve1929fXRc8Gt2kK8zBlOcX4IDLn9JAPeZpOMc6nx+PDw3PKI0c1retlJYBACtSLbuC27+v5cml7BTD3Fmh7HnhmNtJtK+t56WULOBzy42E8THEUj9hItQMYA4EHQDdswmLNu1pOkZB6t4fHx8v1bRyjq5XlWmt9mI/M8k9/+sM8H6dxctNlWXrvIrHWptZPp9PtdhvH0R2u12sI4Z4M1NZqa2ZWSqm1Xi4XZgYARFyW5Xw+p5wvt9dat259yPnh+Ljvt1auoO308ACILCGG2NSHcZqnATrcrlcRiUEAK7EQRkEjFABmQhYPIaY8ioQoghwxxDQO8/Exz+8wz3HKrtrKJhBCyHFISEhBaJwgTxAHpRMznX//n//zn37fIL18vf3hT//04e0PQJpiIFB3f/fu/bvnR0F0wmW7/enj717f3gyAOQbIBs1wY45mDI7u3ru22vrem3ZtDUsTQAosDsGQAqUQhyiSRFiQqIMiOUYEVgidWJmgWyeWwJRQhMKQxkAJXVxDiOEwH8fhIGChVVXcohAxEqC2vi17lKTNJAR3jxJSHOf5eLs+1uvn27XkbGMc359+Gfj6slxIte6xmB7CBJKFRBgSexyVGa26gSlWtYVIpsR7rGbayBlRUUQimllDJwXzTrYsy9vL13h0iHlnD6fj94dv1nJLaQAsFC4RD9TJfeUg6N0MKIKydOPr7fXl5XNrW3RtHJNQCtb6BrAeT0MpMAwcmHKwGJWhUtsDrmTaam6IMFnynPMskdrOvRNzRVQRWdYSNAQJ/WeAOJiX+fDQekWHGPLb21stxQCZ+cuXL8IxpeEeu3/68vnx9DAM07ZtT+8eiWgaD+Mwv7299d5zzvfl8qp6j39qrf/blWVZcs7btt1joVoKIjKziLSytbJo7xBDCMmNydRMY0zgLHEMIITeG4QwupdSWo4pnt6Ped+bK1QkM0DvDmgSUxyOMQ1pnJCFSGJKe9+Z+TCfWi+9d4boQREZiCEKYCBgwEjj/D//x//4hw9fK602LENhtaJue22Hw+Hx4SgiTfttXT99+LHdSpaD3qwC32YNg3FEZkcyt90hIN49a6GukdgluDsiKamLhxhiHmKkEEHB71tRHQiQIDA56x42VqIh+cDaOgFHoMFyHnM72txbt+v2IQ0qXnVfAUhKqN4dEYYcAwYAEOJ1WXqOmDAlJvfjfJT5aF61uRIIhYfpueywX7+uuzL5PJzycGj95n42WTBo5DGDd8o785flz9BLDocU3HEHJ0YROaIbDpRW2LZ6A+NOqvgTejONaehYwzSF8E3us0QFbnu/an8Bp5xOpQSSvdZ9+xophtr87evtdi2hsBJYAks1UXHuDvFSJExI1qKkFLERI3NCTNHzQOUCr9fbOIXH+TuiUYIDB66IHYkVWht8IDxYdwlqXnvteRwcatnOh+m9K/TWmsN4nGtp5PR4enp9ff3++2/33i6XyzRNAIboUZJwlDFer1cRuRd/SinDMLy9vRFRCGHbthgjIt5/h5m3bXOAfVsCcTeMLGB+PZ/HxIGmNBxUtZbNahnHYRjGbdtSD6otTkNEVrMUDmRVArJnppAPUk1dO+GdCY0o4fnx+eH5u3iYOMZhethK37eVu8kwJozUwZpSHjwwULjvYzH7s6+ax+cffv23/+9/93/7+PIf8sBzHkrT8+W8l/aLX3xPqIjcQb98+bRe1tFjCGnvrV6gaR9NQm84XlIChWiFVNGNiEi835GZ5s7uXdzQUbpTG/Ipk69t66ZQeyePmCMyK5ZCTjgPAmuj0CWn3ny9lR41x3HM2UBUfd+/ipKXxq1uOWPk0QBVeh5TlDiOMwh++fJplevhdBzHPHBQDt6JiFRw3/ecc/J43ohhikEBoxM7Wmub0jmYduyWhHsXhDHGDV66vOaUQDACiAg5OjzV5lLWAGfD2hVWCqWe2+scwxSnYQgYKA5xwLBwJFD0vX99+ezX1yGLWFyW6163qr11IDsQ5gFOQmmIXKww2zxFigEvdV2jOvUulXEzQGakEGQeh9pZl03fyjqXK8TBWYRJRCJBKQXQp/HU3fbr0ryBtXCHKLinFKPgst5UNVFIILfz5f3T+5dlkRjMbN0XEem911r1XtED2EtxRzNtrc3zfK9+llJijMxsZszcar3dbvM8l1L2fV+2G96JhyQG3k0D8zgNZe+BwrbcyrYys8QgwoSwnL8C+G7w8PRAgGg+RgYgRKLhJMEyQC2diVLOACCM6XDI0zHPDxQHawTWRATcW6/TfOLozdRReBhhTA6ETanU1z99/Mcvrwb+m1/+5u32Yb2da1u99uulvP/uYTwmMyXAclu3ax/4MGdW1QOn12277u4exlmJV2YI9GgCvWxGa0jdVEi1qFVwcDfELuBkjwGp96YdmpICQx2Zg/Ts0IGDD9OQpLuvpcZSm6EoAOiglm/bqixRhFAS/+ZfpL05Anpr7IxMqjrPw5gyh3w4HvZ9vazXqktptQMnSSHAdb8WNe3bupZKe23Ltn4e5nw4vsvxVNv+dv3p7fK2FW3qqUeoobTirgCVhUVSVDAXaiSYG0jRmnsXahx7TWZerWylrkVbrYbmbsCMIgn7OMrD0/zPTse/Wc7X8/mLtqV3ut28rL1rMG0z+qOMhzx2bU7daAWuQUb20Tz11kgxcZQ0dEM1AGZni1OfDxwJtlqAo6AouiOCI/TOYMjUenFvSZIwA5B1zykPIe/rNbAHQEbYt5u6YoSt7t9++12rdXt7Q+jTdIgxv75+zWM+Hh/Kvocg9wmveZ5vt1vvVkqNEs10XRYiMoVtX4kIAF5eXnpr7t5bcXcEU22Px8ccE1JTr2XfokiOaZymUouD3i4XLTWxu26mpdbFFYUiR+YoFHIMMQzJmFhwPBym+TAcHymMBqyqX96+nq+vhIWFGZmQRBJLJBGbB+UIEN0CO5L2/+8//N3/+vd/dyuXyLz2cllvr+e3h/HxN7/8PkUbx3kt+6fX11ZpzIccpKtXtx3rVnYUTTEwIkmn7A5KgA669mqldbdGuDeoqxm7IiYMJ54CoO4NDBxUsKURQ6QRB5QxTVPwHDqx2r7DukJdzEpjjKrYCry+3s7nbb81/vW/nAwpZOJI6oag05TGFJ6fT4giPJ8eTqq3razbWq7Xs+kqQntp5+vr+fr1ulzn+fj09ODIh+n5N7/858f5edvq+frl8+cf3160nKWuu1XAgrXurrTa3KpoCWGfoJA2vNayrRuaEhNqBFTTpg5GaAy1ay+we61VE85DPjwcfzPz979895u/+c3/WeLpp09/PN8+ag3rdXBSImGaZuaBRoFktDauu4IgRg85ppBHBTcCcCxVeycDoOBjwiHGKQ1qrs6RM6FnZwFLFCRILbWVOqSRmdbb0vcaGQPhul5QG6hb63EQtTqMMUYZc3p+et6WZduvOeeH4zdq/nb++v79t+7Ye7uXfQhRe9+2DZFEpGu9H3Qi2rb17jR677XuvTd0VW1uFd3nKQsbARzm+Xp5O83HyCzChFxLJXAEZfTWtjFO7lD2EkJMw0ghAHMKEmKIwpJC7Q1qG2IeDqeUBzUwMFdbbq/bdstpOh2fkRADe4wuAZghJMaM6/ny8Y+fL+efXs7/7p/+w+cvPzboBrq3PkT5zS9/GAeOEaz6x59ebtddIHIglLj22qAgkymACwdndqQi4kJEQO6szWvbew9gDdpa94qFZ8pjiNlJgNi9W0dWCIiRMSVFoRCZkBWomhW11vbbm7GGYUKMvdt1K7dLvXzW5VL5b/6bB46CzDFGQHMo4yDoJWaaDw+9yzxNMaRtrdq32i7nr0utrfX+tn66XF6HcRzTu0N+/NUv/uZp/uZhfhznqZT1erlt+9Us7a96W+t2a7pVJBKA5nG7wX6DbCNhhi5FpXavzYNDig9Cw3pr1o3QGI0x9qbEGEh8t23rKT384rt/9u7hu2+f/+rXv/pX33/3l+fz8vL1z83WVgmcXSGgJslMsVFbdd97r73HkBMPKU4kBIDadN9r3YE1ZM5zPAYIFAanESwSgHRiwhRTDqmXbq3N4wgA58vXwPL+/VOKYdsX8J4Dr/stRZnnA6EKyzROzw/PkXhZryw9ShjT9Pr6EgeZp/l8uUiWbl1rN7N1XYloyAnclmXpqkPORPTx40cAa6323hChlMJoqs1NpzwGZtV6mGZB1ro/HKavL1+CUK0NkY7HubcdrRJi4HDXV4sIilCIQQTRmYUQEJyRwExSng5Hd9trd8AYk1pbtxtamB8eghAE9uMJY/LakRhJQG//4e/+3f/4H/7X3//0D79/+d1lvSyt7htMQ/jF99M4YJ6Cu376/PrTT5/3wkBiYMakAMV2dUNiROxeACsxETFzA2imgE6tQa8U1cndPcTOI2DMxHQvL3kHpQgUGZg8CMDASAkg1I7dQFUSh5HyHCimvVettl19eXPdQ7fO/+y/mYaIOQszInXkHjMcH0Pprzk9BD61Wphx38uyvmnzbl62bV22favrtZPnb9//IoXpcfr21z/89uHwnbus2/Xzl991veY0nd/2L1/q7dJquY5pOIWjeOxKdV8UyPKA4ZHlhBD6ZhmzoQR5FJr31hxgkFGQwHczH8IxxXnf2svLpzSN73/4zfvHwzzwfHicx++bruflT3U371ZrEcgpUMxBwlyrbWXbizVjIWEeDdD6rr1pE+8hGEQeA43esVTTFgYeJIQkU8iDste9Yu2DyLLezue3eZ4eH08xBWbu1oXF0cdxODwcXQWJAofT4RDQe9vRivVbIkbv27aMYyZ07erurnZnB93NfM55WRY32+uGQLXWWvd1XXrvzAQAXSu6mzlLYEQ3n/KUA3rbrJXz25dal3EY13Wd58NhHrZ1gdanIQuHbsaBgsTeLQgPKd4XOgNgTsOUBgzBWJiEQyC6V3jd1GMQAYlBZIxGiTDhmDEObS2EhWj++unt3/2nf//3H/7LbVvrtmMrx8Pw3btpnjTnZAhfXt5++nR+eV3Khg7IdCdpmoIDGrMKazcFHQgzMwQmZlftraq1jrtp68Q05CkxCWGKFAG9dlBDNhZkRAoBA5GN4i692V4ZTEYOM8QxItPaam29LE3fWAtt2nff+Lf/6uDYHQ2wi0BKItE4V8S+7fsQnlvpZb+6oxuCoUHrtda1aZvA4vVyzVN8fv4m0/Du+YfD6fF6O396+fHL6z8OWQ9j7sDLddfdhkSH6IcxTDIWh7fm67Y5QMiPw/AsMup2rXpWTHdaAnLI+fQwv5/GUcgRRMHdIthwPr99Pv+5tnbID2k4nJfzui9qrrYVPdcduyFTmIcg5CKRYlr2basFlczdgZraVjatvVVEwBAiU1DtrXpvoLUzB44phFCs9d572am3upd93+Z5fnp4ioRkbmoI6OAi4TCfmEVY0D3FfJgm7bvbjr5TXwlo229ECEZdtZROIKVqraX1sqwLsxDitm29964NgVprtZa9rKqdGB2stdZbJ0IAQEYRHwOhbvt6aXXTvhEYIY3ThOQOmofBqgKZROlaVXtO2bq79SDBiViCSCIKgHg4PHAUBCSOHIe97OuymSGSD4linpkTxICM6ghpkCR+eVs+nl/229v5djmXdr7t69d5pPfPeUguEhzH27q8nc+v594q9U1DiDEkkdytAAlTRtpTlBAj+oAYlJuDuaO772VppXnFdd3UDMkQOYQYQ2DvaGbeHRtId9AOqA6gdO8LU+AwICcgNkLcdN1KvV379lb3t46sOK35QKKFnONmXSIl0hQ8RGjaAgTV68vbP4ofazXA9PD4zGLLy+pGtVvduwgh4T/+4z/keBr+8nC+vRJRLev59rlBeRznhPjd+6C38gY1SmJHx33k+mD0xfC2d7by8IAhjhHI4u3z+VMKSqreVVUPcjiFBwoQQIhue4d938Ecgr+9nv+n//nffvrpw9/85d88nJ5LvXlv7x++F7bf1a/XdZWgte4u015rmE7fPf2VfvkHYp1iru3qHrQBURon7FXdoRHU3vtewA2JVBd3Q20EgToMhM4ecnoeRiIiTCn45fxKBJKkao0pD9MMTrVfsTcENF3cm4AJA3LaSxVWBTi/fZlODxzGVmoFu13PTds0TeM43m6X221NKfXWGOV2u5SyM7Nq6//1Q0wA2HVjiyiAsEYJmEZwvZyXVjYEPh6PpW5MMD9+Iw+8bueuysy91LItEsfe6/nyMuoDEQVJzdQQvWoQnh8eMA23W/1ZhxdlzANaIWCXkRwAK+dRW0XzVuE///i7//zH313OX4JvcYD30+M4BUJFK97C6/r6tr3tvXEWKs5oMVFIkQSxu9be3DEgSiBSTurMZQ+qdcyZcRQcR5EywBGRXFiU3PrWrJtaiikkxu7VSYkDSOzOkkYOIYQwjZiEhjgEyWZwqvVyW9dY7GjwqxAixEkxR5EWsTb0UHondnYnJndxcjS+3T5EvIX4DpBRrZlFFgvzjkW9ecPDMcVIv//j33PEOMj1ujTdr8ur2q6aXOaI9XGa8EGsKOLcal/wMiT5DvEfil8g/Da+e//0PQG+rMtyPZVlq9BrU0ocaNi5zsc58FHMToN3xLdbk/iznOZ/+Y//9t//03//7em7d+8P0zzEYOPh6fiNty+YbEHvADTEoamlcPru3V8v28cYXDgxhUjezAOGQvte1s16jEQOWJuzK7fmFaEHzRNHUAJmQvHmBIRuy7KrWwjxfHnjEE6n0zzPX7582Zfz8/Nzq5v16xAzUSSSFI9Dr8XK63Wp9cY7JfcCvTnst5t6F8Z9z8t16b0DHEopAHBvBpt3d7/LRVU1xaFqzTGeDsdMOiUnhMv5Ukphjs6uvV8vr2Y9IGmpFNNIB+01EhJRaU2iEZG2fru+AXhASsMQODt0wqSq7J7Gwd2vt8telhSGMQ6uigwmrChRTcpmRT/99PKPv//Dv/1f/ofP2z+lsOVjGokVi1k/v/p2ey2Ehd2UI5FmceHjw3ycD1FQm/TmEsN4muKQVFtkUfC9ljHy0+MxholgCj4hUgoZNfa2l/3r+fVr60WNDodpGIZe27pvZkYSRSLFMAzjcZpTZEYMaWSJbmxmZblps9qLEROiIFAGeUiZXHCtlwUrhbqVuFkAKKBJAqEt24soGYyIuO/A4DlGmwOHtRUlsGmYFervfvx7MD0dnoHref1z25eVVxXGygn5IQ89ejVfFlezCT1wT+I4zO+PP/zyu79FhH67Xb67BNGOAABgS0lEQVT+E5S3rrrfFt0ihmAqb5dlPE3T1MwuFDgmWrYtppBGlnD48PLh7z/9j19exl/86vt3D78iHh7mR1LfzgvIXvorQwoZ976H+G4OgPpVJLqVkGIv1ZuGzBYI+9KtUYgBsYt1hPVWtl6ZUk/TAQfBjOzIyMzEuC17EHn7+uoIv/n1Lxzs86efzudzoppj2Jer5HGIJyIKg6iTN9tLMYp7aWa6b0uxrTb1os2txHI+vy7XdRjG8/l8l76ZWe9drREBEbmhu3fdmfH58fn9w4FV0evXt4/Lvnzz7j3a075e2ragtxSE3fq+ME8cYwixt8rR1ay0mkLUqs127AZ1f//DD301fB6Oz0/rbb2+vSkkraX3KkFEMmEGVqhXPnyPcdCy4Nevnz98+fH169flRr1PdEjDQNRduzeoLaxr1RLikMbp4Na1sM9TCvx0fDodpiiBgYVzGsd5nnIeOiBoL3VHoCnlwzSnlERiCCHHlHIwZ93s9fJ6vr0ty5mFpnkOIUKTdV2v25UF53EaUg6Rh3wQlG4G7CyGQOjEHUopi66ttQhDkDlPJt9MgwCOXPpbX9TcfOttJzXAQkuSrI3W8sUwtY6MlAMNh5lG5cV8qxyAsz2MJ07Hpa5ff3x1XBgqIBbdE788y+PeFTxkpN6vYGI03GqP0Z8OPL5/fvfdt+/mZ63bS5pQiUwD4PvDg/LgRjkcO9TWNA5DqRq1HEiLkXdTKTnQdw/HV6uvX88GfwSgIR+j8DE3WwV7NNzL+mdM33SUbnXOT3urHV7zwN6n1vZqb0FO38xDh1xu5fV6u6KySqTQwPetTzGo8HR8OnLSuo1pDCFeL69VOzFRgF/88BdJ5r//x/8foDPC8fQEhjGmlBIykcQYo7sbQRpiHHKE9PL69WVZ9+prre2mwCoD91styx4Q9roBA0ojYEBV7+jBWm+9IQNzejo9nA6nyJG4rLdL3X2eZ3eY56NDM28EnEnQDRHZLXkCBjN1RyJqffPSjsc5Vu/1usr19bM/Pv2q92pmKQ29mxjV+7pOMyAOORBnCGhxcHqmjA0//Pj69eN5fRjnf/m3//q6vqoWB+UwEIay6/68EZEwe6u9F3WQu6B+HlOIpVQJ6XA4HKZ5HsYghOZIgCDIIUS+d8cBLOccY8w5M3Mf+zD4YeRtG0XC8XgMIQDQXUvbVVNKQ45EVGvt3QAYAO6Tk4zi7rwL71y5EtEwUB4mYdRo29NsG4TrpwYUI4mrAql7uW6FenJQ82qKTqQwCvmQJFDkzEbogb/77i9++5t/4dB/96c//u4P/2ldtxhDNAaXLXYiGQ8HYdbFtO+3a8GIA1uY0zCBYNWit5u+fN3Pb1sm7AQt1dPTI0B0tMN4XPFq2sWjEx6OMsxBDW7tuvaLKU/TgQHbevn65eXdczTpgnEYpnLukQlDX8pHJEUPFh6m/Mttxb28DQMGTaDJveR8jHnWrOpc3m5OHDlGh5CBcMghO0JrLVJQ8Ov5lbodj0dv9eH9Y2D53e//YVuXb755yjHGJGaW05ziJEGQIefcWnfuTpKAjkdsap1Yz1vd+62sQF7WpqoEuO1LqTWNgyspOHESv58GJZQgPAxpnufW2qfzJ2EQkTiMgfB2/hJFhLPEptYVgRkUStRAwZFloAl52vRCiEDdwJ+evvny+imiR0Ltt4ynvl0gTGPApexEg5GA7m3bPA8+jBjFVBk2LM2UptPDQ6U15DFPQxrVyjRNz0/v5/lUS7+9nUutKNxaO1/fVDWIJBFg8q7koGDay15Q2y4k8zCmlJxcBImg96aqd73TOI6qervd7uKo4/E4TVNrnTnEmHvvRJRzFqG7oPDecTeDGGMIQTgQAziaWQhBRGJriJhzFhFZvaOo0BLjYAZ7aRpMQhpg5BBXrWsBM1Q3RATG4ma2JJQgQuwsJU/Hv/5n//u//ct/FbCf5ue3t7c/vt4cJEYBZ115eJzmMMaGOhy/fLXrstlowC7BHNt6e/3Mf/50Pf/uw3/+8PEP748Pcci1L7uc56e89gtVdOxuu1uVEJ6evpnyadvrT68/gbeIumxvOWQ1KPt5ucLxeDDGMSU+ZuwWmLReq73mPPV9Ps3fH8f55fqf9rKinrz7OLVxGMZpfis3chYKBhghHY9Ty9bWHjxoR0cjx2XdUhoPT0evKxK7448//f58/TLPT6fT8yGl18tHbTqNj4gcY0QmVYsxqoI6MUNKaZ7nDl671VpD5GVdL5dLCCml0Lq5IxqDUxoGQ5CUEBHU0IHAx3EMQnW5fn39mOJwOJyQYdmKo3R0IgQnpNAc2AG0dt23AllG4QAA0+FYezTr2K2a/vCL31yWC8RxOjzOp0cP47Z5NVi6RCGJOTEyuDFwTB4HqpvjDTt0nHKGp0O367IUzyFN09Pj4+Pz0/vj8aGU8iZJRCSGrdmyba4K3oBcVdfbsm1ba9VAl8t5W3Z0T8N4PB7HcRjGdG+BxxiPx+PxeMx53LblfvpTyncBee+uqvu+E1GMUuvP8xWltFK2WquIIHrOERG3dSe+k1GEiObDiMAAQETyejU7DdD34tc04vULeh8zKnAMPjymmcq21gKI7uimxWo3NCfLu0h1s8d5+v6HX75/+t7r7XFY3p2++RD+0NumOnYmBgxIQH7z2tp+W7dtb9Rh1TaRt3l5ffv4urRP1w9fyx9popuW3CTkcS8Ay2pha60hU9+uSPXx9BxtOMQppeHz9UsrVai9e39Sq73Svu2O19LYhBM/5DF4927eq75evmD48MN339ayhCSPj7/6+voTEaIeXRc3qRug876vt+XrMAyCMXKWKCOKhHGUOXZAwHenIQjclusYcm310+cPEuDd8w8ppWkarKs7xjwwM5OEEIEAwBGBQAAdAptZSimUPY8p7iWX1LRqr0QEnlVVFcBxGMZxOnAMAGCtIyIhklvKUVtzLYwEhtu25YSBcHz/Q2Qo61licGsssW51CuzQLucr0gOPMxgDYErZDafH0bxwmn7x/rvI8fj0DPOInUOwvSNA6/sS5mG6q9NhcENHIAHfVDWQjBhacUcrwpDilPOAiLflspe1lNKaniKnFMZDOulkqswYQmhNt21b13Xf91r3Wuu6ruu63geAkNy9qRIzs2CMMcbIjHdb3nsXkfu5N3NV7b0y876buztC711bR8SUooi4+77vjrspBOS7yjCEEFjcvaszs9wWrQBpyBVbOrSpxHJV0hASCyUhHgYtVgDIHcH/K4a+165LHt0tHudv37375jgdb9y+nD9HgtMhvl02V2zcQyQAi0OqGZa+rXItgZMPrfN29Tj11/GCUpfzn9r2JUxIpE3XEI4yTo5A1i7bTzmMu+v58qKdjuHJ2mrsDj2MIAYi5NBT0DQdum7qO9C06J7RDWEvbS9Yy3j9ch3C6+P0bLVDGKbh/Xa7kotrsjXdoPpOiZ8fx+fjcDwlGSQhJo8poGTgQCnlyfvt9etPU0611s9fX8djen76vt5qTE2tlKqARJiG8XBXVWntEmAcQu1uvSO5eUMEIspxGEc7v93cFZEQubbSWhMOIpKGfDgccs6muq4rEsUQ0FStq/rlckkhhzhNhwGhN9t779Bwyo9L+RwE2QkhOZEpmOr17TWyME8sgcgBvHkfYnQmkfj0/j2Ms4KyKbFybVya1rKca35+DodMZtCuRNUkosZyWy/ruW67t2rqMaZ5npnDtu8vLy+qSkQxxhAZC5M2dxeReTid5gMw7Pu+rnvv3V3dvZT29vZWyobIKQmS39P9UvaXl5f7VvC7LBzAW7tjMvT+YebW2p0ZY44ANI4zoiOiuwFg750phEgphRDCkDIiqmqt1R0xRln3dtvq8YHDNCeuT4/96q1ssK0hHrMHlvGIu6Ov4GpOKcRuaoplj13b4ci/+e4vvp1/IMC3df3zy+92+/L49PB6vuzrxnbAKQNGQp5Sqo9Do9MBMhTGtmMaFh3i3hHeynomVhBVZu+q1JEBkYMoWb3t2+60d/nzh5dtKU/HMTHVSGkYkwTCHjEYdCICMFUnOPZ2RA/S+cAyH4fHoW/jhUrWhSJN3EycYp8VwLlOMqkTpDq+V/OYOQkrMlrFpTVoGzHREPdW2u2KGLa1XW5fhykdT/Oy7gQY49wUz9cPrV5Px29yzq3t7r6XdQ6jK7l37R3uwQzYGNnMB4nTPF4ut3UrLFm11bpDGty91rqWHRGtNkHq7rU1b52lb/suMT49ProTOJStEMD29mU+jHl4RBvDwMioe40cJBBCqmXbtx5yS0EYwdFSICJ6eDrGcbqtZRwShaAVFNC9B5SUufRrr2vkI46hobGjrjVYULB1b3UzZg55CCGM4+zut9ttX4sIIVLvpWyblrJvpZky8+n02L75fpyiu/VeWi8p3aetnEXKvm5bMTNwJMLea9nb5fzp69eXcZzGccw59d6XZbmLw+8OAQBU7wBtAqB7iM/MITARmUHv/WfYNbqIIHKt+72+TCSIKF+/3oSCNXrWOCY2Rj7Ap7psm5+O0zAcRPKS6PVtA1AkRG8hCEqsTcr+9d379HB48N7Odfnppw8fv/6hlJ8CTyK4raubOMq3D9+ElMOI3+UQJS2tRAi66w5COXatqIUjxzAjJet98/1r/wKND/iw1627Vey4DSd61NDdGfQphDDEFPMI4G47AHCQGIVIoCFUMg7mOEwTWyqtb/uZx6M10V1jGpMHAcfsi5emIWicYoz5cZiyiFjxqrVp3Wwt66LdMYy9eauVw7htpaw3hkIctAc3F2GgBNDPt9fHwywpGOg9c+29u4K7r+ta655SQgcCS0wFnall4efnR/vytiwLQicC09Z6bVVvt9tyuRLiHetXSkkscwxMNs7zbbnkmNzRwXLObTk77Ov+GiklnFwAIjCEGIJ7GIZRrbr3XrfpeBQeHLQq7Ht5/u5YGpBHcClWbudbMw/dgTRnQmvUAR4fMYiVgpev17eXt1b22rWDmiE6gLVWAMCsE/1MV1e3fdkcbN0LAKSUzueve13ncULErvV+kXABQOKwbdv5fN73GkMexgRgRHwP692993a70X00dBzHlFJK6R7kpJRSynfY8J2e9HPu+1/15/+VrIH3AeumvWz7PXUuZZNitSnYBvKCj3PqcdutHA68ewHCObxrBBd5W88b9BYTVg4se6LsCoDmhJ/PL398/bM4f/7zn14+fLR4zqGE1LddO657sY+XkMfv3g0PyLGM7e364bYv7EgMU0SOWrcaDNiFQtjbHhV2XdiQIQHLCZ9OCBJikDmO05yOGUYQcEJqTd0cn2JiJoFijJR5CgkULXFMcVIs27btAQAscm7WTEHYAyGiZHAsSuaRMOXAbtw78x2ZCGbGRBQYyAlCHvh2fdOqxMw+MIzo6NaYcxzkxw//BEDj+E47uHsIYd1uYF7rbp3dW+urBEBMRARMwCqCrbV1W1Lm2nbr5g73OTBGIRJwAzAR6W7zNEXCvrcQ6e31SyQ9HcdWVVtxQyC3xkvbeGZ1jS4pDa21pj1OSYTdpJtr11J7OEpOg1Iuzd4+/enwza+MB2pbasvNO7ZqvTrAEA8UUf0C9Ez4nmIvsP50+dOHjx9fthWMmCnGxMzbtgLAVnYgdIR1K04oAZ2AhpDTdEiDlbbdlvW2AICIDMNAKL0XVSMiMyNCRHdoSMwUUjrknFury7LcbjdVnedZREopd2BMjOneLXE35gCE3Xrdt1K2EALiz6Okvddta73fl5F77/1+VytVRMQJuxk2PveuCnFUHphStL65Ve27NUagteyo3Sx1LhLIpLHZu6fHh+F4uXz4w4e/Iw1/+vwPvfu69oW+hpBMem/FXT+9WMJEFnmA7ky7UPGYD2M6HNMhi/i8gZfMiTntqfsBAY0kCk/iEpHFAzpwkGGYpvFwHzNf9qW4JZN5nmOM1+t12+pSa0klpMiMNCLaGqPkHO/qYhahBvu+d0VAAatmvauLoCIty3JvwcYoImIGAPchhJGZCbn2HkKgw6ydJ+YU4r5up4fj48PjH/74T58/vx4OP/9t6ECIl8tlGjIjllKjUJMMLuYWQrhpAegiMkyRXrVthZGqdgEUYkJ3oceH5yhUywZgZjbEvK9ba6vberu9ffvu27Lr7fJlSnlZrsQRSYWE0Ep5pXhC4Bxjc2s/V/3moEaZeu/7esvj+4EBYk5xZhKI7P0AAz2IlkUBViCMh6MksYho4E5AE53ep+my+uvr5dN+23POz09PIUBrjZlFQkrZECR54GkOiQUL9DTkKEFjhUp3KME4jilERARAZr+jfg6HQ7c71Bp7bSmlaZpqrSnle7h/r4qqain1HiUCwJ2dEXMKIdRaS6m9NiLKMczzPM6Hw+GbdSvL7WJmzAiEZnovQy3bIqiCFPdWFN0WeCAZJ+/UgowIvGuJMv3w9P7wr/4PRBQpFEchUOgk/PR4eHyaD1PibXu77K7jafx1KyTc3h0ffzmnunuKMVBIQQaZchi7tcdv3iFADvk4n4Y8hhBcobUmSIgMdyEUtKq9FDCDLCwYuxZVRYfeCrirNtSeOYyHMedcay2t1t7U7e1yzjkPQ8op3QNBRBaRbduwNUJnvqPCupqVuoOjMGuvvffbenP31Ia7CalNa3O1moeo1s01plFZmkUnPS/L82nOafiHf/wP19vrPL4fhyhC4BEwnS+v23ZlalEoyKmZMgckQXPt7go/I/BjSimdL0vrQAwhxTTkcTiOKQ5xyINIgHVdg2QD6r1fb1/39TKmlPP49nY5jBOAu7aH00kClK3GNJay9VLTNKkbmbdW91qt1jjkYZy5sbr1fQshxjTEYYY42vGEzrTl1oUHF6kckMIMMUMgw0TIDhZippSPD08AcM1vEjmn8Y60IAzMEmMUEWBi8UFCongAMiETwsBD41RbjDHH4b+eYGIGEUkSOAYiaq25OwDcI/gQMIQUQsg5hxBKKdfrFZC113sefMe+030fGyIRioi3XgmN8HA6DMfjsXvbH9d1ZUF3u5u5Xpu2LthBe0tZGA3Jd9+t2Gl4GvLzw+H5af7+ND4+jPNdph0kCRGA7V3VIceQssjA3drTCN8//eLt+Yd1+6s4xEA8SFZA7hgCd9+GYTpMRwBwkMQkZswYkoSQwKW1VrWqNWu91o4oCrZib60JA3l3ImRw6LVrbatZJ6IhD5Jk39fr9brsi4FL4GOe71ie+5DhnS1lZtu2qSqLCLOqruu6lwJM8zC6u2BwRgoCiBikuXnvjo6C6t5MhWOIA5i3ps5h3RdC22v9+PFT12U+HrZrTZ3ccZ6GUspPP/0UEwNxM22mgBxiAAD17uYA4L2Bee8WYxymqV5vYOCueZgen57nw9S1Lsvdd0uMqdzWZb+2pozycDxZNxGmIFrL48NTCrKs5yhDzLOq1m6PMSCA1m6miKja1msFtXE+EJAieCbS4q3dSZKI2YZjDqNbdUdnAQ4GiBAQmnvHsq3ns2kZs6Tnd6dpvn9rrXUJnHJAYDC3rnXdqrdbpBxi37ojz09THoYYs1EgBySy3kspROTA61Y0erCfIQD3cdA7MM/cY4wp5+PpFEO4g5VUVb3fc+JlWa7X6+vrq5mJSM7pdDrFwzSOY8ihu/VtJ6IgcphHdSvlDqHpIclDeJQA1r2Cg0xBskOAx9P3vzj++vn03cPpaR4Pc5pQJISQUoo5pRA5iXdkRSIzMHcovfVeetifhNze033LBnmzXTzkHCVTpBg4Eom6oVvXqqqGwDGIiChll/u+SoXNHYRkgBBYVNW7c5REd1ha27ata53nWUKqtVprvTUzA8IQwmGcmPl++q/XK4uklO5tlNvttpRd9d6J9JBiDtkJjdBZ1BURkURCIhJwZca7LhcACAWAat1ZvLXarYfg1/Wm3nIet7U0KzEeA8fa9rLchiGP4xgiC4dtr+M45pz3fW+tEjGi36PSXmopW4hy/17TECXFZn69rUTdEUtpzGwK3ru7O8BhnpmCmQHA2/n8fDoG5tY6o+SYt3Wv3VIKpdQcE6cIvTNhCPFuI+8JIrKgDFmiJ4Ixa2ltPYd8cI4YBoBEAO5AaHD/7wHfPn/58NNPy7rWVureHEqpuzsw0ziO0zTdQ3kzU2tYHCFuxX789JPX/iv7dX6XICqadTN3b6YYBMxbrcyMRIC4bdvlclnX1czuBdDD08P8eBrm6R4mEVFkcWJKQ2ttGIZ5nu+H8x4O5ZyHYRjmnxPlZVlur5ckAYVFpFu/J+uSIgUREZmOKWNQ6pJxHPPzwzffH3/77eGHbx6exmkCBmA01OZuW1NvKTwBgHkDckysBVophB3dU8g5Juu9bLt7U7UkEdEl4pxHcjDrjJ6TlGZOZO6AqGYB75s9PaWBg4zT5Nb3fe8CNIKqtqp7q621Uja15mZMwRVulwXIg8g0Tc5UWh3HERHvdkJCMP/ZOZq7A/Ter9erqo7zdO+oC7G67bWodyLKw0AkQcYYE9HPG5/umdO9zQ4AMcZWOcbo1m/LBzQ3TWZ2PD5Mh2PvvSzLYRopZ1UFcyJCIO3VId59kaohBgrRtjWItNa2UoU4yDgfH2Mar7e1NJ/nGchfPn8BoKenhynlXoxFa+vb5ofD8e3tNScZUj6/vrHE58dj2+tW95CAQ+putdZ5Pq6+qdr9yTDz3ZBpb0ce4mH2p3cQjpJG62C6kXfz7AgCho6A4OCICB5EwvXt/KfPH1NKgeTTx4/X88t4mE+nRzNY1/U+zp9SGlMmAgamEH713S9L03E8IgdVdfuZCRCIJEittbu7+zAM4zjGGO8v9rZt7t7dcs7H45GDvF7Oddu1d3fPOQeUu1Fj5ufn5+fn5/sbYgpqLZEAeFv3223Zeo0sd/Cjuw0pE+Kch/vt8m/+zX+r1Q3AuZjVh/z+2+dfzfNRhEGsE5h1uROh3K3obUG+WW0NkuQ2E4i5jSlljobm2upevBftlljUAAG09X2vDI4ELIRukQMRCBkAiBCimxmim2kKmZnd3QwILcYIALXtcIVtW15fX2svp9NpmqbWWt1biAwACt5aE0YH3Xp1VVYhZhEhortjJaJxmn49TYoaQkhxYOZSynK91d7yMB2Px+PhMOQJkRH/t7sQwBHpji8nYFNww969rtu2bTlEZBaRw/w05MnKyszDMLRSHbS1EqOkYXSFXhURQ4jbWhS7iDAFRooxXtaCAEMeYphYJgBwVEBZlsvL61dtZmb4+DAOoTZar+fjMC23cwj27uGBwHtvvXfBB0rptp3ruqXIyLFzaNqnabpc3mptIQgi1n0jgCApJMHHRxueencIzA/vvRbVS/ANBJQFCBkQAADQEYb56Ye/+Kvp6VHb/vblM6AZyOvr6735lVLKOQNASjHGSErDGPM4sIwpSKnn5fNLN5zykFLqqkjOyvu+q2rwVMumvZZSgsgP33/PzK33Uso4juiwLevXr1/X642ZY4x4nxl2v8vgAODeLAMAB7Xed29EUGu33sQBQdEdeq+lLOe3+2G4R1ny3/6L/447N7OPbx+uy9scxyEkJlPRogUNzawphhQBbd2X3is6uHu0IVjJA4ZJpukYYyxlaVWZEpDXxgQIbmDo7tfbjRBzjhhC3TVQAAZ3c/dScNvKfR0ii4BTV1XVvZR1Xd1dAsUY0jDOalupsG15msfDoe0lBQOwtZU77VOAtr2KCJLsrW61iEhKiZmBKY2DpEjws8wY0Hr32+12N11Dmsd8GIfT3VL2Wksp1vvdaqq2u4KFCFGb1t6qNtMhnxJLTtM4DsfjwbT1Zt379Xp9OJ62zRU6IhKRSCi13p/7MAx763vbkHycwrzN161qh8Ph4MRvb5emVUQGd5Ewj8fb7WbmvVeLUNYyxNHMtu0yDTEKg+vp9PDy8qVqJ2Qk76VYbZ6k6M47Pzw8mM2IyCz3KALACVr3HqcnGL4FxW6KvHBg6qRvZwpXPj05j3f8BDgDAk6nH/76+Av0fn75u3//b0/lgDO//NS/fPrUWnv37pkhbNtW3oq7PuTjkz9d1mXdF2E2s+v1HGP87a/+ZjxNt22t2kII5JgkTNE+fFjWdQOAw+EgQiGMhI5g67L01pCIAccp3wEZ276UHe8J3r0NDOZl2xFBYiSAvW69NQBAogiUQmIiALDeLm/ndV2BcLhLrb779ld979u+bLW06szQbLdi0pE5oAMiOmHblNDd3UvpgGY29ZoQLYsb1H0Db9AbIqaUYoyqQ+8dFDqqK8CyaHfhjIgkAQkcoRbV3pm51Oru83gIIRTd7xCEfa91b9u2EcPp8RiDDCke58M9XGTUNA3etda+9K1pFaIkwQmaVjC/t59qrXcByc8sTtV930opYM4h7LXvtdy9thCnkMZhzinXthu4mQPgvR2zLLfWKjOtt8u+XRmcg0RPSeR0mMHA3dZlQcSAdl9PrWq1tiFnAGkVMFopJQ1ZtasZgAOauzoUDjROMxhO07g2//Tl815u29pzzv/8b/7mm2++6c3WdRFqtfgcaZrGWqsp5JBr0ZzCOA61rNu6S0wp52kQ2xUDVO37tpQckcDU8jiSMAvHIaeQRszaKuAN+BhwgL7orV8/v9a2BPa0run5EYcHAEYABEMkQHUQOb3/9d/+67fbdj7/0xiGX3zzAxENQ4ox9vFwu11LKRZpb8Waln3fEUnYHZdl+fz15T3DttzWdb2b851546Bgd4XPuq632+1eDOUgYC4id1UIupdtiyKltX1dt1IAYJ5n93BPbdUa7QKEZV/P1ze3nyWftMA4zCklBRum8S4vvdeOpNfFwAA0h/h8enTvCtW8t0altLtzERGzn8vJpFq1Q1cbhiBpiAmaNm7WooQUUrrbGLNYS1c3cf2ZeW+WUwIndzT1VjZXCyljFHJQVQjsCOu69t6JwKFxhGM6MLMIrcv1/iMWZ7e27h323vuupq0zOJIreCtdWxckF3I1EWmt3TWD9+b53lavHR37tjUHbSZCgOZoElIIgRiggTuo9t47EPTavKtIUFcQdiD3XmsjkGGcat17be5OIOOUxpxrXZv127qAYZIMjKXeBKNpabuKRFUls8zBUHdkBAXUYZolstdWSllK7cUur58fjsd3796XVvdtQa3hMfEUwKjXfu/5I2JX9K3mPNaiijsTjeORkrvDGGZHa92YGd1UPR9y7725f/P4vj8+0eGgDWz9kmICnlHb7376fevLLw9P0BwZhjgZBUQAJ8eOIAgGQKd3v/nV9x/Wl8vKhQgAIIowM4AdhlFVb3vZthszz8dHpiABWquX69tWVtWHh8OJgVqv5r3stUpPIYqEbdve3t4ul9sdFnY6HXrv1+sC9JOZ7fvmrofDQUR6tZTS4XDovX/9+nXbF3eVGIQCorempTYiarVuy66tbPM6TYck4W4KSbW11kxl33cAQIKHhwPzg0Lvvar6uq73ROSuGr2Tye6tBEbqvevtdpoPQjwMAwcJLCFFjgER75psBwVwQAREEQmIDHjdbuuy30ni8/Ewp9CWVWsbhiGEIEj35B0RYQUEzTkjOpK3qq1305+vrGUvpZRSkCmEkEOs5kAoziIYIhMgRkQS894cDKHWvpW1tyJIROjIQUjI3B2JDofD4TilHO5p7n9dWqEhRTPr7oTYuxLwmIey3DrYlGTbL4SIADmkw5DmKZ+vX5dl++bxW9Btq5cHOfbW2rZiTvM4bqUAYcq5lMqmElKyLlIINnc3RfCAIGwcx9HdP335omoiIY8DoFp3SUl7k0SIGNMhp3y5vqUAgN1Ybbc85KZ9HIdeO3FgIqLQexOhprVu9Xg8RuHqLR6/NfkmQGr9y/WnjzKtnYwwbKuepXhOoWhfdzlkd0REdHQEdAIEAHv67hfHP/w+W9v3vfUiSQLhVpsDDOOIMRFzTiHGaI7ujmVNZQjjUFrV+0JZpBjiNIYY872Mc7m8tab3+k8IIcYYY17X/evXr9freV3XEKRs9R5lHA5zzkmtff7y8cOHD7W14/H4q1/+8jiNSdJxmmOMwLSuq3tS1XW97SRDTPN8TGlIw2RmAoSMFKOkNIjI/W1uTS+3a2vtTvS+TxJs2wLgIeQc0050O18+f30Zx1FiCPdMSYM3A7x3p52ZBVHNwF0BWmvXfS+lqGmt+7qtHEhB18s1SQCwrW5jTDEld9y2bV33OyCWCLrWvfb7Q7u/Xa0VZMIooEoAMeeErKrMGAOHIOiOQFU7gKRg5kgk45jrtiNiHGOQVEq5XC6llBDSfBhzTogownciuZnfZ5FcLWe9j+T23sk9xijuWlZJwIBR0rdP76LQut6W29VNBGNt5+PpAITLcu5tSfTe3T3GZd+ZQmDpiijONSYJOcq27Yh13z3HFCWoA5AT/py+EyFAH/LEBq31eZ6IiFNetttWbnk49G1FxBxD2ZacH10dmfa+jGFkzgSo2ogQTLWXFDNxUKioBZDD83eKQ1le+rq9ff26llWnSZDSEJnxLihwAARGb45813fn6WmtbVtec857LV+Xi4gQ8TQdnt99gw6X61trRVVrb0wy5qmVXrZtcRjyRCQphofTUwwB4Wfx8/F4/BkdiQgA7ioip9Pp/fv3y7K0dhex0TzP83FGRED0jjHkPEzX2+evX98O8xxjfH54PJ1OMcZSyuFwqHV/fX39+vWNiFpSdTydHqYp0/+/qPfosS3L0sO2d8deFz6ey8yq7OousruglghoJHDCgQBJ1IASQOj36B9IP0AjzTXhWKCaZFNsVBcrsyrtM/FeRFx3/Nl+a3Bele4oJhGBe+8+a629PocQkVwso9ifsDS0nOWiKDDGWuvz+bz8V8ZYCEFgKhn3MSSGD81ZG/Pw9FgWhRBiWWNBCFNKEH6OslkeBuf9OI4xxkwpIQSE0DnXtW1smuWeOg5DPwycsbKqQghN08zzjDG2VsfkjdFG+xQjBtAyCuHy3iFCKEWfIAAxLobCAENMMYCRMoExifMcgk8pCq5W5QrC5GIAAECYnI8dhDGCokLr9brIKwBACCHGoLW21lBKsyyjmEwpEYIjSIRgnZLWmiDgrAcQSkZRSHVdAhxn72KCIQTOVQAGYCBYNU/eOo0AcNYSSgEA1lqYfKbyBUANPhKES6WsGeZxmExMKMQUrTUQBO/jOA7e+xj9zS6nFHutpRAUU0Rw1zcpWoRQDHicjLPm/ua2t3YeR5SQBw4Br6MD0NX5BhDqjKvKkjA8+bhmBWA0JWemTy7SbH3LVgSeOgD++PaHP4oY6rIANEGOQUIAAggAgCAaHSnAMAcR8EzdvHn59psxJDhbs98/x+RX9UZlRUqJM17XZYxRa30+t9776Lw3emw7AuusYllZcC7/PGAvdS3LMiklQkhr/XlIiREhtFpV63Xtve/7LiVwdXW1Xm+dc8PQTdOEEMaUMsabpvn46Xl/OGy326urq6qqCCFCKC7yskyMKmPMcj07n8/jOFJK8b/+V//94kBvFx+m5TJlDEJoGUWW5BIA4pI2zCkDAPgYGOeMUARhjHGZKbz/rHVYLAy01sYYp00EKcWojXHOBe+Xi2mI0Wnjg0eMAgSddYs2YhxHa60PFiKAMPQueBcggowSzgXjXAixcLtxgpQQhLEQgmDCKKGMEkpiAnoy1vsYgfXWOw8AJBB775wzgolM8hSTNtpZFyCoytWq3mKMUgII4a7r+raBAFDKlJLWOmud90EbgxBOi4sbgJRyxomitFY5hXCc+hQdih4QWJUb7yZKUgLwsD8SmBhBEOCEIEQwBO+c41ImTCZtXLTWOO9cirAd527s+qlv+2GeBmuNMa7t2qHvnJk3q7ws5KpSKucgBTsMBIOpHwqZgZiGoacI5plCmIzzzCVnjIMACYDB+9navMyLLDM2VNW63t2n4gIqDiCnUEJrm6f3uj9zjBGCv/vH32o7397fCs4RYkioBCBIKYKUzBz0SIRMEAMIpKD68xFESkmQ4DTNep6MnvTcB+czmZd16b1rT80wdJggkMA4DIxSyYUSAqTkgvPRp+i1nkPwjNGU4pKgAyFeBAB5XmaZWti1hGApBeeSMUYIFUKqLM+yvK5X9XoDEZ7N3PbtOE0+OgAJCEgJtV5vVqs1QjSlSCldun0Iniz2G84ZY2eEMSHMubCIFRBIxpjo3cKlXhoFZSylxBEAMXGZYwAhhD5FCKFzRmuNEKnrEgAwDAMAgFJMvKGUOmv7vvfeE0qllIiSoiiMd3Nwbd9lTCxS16HvYwhS8sU3HEK8dENCPrO9Q4ohBAQAAgBj/HnkipFyRilNIRrjCKIARAghSMRFyBhDGHZD571fvi09zwBCyEiMEGMKInAuUIqMMfM4zXpkVCjFlncUQpjnuR96jLHg/OriEqYgc5kLti4EieDcdGK1A95BrzfC29lGhI3W1p4JcSkRRktI2Ww0YwxDGKwzxmAuGSPaBkJRmoCLwTnjnIsBhBBCdCBC5wIAERNYZCpTilKKuYwgTeOppHwaGuCjUmQYBgyByCShlGBgrbFuZoyxbEtRyAQ30bfDuaxfKlHNk6vqgKpNhAVIIDBF2VaM3x+eP3hp19vNF1/86qd3f+z7rl5tXDxVQiJWBRAwRIFnqO0Ca5DcAICKYgMIPTXnGKNgclVfxmStnZ3Xk/HjrGcTykrN8wQJXK1WXLBPT8/N0COCGKPeuxjj7LV1DqS0gPfe+xgDpawsSymzheIWY5xno7VZkOxhGPb7w8KfK+uqKLPtdpsQtNa+fvHy6fh4PO5DCKt1tanWCBAAACFswYmNmRfU4vO5WrhHnwVpICGEMI4xguB87/ulKlPOlhGIMSaUJIRMw2jHGWOcrDfW6uAgTH9S7rhlXTOOI4SQc7lwxJc7K8Z02U5yggEAGECBKZWiyPLFPUEu9oAJAACkzMqitk637ZlQmWUZpTikCGKilCwxchhjPc7DrINPjEFMiPdRCJHnihCMEJ5GO00TxGAtuDW+77pp6ud5RAhRzjBXLvhJzwCkLMuMsc65vu+D7y4u0J96cWi6xlsnpSQIbDZbySmiKJdKCkoIu/oyz/McpPT0+HD68I9Qu2EyhKuyFGM/AIi5Wtk4W+ud8UqpRcfkAeKEZlnhwwCI9WFGGDDGMMVlXiTgjPMgLg6KblMVnFNBGUxgnmyp6jD3wdl6te66bhhHgpNNWAOKAeSyphilyIz1BiVIU6HKCFXbT7fFRVELHwwODpICAW/sCUOMUQSYdEMrlPz1P/2nNsxD11jvCMJuHjkrEEQAIISpA3D+8FjcSShzgJiQpQ0+ej32A8b05vaiqgoAUtM00zTs9/sYw2az4VyEGGKMSvDtdsOlJIwaa51zhJOMYms9xphgaowex3HZvixMh77vFyp/jB5jzBhfLLUXXnRRFF3TGrdfHhVvraTkxc2t1ppzzikLIabk53lwzo/jiBDCeGFfY845CclzIWHiCwS9gFwAROssBJ8JqM65xZ2CMSa5cMHP8zzPE8OEIYwpkZxCABYGzqJzwxhLKUKIKSUIyZL0lmXZAhZGkKLzISRCIMeEc55xASAM0ReFkpIjhCjleZ7nWbk/PC2DoBBCMg4JhBAstTykSBCgglWMTv3UnFrCsHPOGTvPI+cMY5Ii7ftRm6GuS4IFJgRSkvEyuiV9Ohozp5AwxZRSrWdrbdu2erZZVhKCtZ76qTN2EowLhhBIKbiyqCHA3qTOaZv6KngpRF1s1BdFWa3Gw6d6miGN54ePMEGm0qF5gpRiH0GCzoaUIMEYADgHDyFOCANCI0II48269AESnAghs7M+GK9nDHiRiyLLizz3fYcISgEAiPJqM8wBRuAjJQQyVAi+M9ErIQlmjCMXhtibfhggIOt1LZk0g1VlLqq1RxDAHgORUpjak8Rse3H1dHqe53nq+5SCC+n923eXt3crSgCIEJCYAALQcDB8f8zKHRISQCxFmWUZhpRTOY7zNE1VVUmVH/b9h4e3KTmCVV1vYozn81kJUciMYjSPfcMYQUhrjQPOCsU5R5Awxsuy2m63wzCE4Pu+59xaazCGUqpFHtD3g/c+y4qyrJVSzrn949Onx4+L3ohJ8frF/eXughDWdc25a/thoAwzwmOMIKE/cSKA9x4hQhhVkvOUgvWfn4Foo/cxhARhIIxmRb6E9kCMEgTzOHVD3zYnTigBmDOSZyXGeLAaI4AxijFRylGKzltrdQSIQgwSghgs9COECMTAuxi8TSlhABNMs5mjDzF6JqSgnHDGGAMgnJtna3We51IIjCHAn+Et5xylFMe4EMcjx72e+n2TUmKCW2vPbTOOo/GOcRlCmuex6stVWSUEF2abyFVKKYTkrIYgIrSUf2+9I5DgpOexsTHpoQ0hRZBoLhDjCCWfYtO2DOOIQF6uFM2jB31vm/5DVpBcZfz21bWARs+M58350B6fu7EjYEAY5Kqw3iEErDeIgeSB8wl4nHwKKSKSxYRDHCFRyHlvLMCYqZwCmGWScdzr1rgZTgHlWYSIICZLWZWbFFE7t0RmF3cvmFKYC0xIsAYO2heTKGtGeZg6KDFVNLgYCCYAhkgiBBxJkMXxcM6Lzf2ONe2h1Wejpx7Bh3cfs1wS9jUAJKWEIEzRM5eK7TrigCEGIMY4vbi8TCnEiKLbj6YbBpCp4s3L62CHh4d3CIZJD1V9t15jAEJiJKVAIArOYMaUUiEBijKU4mhn5xyndV5WWVZO0xSjX4ovpVQphRBqmvM4NcPQFfl2t9tBiIZ+djERxiKMhDPGyPPxQIV8/fq1KpR2tu/GeXATdARhznnf90opKRUhLKZEuKDL7JVl2RJFuHQWLpkzflmBSS5CCM77GEHwQU9znpVVXgTvg7MhecH5rtpxRhYQYJomq2eEkJSZFCqTOQCAS+GTX5g5PoYUgLPBB01QCiHp2RCGEOILuG2D7/seY7SQecqyRIiM43g6naSUIYRp+qwNNcbYo11o4mnJ33WYEEIICcGbacYJMykiZVrrnpA8zxeu0cJsSynFmKbJBp+WXJalLQxjk4Azzs7Or/OSIHQ87evV5mpdM0y0mZHMynKd5aWUUnLBMGv6pu/GwTdDd4zBrKriZntxud7OL984Hd+++yGMDQgaUaIEG/vGzTEADBFJMAYIMaLGaG2GepUrIYO3JRcRAs45CongaJ2PAKSEZJ5BKZOP9eXN5eU9hKQuNofzk1Tq+sV9lmWE0JjSPM8+11QqoThNEBvTjGcMgKQcJJEwQkjDlIdAXDNG67778Q9SleucJ+8Aoce+l5IqIaINkEQAEADg9O57f37cvbiFZQESiDAIBnfrDSFomCdAU2VLijGlgRD+9ddfF2X57R++Oe6PgmerumSEhwQiCZ9rrrWcQ208xnS7rjCj4ziOgw4JMUYgTBFEQUWe55QS59zzfn86nfp+SAkKoYqiWk4shGm9XnNBjTGn8/l5f7QurFarsswXWMm5EYRAM75QW2OMxhkELIieSCmtNiklyujC5rPWQggJw4xQ6x1G2Fs39UOMsdxuaYYzJRYe8mjtZLQNLqJwXUglhY8pJdi2rffuxd1dCKEu66pcjePIBMdsgVGT0ZMxRkoMYQZi8N4SglKCBFPrDGNMW3c+n7NMFUURY7TWLQ+G1to5xxhrmmbh8AxmHoYB+kgQopwb5+IwUM5D8CCmqiiyLIeIMMbGeWr7DmO8BM4tuqSFQ+ucC6HDBGLEnXP9OB/2zfHQSikjjQNAECaqKko5kyoTTKoqLzdFWVPKMcacEmt9Xa+bLrXtyWvTNof+ePQXw/b6sq533mFRqfPzwc4NJhADRvlm6JquPTkAPUwhImujtZYxjggzekIIWOcgCpxCxiWngFBKCMmqVbWqI4ibzebuxQtM+DiOCcLd9jrPFSY8RSRFZlOIEVAhFWcIQlyUmMv80BIYADIxGezmhDIAIURumob19W0P4tvffQN/8cJaP8+zHqfrV6+FUtaMQq0ARHbuTvsPcTJk0JuNSgkgQK8vrz45bXSPcZCcI1VknGlnnDHVqsiKryCkP7376fn5wzSpqtzGGDlnlFJGxGKIYq03xoDosqJgjJl5nsyUYAApWW8Ywev1dolI6/t+no01oapWQogYfQgBwqSUEpJyTodhmrWmlA/D8G/+zb+5urp68+YN5/xwOCIEsXUAAUqQNkPQkRDkrSOMEQCi1QtfGEEICSHaGmstI1RK6X3UZtJmyqQCwQcQMMYgBQhiWeZK8Wkemra180Qpp4JPo+667urquq5WwXshOJes7Zt235SrWuVZroqyKE7753HqIMQgYcJkSUiMIKXEOPUx4hA4Z8sqSWs9zzMhtK5rKWXbtm3bLpeKEIJPkTCqMo4hmo023iUfdPBOm2kYAABt2zIhLi8vMclOR6e1Xha1RVEsm/iUgDFG6ymExKjo+3EYBm1d33ZFUWGByIqXZV7k6vryoqzy5IKSlcoyxkhV1SklM09lmWvr8qoWJOiGEwTN2DndHh4aWbQQ5RZa5yLiO6FICEYgElGcw9g186RHY0zw0Vr9dDzkQ353dQ0A6vtGZcxbmyhDiMtMMiZysUKMKsWLqpYyN9oF79uhizGOs7hTL0peaK0hBiC4GANkggmRqIxIqIs8wWT1AScHmIRQQoB9ikxlU99hABOkv//D908PH+ahjy7G6G0MThuhW4CJOZ9TQIkJbb01mjKZdI9BMtZ++PSMExRZhjPqYnDBBxCyskAJvf4SUkHfvv1x6CfBNQDQe5dCKLJlGRDGcTTOd117fXvz6tUrEFPTHpwLKYF5tu1op2lKCRpj2rY1xuVZWZbF8fj87t1PKYGqqsqyBCAdDoflYEgp5yl9+PDhdDoN42itnWdTlmUKIQWPGRBCUMIpFpBR/L/863+ppIQIhRBccHrW8zxbp1MAGGIIkTWaQEQpwZ99wXmKcRx6yuh2u6mqinPBKEsAzdYem1PXnRgT69Xau2CNxpSootjudkKKEMI4jSkFQYkx9nj6pM1IuRBCIIxDjLPWC1ywVHfO+TzPznnn/CIzt9Za+/+HhFJK86IoiyKTijOeKAYQKqHKqipUhhDS1kAEi6IoioIzJjiLCWitF2mItdYY0w/D+XwOLnnn+7E/ng6H4/5wPBzOx3GenYsRQCnV7e2L+/s7LgjChBAOUYAIIAgQRCoTRV1KKUOMnFIpKUiRoECAGZpzfzozwoObIAxC5C4CSDBm1KdovdeTG3Q/DLO3YRrHj09PWZZdXVxaF5wznDMEIYSIC1nXmwQRADHLFCVEShlCHLsphoAwVrmKKYUUs6KYjNbOcalUmVORQ8khoTCBhAgAjGAFSQlQAQGKMRKaM0zmw6dPD49YZc/Pp6eHjykmjKhUuVR5JgSK4bR/ev/u7aCnBJwe+v2nt8gZOx7a06lpe0xEkRceeAAjRfj5ee+dl1IlGPM8V0qmhLwPC9YrpciUUpmkhC07cSElItjFcHV5+eb1myyTMYZl+0mISCl1Xde27TzPUqrVqp7m/t27nx8+PhBM7u5ebjZbrfXxeDTGUooIoQjBPC9ijE9PT9OkCSEpWmuMcwYkEBwIHoKIhciIcw7E9GcobqEAAZAoISiBrmkhSquyAlF67+syz/PSWlsUxZJjLoSs69X1xU2WZe04/PT+p8PTE+fi3A+fDs+//OrV9uJi8Wa5ubn58PDu8enDYf9JMMKoiIFYq7mcrUNa6+DTNM1/NglbEDsA4LKyTSn2fbeMZ4tIYHlBHwkhIQbG2CaTWhpOKKNCaw0QhBgJIZTKjDHRByHEss5a1C0LSk0QVSoXjKeUjLNGT0137oZ+ms00u3meJ62VyhhXEOJp6FJKhGEwoXkc+7bbbi9UduGtbbuhbdvdqsScRR9wtM3z4cP7B1Vs797Us+0ZJVxZChTEufeeIM+xpXQAMYEUjNEAo+vra6XUfr/vxy7GKPguq9ZCiLKsAcQEI8Y55xIlwJjAhPlkvHeyVJLxxJKUKsToQ0gJ8iznRQYQTwAAyCGAACQIXAIeAgwiThAiBEACow8PHx9//vGHD49P524euzNnKC+K4/nkv/12GjpZ5NM8DF2XUrrY7DCX89R+evc9BPr9/pSx7P7+PsFweu5Lurm/e0UYP52Op/MBgMgYQ5C9eHHHGDmdTkIIITjFOMuyoesZ5S/uX5aran88/PY//+6Hn38WTGg9D4MGAYSQ5kkPYzcM3fKnQghd1y1XOCl5URSLCcp2u0MInc/nae6aplk2/atVVRSF4IpzEWNYOEiIMp+AEFLk2W63IxiieZycX8wPE0iBIYgxwxhFH6K30zTRAHYX2+riwlr7/PzMOb+6unLOfffDDyHE6+vrdVWDFFfbzXq9bs7HWduP+6fJDC4GSilFaR6Hfmg+fHjX971kPIQQoKeUTtP09u3bLMu22y1CRAjRNGcI4WazWQBjpTIAgLVmIVkopRbV3LJRnabJGTPHCCGMUiqa51J575+enqZp8sEyxrIsQwgv3NoEgTc2QvBnF9UYAaW0ltU4T4zQPM+llNvtxTjMbdsunzVjAsLYNvtpWlkzMyaChy4GYycAJiFUWZZLjHtKybkACaUyqzab/ePP3/7w7r/6r39J1dpnqqCZjTNhHFEVjI0uG8bZRoAQWYYxAFAmMgKR1noa9eJdSQnHiCPIMCQ31zdc0Oj8MEyH/Wmz20aQICYMC4QRhJBgSCjaZpvkA0fETxoKAAjDgACAQEoJxJgSRHGJBgMxARS5Kh7a4f/5+79r2pZnJSHeTSBAP5lmGovn4ycs0c16t15v87Ku1yuCyHq9HvT822/+w/fff/83X//aeu29X5WbqlwLwW7urs7N4fnpOM0DhGG7vdhtbzabDUIQQtQ0TXAOAVgU5V/86ldVVXVjL7n48suvunPz7bffci6VEkIxRFmmEmMcIzqMXdd1WmulVJ4rjCklHEI0DIP3HsJkrZ1nbXQQPBdCABhTSlJkkgkps7yuACbTNMx6RAmUqxolgCEi0YdlFzlpHWNMPgTnQUzWWgRgJpXkgmMiuSSYdlPnvZdSLj4tUkqZScYoZaTpuqDnEALFAHJ0s92ussL18/7pYbPapgS7rplnI6jgXGRZBiIEThOCpMwgxNbElEyIRmViMVRbnDBCtEIIqbg1flGdhhCKoqzr2jm7pOp6a12IxjnftIhgbb33nnEiMSuyPMaorcWMYkTbvpu6cbGnhRCDEF0Mfd8DACAGCCQAWJlnWSZtaa6v1yklo522JqWgGHXGRgABpNCDiCAieJ5niNBC/aiLcpoGhBCmSHJpkQJQff1X/+z+q18BICSXNOMZu7DWhmAtBIEQyGmIyIeEEDROa22VzIO3WltKUJ6XhKBZd4uIpaqLlKDTjhKe4ry8ECU4ApqSc44Scj6cCeX8IoMhDl3jncmqGmVV4h4kDCGOACEoFkZnAAgDlADAhPzmn/03bdv942//Lsuyal0DF4ZhcH4WimsbKlJsdtdXV1cARuc8ZFBxsaLldnPLaH55fesxBhiXmRqn5pvvTpQJClmeF2VZ5Xnuvev7Tqns4uK6aU6CMV4UZtb39/e77fY///73H58eb6+uV2VJATTGYExijMMwYYzKKtvttl3Xf/z4mCIS3FJKjbZKZXd3d3VdL35YjBGlcoJFrli5qpepaRmbIYRCKEIpSCkSDCnP87wuau/jME1k7NuFvhadn61JPiAICUSUcs75xcV2tVotMYaHw8EEt95uOaXDMFBK7+7uLi4uy7Lsum7xXNDa7Pf7GGNd10WhJgzGXlv9adE4C0ypoNbaaRh9TN5bgFHOynPbaPMMAVi+aWvtMrcxtnhZu2nUznlCKBO8KEop/TzPhBDKOYaIUkacSwlqraMNpcrU7iKAlIIzRkefIMGLNxuI6eJyjTEGCUkmQVVYa5e6IqVcfi7L0ns/DIMQQggJEmKMIQyM/0z0CMhqBDml3gbBuFTch4ASADFRygmFEAFOM4fM+u6FWF0ASOUqTyilCDnLKJHjPPmhsdZDEFVWhqenczM23RBjGlNIPvgUEVoU4jjGkGVlnuecSQAixMQnD2C0szZmppALJYQkMUGACCowSClaF7T1zkgoQGIYwpRcAsmHESaEIAfQQYAwpAvBOQK4vtj9d//dv/ri7vYffvf3AKRqt95stiklQjDnYru9kJJzroyZj/vnLMtjHiklF1V9f3WjlOr7PqXkXdTatm07DF1d7f4szl6IN2VZSs45Tp9iOJyONzc3EeGHjx8JhZvNGpA0Dh0AcGnsXddhjDEWXTsac/IxyEzc37/UWhNCCEF5nl9eXmZZZoxBiCyeA4t7SgihbduFzbmstlNK09gv3zLjMkRgY6CMlDQnbpwJIYxzmuUQQp10URRZllHMpOQXFxd5WTw9PQ3TGBBQVSmE6NsuL8v7+3vnXNs2CMHDYX8+n8uyjDFcXOyUyvq+b5oWgDSamVJaFMWyWULWLFhBCAEhgDHt+iYGxzlHCDAmKKXjMENAl9h0re0CXwuZjePgrSGY2uCfD/tMKkEZYSQl0Pc9QkipZbM2EIb7cbZ64pwhhMau74Zps9msNzUntGkaCCEXFGAEES1AlhcKQdL3/eKx8edrRtd1C1l12SQYY2I0GGNKozEGgrRoOFb1pm2atm13uw0AwLuAIKYsv7x5/f79e8wFJDjhBCIctEEI+GRjskulJ1j4APbHZpoMACC6ycWAMKYEQUistc/PU13thFB/5tgSQo7H4/F83lxeEIiCi9o4kQmM8cV6l3Ext/1kp+3mAjGZOIuMgkAQhAgg70IIPZIUIQEBWGjOCMB5nN/9+EfE4XZ3qc00aa1kfnV1JTgFABAsGMPJhyovIATWOhjT2PcAIZRA9A7EMI39OI4JQQgho58F+E6bZ/1U13WWZfM4mnmepllSThI6Pe+bppG5LJWEKFirhRAxgqWrr9frEMIy3xtjQghKZYSwGCNjTEq+HPQ/Cb7TYjW32EEvLotLrM7CiCaECCGWi+s0TVpPQ99+tpvGFMcQz+ezKkoAAMVECVnmxUKDOR6Px+PxcDhEkCjG29UagGStzbKMMTYMw7fffnt1cbHdbkdCjvu9c+bu5Yv1emWtmSacUkKURAhGPUcIRKa890tvwgDKPEMI6ZOllKYID6fjdnNxsbvibFr0KMbOAEQpF/W62+/3Dx+PlPCyLAXjbdv6IpcoaW36vu+6brvdMsaarvUxLJEK3kVCSNudtZ4JAjCuNQZL1MKpbSjheaGWDwVBsnziS41RSi1SoaUbhBDW63U/DnleLncJlGACQGsdfcAIEUI+Pj2mlC4vd8sH7RJgUnGVVav1oTnvT8c3L18RAubZxhBghNbacdLtNNjgrbUhuBDSZ/cyGJ0HCKHDYb/42TPGlFIppXGesiyz3jHBy7KECYx9n8u86wZrbV3k5OZKj/bt739fVznLKaQCAAExBcE7PVIKAZUBALCo3VNKIECInR4ePvzQj8eccs7KmcwIkU8fP1xdXb1+/ToluES4ntqOMQJAGPU8z3Py4XH6IKXM8xJCHEKYhznLMsmptTbLVFkV57btui7G2LatHue6rqUQf/O3/6Wex3cf3s1Ge21mrcd+8t6v12vOxSL1WsaYruuW944xNmamlPfD0A/DxeW2KkrnXFnWIYTFkM9ajRBBCF1eXsboj8fj6XRaBAZFVSKElntalmUuuH7srbWkWK3mcWrHAVsbosMQYQC9sZRSTiiBqB36eZ6VyqP3epqttd55a93hcJznWTB2Op2WmFtrbTt09scfT6fT0E/ehxACwmlxcuScLxTrBdOGiGRSpZR2O5RSenx8BAlTyoyx1jqAoPe+awdMYF2vhZSKO+9iAkgpVWQFRfjT81PTtX3fa62ttYvk13uPIRr7iQkqpQw0EUKyXC4T84eHd7Nxi1TCWi9VbqwfxxEAQHFaBNeU0hjjQmVdjMpCCFkuIYTWaee4MTNjBQawn+cY/fF4vr6+vb65OR6Pz8/PlxfbGHyCQCp2PreEkCzLQorfffPHY5ZfX19+Tv4KIcaofTiPbT8MDGGGSaAAALA4qXCWOWecN3/5l39RVYVzBgDVNM3Hx0+//OoXUsqiKKZp8tqsViuZFb/9d78Tkn399dcgoWBdwDDGCGOKkGAAPAAYxuTnRDkgGEWSQPq8a4PQOffbf/h3BPvt5kI3TXD+anfVT+PT06eiKBDBBDPnDESICWq1maapWpWU0/3THlFifXQhAAA3u8vFjBZhgDxYJIRXV1dKKWut1iaE6GGqNuu7F68+PTzM9jtjTCnL3famKuYPHz4cDserq6u6ruu6JoTkefb09BxCmGc9zxpjbF2ggmNKECSztvM0lWUJIey6bskfoBQv/E5r9QLsLsJGZ6wxJsXonJsBBADkQpYXl2S1WimRSZkNw6BtTCGeu0Y6yblACE3Hw+Xl5cXFxUI51lqH4I/Htu+7hfuZFcWfHiwghKgJhBAOw6C1ZYynlAAIf2YUhxAWkjNjzHn/8OFdnpVKZRDCu7u7xeFwmiZjDKXUWjcMY0ppnt6WZVnlWSYUWWFIsJRy6WspREzJbrcz2nHOg7dd1ymVY4zLMld5tjyZKaWljR4Oh1PTLVbaXKplEbTciZdiv0Dry2th4BGElVII4nEctbbTqBdWnwtOa+29r6owz3MV49XV1dT24zgAEDAlAPClBfdtU69Wr1++6trWrNfOeD311plJj1rrtmn2+z1CiDHhU0QEwoQowjLPtAbXNxdlWU+TjjGemuPT/pMQijFGMRFSppRkljHGxnF0Znzx+lZKCZx1uttdbWVZA5gDTGKaEOQAJuCSxwExiCBGAIKYAMIJAEqRkuK7n34fAUgBXF9fMykqSv7iV3/FGGvO3cJALvKcUjZHQxlDAEKEV+ttkechBOdM27bTNG42G+/DAuOEEGMMwzAopRDC1to8z0OKXIpvf/uPv/v2G54zKVVW1S9v71fb6p/8k79++/at1lqpvKpWizvi7Y0cx/HcHJcaKpUoiiIr8mjcNE2C82Uzbq1dCPMLURTCtAjBl7lxKY4LyDtN+uPHt0KIXKoff/iJ9N1IMFZchBCEYJTSEHyC4OLiwhgzz/Oihxzarutb7321rtab+ng4d12nlIoxJgi0NSFElatClTEE55ySudbae6eUVEotDJzz+dw0TZ7nWZadTodxSF3feGullBfbzdN+v1hmWz0HpyWXVVU65yHE7el8Pp+zLNPGjONY1zXjBFPy1S9/seiVp2laRGSccwAi52KzWS82G1mWcUqXmQohtFnv9vv94XDYXOx2my0hxNgZQjh10zzPSqlK5YVQy5xGCIkQLM2NS+Wcm/T8tH+GGEmI+rETkiGE5nl8evyIEKKcHE9nJnhI3vtIKWWUHw4HBPHtq2v9k/7x/U+KyvPhqJ3Vxr37+e2P3//Qtu3idCmE+BPoAa2dCSEgoa6dry5kgkDrkXP++tWbPxtCSSkhhBGAaeiKorjYXkYXASOsyuPJQkYiwSAaiAAABkIaGY8pKgAjDDBSgFJKCzwQfv31X/723//f7x8f/uqvfr2Y61OKCQRSCklZil4KwTmP0VtjCCEQAiHkapUpmQMAHj6+x5gqRRa3/rquzTR777fb7TRN799/IBD5FC8uru6ub56Ph/dv33315Zt6t5KEDdpop7//fo8Qub+/x4haa6fRAgBAIlJSrXUMYL3aXl1fSCnPTUMShJw7axcDdIxxlmXTNJ1Op4U8towbxhjn3DLoIYKtd58+PmGMVZZxzl2MEWHy/dsfLjdbmBDGWAhOCAkBj+PIOb+/v99sdj/++OPH9x+stULyoshOp9OsNUCJMLpMz1wKKniatfdecqHtDABggviAFBbr9XqhHy2VeAGuCSEx1kVRaG1BQimBh0+f3r59CwCs1zVBeFOVMQWIESGcAoTB4nkEmrF/enqa53m7Wa02a6Zkcm5xrFimAgwRAKAsy3nWTXO+fXFV5JUzC5YMqqJwzoO0loLNRkdvq1V1PtuUUpHnlJAsk8vuoiwyznmMsdcTjpAQAjESghln26YZho4i6L3fbFerzU4NXTf0kvN+GsZRq6IkBC0bW4RQAPD50GwQCBD8+MPPiko96kN7bprmj9/8/ng+QJRiipBhwgjGCCFkvfMuQBjmea7rDaXYmNk6m2elEOJx/zzOI8s4mhesDwAYPYBaa+MNd7Ys67ltp2mUNUHWA4wAFClFXubQ+jCdEM8AJn9y69A4OgTgzc3dvjnWZbnAroLRBajB6AwAQARhDFJKueJCqdkOzfNBkKxabyileVYyKkJ0C3lz8cl7UxRcCIxx2zSLGMU5t386fPPHP9zeXRdlOTaDqKvorfX+cDhored5XHLvlnuwMXaahyzLrq6unDeLxbfgXM/z4neyNOGlVC2/vuiJEULLKLFMLkVRLNj/zfU1hDDLsqqqFlUGWW3WH58eGcT39/d/3mddXl6GkN6/f3h6fu66jnBKCYQEOeCtXmhqheR0aXYxJsaog8ZbN6Th46cHKeWyPPXeNk2zaM8XGIsQsmwYl8slSMgY632ECO0urgEALMtISv04+hipkpiQ6BMGOKUACX51d7ur109PT8fjMaWkp0koSSnFGH12qJRqIXuaeX71+gWlWM8jpXxXrrthCCGPMd3c3MQIjqfTfr//+PHjer1mjLVNX1T5okoTSgghYkxN00hPq6oCABpnF7M+htG7d+8O58Zb1/cX46Qfn6+qTF1st8659+9/xhjnWRlCQggAGEOMIYRxHAftnSGnpun79g8//vHp8BSMLooqRh+TR5QQgiFIMQVOmRCSMaYUhzg9Hz8ltEspESxOh6MLPiF4OB0lppeXO87pPI8xOO99fzyH5Kv1quDy/HzEDkICAaQIzkQob7A+nzMGvdZkLRFiMXoYRxBN2x1/8Ytf9G46nc5XVzeLroNzLmRmrYcE5kLGCKRUKsu6rrNzmEbbBX/uBynler0mFOnBSCkZE1prxnlZVUzJRep1Pp36vp+1di784utfrtf1MHaUcBsihigk9Nd//ZsY436/r+t6oTB4bxGOw9B1ff/y5cuiuDqdTu/e/axUnqkiBLcc5XmeT6fTPM9SyhjBMAzLjL0ojJfDsNAfCWHL3ZpzPk0TQCnPc/h//O//6zxNL27vKKUAAy4FxYRzfjyfvHVLgWdCNd0ZghTcHFys61pw1XWdc345fCE6jgVEcdTj4ia0uFwsexhjLGOMUqIy4V1cCo/VOkGkjdNa51WplKpWNWPs8Hw8HvcAgDdv3gghuq6zerLaweQppRATH0OK0Do9DL2UcnmWYowBpDzPJaOcMYYRwMjH0LVD17b3N/cvXrw4Na1xNoW4MD5SStaF9+/fx+RXqzqlBABcrWopOUJk0R+FEObZYggxhIRx51xMxnubAPm0Pz58eD9N0zhP9WpVl2uFhR51N/eXF9tNtUsJztZEECJOLthCFj6kj+/e2qkHGD4+P5vZUk4IZwJThFAggGcKxgQBIIQQKjBKMNgQAkacURkhyIS8urqDDJ3PR5jAbr25v7shhDx8+ng8Hr768uub62tKcV1mMYJ+1IwxygSVmEEMEQEA/fDd95MehWS3r766uP1Vgs52D9DZtu2AS4TR47lXSsTkrXPeOYJFAqhar7eXF8/vPyjOAMWHwzPHqCgKXuYgxNPpNE9TURQLu9Zpl+d5hP7Tp0+MMW/84XzyIXAlOedvvvwKU/r9t99QhIs8l1JKlVsfCaPRp+60pwwTzMZ56tojIaSq123beu/v724wxtZ6RsXh+LhI2hlj06TP5zMhZJ7HJfkLIZRlcpGwOxcuLi6urq7atu37NsaY56Vzbhz7cRwZVSTG+PLlS4ZJ13UAxIUpCSGQUjJCPgPDJITZeW8BSOPULzL+WY/W+N1ut1pXi5uXt55STij33gvOvfcQYGNm78MXX3wBQOq6Tkqx5J9BjKWQQqXnJ6uU2mw2IYTT4Xg4PCOEXr58KaV83j+G4ClljCNGJec8AuBsKOrKWsvO5/P5bEOnuMrzvKoqznkITnAeQtB68j4yKYidDu2+/c/9OI5ZLglm0zRlueRChjRf31w2zfn5+XG73XIunTOMo8VMTirOmXz69Pzp4eEXv/hS5rJpGkLyPFeEyqurq1999UUMoOnOj/vnaZr2zb5t++jTZKaPn56UyMaxV7lElDztH6EHu5ubpu8wSEKK1W7rjKeUGj+vqvrV/QtA8Tzp5Ly11iUAkvfeR4gTAs770/7Z6okCBCJEBD49fizKNcJ8tm/tOCefemvef/yklOKCeu+HYTqdDpvdBkTw2V/V2tOpsS51XXfujq8+ffzn/6IKID38/D7jbF0XJniG1ctcLTuDyWjnqJ6tECoFe3h6IgQwyTARRb5KfgIgHp/3nDFrTIx+Qa+qvEgh/vz0SAk/NAefYp6VZbHGGDJOhBBumnCWEYiOhwOIkFAJbRQqAykhONar3LnQNA1lDAA09JrhgWPy03ff/6f/9+9vbi/LohYiCzGeT4dFpL6M/hDCPM/ruq6qylrLmFBKee/fvXv3008//fDDd0VR3d7eCimbppnGkXNalqXzEf5f/+f/VubV0HZLroG1emFs/3lzYoyVMpv6IcIoBJ/1OI7jYkK9ENfKspRSLpZGMcHlGno+HaZJr4o1UxRCWNdlSjBFuAATn43vMG77vm3by8vLoiisj4sYlBCSEgQAFKXkksMEAIAUoXEcV6s1Ffzhw6e+7znnEKVx7oPzGMAU4+IhgwlxzmVKjZP2MC3OYafH0zybu9vr+/ub7W7NGIkJnk6npSE+Pj6+f/9+s9lBmDjndb0WgmEExnHeP+5/+P7HX/zyi9sX1zHGlCCltOu6GP3d9Z1zoesaY+eUkvUREC6E6Jvz8XhmhAnBjTGIkHme3717F0C6vb3Ns3KYeoAgTCBGoE3/9OlRyuyLL77AETptjA8WRD13zkVrIkhoMuM4tkbrqR/KYj3PY3M+AkghI9M0rIvVy5sXeVUbM+aFWK1WgucQpYeH91xyQmgmc5VLENPQayboX3z9iwQIDP7i9nJV1cZ6qx0jpHeDs0HiQDGTUo6T8SlKmWVZFoLzIXHGFn7KPM8hOADAMoKXZYkJ++abb9qm+eLV64vN9tOHB0ppwujQHqty8+LuDkLYjc27d++M9X/7t3+LMf7+u+8QAq9fv44xHs8nN2nC6Wq1zrLi229///33P3LOIUzrqtbaYoxVkT8/nbr+lGVivd1QzDlfAoP94XDIsuz29nZJ/qrrumkaABCjYtmOvH//Pldie3FVlmWCccGni6LQWhOC8OPjx7yoFnC4qMr903NwFhHadX1e5lmWDUMPMKirIsFIab0wxvq+F1KOw/Dw8HB1ecOYgNBhjPM8996mWD8+/rHI67vd1axH7yNn0kXfNO2frB45AMDHuKxpp2naH89FUSxglnPBOVdXZQSh7RpKKSdCKeWcdc5yQRivpeRKqcPp+Pz41DWtd+7u7o4LMU3TZx2Q9/PQE0Yvd1sQU7COcyqlLMryeDwuFhV5oQgh9/cvfbB69qvVijE2z/M0DQTDxc5S5lm93i62Akv3y7Lsw4d3x/3+xf0r5w2lhGKylQohggisJK6rrCgqzun53KYEY4JXV5dN01htSyU3qyqACBEOIYytiiY45+ysX7142bdtRJAoMevRzt7q4L2nDAIIrbXjOHdd1w+TqorNZuVjOOwbmNAQbBpPPlifDBGyGUYpJc0y66PkUsiMS1ll6s3renuxATBW5SZbF/3+5LTjghCQrI25yBFLMZl5nLpuWO12HNO6rgmGwZKEQN8Pfd8TsiiKbEhRcA4AmCYd4nSx20kh2radxwkjxBjaXl7sbq9/+P77v/u7f7verRFC1oWqzNvmVFWrlJL34Xw+zfM8dN2p6T9+/Hh9c/OXv/paKSWlZFQQhg0AJMsk45vNpizWjCNCUN+PCYDwmU+Abm5uECIfPnxcGKBt2z4+Pp7P59vb++3mIiXMeTZNQ0r4cDhRBtfr9RK+9PHhPVmCloRkx+ZsgplHeD6fOKVVVTGCk/NEkOZ0CCBxgdfrGiT2/v37cZh3FxsAIef86dOz1npbXhCiy0wlEAhk5dU1xlQbN00ToTjGiADghAIlrbVt37VtJISEEGECjDEp5aRtURSccx+sVFxBNk2TykRVVNM0WmullBjDvu8hTFVVEkJmPXLOf/nLX57P53mec6kW4wlOhbF+ITgZZ8qy3G0KrXX0vhtaemQhJIQQRCkEFyLhjF1d3vzww08QJgjT4fC8QOvH/WkYhvV2u6RHLitnjDFFuKpWzsyEomp73U+j9x4Q7GMYjx3EUDBmzNwPTQghz4rjufHOX+8u9s+H9x9+/vLLL8uiOh6PTXNys729vJJZlkBAGMhcDuOox2FVr8mKpJRc8J81ay6kCBknUuVSZhgCF7y3ASUwzINPtiiKGJIxzlnddR0EuK62eUaUzAXnUnKZSQSy3dXV4fT+4aendV5CgBAAsMhxgoohwrkZdC4tQsiBCBGy1psQEYgpRBhhmZXjOEYXyrJs2v756SQkgRC3TX9zc3N9cdk0DSHkeG7bab4W3BmbZdI4RRi9v3s9j3bs9z/88NOs//D69WsA4t//x3/48ccfv/76V7/5zX/xq1//jZ4GZ9OL+zdCKETgbMLbdx9fvXlxOh3+7b/7t4KRqihvrl+8un9NJB6nfhrqpmk2m83vf//7//Sf/uNvfvMbrXnXdZTS1Wr19PTp7vbFxcXFzz+/c95jhKQs908PwDu43X769Ekogf+n/+FfQAjHYcQQ6XGyxux2W4SxsxpCsPD4irqqqooxCiHSWp/Ox3GYVpuNtQ4hKJWKKTEqEIRGTyF6a103ToRSQujxdAjeO23GcUgpGGeGaeBcCM4JIYyxBMES8pHlBabEhxiBxwh471KKi1MnADDBqI0OKQolMcYY02EY+27U8+ScWwqztmbSxltHCR2neRo0ggiEwAW3JsQAIkjeO85kUZScs6JQGGPOBUhgGObj8SilKop86UhSZqdTSymWRRaSq8uCc5Zg8sFTKsqyuL66JARP0wwwoZRp57x3MUHKGKH83LbBu7Is58l454TgYz8xKZgS09SXRTGOg5lGVeQxxa5rGWMAQkKpMUZxgUDq2maeRh88RmRo2hRjXRQv7u4EZxQiBCEi0OmBQF9klBKZQtTThGJSXFxud1+9enV1dbnb7m5ubwmAxhlKqJstwvjdz999/933QmWEc5Vv8vUN4xJqPespesg551Is1c17Z4wGEBIMVZY7F6w1znkXwnq9rqpSqkwptd3scqWM0fv9ngq+2e2c1+/ffmibgZKMimK7u3bWztNAs2x3cQMwffPlFwEkY83LV68ur65Fxsdx1kZ75wCEKhdt25yaI8bs6uo6k0JlYl3tplFPcxcTeP/4vm87JSVjrOu6sq4udhfffffd+/cPZVkJwa+vrzHG7z+8naa+KGS9qpw3xkyvX704N80333zDuaRc4P/5X/4LQsjz8z46LzjfrFf1amWtDQC4EPtxIozXdbVer2dtplkDEDNVZlnOOF+In8vKCRPMOAspGRcSJM64GKIU0oc4jxomgjDabjdCSUxYtV6rImOUZnk+z/NnMM95PetFtey9D+GzH7X3njGKKbbGzvPsvbc+xBSXhBvGeN938zwv9KSyrILzmGBKCQQgBY8xIowUea1kZrQGELx+9cVqtQIwMEaXBCRCYd9PznmEYAi+LGuEYAgRQrDe1FW9zvMcweisSwBCQgCGMMUUY9/34zRlMsMQUYQzlas8QxilhJTMBBcIQghJluUYI6uNdf7q9noe52UMhSlZ71NKS7wkJSQB4KzlnC/BBUKI7Xrz4u52s1lTgvNSLRQsPc+zHjvdO2+naZqGXtKCULbdbHa7XV3Xr169Wa03ZZkb67z329324uISA+TcPPTHeTQJorvLl9vrFzQvojbeGQ9QcH4YO0ww43QchxC8lMI5q3KFKNYmcJmtN2UEi8u8ZoxACLqub9tmmodvvvmdc6Yoy4eHTyn4q6uLFy9uIYofP77d7OrZ6m++/cP28oIJdnl1acw0jv3lxUWel9b4T48PQztO8zhOQz93b99/eNofvLN39y+uL1+em/N2u9qud+vNKiSfEtlsNymk/f5AKQshIoSlkATiV29eU0rbtoMQ3dxcCyEeHh6Op713PstUva7HSf/dv/8P/TDUq02R1/h//G//uTEmpKgyxaWIKVlrHx4evAdMSMIFgKA7t3aeEYYRxGmc1qstoTzGVFU1QjDGRAidph5gzLhMiAopGKVKZjDCrMxTQkqoVy9fSCUgQogxlatZaxs8odTM+nw+M8YSTARTjBElrG06a4IUEiAoM0kJjSFmWV4U+RKws92s67qSUhBGBOec87IsKaWE4PVqvTj0M0rKqsqKzDmfl2q9rut6hRDpuj6EYOxorQWJ+mAhCgn4YeiVUplSxujgbT+c61X1T3/9ay4zIRWBCULMVQYRDMkB65Y8MZVnUklGGaWUcZliooQjgBnlPljnLOc0xnhum9N+DzEq65U1ev/8nABo22Gchvv7+2X5HUIw1iKMtTFK5lfXV1VVzfPsnNluN5yzx6fH/f4RRDB2PYiWML6ur4ps++L+i1/9+i8vLq7KqqrXdVWuOJOciZRSij7jHBHSns6ntjHB7vcHmZe//OKrgACXNKagTZtxhDCnnAvJMcLOh5SAFBJCZJ2PGMm8QpjGFLph4IpTSjDCIYCmOZ/Pp9nMGEEp2Ha7QZjEGDGCGDNn435/fP/uHSK0XFWAsFzKjw8PlDDGaN+2IYQIwLt375XI/vpv/snN9Yuy2hAq6npVlavd5ur+xRuEMSEYE+KDk1zFEMtK1fUFwTQFYK1/8+bLerUpsvLu5h4itN1uXr9+rZQoiurm/kWe5d9//5MZ/dd/8avLm+vf/+GP//Ef/qEoCwQBF+r/AwHU3/3CQv3GAAAAAElFTkSuQmCC", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJ1CAIAAACdFImqAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAACAAElEQVR42sT9ea8tSZIfiNniHhFnu/e+NZfKquyqbk6zSTYHA4KiIAgQMPog+k/fQR+QACGCEISZbpHNYlfXkutb7na2iHA30x/m7uFnu++9rKppx8uTcePEifDwxX62G/6//p//D1VVVRGJMYqI/TmOYzkfQogxxhhVVQBijBJBRIJKjHEMEmMcJcYYhzHGGIPEEMKQfmP/iyIigKoaBUQgIqiiKIqI4NRUFREBABEpN/vKTh4dOMflT4b0FQCQfSocNYKhHCtQORZFABCkcn87j9Cki1XLZ31BeTqR3U2qi6OqpmfYwEIUERAt4xw12HEaXpG6t+XO5X2JgJkds70vETlGZpp3jXPQeOcbbDx677qGnHNzr8zsHTnnHCIzewfM3DhiZu+QmR2jc65hYmZ0iIiuHnlVACCGajwFAFBt3AYbPVUFIFUVJFUFpRjjaPMeQUSGGEQkDD7GGIKEEIJCjDIEiQF3YVChoBIjDiHEKFFAVXtxZe1FsFWqIiKqiFjPFwIDAMk05YrVUBJN84IwLbg4lPmNoOUYmcoUaz6pqj0hADASETEwIto4MCoRMAERMon3vmvIez9XT0TMzMzOk/e+cc577zwQkWf03jvH3ntHTETULhDRridyRKRkU58ehYg2zoqgqjsJABBFbGRijCGqKgKksQ0hiECMcRzHGGPYr8dx7Pt+HMdxHIdh6Pf7YRjsz9APdjwMQ0ibmcr+tfVpzXsvIsMwvHz5kplvb28RMYRA5OoNUsYwxmjLyX7OzF3XtW379vvvZrMZEfV9b7clohij/dCoQd1YAxE5YkQkRma2dQsYbCUTESIgYpqjqhvlJmU2y0HZjyEMABBURCQGNWpm14pIiJpImYht1QiNTg1UVQkBIOZ9jIiK01oijGfpWNnpzFw+EbFRR0Tk0Dlmh96z68h77xrnvW/atm1bbjwzO+eJqGNv1I+IbL/bWHlGGzciYgIi8rbeiIjIubLjyTtnHQAAozCpb4AAwN4V4nw0ngBQFkkhZYYdEkYRUYkhBAlDjDEMvR2HEGIYQwhxHEIIOG5FJI5hHMcxDCEECTbgAQAoTaWU8RTqE10HACUAUMRCz1VRVYNAnu3UHDPbjNkL2HSWNWF/FgSyqSaiiCoiCEpEgIKIKoiIooiIIAYhiIga06pCREywlP4BIOgBfhzBydlWcDGjiRSwNGqWLvjQfT6pHYzZhTO5J1JtqmkkL8HeGMejjZep9MGqOjs+eNyOzxxt9UuXfXDMETET/0uDMy39mpqUb+uTqSFoemOV6VZY7lbfJB1XsJQ+cVoPRwvjg/OVflK9I9Vjy1RGu4Y9pkQCnoA9x4ZzxMwkVFOxdE8URK7n5aifmRWciCkZ3/GxTWwkVWP+8yK79sTSemLJGVGzl7LVW14Qzu3iow4YnxdCWK1WwzAMw0BE3ntVDSF8cEHWt83k6/ir043z9FayHhrzauydkqoqgapqjFL2ztFPnmhPv8gTcwGHK7Z+6BFMni6hT2offAW7ACuB5NL5D97qj28/7TVLJ0tXnW8401wUSf8gc/dZCiT7JyIqJCLBRREJAjFG5MAOMXCMEUlCCCSRiCCGGEmGBAOIqGIjBYWYnI7mE6N/dHE5U05KBm9VpTQTp2MAcCjn5eOTS41fQKgp79Gsp0er2hIEVaPlkDZkVFVMhCyqKujENRsmxsRL6hFdOMWts5u2cCT2fT5M9IiIEI/55bOwd7alp3/KCiuvkTjfmgtWzLRY5RABy8VQuO882OeuUlU1BsvgAZSsox/ceGKrDg52bzWSFQ4RHjFPCZAqVtehgyz1OoIK9tQ550ySJkdE7JCZ2LCOFIyPLvOgQGWkNQIgQiK2qoqSBG1VBZOkIQIAiP1AbFFP+F2tZgJQhahql5WV8yHG6MwygENCbzh3BHtEpHpAo483X3UfE0Cvlsvb29txHNu2ZWYTSZn5I2nCyZ949KAjVLjE5NmcqioiqyqX9aTKaEKM8SLTEOW9f7F7l8bhlNU4vfIslpRH21AzIzEQg0Piiact1+fRQAFEALZjTOvCzoOdP2JYAcAkGAW19QmiUBHbI17zLGFM36qgHmiwjkfpcAQvzU7+lJNr6PxtJ6hDADuodIQmzGpWXxThlLJSyJSfRbQ32GOJMUYy2IsUQgDSGCNgJCKMARGVkIiCJH1FjFGxqKfShJxKe6er8yy7cRYFAY6J1CkZNIlBq7FWeIpWRtCElFod1PdJgiaICgCQTNKeqhTYMzXdKeyVpx+RHoBL4DS9ctkDJkwQJdgjqlXEWuuKq8soX0xPAGG1yM6P0qEYN8GeiBqlUMFD/VjSKEgEVZUIUUUEVUBARTQa5glqWix0FvZqhVLdN3v8KWXRajOc3VFEBEzVKEOBvfpX8QT2CJAAEJUJmJEJiWSCPXDMzM50bmlSHE6qexMc6wmtB9/G0kAPEQG1Ut2K6tHekfxpBEIBFFCq88ftaG08vQy0UjkWJRBmFZHdTeRgYOEcZS+wp6pEi7Zt7Xgcx7Nz9ESrV8Ola86i++kFaceKIKJoku2ISETLJ+ULPtjP+n31sBtn1+cHW1mYtmGZa373dM0czrJtCgU8oJHTGMKJsqSmulr1oT4PJ+fr3z71LgoAQCBnae/RixzR+Uvrys7ZFxkLyVCiXDFJe845m2/TYZZlVMOeEVZbmioUY6RIQswSY2ZZgZSIFAGDAnl7TFAI3peeiWG1MU2550fjVUjAUUdP5+PMYMGHF5AecAeVbQ9r+Q+LSKCnmJfkxfo4K5IxvZGCGiIm6FNVg8NK962VRFRm90i3eY4ATa9fMO8U9pg5Q5oUdMy3BkTMP5mA8JQo1Dvp8kKmCvkS7BlTWCwg5bNGLeOh8hBAHqiJkAUVBdRK/auAIsVyoIgoEYkwbaNkR8kzkt9Fqk2SuNcK2BQ4wVvFCCBi1MniLNXCcvkOtZLTYM8xFtjzzDYLHomTzpMRwRFxBj9C5CxiEiKhEiojISKBMoKiUpl0FVBSSPKnjTLaKQBQSavWSJsCqIKkfwQgYob0eDCzH2pEVNvdz1Kls+0skTo6aci33W5ns5lz7u7ubhiGtm2998MwGDt+cRdXmp4a9mricArbZ2Gv7Lt8pxrdJ5A28piBcLItPckzf0DwPerGWUKnOPE5WLk7MBIjESBBshwmNR2bklYoIRxO/9KfQInwJV0AYJL8ig5ONa0m2ySK1U2kMlNV/9A6nFQ6oiJQWxBMaza9nABW6odKIkTrFGpRhigIVhu8OqjYOD2W+XDqs72Z7YpE/yfYO2oxxkzL0pQzs0l7zBxCEBEUIiKBiIhEcSRKL1qxCA4m6ceEaqJiU0kLC57kDsqt8JzAd7oZDhbNBWlv+qus2wsb2dRicBb8Kh7H6KnCJJsUeq9H2roa+VSlokT1BnDOQbVXjRvA7NpQvsrqjiekPTgU9Y6lvXp7Y8WPTGcSiXliimy0SVVUUUFVMj4J2knT8WZpr5i+NahKhGC7RCga7OVf1RhZAardPyKiIhpbVpaHXiAxRteoHuQEKRWMMROl3VaPAVF9w3S9I0JgE/UAgEDZoSNkR46EiJwDZvMeMH8iosShgyMmtGkCyh1J00pqCtZqbxfuVUhRJl5NAWr5T0glogIKKAII2DHYzkuE5hQGjnDuSICAQ8Co/yzzUtaniCByPez1+J+eV9Xdbjefz51z6/W673tEfFrDebj9p42F55Sc5eAU806RL680xEraq3afgi3x7IGnT1pnju5/1Jmjry5xEobnSbNOWgwWJ3MkqmhiSgaD/EQT9T4kEGjFNKgaezVpbsoQ1RcfnT8ivKfHkLaJmCmfFC6pPs8ORT558Kxq3CqEtZFITo3pvzJZ1iHnmaCiyRPsZTlMRBjBbH6qJJK4HoM9RIx2R7FDhwqIAZQUEJBYOdM7IS22euvKmTF9YiXpOSWncf2aXVqmr9Jlp+Nng3cs5+VBra0kBBVHcTTreCj+lzsYu5DJelRVjQIACoJZ1ZdAMQkwiUxo0ocU1sx08YmZUsi78EQVXiFY2RVPKazqzZytgFp9d2LsgY9plMVaE3JrlM8AJqhi1FdEVUDj6bLLP4yVs0L+V8RnA0UFIK209tmqpNMSn1wfzbUXNW+b8naTstFQKBNoZ9IGT0JwXhM6wUMFe6jCDj3TBHusROSEGMkRGqQyIjuT8JSAyNxxVRiIFAkRRNLeSK/N9h5KiMgIQIkvJjX+AgWATCQwGpcopRbJTzEfY17Ol5Cvpib1uXJ9WfIF545gr0hpR7S+iFPl/nl9xHEcvfdt24YQbME8LerVBEErh6njjX4CdXAOYI6Ij7n4mU84MOhk2zsAwvIKH+wkfrSSs+5bResmVbZd4JCcCXgoCGKGNyQBIUAgm2LbyEaWkq7e/iJKco8mz0NlEEUCEAXChHkGfpBte5BUWUmN86RtD45wEbSQLAI5hboi5xEIVevTBtg8I03RAwCgJmdX4Hcg551RjZqLISImmBKB2rZ3QHuq3h9NMxHHGDXZ6lBVWQQgAjkAYBERIiVmZhWTFE/5x2q+f4rzzxH+TTMBVT8/buf8hEcfrdejKbdhra1ZZsMzCRjr86oAYN5rh5N9vC3Pngc4ZhXxQy1fA0e3PN2K9f0/PCBKOtFiSRCVzgc7ebLG0lYRxQhmzUPFpC+OSR1wEDGSIZUA4sTfZ65/2n5YdeyUra6G1A4ICU78ZgscamYgyn0ojx4jIXDyj1dAJMfoHHkmpEhEzEpEINP1lH2OGAlACKAOuamWk+l+EUWVIgCoEBLbeURWgOyimShjZs8U0zESSEyCgiAkme909cLJgJxFQTjUtdi3xRug9m25tISONss0zqDDMABA0zQxxmEYQgjOOZGn/CBOt+QHEejpHTTNr8EzJNrPzLa0jb3+afQKftLmOpImERFAkAy0THY3sSYCEIoqah2vZWa8YtUDgKQST2AoycnlsJ1i3tE+ggrqTl/hCVHPxLts1YNoiDhh3nnhrzANHzF6mO8EoAQIiKwCWcQ6mDvXel/rkYryjb2HytWltCAJXQkRY7Q/ItEYI4CLhaghMJIimRLVTIMoSmR4ZB6Mk/XoCSYo9Yf5yXc+tu2lbXOq5ExYLlCi9JIXxOSwLkDlVkU/XfZrpteSz2e6LAIAnkhVgUizzV9AEFAlglEgVczqXVUNIcAFwKsX2dGbMhcP+UkjZGo658i5pJapmZjpVwzOuUMyV2v0pp4kgiICAEiTRilhkPnlAucRSHqsFFcnk0Crqlnjm1xaahar8AAxSogmAgKAebgUeYITqyYxB9EoovmiTS5biDjGcXJstaEjNKtzGQrJgU2GRmUc7KwNVlRkM8MhaWUNjTJCwjlEyKI2YHFpIUIkZSQiZebGrgEkQCZkZkIgBXZsjgmOkQkI1Xw4CR2AiWwICsm6R2ZmobTO0uQoKDKpxcOiiiNAQSWKwSQ8xRQbqyoBNKpIcq06XF1FVa7Maaseejccyu5JIFPV5XJ5d3f36tUru6dp5i81zEJhuQ8AoOp+v1+tVm/evGnbFhGHYdjtdl3XnRIBAGA6duk6Wq5HrSjAsZLsT7dYafb6UAUzSKU/KPuovMXhHWoCdcZQV5MLOGS2yisURHmSc8XkvSmKoqACRKTCzDGORMTIiIpJy20GDizCE6Empk5FgBCB0UieAqpoRDF/YmQgVQgSKcf5qcREJgBAxQyB9s6QVWloevXs1QAAqFFAVCOiIima1TIPKaMCARE4xkxgJXWVSCmAipEfMtkuma/rpVXsAgyEoBSy8aisAmPExSkUac8MJOUTKh7KzthOICKNaQJM5gMANk9FVRZhxDBZCOTstMFHcDpPEP0ig55Zux9957QK8fj+9fmjXXd0cOl8ZbRLzoiaZZOyPaDeIpeFrUvtLOd46ef1Vvng4H98H04aZUJMJohU9jnJ7F15fVINYtYnFQASiKIgQAJRBcG8mlVVYz3eedAmRJeJccqo+zHjdoGUQPUnHKqkSrP9YrDHwBnzkFA9IbOFKBgLoszMgIxUnFo8MaIiKzMRILOhaba2ZoMHFou9+RSoIKBCREQzH+d3ksyFxKTrQ1EVQFWIqEksUIikkKnJh1fOKRN2drvVLH/RYR5deHb/nuIuVPuamY1XPn36kbxVwKbsr7MzfnZ/PSGpIGYP8xOB+HS48FP8Ti+1mtqck6JsWiOAcRUHghoqZEX3cTMuCZNtT0iL5lMAONn8Tt9IzxPGs7N26V3ONrMxkEI01r+S/OCcc8Wnk6MpaN3MEQCKyNnLcKIPznsuSicRU7hPWvhMwZEI7FtCjBFGVHOlHREFFEDNhscSvaDF8DACEJr//AX4+8BSOOKA6rGoBvf8+Uu2Pam3UApFQADzL0UBKlKCWoBVkmny0iyP0PPno6mSLCZPRSE5ZoidT8RaC7JSDls+4hGklkJw4lIdnNmKkDjHs5orPTlzSg4+KnTy0lJTlaRA02Lhq1vxFwAAFEUBsXwrWRCs9KKZfZHsDpoC1tLvqciaEtVWuMmCAKCJI0sjXCzdZviiJMebYrPy3bAZn4YSlZJFkIiBquA6Is05SNJcoEPTdiIiCGfYI0aHCfYaJERkhw0TMzITkhIgMyEiMXhKsJe5YEvsIKhMCghJ2AVRzGpdwmQ+TbobFQRJDmOiBKKipKAAtVWPQC65EeCFZpKsHnrtp32UlfkGeBZyV2tl9Enzz9FOn9YKoqkiikvd0dItPwGAbGw6Q2rPLnWAM1ygHqvspj2lTxoOjp51hAof4VR+xjekjFjZMBEFAEmVD2+ImhwgSYHUXDeZsu9n7dELRQuGtTMnmlMemUnQvlCEHDFkJkNTmxIAmcIBgEzNb0E7062h+FVQNi4DSHZWn6bycKYkR9cIHq1MTE5Y2ZEn/+6MyvMJdt+6ZBex/dbm7SBur/LiPYI9QUy6KaMQJkHashMRZWUFVWVhEWFJuYjUdCwn8t8TKvKzjE/ZEvU2qNdr6u3heTgHe+VMdXOZnlIvvtMIv9rOpOfPQ318+F5nv/0gE3o6nU9QqHq3F7vd0UY92rEA59nhn9DKypHkkWJgNgGhxeGJiBnzRGI08UQ0AlrMQwSSyRf6CEQPKMLRCslnpDA0R1/ZEBzFqJltD6phlAoEi2HPBpeyRImIpJC0l0mHCUTIaGHpwJxVhkiOyIIZmJGzAxFROl8mzhyrplk4iCaOiJDz3GVDcdIF62Q8FkyJgcx0AqIQzRR0tBovEfGz5L2+phwX126DKOdcSWd4dv8ebZazKGjXmzJ5coGu7jPdrVbCn8De0Xtd+rNcrFmnVf8aAIgmfWOiZtn5q7rujEPHMQrW5O5kk51lC2oKbItBNZp3PwAQapLYbJ2cC9kmBUAxxThmF74iYJ1SADz0ljviBo76c0R+65mFQz5mepCCqBJAUC1WvWTwO3zxumNwyKOcMwJNbwyQrHom+yoYQ2xGz8MsLZzcPdXmuPS1qDpTbgICARQRJVRVr4yiI8a0hwEdobDZ8slRFIKQA1x+Aj09YhXhwj45On/gV3nhoXXMTWbJjERbbk9By6EHOfdK/YgyJZDj9k7OF9cCoBREZvxPUVlNtr2zK+9TkK8+UxwyT668hJTH94RkQP40jU3aDII1Jh2hlESVOJmFcjMgzGJfTt9W2HeBlMwlrWYFTb6f0yBhFW0JSgAoGECFK6WXCeK2u9Krc3LahCp3AyIqYcE8ILS0nAaEioZYib0lPeYVjlgQzAF5lr0lQWP6FknBpfkCc6jDbCPBDNsInPRXCggMogAxybVmC7WRJ1WVzFhLcuKU5M1JmtKUEQhIBImnjgxnF0cV4pIsHYX9TWOlSkTDMHjvY4ze+91u98T+hctQV3iLckHS+GZW6fSG+coEe2buPdr4+KF2dhw0GztFBA4GBIsN6OAmhwk3DJI/Rtp7Qvw9etMCH5oVS2mUqhWOYAsJTaA3fSYcgwVWl6vlL0l6Tp3kqCTnqaoqZVoHkJ2D+dgGecTTnOI3AIDESrbTs+9OKqoSJ/+sA5KoB6le6mOqPo/HEKa4PchxigoArhiZoULp/CQtzE5FFrMwS0RgTlzp2AkFpPDk2jrlAk4vOAt4p6OMk0iX10d1/pKSUysDdZHzAEC0DkGtRuDEn7A++OD5qavVMZ5304JLfx59dakdhpedc1c5mJeag/6jjBOHi+fAqncChJNuU6sUsSKiSKoQk0a0pGgpPOYBv3k67Glu8/ye8oxlFR2tzEtDXQuFKaLcNBaZ9CCmgNpKxj4SEy2vB2JOi2xzwQCQY7BoivQ9dnGcVjGiQsxa2xPv1sOdawpnW9E5fFbgApE9iwQnyAc1EagvIyKT80IIXdfVsAGXGdbTnpR7FpBDRHPmrP0Mzq2647VxdMOz74UXmEsD2ppXhDwOxp8dhfpUFPnYDIZPSHsnb1EurglaTdNUoypXNxSoMU/hEsQmq97lC07HwXZwIf5QvUjhSOFJAnW0LOvPYtX7+HY82vl09bD6wZTD8SGP5+TqmBlcAADHblKAZnsBAAAm/h1UAQUtKVP6VkEsb0sFe5izWtDUt4NQ/LNk64mlAIfYcEQXqjt8mm0vTWryt7frk7wLACbnYf2sT7TtHTh3mWFGJ5qbuY9al5V8jGtams+k81Mw34mYWBZEtSePxuSSbW9avpcIwce3I3CqRb2qoSpGBdEjw165vJhqKvZIa9vetIVShnVMsiZi8s4Fzg9DVASBlBzuEunUc3bNQuiVTriGpEOxT0JEyra9Ei6ZHCNROYdYldyJhGTahGQRTMqSqU3gBpBj1Rkg5UgUiACAZuNUNb2nqBKo5KQ2ZuFLfuqJixeLlzhjGLnguXPwwud4hQJ7qhpCKC+eHEw+TtSrb4VVxjJE7LpORCyS7xRXnlZynu4OPORy6nc5/7sK9o7udnzbMy5Xn2DbO8K8Mm4TkcxTer6vWRBDBVS26Tb15gQAB8fHCcpqOa/uGEHSpKdrKE/ouc6X/h/dBCAZIE9R8GNacT198qqLOTkhDx/UtNdg71TaO+p3oeOYMmKYqGfoPVnsPjjB5eCDsAeHG+bsrc6SKv042q2qoFAtuKqHhzc/lSousa5nGXDI3ixgKs8TERA+TjF7dlKPLj5CuNMHfczJP6YdiR1wYvM4vJJMY5l+IjiFFF3mkE43j6RZqxCdqhnRg6mcaPG5ntuoHI3wEd3EbAusAxiwIpSZgpvvNRIRapUH1WQ+mmoXoAmDh7l68UKs0hG5Pzv4+RYpjWedLwngIjwcrYf6rYsUe+6rJBOYIR8yfw1P7t+zrb6bERzzOaCjTGBHL1Lv3HMvdfoIOIHAU/KNWXWpJ9h5hP2XnvXx7QyiH97wY0DiaXnuIwf/6Y7BhVX3Qbbm40Hu6R7+UW93QotcHdZ9sOxyOCpZ0lNLVxajs32qIyg4BQFBEARxjBKTB5HlNWTAaMXPkgnTqagKqQQVLWkpERQhqpY4/BL6g9VnlkdhWo75BRJ0IdZyGgQzth9KgTolNBKzZwKApaeyLOVkIZVIU8YnnaS3+qAuCQZV3II/1OwVeTOlMEmJ9CcBnLL2GQETk44AAOySq9HEvKfosRZTSnSMEBkZSMiRYEAioKjACpaB2Ak41VHE5sqZ0M7KUcwlEaMIIpiPvSAoIFl+ScAkdGr2G7Rcw2h5P7LHiSpG8+EE0FTvTUGRdLDloiAqESFiDBqjxkFJQKJqEB1iiBEiAngOMUTQACpKAUQUgkpUUTZpD6KCIAqiEKqCWLx2Vk0kWx1AhAaAk/uJaVxBo8Sm8YTkgFCRxgAZeMRRgSgyyxkhgDl0kpUNAiCIoEBREcAhIhAQUs57H1EBSJzzzilKAAQmNjZRcK+KoMzo2XPxIIsyIiJZEtpqV7ejJ0ph8oCokn2AnVXqEUQk74jILKUwJJOmKhCACKAio+9jwJTPkVQsIEQQkGKvMVIMbInMwJIpoRAjqXhHJkJHEcdBxTeKBAoS4hgliAggOI+73bjZjk3rFKJ33Xa77bpuHEcM24IciYBYROAF8kegw27r8SXG3oQMg7rH2zfX19c3N8u+7+/u7vq+b5pmNpvt+76YrTC5Fukw9t3sCnPwQ45qJSIabZyRAM0vKVMUzolbNQtnCqApMozR0i0lEGLmXHiotnoSM48aABRz+TVVUlEA0ixpIebIE9M2x0kbnolyIhDlz2xdEwVQRtQ6n61FigURy0Ybwag6CCqCiIuRlDwwkRIpqTplImVQInRgTlh50yM57UmIwFkEcOqn4ahGjZzf2QpWiqULgkOwLJNbB38nX1+FWOqLKapAFJETN10AEKSY7NzFA5lQlTSqKFs29km6QbIELhhsHSGgoACygoA6Y6MV0QydKqn8C2iAtIePFmLScU9FGDAnXD9ifKDmgPSYYayh9Ih5P+ZAEY6Gr/72LNRfYkaOGKWzfFN9cHoTPDEifryod6md3hMuMKTwoVb3+RIXfCp6nu2nXFIQHD7og+LCE88qkt8TP88LBI8MV/W3R+4wp4N50uGDun0AEELgVJuXBIkRlYDRhQMXp5Rxv1yfrTtK5IiA2VsWPoYcoCbR8j2zglIwP3NUYCTHRkAZS5i2oGS1v3cesUTNV3M6ntE0CGjrvQUL28SX0UgAQJbPTRJ5UXRIgrlSJkTzVSw0K91Wcg3oqr502ftE5JwbN1v7FnOe2KyQRBul/X6/We/atn3+/Pnr16+/+91vplD0KrL7iSWdIsQPV46ZDLfbraq2bWtEqe/7FCAhlRXQO8ttZh2z3sYYNWV0OHQFPKfVP7MxKzvQ6U/OikdPt8Jwny7XJ7YeAEiq/l2yPaSyJE886JPO/wnb08ThiTOnoPDH9uQo7lCPFfyuDvI9EFMqZ049tGlPzt8XstYevV5NuaqD8yhY/7DWQlw6f8QynELaWaqdlSQ1tqWb1CIaVDpeeBJOLrXT93oa7U41TsfDkicQs1tK0Q7Vgzn9CYdOp1OHJy3rKTh9jLo4PyhFKRwNxcEFcLAAjkboZMTKczHbF860i/yKRlRFQaLklSpqvpMAiStOmeZRMaYkSdmGnRJ/pqWBJvHlf0xIqC5rxc3rk1NEIDmC1rFjBxI9oyNwqYfmDUdWk6rsGhF7BIoqMU7TgWxs6mS4QQRQiaCCyQ4tRR4FRlVFgZKTdtoODkkoO8MSCIMqeJPqyJwSQxbpD9aMLSpHTIDYNFYAzzlniygLOt7scPP5fLvZ932/3W6ZeTabDcNgpdINOLXSgp5dRd57y1VUT2XbtlYFvmmshHi72+222+1s7us1hoje+9lsliSGjIWqymmop/jXs2TqaZY0j+oZA+dHNs1svR6y1DX/cXSmzAgiahRFiRFiJJFJaJ4GQVSrzDVU8bIHA1X7aooi2PL+o7SjT5KFYyPFWTr8xB0+8mJrcu49ysjXzQbK1XT2LL2uUdCSPOFhK/7HZ0WryVf90IvvgN7BsY3tCPBOsW3qMJyxrJ4C3ul8HN9Hq26fzNbp5J0O0RNeZ3DCUZaTeM5R5ek1MWEDmXKg/uoURapvLy9EOLf9tPLWqbxcDwfwsGOpib0spYGp3DLzt/bnYaWFcj67t3xwuZ+Zi1Tj/hgaMUcsgJKACCBGEQu1Qy6V8Apda5rGITnnkqemRhIiCJySdSsjMYNDZkJGUIgtWQoWYAESwYiqqpbwTEnFdDZomdEkWkICCxwyjbICAOMU7q0ASmi5Nfb7PZgClhAqvtPiiwBIQFAUEZBAo+QqMpJVcGoJGEzf6CrmNxfsTRu83teq2nWd1YMNIVjNTVvqq9Xq/v4+hDCbzZaLq+12u16v379//9VnL4loHMc6ev1oKR5sAYC2bS0tZ3lxIrKTfd8T0Ww2MwQ1cTPGqNEqGroS257qs7ukeywN8TgH2KX9hZkATXsTD3aoyeXwEbzptHcqwMsnn8K8U/A7bZDTFIHoWSPwAX7r9Jl++8cZAj9qD16WOE+J0tmfHKHmh0hBCkoEk/Mg1aI49BXLcwoIAI5q2IMkABV6Zw+083TohV8fnKKmVnVr6+NEAkVlwpcDbqheDWfXwSVEfGIaDs9cmp4qBPicVHcJCI9ePBr7DKo5LBALScMpF4ltKc25X+GjNSembsrS95H3zYFsVOLeVFUurJ56IZpBgauv6hG+tKzzDfAwYuEQgwU0a2lUXNFWVhcnnVv5XVl9NXt+lmwdT5DGlF9eVWGCkJCkUnApLpORAdCxI8RUG88i8xgUET2TI2Y2kQcR0SEQYZui2y0ODxwBMxBqjOpAGdI0MKiVSsg5YRgAYpAYk8HQswOZ0mnYDlXVUUeAxNAoIUqq854FJqTKtw9LmRkV8/clQAZEhDEGFQUJEG0CIM2BikJEUQs9JCLHgACBgyqpOhQdIIhiFAVUiTkZbwjjGFLRTdXl8irG+PDwsN1uF/OVc261WgFA0zSqOgzDOI7FtvfEejZk3W63R3u8MNMGaWaxWywW+34dQiBIQp6qhjBafVrnHJO3K+s8A3jSagqm50S9Er15Sdr7SJnvkF6dIUenxK2cr8eBLD3ZBZpjiVoygVBEsxRq9gKGchnmxCuYN9jHvMVHtiNqWUPXKd9zdMF08NGWo9wqbeVhfaWa88Cc6wXRQU7yNs13DTNlji9DyPFrHGHeQWRyOpkOipNZJnOAOClU4RywnT+PxwsXK+SGQ0guT0l9PnnxNAEnis0nSL8eurQonAeYepCfOPPBTTU9F6dPQZCUDVO51Ag86jAAAIhqzBUTSEEQOGPEpGfT4nSTuZ8LPamQrRJDk5WOtKosVBvnMkxO20GFJOVuq8XEnyLtQR38UKVmMOsRoRMGRkdE5Li4C1Ku/uoQEAlBWZFQG1LnHFvBWCYimjGaYckyXqJGCy3whJ7ZM1vues+ezAglAuoAUETiEHLmcZ21HRwmUlSNANSQAhCQIrKSEjp0gMC+dUTOSg+JBomgEBE4plIMhV+wuD0UNXNdVDBFGNhxZcsrc2FVzs3xBNVynSWT0nRZIQh2/PDwwMzz+Xy/39/d3TnnZrNZ13VWIXY+n5vZryZq57cDQNu2d3d3NbmHQ3Vl3/dWnKjrumHcEhEjNU3jnCv42jTNETLZHXzDRXg9m4r6SPFY04f0Q0xOVUf+DR/cpxOZqvRYekJm9YKSs2b4pikWMRcRELW6SB/YG2I+hhV7nfU3H7+/PqmdJZLVEj2AQD0HckcnP66f2aCQQvcKn1GUvmTUwGDQHQ06ntN0n3YdTnDuCPBqYqeHdh2pS4ZWisqzK+aD50/h7fTiSwMKB695UDN2WjYXbHunM51GBg9mq94hesjNQSXzYdKeJO80LaiWT5Z/ERSNjp5Ie/VzS34TQYig/rIBNR3gdJPTzVwg5HQtHtr2TOA76kySAku95ZMlU+u9YTKvAYCSXlholyhOrTUltOpOVj+vseRgyKRIZlkTAcKoIoCMwgjRcuswauvAOWw9MaNVy3OMzDRnBNEYzRXZ6qqnCgwegVVCGCXKGEZEFJEA6L1a5tswBFP9IWoYLAKvLskkqtB6AiUltXqrzJ4cErkZdsxIQKAgVpwYBQGj9nlXm7cwqooKVjk5hSGpSxUMlM3/UQgkC3PR5oYInCNEj6IQBUVHSk53xUJhf97f39/c3Mzn8/v7+7vbh/1+H0JYr9erztt52+ZHBR/OrsCmacZxPIIBq0Bk42IHRfmMiI7YsoD2fQ8i3nvnplispK2tcO5IZwDntsMR6zztryfrITz9akdKziPSBCeYd3qllVkw0JVQPJDIZ+dYKzkExfsRLVlPIvBk+Y3su5SqFgGAVNM/y6unQIe1RvM4ZLt0tlHr5Zc9e/zT2inyXRrtKSsWHpwsUgHmYwRUAWUCk/aeuGndj0so/QT+1b864i+eRvJLXXqS3h0rDeAjwO9Urfr0RF5iT34Sh3LQPlJtctDD6lOy0GYCnJ0/hue8FKSSFE1A/KjHPalYNo1qrpxFlvemYJ5lWi+YBQBJHJRJ8ZszTSd+FJTMNFwvp6mixSVVJxwsOUyO4zRrPVCijFGjRoxxJOCuSa57ag7tav4kOGvnjtgzOQJEIVVWJBGHrKoIIiCU4/MYcbfbhxgGkWHfm/xhRL9ZXrVtND9DCSrRkj1QCGJRxhnxgv0k9AAAFsDAzM4Fckw0KoJzgYMvWbssQDCGYNKhvW805JtcUwvhVlIBBO89IkqSHARJLQG+SDDyzswAao6UIjJWKzzGGMKk5yxKSES0FxzH8fHxcblcLpfLGGPf933ff3AvmCdnKVpk15s1sRCQYl/03pu0h4h93+/3+9azxbZD9nu0jjGz975sriOdSiEXH+zex1zzwS3zMd/qObGvfJV0ZSHGGCWQekUAlKTIP30GilKpn2aFZPH4fN5uf7L2NM08e/HxTz5ZyQmTqGfEzQBazcpd5W1RRLQANXJ1jpD8PAArq2h53LNhyriBAOcBrz6ZLQHjMAwhqB0Mgy1ciFFE4MjJxn4GAKBPRd2XZz1h5nl6uGvpTaqfE/uJrFcO5XKh3OXp+cwfmoRWrebyglVu62KbKfOGhwfpMkrW6KSIV3AgiBgYXZaTRMQq/hIoIYhIIOVDLiOCOkg5wEREAGM03z+LTkHEUkrgQIy2+cyjhTn6cIIfQhZJZeTLD498NgFINdjBkSu2iCpMiTGJ0IL/RESy0Fbc6wHAPDL0RGta1oNVnwMRJGLH3jdt23rv1+s1IBuWABMDAyoqxCEyM6E2nrvWzdvGN64hBxhQggYBQvbsyaHFOeo4juPYD6Y1zXowDCG0bcvMjgCLn4Vq3w/b7c7kj9b5pmmcYxXdb3eAMtV3SL4StB123ntGDiHu+1R/1Xv/7v2tuel3Xdd1XdM0VjnC4swMjGws0DERgYBGAYlokZegCCohCCCAmBBAgIzUelA2kIshiGlAY4yImkN2kwq0adDycI7j6Jxbr9eGcCq42+02m81sNmtc94//+Ie/+Au5vr5+9uzZZrP58ccfF4uFOaekvZZ8WSWEMGua29vb5XL58PBgdkHvvelILQtM2fvJyOfhF7/4xa//4b977y0jKDMWcbC4s5T7O5wEvppv0xPVy+F+PW45VE/psNU3QcTKlpSIQrWbbEdXlVU+2qJPMHW76J9jjEIoIlyqPqkyKoEymCUrWshvKqwHlBMwW8A0Ui4woAd20IMKhXlHSxnDI5pfaEWtDy9EUj+xwaGBrIxtyVpQ51cREXQeYAIG41/zpJi5Gy2MPNmDiEjpuCzkeQSuQK7u0FE01YWWLsimglRWFOijZAw9p8984jL9CMn16ccBTPZC+DSm40/fnnivqOqynVtTvGex0mkEdXm+jHIFFW/ilUBEJAAHKMkWmAXEvJj1QmdOJ+LwZBL1zICY029TLgVuxdatEUwSnqklD9ga08VDztYBVThNPRqFgJbVaJ573vumaZBJVfb7Xb/bKQKCSkSJIyIqs/eeGRamzGz9rPNN03gmB4Ky77wDBPMdohCGfhdDiDHe9htIZIhU4zjGvu+HYSSF2WzWNI25HLZta58AjcowhG2MccS+bdu2bZxzz58/H4Zh6Hfm64+I3rOB+jiOSWRBBAATsAxHTXgyVsDUeiY7OnLkSCFKBEVRZEKxlIE2OYnIElT21mpfg3RdF0IA6DU7QAEAIi6Xy/1+X+CnovVguNK27TjeaY7uDRqIwOIZbBZMCixzV5NOExPtPlDJVUS0Xq+7rmPm3W5HRIvFQlXv7u7+8q++FpGu6/b7vcGbibgfiR8f05Lw/ynXF4/An7C1P/JiKAmIMjwUiaVg3sFvrBw8pr2EqieJyUD1yAvkz9VOqehZiPnJN68USJa0BBFByeoLUipDkTTOJLVLyxFRqxH4LGgfgduBobxqIUw29BhjLun3US9TU/yzv7H5h0/RWhwM97nzR3e4JO39udvR65QDpeylaPpJI1oigoBiqrfE+5kVMKqARIqEiGSynZkPFWuzWcy7qMDeNPiVleJ0eaBiVm+WJXjGgyv7qjzt2VXfvODo0TVJ+DvyDyysGHun2aMBM2uMGsm7hGvMoFFEIPYg2LVt67DztGjIe0SIxprtxx2KGkpADOZMP44jYzCT0jhEcyqxTu52+6ZpGt+Zbm02W5jej9qFqpqrPSkMw7DZrMdx3N1sEZGzA2cIYbfbiMjq+kpEogAzMaUSPCHEtm1jjGGIEvo4yuBHm+e2cwldGAEZCFRRBBCIEZTMp0ZAlS11SOL0U7lRRLUyf54BIg3mBShB4hiDxBgljsxs4Ldeb4Zh0OR7uReRYRiur6+Tu5B1NQbvcbvdhhBevnw5m83att3v93Xl2DL7iDibzWxgDbckB1E0TWPunWYmXK/X3vsXL140TfPw8NC27f39/Xw+JyIJk0T4kVY3gPM2hWRrf5KAHD0o/5kSRalOGdou7Ws859Jy8XFF/XOZAhORWfhEBFKNPYWUbGiCw8zhaLrGwl1MiYeQFgZOGYERqlA/PYj8+1Q6Bhd45T++2ftJ8mNINSVAQXO5k5QFCdGSwCOdeHLCCdmqBbvaV+UIEZ8W+wocFnI2ETh7aG2lqtQOp7LOWTD4JE39B2FPNa0Y/ZBe/vz5P66aQX3/s+9VJsiE5kDCYFW5LWcziIgRu1N+xWAyF5KEiFb7/ACGRBRzBjI8V4MCzjFGCQGTQS5OICcJNA8ZuqTztPC+9Klq2bIst1PJIVOM+BeQsgh5SSFjJiKLGwMAR0BEjnEx60TCuFvvY2TC1Wr1/Pnzq+VKtm+987OWGo/MCipRY4Qo/W4MYRx60+mN/TCEPo4SZWdGJiLXtu1yMV8sFk3T2biBkgioKhFFFRAiAOdca5BLHELo+/0wDHd3d977WdeYR6Jp5urXNCGv5Lq04naa/TvMfBhjnM+7pumaZmyaDhmIyDY8IWiOvcsB+MleaNPKzFllbUZBQdIanERGERmGviTiMpjZ7/eGUqqa1YxsXRURjbFpmr7vh2F49uzZYrGYz+cGYEfcpN22bdvNZmMqTfuqrkOUFhai9361Wr169erh4fb+/v7Fs+chTIldikfukfLwg/h3lrxUP59cWurOl9uewt7HSIqnmKcfkvxUlZKSLkNXlOLvBdESIUhRBmrRn1j3NJv3Kv9PzYT4ie79adspv3sKioeEYvrV0/cEgNqNEwAQGYERU3K1pH/O8+UsqW5GnKoEg1XtkpQwX2P6d5o444PIV76t183TS0PPKffqoTkyBR/9/CdMm8hx4SH4SVyJfuirowumCKyjgwpBJxNgSdebPUjSmFu4MYIIEFEUpUq9OT3lcAyL+cyuDKqERGkccJIvTzpeL98J81SqKgrnmmDOjHeU0gUSWmtO+KnFBTTnbgWAqhjQRKxjLAQuEfRcygfEsrwSoTLAw/u3TeNmbbdazBZdu1wsFotm3uDMLxHIMmiqxDgOYCkxNMrY77eb3W43jiOIRIgEdPX8BhTNgATEKjiGsHt8sPmIUWPUcRzDKIrAyPPrlb2kc27etvP5vOs651w3a2KM6/WDDb7Z7Zxz232Ph7rNpmmapul7c5Mx0wuJaAgSowz7MY4wjjwM4hy5xru2YSaJgoAIlBVcSgBI5BBFMDs8WZyhqIIIWtybNu0QQ9/3zPthGNpm9vj4uF6vnXPz+aJtW1XdbrfONSbtqepqtXr37p2Z4hCAmft+UIX9fr9arVar1fv37/XQPiQiJhYj4jiOIsqcqL+B6OPj9uZmFUJ4eNg9e7b867/+ayL65ptvtruH9Xr38jl2XWujxCdQ95Hbv0avY1Q7IlbnKEyxfolVJMWjO38U//3BrtrioXPqt4nAImkSeQQ5GsLZDgJRzMWyTMor+Xaz9DaZyiCrPf+EyFd3+/T86QGecNUfuH8a6EmNmcgXEQJZsl1EJHImCCMxnEp7p309ATM8UmxaCyGEEOJJK9KepjwmB1kbzkp7l1iw07VScysfKfMd3O3kpdMT5RNqSpybiDPtU5eRXnovrGfHZD6MiKxoUVokQgQ1z1GQkiqBTzT5VpKIpArDUD5KgoNqMM/wX/X4T+CXge3ssp56U5XWU6X0Z7EvKhX1dZEDCkUrt6mtQdb6cSAi55gAAYVVSIUQbq5Ws9Yvl/PVcj7vWkYSCf3u4WbmVEMcYohxDH2JA9vv9+MQ+3EfozKR7xqzpd28uFZNVXOD6NCHEMIwhOurG1VEdoQuxmiyCDO/u7u1WSAiGcdxHM1Y9ezmKsYYMMGbiUcA0C4WJruIiAmsJmk9Pj4aOpa0zqY4VTEPMlQdYmQDDmwxWQerwEAbPYdsMqUxS2VZpvRgSkUotIRkMeBqtTJbWhHLXr58uV5vVXW/3+/3+1evXr1//74wJWYXIIKHh4fFYrFcLpumKYJsLdWZXBhCsHADM1iaE2nTsGHq8+err7766ubm5u3bt+/fv29a8p5U9fr6erfbxRib1huIfpKoV5HIg70GUwDfJO2d/rZulAqvT9LEn1DJWW2u/L8S+lzRZ0QUmPITZbDEEk9lvyWFetdoVmyWPfVnkvNqulGD2enB2V89fVvDuTwx5iXmKgGciBwi1olZXD36Z5tUHuQilh0uFlfmcK6Ziin/VdDRboWmlHp6dE5FvbNjcQp1H9RzPgF702/xo+bjz9ouvVdERTxiiGAS+wRiSs5/jmWxJJCJ4y6iVYIhs/VZuV1RKV5qeK5jE/DkDGT1KoKTJX7pfH1b08bpgVM14WEcZLlDkfOKZ5dp//oQmVVEPLPz5J1vHXvH/9Ovvt5t1/vt4+7xXgd/vVytFl3TNLs330A2DY5jtIUbY9xvtwDomNrGGdjYaLx9906TUOLYNb5tmq5VxaEfAcgx+6Z1qjiMIsLkP/vsNebKAI1LDjgiQgTeN81yQUSik+2wLHtmNs9GVR2GIY7Jd19VUZPbjnPOajKAoKhKjCpjFPAhkHPIybyfslgZ7AEhqhwqEkUkjkFVo+XZkZTQGQD2+73Z54hov+9NqzmbzXa73gZku91+9S9+8Q//8A9pLgSGYUAE7/39/XaxeHjx4oVl3Sx2O8jcjNVkL+454ziayne/33ddt9vtrq6uvv7669ls9tvf/vbNmzfeeyKdzWbr9fr58+fb7db0nyEEy1L28aLeUTvlsI++LSu8lhHLxvwY8a5e6h+v5KwzsJxS5lra0wyKSdUBhwlZTn6e8rkoTEY+TeHtmJHkj7ftfWqrN/vHEOE8bgX2ci0eS6Nk3reVbS+ZQp7uwdGDVTVGiZ/SiienHRTYwycdOZ/mO06//Rik/MgRr5EGntwJf8zsfnyXLr3X0ewUtDhaN5dgD4DtxzK9e5Imj0YjH5+3SJ89PoW0S29XlKIAUDmCinmuQQakOupZckicYYnNVwhhGIYYYwQEgIHAEXcz3y0Xi8Vi0TW//vWvW4bFcv782bOrq7ln7ne728dH3m2KpNg4JlBmjJHm85eGPab+3e/32+227/vrV8+zdo6GMD4+bO/u7jbrfTufgbqmaZqmi1G22+3QByIiTlPAzItZu1gsZrOZ936/jUSW/4zZobl9dl1397gtHiImORkH2TVteVMJ0fCDq6phIqImI8YwDLy4WjCiTkmriiyCeMjpWvdMxlLRGC2YUout9PHxMYTQNM2rV6/GcXz//v133323WKzsfn3fX19fQyUDjaM4h865IYybzcam6WhRJSbcue3jo4g0TQPZrmxi33q9+/zzVz//+c+7rvv++++/+eabcRyXyyViNNn35cuXxvp474tHzBHsfYzABx/iletvTzGvCHg/AXE/klgdbbHTrp5lLp/eklMHPmXD/rT2xPY/fTu8gHyXWiZQkJd2gj0FQDjWEpXRdsUImP17LJFRsu1pzKrMEMI4xhgl9BqjxhFkjHGMsQ9xGMc4hH4McRjHfhj249iHMGiMBIOMASSSCikggRCigEJ2agfIuSoyXY35JJbP6s8yQkk3Kjnv4kcyW1yPY8W9pIowlGTj9EwAJ9PoS5WGW2oYqO5vZWvgUIsCh9V66/PJzzjjjWT14HRB0aUYt049gYiyggI0KYWkZToGUEARCYIEKk6IaEjJRCx4i1QxqEDQkcQjqSVhN+9OZUQQcrZsiCgtJhVVILLM96aqovLeArYaoqIqoqBEUgHVqIqgKAIaIQYNVm8vwCgAI2oAHRWiagRn6fJFUWJQheSrGs1qqQ27UXS72wLAbDYDpv1+TwDztnPO9X0fxhEBGHAcw3zuttttO2t/+fWXr148j/129/iw2/WvFt2rZ9fPbpYQhoe3v3/crL3n68ViP78xxSYiNI2fXV+ZUPX4+Ljf7zfbHQC0bTubL+eLlYj0ipvNZr/bGtUWwbZZ0qrdbDbjuL0fY0nTtZjNAeBxs++62TiO9/f3b9++a5qm8w0APL95VuDWs+u6brlcdl239J3l9GoaJqQ+9ON+hBjfvb+fzWar1Wo+n0fAsR8GBU8cNRkRTAhzzgWJ49jfv+u7rmtnnXOOlKNAjAqg1KDzjfn+xWgV0TTGSI4ZYRANEoMEVQ0iIYSr2U3Yvd+sd9GpDvrs2bP5q+7uh/ey2Xrvv3z2fLfb/dP/73//V7/8xX/9r/910bZ3AzrXSYzbh+Fm3sKIv/uH3/zqy6//3//5//Py5Wo37B3R+4fdauXH2APJMO4Xy9m7d5urq2a5mm23m9m8VYg//8XnX375JRH9429+/fbt267r5otuu902hIi0mM322+3N1dXj42MYxlnbFXqlmHgmC2n0QAhIiqxImutqGtdfSVEAKWWrIgECqwqiEESFIFEVYi5cTgqW6YYgp0G1HURmuI6oCKqENQKZjMuIWJ56TAcutBEGc+hVIiBAZAWCSKpAEVCUoiJEIiTpCSgoKZBlgrXkKmEUFWRmFYyQOKqiDsUQiZlRSABAAJHIm8IIEQGFiBQIiYAsPawCFBOGUUICxCARECw7UEphpKhArJEABdkYrBCjmAMxYAQMiAFRFILCKBoVWIaoMWIQMpJipUe0Ypiy9Q5dQQtJxRIRAYEAQcRcu5x3LhnWzB3IUnU4+PQ2icknEvepYu3wJ4Z2n8AT/THS209rtZRTn/nnapdGoNpUxTIHtTOIVru6ViuaWGgRO3lWDsza9XPPcpdQyX+HEufxiB2pSC/d6rB7tZJz8uxYLBZmAzNdWdu2jfeIIDFsN+sYo/d+1rVtg59/9vUXn72etc393fu7t28agmc3Vy9fPkcJt7e3JIGJVtdXjpAQ4xgzgIklxDKz1mq1att2Pp+rqmk4U9k5cqra+NZ7b0lAzGhlubKGIWn49/v9er3u+17RmeTUdR0AxBh3u52IoEIphspIzrmHhwfv/dVyZko/06x2XWd+H8w8DIMJXiYvAsB+vyfvitbX5F0BNXlxv98HSfpDxFS3oa5s55AiopVjHUJQVWZsmsYEVAoBVdfrB+fc1dXSrJIxxqvl6q/+6q+++f1v7+7ubm5unHOi+vrVq7/7+7/f7/fMDQCIKhIau7xp3DAMTQO73U5QowgziEjbWqQgbDabL798/v3371Xhr/7q57vdruu6r7/+erPZvHnz5vb2VnMGjFpOhT+dxuVInjtapVhZ++BcpPmxfKkH/p/F8vRBMfQJIKy7dEk+u0SlDgetio7FP5ayHb1RrbDFIjsQEpCAsdHEzCCCiKNEIgJ1RKLsSAA5kqJEFEABVCRRUSTzSSU+KCA1PV2yIYYQgcESdyMxOWDHzEjOLHyJ0Dk8E67+4SbJpnrUjsIVjvzOtfgSZR+yc9g3XWOzhDml3VPqgz8RJqYeionIH1YO/LTnnrVja2VkhhPp/gnV/6TZAMj+kIpoakBKfEgyvSUgTP7N+eeTtVtAaXquwhnDu14wyZ5Ttx65vVAuRQR1oYYD6f3wz9KI3DiOzNi2LQCICBMulwvvfeiHfb8N49g48rP25ubm5uZm1mGM8f79m28f7sdhf3O1+sXPvvzys9e79T3ESOQd+8aRRtnvNpu+30tjGKNVOhhVnc1ms9kMcr6r4rE1jJGIfIPeOyZWVavesNv1zLxcNmY3Lcbv+8dNCRsI/WABbUT0cHfPTN67rutmXWcl5Zxzw7AHoAi62e8et5ti8Hvx4oUgjOO4HwdBMDBj4TkRqjIgRLHiO4mXRzDDutkIm6ZxRES0jwEQCZGRCMAhRiImWsx834/juAvDYKV3Hblm5t+8e9s2M9c1CBJG3awfJI5ffPHF7d3yx3dvZ2GMEu/X67/8y7+aLZe79QaZNErWkWo/xL4fg8Lq+ubdu7umI4jiHAbRVTfrxzCfz7///mG1GmczfniI79+/n81mX3zxxTiOP/zww48/vjEzoeUy9d6DSr01Jgv0n0LBeIR5WtKqn+zfIwVpTYhPNw7U2VgOc/1fog/TnxJB4ax8UvOLhdlFVDyn+EJVKlys1dbAQEgADKVSw+Htqzc36eUgUnbC/gO1XHpPO42ICB5ArFSzIiiSAZWoWrVnAFCMCCyEShppDjEqBxQxexjEiNllCU65jWhUJcGeWg0VZHQekIgIySGiZPMnEuFPgL1jPKhonGQ7XgnUizGahJcGW8GSXwOY2Ed5gA88OcsraWXZutifT0+OUE9sfZ9p0ev5tQsXHGc+pgenSzyPw/mvLl0P1XIHM30BprgDAAFBJItkOJK/YyBiwBTOjiIiSEQqIkqTdJjf9APSHtSgawdSeWbmULwjCE/YppScSbN4mjZtSdqJ09rw3otIjON+v0fEWde2bbtazB4eHrbrRxG5Wq6Wq7nJRg3D4/sfjN9adP71V59/9vp163izfnCg3XzeeScx7Lfr3WYdQiDk4j1o+UQwJ3+xQt45WjxY5hQA2G5TtfH9fh+DlJI3FjqSNyQXZ8iXL1+a5cl7ryEalHZdN7x6XZNCVbUMll3XNE1THm0BA4ho1rW2ba2ygX3VdV3gUEh/DMHcI5k5ZRmLKTrFOZdAHdQU5o4oIAZTfCN2bauqIq2Z1kREgBzS69evzNEVERaL+e3t7e3trdVSv7u72263QPi4WT/sNl9+9bM//OEPu51EFQF1ROZZYdv+9evX797dAcAQAzmOMTRNs16v+8fH+Rxubx9fvLhG3IjI3/7t3zrn/uN//I/b7ZYIF4sFIm42G4M9Gfp6g5Qx/8ntkpdAkdjgxHXztMEZGJjur7V7PZwBTjgLeIeNVAiK245VYDiot1fj61lDY4HGsjVFBD4yIW/VsSdEVQBzMZu89BFAkViiIGpEoIDCqqqE0eDNeRdDjBGDjzE2mrjMQruSluXCcDVKqigIWdRjJUQgZAeISoyIYIhbOan/FCXn2VYTuDSqUcCIm0zcfWHqCQBAct7+PEIAWXF8aKusvj5eDX9En6t8l1aHM+VoRoFUP6d6NfizaVz1EOnrkTxSHUx9UErh6aom0IlIhj2Q5C5oYh/kOIfksS6SEvaVMLsc4Tf1R0+2U/kLKtQ/xLwS/XwhaCHV5Cv3I7Wk8Dkb9QHEogAKkgoKNyz70Pd91zaL5bJx1O+2w27dEC5Wq+fPnzeet9v1br3diHRNfHlzc319PWvabtbMWheGPuy3Ny+eOYQYxvV6vds8qupyuVrOF81uLGFzpjk0we7x8dFwy5xKbOMx87PrmxKZMwxDT6gC5g9CRCkTRIoBcMw8W87HcXREJq32/YyRDL1EJAzF4TmleN7s1nUxgXpYLDknMrAnFBCI+2HnQFMQIQAgIDJqZOQxBpAIlpZQYhwHjaSq1LVkYQoqqGI56hRJxkCI81l75ZequNvtdv0eRH/5q5+vHx53/f7+/t77lkDevXtzf3/3+vPPvvjZF7/+9f9gdLv98Idvvvviy69++PHtfuwpM75WoTOEsNvvP//888Xym1HG0A+ekNgB8RDieq1fffXs4eHhzZt75+Crr74KIfzn//yf7++33qfqfRYLXyIfpgWU49OfYosx7eUkup0GyGYpJQX12xK1VMZJFkHJirvJvgQAlhHgDAYeeiHkZ2QWBwDOSI1QA+cRjuYakKUdgD0KAJIiEh+5Wlrm1ewVYO6aaIE9gHhIO40Sc956kL81mQ+ra44i5AqcV28KQFQVEgXLkyKAhEomw5HjGCO7GGMUDSxCKbGswxix0rtwZt+PBioTkfwWiibqIRMgI3tbIgAASlLYS3Conw57p6hr9SoZKZiTIABX03xI+CDXf8csVh0sUfv4eFEPPl3aO42pqbSqepCaCE9loErjd8Gl5RIunr5LPj4UKI87O2He0W/rJhGtyrooWGZpEZFIwhaJLEqU/EPQ5bRk9klFHEx5as8962jGz0FaAjaAVCQdjuS/ag1UIHd0bLcVqMoij+PoiZm58a5pmlnrw9CvH++vl8u2cV3jNGwf1ts4hvlstlqtfvb6yhGPMey367BTXSwW8+75s2sZh/UwjEOvqqvlddM0vmFC9+pVgjFENP3Efr8v2kLrQ7LAMTvndmOqA2fKQ0QERYM9Zm6aLoETohWUGFWYuXHOLvbeS4hlkdidDRE1JRAQi8wrDK8VH+j7frfbbbdbc5mxRMzb7XbGqchARWjAHEcNC5HJcDQlfFHgXGUGVFHFERJ5RRz7oR+HwAMwoeL1YtW27bxhmDfLefP4cLfd3HvvZq1XjQ8Pdzc3zxerRQziu/bHd2//4i9/NUps2zYYrxACIynAEMP9/f3Pf/7z6+vrd3fvBEBAF7OFeaguFvD27e1qNR+G7S9+8dWvfvWrv//7v//xx/Xnn1/ZXGw2PSK0rQOA3W7X8rRKy0s9TSKeaKdqyeSncRhyntDsspx3cMbq9x4WNSuXgcrR0+ECCtZ3Lt5PjtETOuYck6ZWWPhChprMH5y1iv3UETvCPD0YSUslMTGwigiiyM5YblIGDAAg6DFE5QgSTZ8JHDFGiYgiEALmHLyZY78QyRbBXBMQUdHCFcyXpRWEXMabXP55FPwpsJedhDDXcT5oxbRQvjINZ0r4ohl1JFXYNLoiOPnSE/CRVW9aOOdahE9rpxyOPVwq2545EmJ2vTwY5T/Oll5jRrnhWXXExUcoWW0hVVVBRbSkeybtJV7K0kSpFiFMRIlURAUlZ2vNy1NQyWj09IJnbXsw4X46o6rwZNzedMMqXZmqGWgOzCaHw1I+lUCjBCSYzdpZ6xACk14tZ6tZ4whVxr7fO9SXL69fvnx5tVjevfk9qBLzoussUDqEsN9sCa1EDs1m3WKxcM71/X6775dNV+deKHvMQiMg73PrWIzxxx9/MG2n957IgfkERr26us6dT8ypyXCu8UTU9+N2uwZJNxmG4fnNMwANGkMIGuMwDAa3ylBC0EyryWwlMlTEapgMRJAy0qA2joiQ2bJSEDMRKEikTKYVVSUoKrF3zm3DqEIKBDGgRI8Ejmw97FTGvo9kF/p511xdrd5++02M8dmzFxqH/fbx1Wef39xcvbu7f3x8XC6vXrx48c0fvu26bhjHYRxFtfHsPcexD3Fkh41vAGA/Dtt+P18t39y9t1FdLBa7vh/HuPReZGzb9osvvvibv/kbe+t//+//9bt378ZxJKK2TQGaifads+39tM14fmPitAAFoOSiPZT/AAiN9OAF294hlXhKjXkWigq6OARGqJEv09gDUMRMkwkUUz4ktGoPBJKuAvP2UAZKVwJkyc90nVLZ+Sp73seF7GFx3rE/CQFgjEiUtItApFmtigDgCAMiAhElrwQixTmKIATKvDCK0BP0kAtpKpjDSojkLT1fQeU0pJYv8KctkdLY8A+RAJmoBsJqBQhgzpE64Y5exrJ/BgfO+tFn7W1/1l59ULT9iEdjijmHkgNMa3BVhcmYmirvUXFSOVJsQiX2PYH0tdCbgc1yv0BJzjk97sRvpQh5qmpGBzMF1F1pHO/3e0Ztu0XjCUTmnbt6/ur7775xRI2j62X3+sXz589uYowPt+/efvfNfD5/8eLF89VqtugkAolg11pQc4yiqvv9viSEvL29NYmh5FIxB5C7u7vSh9pF6/r62vSZbds2TWfimnMuWKj7OAJA0zSWgQwAxhgAYLvdPjw8xDGY4c05t9lsbFSdc+y9peV0znXLWYFMyBi53W4tWNBSYj48PAzD0GVHGKzyqmjeerv9rtj8BMF8dhARxhHIKStGYQUlYCZmf/twz6DPVsvFYsXemV532Ozev/sBgG6uVwTaevf561fmYBkUdrvN1XLx33eb+XKlQJZvmh0SJu9R73nWzm1Ut9utaSyNOHRd97BZxxhd1/27f/fv3r17573/8ccff/3rX+92O0tY2jTNixcv5vO5hX+YtfXh/btpxf+JAO/S3eozR7IUnJN7oPLkxEPFyZFs9PQrnD4XEQmBsKg6lZEsB6VdXKorHd2K85lc4upjBk0+aDu6NFaa393O2QjI8QgwQAmAYDWjPrIgKQp6ghgRWUUS7RCBC5owAOCcRdbKjBKROXNaRrDEPlMCfUAkYlRwlnxWq2ws9U0Rp4JbB4x/RQ2JiBmYmUSJyBM3zquXGCNEaZ0fFQREVKIAqFgZZzSFmCnJD31x6w6U9z3KuD8N96Wpu1A5wdJuJXnOhOjqPiblmJ3PehKnH04OkABQ0j4dMWuXmDg81ByWFMMQJzmpXkMn+op0TYxa3NHzxCGAMNXQMrGHEiOokpMQAgiCEKAgCmCKlhaREERVkR0RJhsVTxiMoEWVZDlc8s09Im4f+lw23Tqjhi6AXKu2EZHZA1AKD4olL3lWPkTDSEQqLmeiKqHfr+aztm0I0EFs24YgrB/vQMOim796/uz5s5vWu367ef/+7f39/d/89b+wAkASYb/dmHdJ4yhVf0VsmgbJjSGIhFK9r8QSGIZZ2uUi/xnI2XlzvGZKScJs9MZ+MDXsYj6fz+fM3Pf9brMFgIf1vd38+c21RUQsFwurtqOqd+/ef/PNN+v1Zjab3VytYpxvwp6Q2DUtpBQtHbdt1/R9j4gtNcvVol4eEoL3vmsaU2kmQzXicjEv9shoMe997xDbxqsEFXAI3DZWk1Zj+OLVy/VmF0GHYXi8X19dXTnX/vDDm+V85px/++P347Af+t0ffvfbedf+/Ksv+yFw0yyWy1/+8pe//e1vZ4vlrGuGfrfdhJub58vl3KJNvvryZ7/97W8d0WLWfvvtt6vF/P7xoe9HQ77/8B/+w1XD/+W//Jff//7N//q//l++/fZbg/MffrjtOmLm29vb4nBkxtebX/zi7du3zrmu6yyjWxxhNpsVRXSZRxuiUSIzI6Gg2T7Bvo8xphDdIwVGDrhTQstXjGxVLRQRkz3PiEQp11xJn2jJIAFKoEiRgezKNE2HPynHdFgOHkoe2kPZjpm9d57Jqso7JiZiRkfozJ3KHDcIiMBqEEQZmZkUUVRQAMi2hmnRVZWBEYjIhRAQI7lGRAACAJHziIgEgCkRWmFfTDdWUVAqh7Z6KdWDQXM2UExxACIKQIqASCqAhISECv04EDtAoazetAm5hNYaU9XURNgBAFAVib2k3HJUmAIAUCEsfpU1wT2l2rV4Z4MuIk5EREP6E5iZg5SVVxpJzHK5mrALAqofZij+rO0nGwOO2v8BIumpsHXUeZOSJAJS8hHKaVOhmIKrnyQdaa5cq6UKzemYaOXVmaTFgyWoAGDMuAxWUhxU1TnPjCFaWnhFYKurnuM4KcudB/ppZo6qiVPOLI6IQBgamq1mjXcEMajEYdiHff/5yxcvn93cXC/D2L97824c9ler1de/+MqFXcPEgCJjDEElKoIqNm3L7Ljxs24BhOY2KSJXV1clnR7mvCeaI+1MoiKiEIJlj5x7JyISIVZFxm2pD8Ng5jcLsNtu9jFGZOi6bj6fW6mBt2/fxhCMdnS+UdXiMmqzfH29CkPc9dthPwYZGR05dOTdYlaOySGjQwYC9jESOkyZbSKBcw071wQNVsAdIAUdmyqm32ytw2gKUTaa4cZ9zyqqwgKd8yQ67vYQRo3iGlrc3Mxms/0QmqZ5XG/73XbXDzMAlXCzXL6bz9rG9bstgY7jEIYdEREKKoRh3zXuarm6vX03m81ms/bt21tiePHixc3NDQP+p//0n9br9XxOL168+M1vfiMifd+vVq2Jm7YGLHNb4h6uVjFGy2Fm6V0ItM5Wc7pDD/j1iiGusQdO+M5p313w0ipX5pycT7hiYn2kJ+bzesed3seIakI7QiZwhADmeCKkqRQ1nTxXc9iYglLFpj9BtVQjWmi5BtQ0qlpBuJ5oO/XQJ+NCm5J11BeZoUORFNRsXVrEEkiORQoKCHKRYmc8Mz1nuTMZu8FWV6m89ZnkZKcieT2pKYETu6AgLCLCos45JwBALoYY1bkYY3RRzC9cRBwEVReCiACoiEI0g3EeMwBzRgIzpl3SIV8aUfx0+NQqJQ9msa3Y+RBAPw7LnlCPnH0oVFurLP2Ph81Dq9v0z2BJRDFlKwAraigiqi4dkCsaxaPO2HkRtHpdlru92PnKkkjEAmtKoQCZbCITaTFrWaqaQk0mdi/nIgdIWtDSDQvyjHZ7BkSMEkXj81W3mLmZA0QVFETlhpTbv/rV13Ecdtv1uN85R6vls6urq9V8sb7dFzsioSNP5FgV54vFMIxDDLvdjhwj8ny+dM71/c76Z1q1kn5zNpsZI2xqRnt959x+t7frY4w1Y8HQjft+s9n0fW96k3nXOOeaRZeYPtUwDiHtBIjDGLhnZkJUle1mvRYBgMcfHiCCoDh0zayZzWfz1XzWzB42DwyMDg3yHDIwMLCGESCCKKj5ySmCIIhnJCVRyeoVIFAEaV0ShoAd5AkLGsZx9E0XxwgSurYBjbvtWiVcr1aIaNVXHJL3fj6bXa+uxv5t7Pfru3fLxerVs6uum+/ub+NuRwCM0LUN62IL66HfzmfNF5+9Dir9fvCNu1q2n335xYvnNz/++OP//r/9f7/99m6xwGfPnlmBvWEYdruwWDTFtmqjbeMWY7y/v9/tduZDVCSqkpOz3o+YpQD7p3m/a06kVO6P1f6CQztfseeZT3pQiaARVPBImpnqS6SKAJV2SisahadpmA5pyBEFZkLHZNZcZizi7ImLTfLYJBDMpkQCQgUggWSNSinm0dR+iXLEJCdpIXwRgFQEUTWaZ3IAEFXTMrki6iUlkHl3J5XmwUgCAIEoYAQAEAIVTeqmNLxqSV9Qs4dHUS4e6R0vCSoCB/n3ytBlPrJeAgBZfXhs25uWwqGEV5pF2lseBxb14iKpMnrm6NRzjC56UcuaISJOnYg4Z8pWggBRolpU+Kcg1mX57CfG7R0ffKLcdqC9PDzzMR34+IuPGcMTpxIRU/EX0a2Y96bdKCKIrJCL8gEnFBRz2IZyZaXAzhsbjzufDxQANvsNAACSTfc4hBjGYRiIfYxRItRyZ2KTlcrPy91EhIiJWESCBlv6zrnPX81jjLHfxhgd42w+Xy1edI13rOv79ebhvuu6z16/Wq1WYdjf3r1rHBM5JQByDtX7lhyDYggxBBmGIDKwN/tdg4hmjbMABtOVmfD37t274mYC2VxnBkKotkkJ6bsbHow0O+fatrP8ZN77fRws8s+wE82Yx+xnc8vOrtlXEwCcc6+ev2AkcuyIyTEjIVEch9Z5tEQREkRAISgCKlAQZvHee++MWIjqMOxjVQQjljTlcbPsFmimoBxeGWMQEYjiHY2DaJSW3DiOcRhbcrPZrN8P/TDsdrsY1Tm36GZfvn5FqPd3jw+3t8uue3G1Wq5W33zzDQmwg3dv74nuv/rixV//1b9puPnmm2/ev3vz4uXL2/X9z372s3/9r/7m+YsXb35880//+I/ffv9wswRmvrq6+uGHH/IiAXOLPVX3WTylaZ7HcbQhJcKjzH9QI98hnGjlflKo3MHavkBka2mvvoBS4SEs+GfWh9MAhnyEeBgOe2QNqSnMgY6NyGWXQSIijIzEgMRIBFwIdfWrfJNTH9Qzr2ZUIJ+LxbdHRCZjHaJWpb7SGUPdRD0EsaR/KjcXzEZ94wvoMrePiKQQEUgBiBgwgqJoACEFQTj61KSwNa8CQeTkMIkpP3geAUjHqlCHq58VrmtgL7Cnqg4pElllLGZmQBP7nIs+RnGq3ocQ1ImXEZyqB40AICrKIpklkpqI/9T03p8Gh6aKltqCaPZFsA6lnpR+PS2OfarAd4S1H68jra6n4q5SBCnzZDnbNxGwhVakvVpJk0U9422dCGCCyYkblapYzGkLQQAA2JIcpnVi9qTiGll8mUExlT8CSKBb4jiJ2qZj70RkN/Qi0ra+67oW+z72Me4b5uV8sVot5ouu9c3bH39o2L16/XIxmzeNG4Z9GAYAcL5F8oqS7IdhhBhEdJTI5Luuc64xXa35hnRdY+Gxmmu3WjGdvu/NpLRarYoICADb7TrGGEIiu+M4hpCcP9u2XSyW8/k8A2rYbreC0djzrvGBcBxHCSHEkZpGVJjI+2Y+7wCgdb7rugjRERmzE0TiOPbjqDE2XUcAwTiIFACEBDDvumJZsAkdQpAYd/tdZeUCAogxhhg3o6XwTrVtEaFBFqYRozMyigoaQSMj+NZv1lsAmrfzWTPr+9557x3NZ+2Xr1917B3o3Ll2hl3jPMjPP7/+bv24er5YrVbPb541TL//7T+9fftWoiyXnXf0xeevm3b247u3//Xv/m7fD89XTiH2fXj27Nnvf/970w8tFt57v91uS3aber013hvrYMIfZp/bSzuxRruy+8xee3TN0fVlZVrWk1PdJiT/kAnwpv1YXWmXlyPL7HNyART5poa6CfIPZQ/MSdLzFJsUiGadQ3MwRCIG89ssBXioLod74mKjEAEQlKEoYynF8JUOG4dRRD0090vV0nHNmJ+kCY2QbPgIqmLFqHNCLjwh01wsodVnKpqsQAhHn5Z6HhVy9GDJY4CQPXqmuQKIprY5JWRFSKwXh3HfBfbMdUXY8gqqVwzeq+DoRUQsGnqMEQBajJYWRAUhRNCgygBZnfXR7U8s7R0K46qf7KJZX38g+T2Z0+T05AefWgPk0f7UHG+nVfGE0+2nVUC6OfBUAp9qypli1RDzAq86m5MbHHSgiGtWSm2IYRxHS1VrgtTjekvkiIKIBddMfQOglMUa7IFSviq70US9pml26x8RcTVvb25uFouFJx4ljv123rWLxWK1WDJz3+/6vtcYiWi365kiEki00FwCAhFYLa+JrECrlILypsM0ACsaM6OnL1++NNdK2whmCxzHUTWGEPp+tHRZzrnZrHXOzedzY2RNEEG0iFtBhybtlRS1BqKW0rP1TQmqMy/o97dv+zJ/iETUek9tO45jyIPPaIY5QsSu6Ur1iSLbWccMC9E0ZYh2WUOOiExQUFUwFl+1dT6GERU8sYaoql3bIuJuO5pbhFn0CdAhzZrWE899++rZzTiOMY7jGBet++zF11f77dXyGlC2j9s3P959/92Pr17dvH792moVxTiuH4dvfv/b3Xbo5uwcPz4G5+Dzzz//3e9+p6p9rwCpxFIdlqfZ5w7Z24jZ61zahvWWKVFf9VY6Gwp2xJhWGorzHigmPECFVcysuaZJtYMmiOWDepl6eKsj15hJciujwTYXmLwI2ZEj9ozM7JIUaDHbafDsDFSh/Wf5g8wU23GiA9PogRShNkI8EsGTnJfsGikADCCRGtQiRouCJk9GIEABpRQMopXnoJq7P2oS5oAUAM37pUqDNn0mFS9kr5Z8IzmcsRRHgISgemzbg0M9QD2j1hw7VFCnABAAnHOWxr0hUMHGamGbJUMEAFqKRm5ACSikmBeUOEaAkiflvNfln6+lybC5k08wsJ251Z9TyXn0K8guwpknxYRTOW5PAI0hy2TCJdVlYkinW50If0etejRUpyrVWbbtTcQFTCmPCABt24ZgIUExBo2h+AlTSoVexTPYz4dhCGLBUMKZUs/bpm3b65ur6+trZh72fRQlwpefvTbk2u/7ZPgxs+Ko3pvyxzlHTdMpolmaVdGCvgXUoguYebvdlnSyRraM5lrA33q93u125jFo13z++Wvvfdd1iMhZW2p170IIm/XO0miZ26H3br1/KMEPxhN0zbxtm3k3M53qMAwSRlXah9D3/YtnN5bJ2ipLhMxu1p6BzIzoGIkI99vNOI5DGCdDl4qqXl9fE4FIEBGKKQWwxuB9h4gAIlEsb0sEFdCZW+z3PRJ2bTP0I4J2vhnHsevmJtSqqkYBwBTh7ti1zcsXz969ezefz96/v5237XLWtS9v3rx597vf/VPr2r/91//mL37x9fX19fZxMwzD493jP/3mN/PF4uHh7mrpFPHxoW87NPWyeX42Dex2StQXUaaoBI3+WMYWzLkFSkneFJt/gnxVAtiLe6putcOLAErJtECpcmG6OxEeAmdRj3GWO6odVMEenNlpk5ykyb5VpDpCKGyHsTJZuIfqGJmZCYnImduiFWywm5zkkklPVMHsSo3JzyAZPABAJACAFWRFCKl8cS5meyDgmsd7Nm0W2LNIpEJ3ErVBshwVB8MDQJqdXjTiJA+oRU6qKl/QN1kWlmLDm+Yioa4JjzYjRq8c4AVpTw8TYtVDRuYSrVIYQGZVRUcQnZoziwAUlxaPAQCTqQ80JexThPFTA83/LO2yEPlp7SOVnEd/fir4Hd/NOEc4gD3bPoWUTx4kqgBHwugkytebsKBR/SzNwQzVNaqqMSR3D1NyikgfQgy6WC2JRIFEeoqCOHmv1KNRzjjnJOo4jszsWtdkj/zPXr9qmmY+75rG930fwtBk0cp07AZOVqG0bdumnRl1AEglDsyy9cMPbyx5iohYmmZDF0QtUl39yvf393ZgVr1Chd+8+cHys1jkz263W6+T2tM5V0yqfd9b6SLXUdM0y+XSgJCZHRIibtYbu3Kz2YDIbDZDxHEcHx4ezInGykEUMFutVqrH9AsR7x8eAAAoab00O87s93sAGLKHqsXMDcOgfSzSA5IjIuAELWM/uFnrnNvvegBgz/v93pI8mTgrTSsiImG/3y8XCxHxRAzwxevPSOH7b77pt9uNjNfXq3/zN/9q1nU/+/Kr3/zjP97f3n733Xf7Xf/DDz8Au3/5L/8lIooEAWoamM1mFtVnJtKXL19+991b4wZMZi3KtEKzLXU4IlqhXe99KiV/bieWhX0EUbUIWMuCtbSn1R3gkAzCCVaVRxMeU+ga9ijF+RwbI5NdNqsQJ4EPpqBM4qlqB6JacpaMj6Vn9sODvn48R16uzD20FAFanlES1h/BXs2aJC9BywUlClo8gAA0KiKYNvWgets5+vbR7XD25UgFqGDLAwGS/tKlJyNYpoaSvwPT2JKIaAQEdejJ8TiOEdS0o0wooB4USeMoTaOI0BD2gzYIDeLoyG/mo4x773rpN0q9G7cKo4Zm3g5BxhjGqAgeES1/2hBHgANlAtV5GeBYLixeth/ZJgfcLCBn3oQAwNLXpfpYaXGHMqBUL6BY1vr0CQCh4MzhF8UpLdnMs5hY7MVHu+6SOY0IRNCpqQoGZjT/FabkpUzkgARIicD8jac3RkQmZiZmII0oORBBCVQxMoFXVZGYOMXJgZsqeyKKFMX7SAIEjKSqg+g4xlFCVP3Dj9963/p21qxmLa+GYdjudn3fj/c7UAUMYIUDkDw4RGbUfrf3Db64XjQkGvbX16svv/iMx7WqxnG3HbaIOO+c9y1blUiRfrdbPzwg8PPr50S03+914QOiMjVNg0T7YXhcP5oEpojMvB/3Bi2LxWJ1vfKCqaDkkBJGW+u6rlRRMMFi2Pci0mLbuKZxzlBKxjHKCAAz7/t+F0G8d4gYwuhbaWfdynfz+Xw2XzBzP4a+7/dhFJHZfBWiRMBmfqVAfYxDP4wjfvvd95YadBiGMQxlbcyvnkWJoNA0DbeenQsxhmHUbhVjDHGMQ0w1LKOo0mY7iMg49uM4gmjTNMvV/PrqajtKUX6KBImCQQHg3f2brutgP8hAXUNENPaPFHbjfryazeZNA2HT92McIhGtfINjmLWtw/jlZy9k3Hz28ur6//y/bDab7X4MIQTRGOP999+4cXj3cAcS79a3Iyhq6HX86i++/m//7b+NIXbLzpP77LPPbt+973f7EOJduL1ZdLvdftY2zjmmSWncWEI5BCKSIagqI2gMfa/OOWRABmUQUiEVFEEkEGC2VLSUgSTp1g8r30JxKnc+71Q1u7QYDQSnIIBRQUXAHHgRKdLa9kQhLGTJvtNmNyVtTN8AIAuoMpCIgIBaBisADYGJHBEpgAKKOgBScC40zI3TltUxOBCHkYkaJMfoiB0SIxKSSXvJjjVpRDF5eFrhKVCqgtgElJQQMEUxRVBQ0QAApg6Jmur3ghJRBESIARBVWBCFrKYVAyA7B2ZEVJbENyuoDjikDJ92e7AsiqAxQNYEE6iCekAAGNFqQEoqg1Pb+c7RSV8FU0G5I8Ch9F8FnVvumwMIOTRNYeXsVFqxnDOz4pTm32nyh3bOxZztSVWbBhHRrCnBxGECQpaxZ8Wo5oYVVVLkkaW0wQoM/lQC2Z+kXRLRPvU8/FRpTzNqTtJbsVOiqKJCChJIQ1cJ/vWQ1j8smVJO5VH7aZHtjBYUxUW9YHLuzfTb1eJqjMGyk1iWPEAkomfPnoUgQ4jjqGNQiRCVGXG323VdM/a7H7///nrZfPHZ65vrVRh6kOS2UGl+SFVvb9/vdjsAmM1mhG6/33vvb25uaLEwSc7K2lks3TAMy+XSdJWIuFwurSLB4+Njh857b0XP7WS5rO/7/X5vZd5KHOr8qjVzYEHEIGNJb6aEJcgKGR37BlkB+74XwBCSA5Bzze3tLSIOMW42m7v7R0sc0zTNFy9e9H3/8PDw+PhIjC9evHj9+vVqtYpxLBL8btfHuI1xDEH6fRjHMUowYDCHTlV98/aty6Y9Udlut+vNAwA0iysLAEdE1eiZSxJR0/zHGEWT8tA559r0banKZF41FsXoByYG7zkVZ1c1I9B+vxuGsB+GEEdP3Hb+f/43fztG/fH9WytDuFqtvlytAOg3/+O3//bf/lsrqte2zTAM3HazWWeryNypLBON9aHt2rI+J5HoUCl14ExSa6oOryx7sCzm0+1mFzJzjAcFg8odSjh2tXdo2hcHlSxJtTChJpjZIQAAccIj0gNH+joG2mEdFU12ZvLtJHP4nF520nMiWuDEkcBaXuaUwhY6UxMNO7ITAJDTMCmA5vB/RFRRKoolwUppnGx+NaNfl2o5LwDACTCdPfM0jT2S6d1Z4nt0KVa5Wuo+aa5Lh4gRpvsU5QARRQXmXBKCicgpk3NOe8QxAg2IMUiCPhWwbCNalADlVj/V0fNP1U5H8JPOn52kCzDzVCMDIErXF+lRLR6J+DDlzXGeF6hgT0rJoQr2ah9OIso2ZmNJUSWqqvlWpFycySsYM3Oc2tD3IXE+avWojG4CjKoYowQBjYTAjIqqBDqbte1qtpj51y9uXtysUMNms6k91nIdu32Mcbvdqup8Pl+tVoRuGAZDr22UsR/MIDcVfYyyXW8Wi8X16moYhs1msx/G5XL5+uUrlwMKi9eJqXcsRNpC2k1rau8VdOj73srmmehsQbKu6RyAeeIAqSoyo3PNsN1JH2OMfRhDCCEmRdYQg3nq930/DrFxvpvPuq57d/vOYt69969ev/7ss89ms1mIkdkrRMvKmVSgcYhBmZ2AmimTyYHiPowyhuVyWXYQojnvOFOfas45IiIBwjiOjTP1L8QYk8s5i/kHomPTjoYQstOMxhjNMGn1jowJCGEMITiixrmBeMSkTXVMPuLf/d3/1ofYdt2//tt/u1qtfveHbx4fH/t+XC4Xz58//7u/+7vdTq6viZkj6BBD27ZpzREZSNsSMpVmlFFBs5sHAJrnesopSUnbJ0g5X+WhKrDeHafbsyLEZRdAjCNOucEk+8dDdro5gD0oIKp1/JkJWBVNSEZuizElshtOsAdEOdyFjVdJFj7nHBMyEztkh0zIWcvtODmwmLSHxeUyJcg+IQgGrgCguf5MHa2RX6loXycfEmQCMMnF7g+qmPJtMgCYYjOkTGQRILsJAEFSluUgK0hZggEg2vGh/c9YsvzH5Jb4pORwID7hoVLtYgBDWRPZCXAKzJwmu/KH9FC5uVd0Mya5UG107bN3kZibENxA/RDGcRxigIimoEElgYhKQIpKgEg5OPr/+FZzgnri0Hh6XuHi9dNiuuCcefrc01ai63LaHjIuyrzh8TKOTj007yxRIsxx5VruWQFqmnrJfOLRW9hOUEgb2+oFiFh0tBrvz0jIDJrcIy0ZiokLYVQRkqgGoEO/G5y7+uLVr77+6tnVYrd5WD/caxy765X1ZBzHcSx3kLZtb25u5vO5iIDScrkUkfV6vR6D1Sswq1jx7Gfm/X6/2Wy899fX15aNWlXnvt3tdgaTkEsiYE6N0TQNZIWJPXrYbkrGMu+MBjVE1M3neYJgjCmjJlH8/rsfEdGIFToGBkAUkcVimWPbxaSZ7Xb7/v37x7s3V6ubl6+eP3/28vr6umkay8m5WCzsvcbRArcljBpCQEwur/Y6Y5/Q2jkH0Qqmm7uDJlXtGGKM4JuCJf1u2MnWCgGasd+udC7JcDkEhYjIsTMiaZpY1QhKoqNW8dpt651bzsY4hHG93d+rhBD+5V//i7/7+//6m39aO/67zz7//O72Hgi7xv3yl1/bCry6avt+j0jj2HvfAAgwI1hV5Ijg0GHDjfkTGIOVnPfxWDgo0oOes4bWG+F0m+hhKxunuDIeyUs17OX9noCnUnImaQ8qUKm0NaopNxgAAEq6T4kFKk65NiOWq4U4LVFHzAQlWMXWal7DUz1Cy8lJlXtLpgu1MWV68ZQip4LJ/HYlpE1V7VMtWg6RICoiIyooWQq0OklkhtKka5roybGLOBwN/hG1fFpAL/epL5soNp0oOc/eiKrSHmVx20muYM9VZVq1+m3UgBjFHkSgBECKzOSQAyEJIhArDbCHAUGHMZqLhjlWZ7/UXKjoj/G5/CPax4t6mEIECrtXDWkWq4pQ/MS0IZz/qqQCOnJEQSxMIhIiJbvhGSeuemXbslaYaFY0GzaAAjCpyT6I6PL7ls9k8hYAgKhRVVP9AlFVXSwWwzDs+mHowygRAAio9a1jikGHYRCMIYwhSIwRRGetXy66m8XMSdiv7/rtumGdLRea0jGHEmNgMDOfz2ezGZOXOBr53m63d3d34htVLXq8snrX67VhwGw2s1RhBqW/++4Hu8z8IxDR0kBnsq6l6GWK0hv2TdPM53PvPVr+aG6YeQgRwOBc+jCWYg6rZy+KpBvHaG73IYTd7sdhGDSKc65pQ4zx4eFhvV6/eHn1+c8+/9lXXzhuNpvNerdu29nq5nqz2e52u/1+L9FgmARcEJEhNEBAkliCvo9xRMX3724BlYg6b3VrTVCOwxB67Xe0a5pm1jXee2M7dputhKjSOOfMfxFFSVE4BQqa5EHojJaZy4/5EBKbTxN678M4ete03gJbmq5pLYTi7uH+L7762c31w3yxYoL5rL1/ePjN/7j9v/3f/6/ffffNq9cvXgP/91//AxOJhq5rhnFPxEgYJY4hMEnjfeObqCFqSMp0FKviBgBKIJhrr5G5UjAkB4dCOqmELasKTsqqGvmwOBDmH07qQYOWvFkEQGtNZd7OlcOL1LsY4ShuD6dPVKWEBLZtlRiouGs6ZIdEwA7zmE+hexbV4ImPhFqiCbSSxRFOYnArLRqrilo198y+q0mBNQHJB+oA0XJNA4oiIwKgUgZ3VJlUojXdQE0hfGA6X03etqY/kpwy5jR8LzMN9cHlVokAB9hZSXtFmakXdJ6FfJQDRCx5HQGscmWe+0xhiSgIEQUgJSIaEwPiQ8ReiQhVEJEGQFRADkEBAAVDyRVihXM/tQbwn67pBe0wXBTpLoqG9T3hkAepb1hfc6Y/ULyBowgwKiiIKqcM4+buPOn04ZC/s1Et0l46zmmBREQPIqVydc3iiwXJSbR0L9l7AERUVCQpyTDGCECOG2iIQhhiGIYxxgjoLYxdNTpGx8zgCPFqMf/6Fz//7MWNjn0Im+vlzHmOQ3+73oaQFJDM3LbtbLYwr5PNZrOLvXl1Pjw87HY7ZpZDy02JZmvb9vr6+ubmBhHX6/Xbt2/NUaLv++LGQqWwuIhZBFMxINXiTXf1/Lk53BPRKDKOo4qGIOvN2oL2YhRBIHQqEMboZr4f+81mv16vN5vN2O9ijCDx9vZ2tVo9e/ZssVj4hs3qSUQvXl6ZlVEklC49PDz2fb/fDft9b/lviSw1KM6XcxDdD2O/2+92G8tv4hmbrh32/Wa7ewgP3vvlbD5bLhaLlfeD6W93u53E0fxLEXG/24/jGGLTdZ2rKi/GXBMRQMYhIoZJYYAIqZo2hDAAKDOTAqhEkThGIJx3DeK1975pXFRoWg+KD48P93d3189uPvuf/xIR/8c//vemaa6vr1++fNF13d3d3f3D/XzeIZmVJIoooTCzb13Y7o9oVA1I2dIFNIVyU/3tRKNONlrev/VXU3ahSr15wETmKpV1rZJKyVnR1QSHVMmaaEKVOXqI6SHtL2Z2jpzzzNGzSXhsUp8jZmbH1LDzJgsSEJEjQ8Gpe1XFBsstn2oYHRC3Q7vV1LfK0i91fPoky45ITpPPiyiKlXjVeICsYoUiqvuLBsgqIlGpVJ3hUCd8PMtnRb0PasuOfmh3PCPtncqPVAWtl6RBIoIwrSrNOWvMtJrGmlmQQiByyBzIITvEgV0MSNKPkViRwTlwI3lHIQQ3iEYdQ0hWmWS8UnOK/WcR9j5e1Pvg+UsQCJd5jpPepBwdqkRpxUw5CJKoZ3kcLNAm7b7DblSKlvKR1aeTAhPMfxoUAEYFFJ2kOVULQo+QuDMRURFQsMKK+35L6Ihd17bqWx56iNqHGGKUGEGBCR1z43znG+94Meuu5r51OsQRVSAO293+4eFuxIV12znnfds0rUlyKinhyziOIjqOgdm1bbvp96pTPwtcffbZZ/v9fn1/R0SOedb4zcP9+/fvv/75X5i/jNkLS/rNx8fHMmXmVTGbzdq2bTtb82yRgiHGYQgx6nq7N1tdDOqa1nsMIex2u//+u28gA7CIMFE3n7WeX332+WI2Xyxm3ntCJaJ523Zd5+Y4juOu34sIk48qdw8P79/dtu3MbG9hFJEBEU2/CsjD2G922363H8MQY4xxrzHM5/Nhv39Yr/vdRkTm3Wx1fbWYzZ/fXDdNQ0QhhHEIMeydH7z3Icg4Ruu/1XNI7I6AqibDnmAxGxm34b3znkO0bDVjjDL3TApBJIYRmZjb1vPg3V9+/Yvb+wdGaGdz9+bHtz/+IMPw/NVn3//4A0p8//5t3+9evHiBiK9evdhuN8gApAIRGRqzZjlUiMnDyMLJzE/K5B7DBE8Z7Cj5e2Sh/8i2V6pyH8GeyETiLZ9bjKJi9UJRFZVKMBqqAqdKI3WwOZcbYi0kmJKzMiIATLKmOV0XfGWmxjvnnHfkPLnUOB8479g7U7OTYzRvl+R+XViBCvYoZc484RiyuKeQPC1JjYVN3htQAt+S2jNhJBERBMBS60oAc95OJQAyii0p+3weZDH38amYXw6stMC6WDnoASBqlqNSWrMz0vlTsIdVUHVywomHWVrwQBK8aH8qEGjdKITSVXyEViS+MX7QiLJFCCFyQFVL2gRENDp2PPYMcWTCqEGJcBgUVEICPkXSf8aiDZcA6VPPw2X25FP7Q2qWFVA1FpEYuWg1SuHDsizK8SnsFW7iSKVe68e1yIlF2jMzNRfPGtv8VltD22YWgvTDIFGJyDvXXd8g4m4UGYNRWBBtHDXOe3b/8l/8cuy3+92jI2w97Xebsd96piGm5Jxm5FBVUzZKhK7rROTh4SGEaAkwS7So5sIIVuunbdvvv//ezB7mkGJZ/L/66isrpG4qTctMZsj38PBQKidY6N58Pu+6bt8/jmMchu0YLLBBhyH0Y+z7IYzS9/0YxbvRe2+he9x1Jf+njEOM0bwPfvbFZyEECWMIoWv9fD6fty0z392/m8+Xs9ns7vbh2zffb7d7BGL29/ePjg2uohXumc+5bfz3b34c98N+7C1PNCvt+3G33W52+9Y3y+XSXFj73e77Nz9q0F98+bnph81tdRiGKMmtKYxjCIlQtgDOEQBYqlVVlBhFlQi9b6y2OxExE2JD6Ar7a5ikwsmRDTSEEIb9fu9urlbOUVTEly9n3rN3/W743e9/+8u/+NWPb354uH8c+/2727t/9a/+Zjab/f6b31vlUWZn4XlEyZvUOWfZDLRK6IPZ+lUEO2acfBwPke9oGx5KGJOmJDOCQYtOtWqJ0UzSElV7p9aHTcUZcmB4fQursJpcWowwoyqiOkfOOe+T16anFK/niD2xYzJVp3POO+fpQNiFE9iDbHzBc5Iunnj0TNTgDOWx/0tG+giIkJRGput2Vp3MyIOgL+kpbBAs4p9yGuvsvUKWNkQ1luQsAsCAikhqnyAIKCoI5tFAChH0bK7OdB6ATWEooISkEECPq6sfMQL1uBBNXrlmYrH4aMwhDUGTzY+Za9gTzIoFAAqBQIkgOsfMQxidIz/GYTBXJYxOkMY4Co3gCGLkIZhdRMdURQWIANhlVlRqk2mtfyhxvkftEhphzmBzNP1EH8DaIxVBuXvmT6qRTN6+CXDkSKo+ivM7F3ULAOQJQBmKUiUJebYHmqZpW995ZzYAysxdTAlkU6BuBGVH5rZHubqeDWlAMNRUUKapbJhKKEgJOWUwFB+tqkmuLQ5KDGiZeeMQLa2Rb9pm2TVNw4Ag6h0tZ/P5rN2u71vv2q4FibvtJgTLthVfv/78zZs32+3+xYsXs9lsv9+r4GK5zOm4YD5fmFYgBFGFN2/edF23XC6t7l0IYbPZ3N7emm+Iubp476+urqws39iHYRhub28fHh7MO9RQ6tWrV8vl8urqyoxU5s8SQtiPO0RUhFHidrvd74cwiiKFGPfDoKDz5cJxMwxjEPFtGzKpdc41beO979rGe795vO/7XmNYLBbd9Wq1WnmiEELTzjbb/d3d3fv374chMDkiDeOgCFEFkdtuvlg6AIox7odxt+/vbx8UZT6bbfseopB388XKez/s+8fN1jQ07WzhZ51GuH9cr7e71Wq1Wq2AKapIwMaTahBQJgYgc4pZ6KKdLfr9PoQwjnEcYt+PqjqbzZfLpemd9/tdqv+uwZZf57yqMshyNndtg4igm97tH+9vd20TQtj343w+f/nLr28f7r+5++bz16+++/abvt8t5rMw9ItZ9+0ffrdcXv2f/v2/+6ff/O7b7/4AosvlrGm6/X673w9t44lSbjZgLNJPN28R0Yxh6EpMNzrva+JwhHyZgNRRDakEXYghRo0SgYCQzFMLORIgAWuOv5aUIeJMgjH7f8GhJNwkScWea0JjBECIqTYQAyXY88QOZ771zlsMrtkv2JGpHxxRCmOwYFxMjq+V4mdyxWdLUXYiG6FSMVKQjYQjVdUQkodhgnxM7i2UX0RitMTTERCjIoAIiEQdzeBlQx2r0uKqOT0ZQIyginKiWyb29oD8GAABUVABVbD06znfPiSji4Llz6k/iUtHAfN9jF6i5Z6pAaP072gKD9Sj1dXTsFYlrw7MmFSlekkFUjHGqMZ/ASIOpo4jAuGgiJGEWBghBGTWEHCMqjpEMWVaYruKAgpORKg/iUT1025VXvbot8nvMcPFB29b7Gdl5K2ZRUoBiYGQgJEAidQzOiJicGjjwyCqlL3FYMpFkMqNxEiAlmxMiuaHKMYK6Ws7ZeVCA9VbhBhy91jVvMpVFS2QghQENBl+ERFxiCMQo0TfNI64827WNa1zI6FKGPYyjvt9vxv6XjQCwLfffvvs2bPVamUeKN57C1cYx7HvB/M9mc1mIvLwsH54eHj+4rkxyKU2gvmUmu+lud1bChhV3e/3t+/uLHqBmReLhQmIbdteXV3Zr3a7nWF/omsC6836YbMOQRAxjPK42W7WW8sh2s46UNrv930/ENFqtWoXcwtxQ4nceAR9eHgY9lsRmbXd9fX1s2fPVss5AGw2m/1+/259q4rjGLxvHbfMPsbY96NjVoUYFDSoopXiCSG8u7uXIF3XiGI/hDCMPFpdAuq872YLCXE/9EPfO0/etX2/gREs4hYIYgiWPinEuO97e2XvWVX7cdhut1/8/IsQJIaQeYtogXe2ALy3oneOiJzjevH3fb8fB1OWLhYLSzL+OI4xDH3PTdO0zj97du1W+NmLl+boc/tw+/CwHmTYO3f37u1XP//y5aubb775br/feu/bzu/3e7t5cqyzQkjzedu2QUZTbDrnuPElj9cUJYc13UqrGas8W/n8QQBDIYnFLqiqzFh2aJbhppvUO/uUyS7oSERF8wIAJY6cQTHH5znnmJMvrstWPRPpHJEjdkze8k8kMQ+IXXluLe2lAmF6nO5DLyfLOiJWJsygYC50MA1rTvCY4EgBEEgBQadELCb7SlY/aQ7vT2rULL1pzPDxcZ823FlQnj5TkPjprxDkbCrqS7N1NHNY/KJMyVmFyGCWtYmIhJgCkx9pdBwdjc6FEIJ9mkl2dGPj2I8cx4DsxUc/0uCMwZZxJC/J4DSGOOmdVVWnzOX1PP0zNssfolIBXrXUikSJJXynWmWmva6vnHYFgg03WW45mjTmRIQoTWO2AFeiUzX5QicjTSWfKQDGGBXQsqGmPiuo6ohIoM7qZaRFXe3nFJwHAGhyntlaEBkxZQAiQEDyrVNVFSt2YXkiHCIuGmB03rOpa5hQxnE39m3TyDj0w9D3QwgixIiOGBpKBUVDCN61s9ms7/vb21siNp5XFTebnYmz19fXV6uluWL2+77AlWOSGBDRiuo558ZxXK/X2+328WFrZMXOG+ZZHaIS+WczYtLe2/vb3bbf9Xtm7rrOt12nKgrX18+YPTln0pISIpJrmtv370wwbduWEJbzbjnvpGGrYTufzz27/X6/3W7NiabXIKLjOMYIiChR+37c74fHx0ci9q5tmsaqWpo+ll0TxmHXj/shiEjTtDGOd/ePiNi1vm1bBtyPQxxG7IGobxgBYJQ4hFEJ4zBaNsXlchl3++1uM45jVu163zZ9P+73+2FI4fDMEmOMQReLBYA5FqYFZoElwiGFtwOMw2hFkdq2nS8X+/1eVR0SMBFq17SvX766+8O3TA4dScRZ63G5GGVEie/fvQkhLFfzr774/P7h9mG9VY1XqwVEolxxwGaNHAJp23giQsemG0xFx4lM4XYk5BVqXtuq7WQIIZvcECBirm3CjkTEyq5Zbq1MeTjj3YECJis5zxBPAMi5bONEkkHZUo5hiv703jnH3otz5I9se+ycc4zAjM4RO3SmXgYE5yYSXXZwlvPwxBU+SgBQFEE1dZrk2mPmfW2bWLMhEiAQAAhJYXGsJKGxvKUMECJHUUQOGiBH8uWLUzINcx8og4Nq5bD6s3T1EmG/pL2rtYAH5y1LSxHsak5tmrbDB5SL67i9QtEnRqwEixDBVJ4pp5ENxMwUuTjgBudG55hZXCAXwihu4IGH3rthGNlhGIWZhyBkSXeN7ZCpbsCfD+0u3ZkvFHQugHfEK9U20RNNyJlnHXGmkHepbyy0NKICkhIAMXjixnnvXcPEzIzTmKQ405RbDETEdN2Y624bhSo9OY3XqWBPVWIRQ3O8VOo7AKSSxkAGMLnaQ+GSHAEur2caxcifYyIFiWOMIyhFK+UjEYgdsymyrm9e7Pf79Xpt3vObzWYcR+ec8dchiMUDOOdMsfmwuS9ZVCDHj5tsl1J0xmh+jBY21zRtspHkZqyDBTyoqhkUrUrRdrtd70fn3NXqGeeS6Fer5s9u0r0AAIAASURBVGr1TAT2+/12vS3Dvh/64f14NfOL1iF6AIj9diC9vr5ePrt59uxZjHG/3T08POz2mxCCoThos91ud9vRDJCqaMHpMcpsNrMexhh2u/1m89j3Y7NcIlOMcdz3IQy4XHrfdt18sZiFEBSAPD9bLgBgv9/3251QJEAFGFUgwhDGbb973G4etpvQD6JBQKXHoMINd0xv3rwxfXXXzdvWOWc1StFyhFrd9irQIwwSZ92i7Tyh8x6JHCKISMPsnLNsOEElhAAK3vtf/fxrG1uK2l7dxCtdr9f3jw8a4e72XQzDsxfPzdXl8fFRwoDQOufa2axtW0ugpSgxxtl8TkTAVItlImIG0WKu01LJobKS1MiXCVeicnZu2st0Ereaf1doUYkXPNrjl2SJVDdAiZJDJmU3TvvnStxe8udMJRfMjcVC95CZzZwPOSmXScPlKQX2jgia1No+ONDTTm9XmflMw1fqSGOpt6AKENXqKqAgsJIisiQeggsZAKSk6TWcwyKb8kH20sN2CcYuwR7ES2IsgcGeHlazfeK+BfPS54E77KRyrA1UApRLxCEiAxAARYxEVh8RCdAl8zPFMSCPwYm5trvReXI88kCRRd0oiDhiCJYUyrzp4kTiaz3tn7tdHq4MdZOt0FQEx/B8BIfHsyZTymDN4d5aFCsKSmQsp3OucSWm1eRsAQAQ1VwGGpL3GqoqkIICo4oqkoqIZut9sZVGZlVw9XLPnLGIgk68jClVICnbpwWjqrn75n7twVIOWqEhRM/cNc6zA21ijD/88F2MUSQAITlGz8CkiDHorFsws8lnBlSLxWK/3z8+Pm42u6Zpnj17NpvNxiG+ffN+HzZl7dXEzjwPC+YNw5BsUe2iLJ6cqXJSL5vTh6U6s18tljfee/ZOVS172RCDRDDZqx8HADBwQlR2tOyapmnatjObU9d1V1dXy+Xy8fHRAiTGcURg71gRRPXd+/vNZmPxGDHGmGzb0nVdFBlCv+u3j4+PDw/rYehVwfWjSY2gNMagSLPFcj5fxjjyMMQYiYldQ0SiKBGG8CgiMqgAMqMCIDEi9uOwWi1XqxUR9f1OQhhCHEKqLM/kTckMAEzee7/b7RARSUtuZEKyUBMcxqhC5FzDxB5QYgiDFZYHQmYYxxhUZPTev371ervdto3f7xfsXdM0Ywzr7fb+/n62mI8S15uN9/7nP/vZ4+PjN998w03jGhvSlv0kK0Sr3C2CyAJsziQAoChW9MW0bsktXBUJQcHSDqOYq5aCAoNTVYgRiCBOapIYABCBzFZEkCuVsyZpL/GL2VJ4pCOd2EpLT3IUO4eKDGaytJRyLjlwkmNwqcYCsktEM/nsZOfNXHQPEFNm4wx7Ux8y7B3H6VsqO8s7rFEBUHN4+7T3czppAEAUzYpEqSKPJdEKUUUgRRQQQeRQ6lQk73JEREFIOZBzZDFMTMB5ensJ9i7R4YvngQHAFXp3RIIp15g9Evtq5CsGHwAQrSo3VlIO6yTlZMcKZGaSSSmaatWOHDhQ4BACm7E6sHMOB3ZuHIMMHJWQeRxDDCFoND+rqZ/lhf+E4PepQrR3k/vy2euPpMBLUyUhQCUgQl4iRnpQwTlC9czcOt+2Zuc3A6pCpZVIyelr2FNVBSQr82huR1Or3+sg1lZTHXARKcqT4jmW1owpM4kRWQWI0bmmaZrWtcXFACkAKwB4Yu8IFVQURcdxJCLfdmzZTAgtH924H83kZiUF5vO5iL57926325nxbLlcWtkay//purTA6rCtomwooXiIaGpGBJezpk3qCpsL0z2aHcskldVq9biHfh/H7c6km77vHzeb/X6/WCyapmm61kyJlimt67pu3DD5rmvmq+VyuXS+FZF+u314uDc/iLZtY9DHx8cf3765vb1d93Eco2rJGZ9C0MwX1DB7vV7HOM5mXdd1++iGYWDm1Wr12eevDLceHh4e7rbm44SqpbB7N5/RGKIlXiH0Xbuczc3S2bR+1nZEtNlsNrt1CKFFIOeeP38+plp+GkKQCESiuTZCYpuUnXMxyjAOjryVtgDE2cwKS2AIEXKdW+OeMNdlfP/mLSE27NxsLiKEtLi6/vzzz3e7XVRdbzfmr9vNZldXV5999tntQw+ZI6kT8YxhpMwIYvatK5JfvZ0PvBAy0z9lfmG0CNEQAmLhM1VipClN1eTbjGxiZb3TD6gsHIHfqUYHBMBySVsgBhn1S/U6WFJCMsr+nJyi94goZ+kkIrKYPK0LVlRKTqvzV6rfFdHW+q+HkcdlTBAO6CpAKkmd3rR60ahhuoOAVVxBjCJJl5QcCJSQNAt1lCAQIPlZKhGfr6Ie9NPOX/TwRIeaYY9y2Yuaip1iXqJuRTQsWl3LFZl/RVUON0VPFO2f5RViHmKMTlyg4IgDhxhj4OCIo4s8YnTeez8OsQ+j44DOhRD6Ibgo7OM4jsMYQgg0hpjX4pGm4k+LfGfbJSmtbKqzHSg6liOPlWrNTeNYb0uLXEZECFaFNJaINGunkq4mt1shIoiAiKlohAoAxqiYPW5pktOQsFbpEFj0FABhCV5IsJcZIygrmNIm9Ijs2DvnWmfVdnx593mbamGjahyHfr/fb9YpZrxtmrZF50eVqGKB+d67/X5vwq4l8bJ8JUTu+vr6+voaEdeP281mE2Ocz+fcSpmg4hdARDGOlpb6/0/bf3ZHsibpgaCZvcJlCAAp7i3RbDbJ5X6Ys///j+zh2bMz7JmpqqsyAYRy9Qqz/WDuHg5k3uoukht9Cx0ZCES4fE09IqVkLTnnnDMieSFh3Yf/q45a3/crttM5p+v+1+crs+ScVrAMAJVlbb1zhZ+LS5Kqqdp9W5blP7UfliuSwtgvkNFxt9/nnEPOzDyFdLvdXl5eLpeLKffeO+dMURQAoAgm51zTVCKioJ6yKgBAmRVfL+l2u1nr2na33++qquKUCx8/fPJmRhHOe6TXax9IIeTe+6Zpmro2Rs+4kvT7a3eLOVlnyZrEuWp2Od3Gccw5IxKCISJmUBSacE6c9DIKYRyGoSjmb2SEDBhZdLOrqlgP7zwWdQYRiyxK92LG2204v3xhgKqp//jnP71ezpzjw36Xcx7C5F3xxz/8YH+95JynMEzTNCnJEhkQZ1SHRbp3BA1tdUrwHhFhgaPjfey93pKqOaAJusqQkogsajV6n9zz7LzSG5aEG+9NvzfrwLuaBlflFDQAQICGVH5s7W2StcYSWLtqct4bnmpPYVWW0xhHCtXcIOcX04l5XVIzIM7v024BEbFbchiDgCR1q9gIVugKn940D+eBCCw6TbKAX9Y9FU6zxeY8xkMAWUMg3KGt2qMipIQbxOa/+TNx/u7rDPLd1zM60rC3xTK9K0HwLYZz+zoAwBtBrI152yYAyMLp0U/IOSMuqQTONe6aleectVOPmYgSREI0QhijQbIuC9nonLMxhRDAzMqHa+o3J6Xf0O3/Zx6/9znWfh8NZDaGF9ufOiLajhP0LlIpyO98jrXrASci7epYayWOagkuIspegt8pHHF5wsxIqAn+OtuDzIrk3IY9PS/bM444vwfNxlhS7tfM8mZYUmydjdm22RljvPFEJIKKq0wpWSVaWIuAIedhGG7X6zAMh8cHDZmMyBliylEYER3TMAyqB3a9Xn/99Vdj7OfPn6uqyTmrXtc4BGXsGWPK0q/HVouksiyttdtZnYJWtPijzRW+XqK6qVq06ZEPIXz9+vX5+Xnkqqoq53zOowbF/cPx4eFBFa1TSmVZ7o67Dx8+1HUtIhg67x0g9X1/Pp9vt1tiKEt/Op26rjvfbtM0Idm6rj98+PCHP/yhj7OCKBGlFLQZW5ZlCOPqwKdf1HVdSimbXV3XP37+XBTFb7/9Nk3T54+fPnz4MIVhHMccg9apiDh0Xdd1ZVlqh6CqKtWaCdMwDIPy1Yqi2O12ReG0qxxCuN1up9PpcrkhUlVVZVErJKHve2PMvPD57JzTUxwje+/JaXRkNdIbx7HvXVmWWrPqqg0gIYTS+b7vkbGpa/3tubvFKfz1r3+t65qILuczLKPEf/3Xf2Vq9EorisI6JyIq5qIEKlwc6e7L/XbWNfP55lb2NlNctRpUiADuQztZlBu2hSOv8ZI3YezvrDnb4IeIqhu3hD0Bnf3QHMOM0YH0LDy2zW5niRZaZMkszPBOJJpFPb/f5DRmTlK3tQHAalN9X8nfrV3vduptl4thIbn/ftjLitwRSW8md8bc6064K8UQpX9off5d6MrvNkUTAFgQ1v8DuaOeAIDzeyw+ziOrt8NhgDmLylm0p7uQYUjQojGQGNhiZuIMwigZkYkSiyXKxmVLkXNKiQykhMnvIGeOWVLyLpFPJhY55zjGlFIZTIwxIkXEgJgI+3JKKU2ZU+TAkhLkmCULWwd3SCujmm8BxKztvnk2S4CLmmhA1FyWQEkXC91wky9uY8zdF3D78N9L7gCAOagePACKUM6iyJyWfM455jRXgcJpFsKc4cDAmPKYJ5w6QsRdRd77pq6895rLC8c8hqLeIQhylqjkzFki0zmLgMTq6qjBjDKiJBQiIRAhyUJCFsiAwSEQEVogO0s7ZFhEXJhzThomEVDVbZ2daXOGXF01CrsgosIWskhIowAhNlVJRJ4GkSySx2n68vX5dDqVVfWHf/4n54qUUuQMDAW5slANs8wIrixCjC+//DyOY1FXT09PDw8PX758AYAY4xCGkIMxpmyrpmlSyswRGZxzVVk553LK/TC8/PaiKthlWRZVVdgiSyI0SXrtiTrrkPw4xtu1u916X1TWtu2hRMTL7fX5+aUfelvXf/j452mapqEznj788Nj4siitR8ycnIViV+6berdv6oIMjczcNx9/+fpVHRsu1xACIGLqpl++fHXOldXj/rGy3gFASqmPce+nMKWum7ohjClnMMZ56gZfVOPIwuhcHWJ+PZ2FQ13X/6//+p/GcYxpjBIPbSVN6bzJHJGo3e2ETAjh5XZTkybny2Nbake0bdsQwul0Ynbt4QdmVpdBNaTtuu7l/HK5dFXj6g+fnj596LouxtEUYis2CBzj0HeQoShqSXjrxjhNnGn3uE85j5cxhEAozjlV0tk3O2TiyMaYPIU+zqub+Ml7Y521Tqyxxh6K1o/jeLqcQeLu4fE//Ic/D9P49evXfhyttSGOdV03dc2M/RhSTsb4wrsVXoeoznI4K/mT0UUNiQQMA3IGACCr0uFLb0PNKQFSZBFB8sYyYEbKcw8cc86ZIQFkBFZhbxZ2br7ONQUHAIAsd+bPe1YxzdiOOeBplxEFjCVH4qwYw5bEGnTWWSuFXUtANEaMFevIGAJkIBK0GYlAEorCI4yleX1bm+SwyKIgziJlyiHW7pgFzjkLMCEIgiCzpJw0MwAW4NmzM6fMzMZsxAtxAzVXvCeziOQNPx3od6Alb6MbLj85/WNhj/I945/rb91f2Sy/eO/AZYywzvZwmdi9K8nXCd/25MnvzPy2OdTaBDNvAoBsqroNanTRqUPEEijnbDAmpIQUiFSZwhubEnvvY4wx5BjjlCLHlDKaLJRTdGwyxMTki5Q4pjQfyoXukIBJ5lHqhlY+b7m1bg17qnGiYc9u+of3Dd50MN4FRbexHdkezG12+abmS6LV7b0E5LxOm+5J2ZKQdt2UUjKAxhjvi6IoCu+spcv1pBc14jzkU/NlnQXOY3B9HQEWwsn8OooxhhmttRbmyXzOeSXbKkyGiIg8gBDcC/Rpmvy8HYW3XjtaAODIrYXj2nFCRM6cc+6G/nw+9/1YVVW72+mlolhNEclJ1bCyiDTNTiGXxpjj8aiiIRrzlIGgi3hVVSJyu928L0QJ2kVRFEVK6XQ6nc9nYGnbdrc7GGNCCJfLJccEAK6k3W5vrX95Pf30t78MQ9gdHv7whz+dzmfdu2maXs+nruuMtU1TXa4nSRkAmqbZNVXhLOeUprDf78vCtVVVeEuA4zgqXW9ydDqdFLu/nAsiov/6X/9rCKHvxmEY+tPr+n4IZ0RCsmgsGG/LyhpvvddLcYzT7Xy5Xk6E8qc//emf/umfLq83Y4yiWOcOBBlrrfqqD+OkbdLdbqeVa23lcDg452632ziOq1zLMAzakLhcLq+vr7fbDefxp7PWemfKsnSGCm+tJRL421//ej6f+2tfVfVx/6Cr/zRN6fV1GZLdx6vOuXEczeIhgM6sLce+vyGida6qKl+V3vudys0YkzhzTMMwiEhRFIremhIoQUIN33POeVZxIq0jVzznfKsu9y8R4RYuP3en7k3OpSraeiksa+tGkXjbD4cN9MMs1lR6+2ynxduFQhFgGvZUVGkNe2ZBZhra2uzd55Tbh9lYLtxBLsascmL3VWi71n0TalbG4bqy6Udt16vfW/3e/eH9Rbg3UeV3qq7fe7D8Y+8Hfj8kYiRYwt4csDYbr3Q+u8JMcINV+XZvv615v42F707wCp1fUTM6H5ovQSbtr+acSXRJpZQSIeUsy8zPuORSSjnzlCLlPMuPOzExUoqccgSbEoecUpbInCLHzDlJPw4z8jizcEIW5oQyM1fM7KMuRr2JAYvCISJZVPYPot4jaMm8O+vwTeN+e1jsW5DW9lp511SZy7skOeewaPanlDAn7bPJLELwRgwXCJg55GCiISNk0Fgg8IfDQURw1jLXDEsAgPJ9Cxe3LQFgIeXJaHY846wAyHoFnij8dsYji0hO8zkyFg3eWVDGGOdcWVbOOVg0HYiIOamOz8K6NXNSPE2qATaOo/f+6empKEuVr3TO2cITkSxymkQ0TdNqrl2WJRGpHZ33Xn+lM6q6rvUzSeS42+lvf/vt18vlolT0p8cn1RMJYbzdbtqNrOvaeIyRX0/Pz6/nMSbjnHE25cwIXddfu8s4jjEHRLDeobHdy0vhfV3XVeF0idRta9u2qeq2Lq0hxcJ0/Rhj/Nr/NAxDURRN05QV6J72fa/tzaGfhmEIKa5ufwWxSAZislCUhVlGpP00DsPQ9x0wf/z48fHhcDzuS+dTXdd1rT5KDMDMUwx9f0tZ+r6/DT0z13VdV7W2N49tg4hdP45TdL6s6zqldL31RVlbV7DgOMVhDJlht2sPhwNIyBxjjN5bAOi7MeUAmZ0tlA3S933hSiLrK2+9Z5AYI6LMat0x5pzM4k2t+5gTgYiYjIiKNxbDIGKRXFEURVnl3Lbt+XoZhiFOg7W2KnypVJOslC+xxtRlgYhBbZ60C6X6LPbe5MzLtUpECGazfOkmvSGniwjapXu5yeB5o72+TWGJiDgvdFgwBhdZDpoNhb9RhzGWvq32CJAMWiRnZ0KCtdZ5swA752GeqrGs+cTmyT00Mt4DNm3CHM2V3z1csdKU3mI7t8F70+blNRC+naeQyp/IIs+2Lnv3DED+MQQm/x3+/Pf/4P5Ri6AnA0Dm+/IrGyXSLAha7cGSgcoG1fJ7DV95C3XBbyCUa5ozv0Huf7t2wJkZGO8hcAl7ROSAMmULksjknK3NKVFMDIZyzkjZOJtYTHJWaclU55xdTilzFshJQuacxE6jCEhmySnnLFl7h2w3YY9AFBKMiKo0gYu4EZHCEpHgTdSX7TT4m9gGG2jP3w9790sqybz9aUZJUIrKAWdmHSBvv64s/RpFtHpIKRhjnh6PtGz0zFMXFhFjt9uvJ4MBwFoig25xdCYiFXo3Jq9xOS/fjgsgjYhMNgtVAomobVtajAvM4v3mvYfMRGSIlF3LzKoN1l9eEZFB9vt90+yapmERnWC9mxfqV3/58kXrtmUNzcxcFIVGzaqqdDVXybHD4WBQEHEYu5fn08vr15zk4eHh8fGx8H4cJkW+OOfatlVF5ts0PD8/v76ciezD41NZ1uMQfvr1l5T49fX1dHklot2hbZoGUaYxVM7u2t2uqRExTCEj7nbtcd82TWMJs2CawjBOY8whc8y8DDuVZZiVo/b8/PxyPiOikgH0GGpIcCjjOPbTxEJA1hiHQimlOE6QubCu2dc/fH768PDInPquU9NBa+00TcM05Zz7cbjdbiHOarbOu7L0GvI1neq6TmOhjjwVJqOVk5q8f/z4Uc+jc67vTnGIKUeOqePY3245RmMQWKqm/vDhU12UADgNoeu6aYo///LbOI7OGRWTm6aJc7TWArAlcs56r7ZE3joioqYu5u8qvCFCFmOM8aZATClxyjxHBUIwxhi2EoPiA+eZtwXPzJkVhDUHmTXSEN6hvH8/7M3LdL6jHLd3MS+p2HojLAUDC+NWIAQJhEFBFYBCaJDAkEUCQkNz8fk+7CGBI2NVaXN2lyXFrWzr5m3Zh3dUBNxfvHueIm1wpLqB27A3r2Z8X+HXmmd7J26mbipjJvI7EWsb0r6tAv+dYe/f/PBv3r8pNN8uvfcotiVikQWgNwSGteZbl+nfW9zXf37b8FyjJs4E5/kq0S1Z37ZaYBARCN+B5iwZRE23s0GTTSAiI+RcFraJc86JYZ2HZZScxc4e2pAEXMo5i4+RmSWz5JhzFv1PxM/eb0CAhGJxDntWLRlnjgxq+w8ASL5T6n17bu6H9Xdef1dDrw/O875ozIsxYjRr2KPFdGf9Q2NUmsFYa9Cqol5mll9++ck5VxSuLEvvrA4RETFlXrfKCABkvRuMoLBq6wqyMDIIZwaJ3T3G8x3o5nVWikzEbMR7Mt4QmsIVKaWcEiK6siyKonTeGqsKnBogmXOIU3+7jePYdx0AlHV1OByqqkkphRgR8cOHpxhjP40xxhRnLOUWpA6KgFdjHee05XXXoBJRzHeOw/PLl+fnLznLft8+PDxVVYVgzuczM1tvd9WuLKuc8/V6vV6vr9dbzmKcd7bIIJfbreuGW9+9PJ9yzsZ6NXAvS69WSm1VlWVVlgUiVmVROr8/tLvdDgCmabpeb2PfLW7jkhkPh4N66f3yyy9dP+pJTykpBVtFnAVnm/WUUhYYYk4ZvXNlWRe+EpFpmIBl17Rl4QrrHBlgqatqXzfXIYAKGMWomJcwTmGchmnc7XaHw6Fq6rL0xrgQxhCnl5eBiLS3qarc3vv9fn+5XPRQq9eENpNTSqUrIEvOjgymFLAhb7GuaxS21hokSTJNE5AoMjNJCjkkoWvfhTh2XTcMAwECsDGmKopaa8+6VmBRWzpHtnQFIMUpTmOw1hpnjXfe2F3dRM7MnBQZIWCdRURMACCRIzIa5713yj1NPDccEBHQIJESiubxjdyFhYmsLprvEOCy+M7j5rFWBWvY29ySUTSTtAQZAQiZRZ1LFr0SIjDGEoE+v3/sJuwBiiW0Bo0hM/vqab8TNwEP323VGs83r74fr9Ac8O4Jt7krqwnzm27f+nwt0O/NQ2XgzVpSggg4Y1UIcaZCvSsHZIZofj9g/c7L/6jfACMiI8kC4lvamxrwNgDVtQZew952Rb63xb8J+LjYN8DbsPxuKX+TLCx/SESA25hxbyCsJryIiImVw54wJzbGMGSiLAicMoMVYiZAZjY555wbV+ScY86JhQWZJSRmhrlJmFk4afDT6qfQ+QoAChCKASRQh8Sl9LQ63lsVtGf/ZlzYOrN9uA5gZUOE+ea0b4/SNnS9KfsYmXkNeyEEtEY1O3LOJs1V4PpXzmW9H2DV7URAROvea+wuPaVvaLMsgDzENIsFsKz7TkSVC3NfhRwRIhGIEKNmriIAYhGdMYW1hap8rTSsGaoHqFKZZpGx1+V1HIdpCqqfqQUcIioQoK5rjWqzeXqGdfuPx6OIKNPOLCYMzPzw8LAembm9wxxjHPtLSqGqCu/L/X5fVSVzHqdRIJdltbZJr9fr6XqZ+iEIKfo9Mo/XfhzHfpxCCF3XPT4+Pn54KopCm8ZFYauqMhwIBCU31W632+lv1bddlV9iyGDIWku2MBZ+/fVXnaKN4xhj0vK0KIrD42NKKUwpxhhS1CgOAImpNH7n3G53aHc7Y0zfDzGmuq6dtXVR7HdN29aFdYAMrENZXkdKyqCPMXrrdrvm6elBqXM55xAgx1DXrdbNfd9XVfXjjz+KyO12u91us1iac4rA1Fu1LQsiK6K1YwlNW3oqq2LXtF13fX1+ud1uyEhExpmU46dPnzRp895zCuulWzhrjPHWrna+eh619NQlIuTEzNY5773PBRKWReFyDjEqUo5AbGWJmCzmJIyQcyabAYz3nplJVR30OjcGF9zmfGvIlpes3bx7JTc/SGSj9Lt2+dYw8C48IJIIMtMiM6w67WgtbMMe0Rr23jskmBnbwBZJqz1DcOcqvCGhvpnzrffsOtvThevdsrzAW+6Gsrhpy0lO76q97epNtMVY0rp4z/XiRtVF5O5EgRt5tt+1y/5fpKuF33AN3pYo779RA9J9trf99Tb+vyv5V1UOeFvKwNuG3rpNKisOi3DJ+k68m9MTLu7eRGQp54w5Y2IbBbIwZLYMJJlEKEtCYcEMQlmYmX2Vs1hWrU5kBp9nYJWoy4VkyAlknu2VcRDloLAg8NztFBDhGanhEBexAwBANFvC41yM8t32YvsKssjbFvG67+v9vxyWOazqEm9V6TAlbbHqNCvnTDFqBw+WjNX7pNdSFlYrdLRkAQ+HAywaeiFFWaSSkO8AbiZBBDPrx7BkVIl17aerPRlCclm8JzJqHU0irCU7IFky1jpnvTXOkDVkY4xafnnvLdJcOAKQCqBxjuM09v00DJmjtWa3K4nI+pkGZ4xROcHz+ayy98yaV8xAGGcohMBppq856zRAekuouibCnIQBtOcd09Q01ePTHoRSStfrVUSI7OHhQUSmEG+32/l86fs+z+53TZyt27U3Mc8Uf/jx08PDQ7vfaSyBZUhZIjpjfFnWdeO9zSn04zAMw8vpom1hva9iUr45PD8/I2Jd123bpixq2jdNkyvLlJLTNsVmZXg5j6u3HyLGGMsiG8KicJJyVfoPD4/7fRvDeDqdxn6oHx9UvWyapr7rlNVgCZ+eHvb7tvBWePYiNwTWYOF8jDFOAQBKX5S+6Pu+u96qoiyLsnAeEbVqnPPXDAatAGaJSAbRhBC77vTrT78aSwZBtQKmYfjy9cvXr18Xs6MGAAZOZV0fDruyLJ25O1q7ZTSFiDnGgccwRmOMjhiYSC8qsgYJScQQopv5P4kTAhgEVzg0BGRBqZJaEungGhdUPoDZhJm3YW9exNfB1bK+yQxORFBeusCM5wZEAeVXI7P6DQAyCaOAoSQswopiZCRjNhRsILRIgmC0ufoG0jK3SN+EvRX+YwHvRD1cyAxvKLuzu7oe0kVLZRYJXOLcm8inkRzmIGdVWgVnz1sSQa1D1C5QBEWy7j2iKjmv8Ual7TMAoBgAQRRF9QPMr2/Vif89Ye8fDYd5wR6KrEQInSne4w5vJnEZAADnsEdv9VnwG+Thu7C33bhvN3Qb/7Z5BPyuPeD98x2gQUpEkAUASViMBJbaVlEgAidABsmCWZizJPSGGTkzg4Y9k+6sOBDWyIecNTsuJ0sK/eKsNAYSUFVBRXXQYhOhm5TTm73blr/wNh+ERZ9te1jWd6745nc9ihSZmXEZbsKihznzKZczsp6XRfEZiNCg9YUty9J7O41RIDMntcO8R9Z810dFmgXiELGqCiRBAebETAJz1B+HkKzNgiJIRDy3iGXX1MYAojLzvHLSZ52HmYYF2o/1hXPLRSIphzgOYxfTHB0BoCxLNDRNEzMoXP52u6XE6+eElFbQYxjH9TrUY6iVoiIDFdk7S3whElHTNAobQURDznurd3JRFF033G63y+U6jKO6GAcGj8EYVxSVCrgwc1UVx+Nxt2sQTYxTmCIgeOsAJYzTp4+7/X6/3x9zzq/n6+l0mmJA0OGi90WZs/R933XDGIac5ePHj9qATSkNY1DWIBGdrldmVsoQ0L1G976sqrqqChEZxzHHZKypC4+IvjGPh2PTVEN/e319lcy73S4yq5iZYkFjjGof8fHjk9Z5IU4MUHpfew9V+fzyxbvy+LCvq7Yfbv/9v/93QN61h7LynCHEUTUVjTEsSbXcnHMALIzWGyLqh/H1dOYUnTMphtPpRTmRhTNtWz99+INyFpWY2Pc9e4uI4kXbe8YYBrBEOvL0gtrgNcYUlbcqUqIXT2Y1vrGIaJ3eaJcwIM5edEVRkHWRc8qid4hxDomEjM7FFVR1D3vwZl3aLN/3Gu5drfNOweN71Z4Bo/ZDnLNBZBKrZ/y79zuIXcPeXO2hwkGzJWMVnLLl6sG9wlOEJ33ncQdLC37T3QEQXJqcCOswb37Dpq5dF4r1n9vHu7Ud7gOd+a9U1WXzHtwulf9/Cnvb7uN2hrd+zNpxXPbXAIAdhkHXI7PI9cJsQJPNoq0Oy+L7rvKATWm59bfbtkwzwzp9ScLrtRVzopX/J4wr+DhPRGTRAN0VkwlwypmQrDEqEUcgBMKCFZXCmIWZISuXRkQEc87AWYCBhTgjZ4EsIm1BS9hjFEEBZEFd9AUYeb1Q5r0ulhV8g25an/Bb+XYAIOPfEdL1UVincAxZiCK672ovAHwX+hKaxSFDCLLEvLXy9gZ16OW8qeraOcspX68dohhAJCAyAAyIwsDC1nrd2sQCKQOAQAaQlJJIJgNFUVRFCcDDMAy37rCzgmR9McV0u50f9senpw9DNxJp252UG+BsAcxpSvt9a4xZ83fnXF2V3vvr9YyzmFny1ihfwlpbFGYYBjRU17UxbhiGYRwBQJ16knCMURh1TWRm54x2L0WkqqqmqYyZm8ApBb2cytJ7b7W+729dzrmoyqZpOEPXdbYoH45P3dC/ns+n0ykndmVRtTtjDKEtyt3Ly+nl5WWaJiKoqurh4eF4PPb9LWcBYGcICEvvyroqnH9oLSJez5fL5XI6X6dpskVZVsXnz59vt+5y7aYpCkJZNbYscpZxHLVtiIjWWpVGUwp513WZhYhCipfLRXfw8PhBe93GGBBOOTrj6qqdpuHDw7Gqqsv5PI692uqmlF5eXnLOwBLGKYTBe/9w3D88PJSFVzkeZ4hB4jQysyU8tI0xjoSnoZMUd01lDHrvSu+GYRJh55z3hTbojHGYdKQaEDCNKeWggdBXdVE4wFzW1dh3RFQ3ZVmWj48//OUvf/nt66/H4/E//5d/ud1uOedd3fz0009d1yEaZ6xmJ1WFKSWbpqZp2l2r1kJFUQBwjNEp1WHOv4Ws0bK7cEbVpXPOMQSPxltrCPNCIDPGoHXa3BeRwvv7UrhR29sMAbcsHVjMvgVRPeEUrIgxJkS01mg3L6WkSrfazSIyAEbErguC2UigrWEPAAjdcg8CLUw6AgEEi2QQiJD058I+XCu5Neyt6Oi12rtXFyjbRRiWmg9pXvdEqzpCnFs4gEQIyk/MCGTIEhrOE4IQmlnRVFAMIFCWmV6s6x/Aqq7JRCRZ6RQKShdhJrLbdXKTgnx/hrd60/87wyHfwxts67y5l6Y5io5jRbtcBEpg2Jzy94QMeYvkBFALtnstuO1nbnus37Zc371nzSk0R1j/Sc4Kq5SMJWECi4iEaAUISYmoWZABgZkQPJYikhmYOYkYZsViFfPmJmDBnDTsoUA5zdEOWFBEp4bAUlgHAFsDZUYmgcCoUjcqsgosq+yNPpfM629RgNGsKK9tnsgbTc63ScOblOoOdDbGWpsXDR1YqoGcszFudoxjDCECi7IMGMXMCqW01PqzDurbdIRERKUmiUCBPyqD0jTNNF7669DdxrIsfVGjcTFm9QrfN/WubZ1zOQnnqamqtm2naUBERKvGPYSzY8DY99p/1HzZeaMCzXEadUahO+WcY5Gc8zCcrbULAN3O61qMOU56GM3ieKB/qJFvQUgmjXkK7avrWhBuV8Xut0D269eXL89fU8wiYnxBRClymBJA/P/+73/RBKUs/dPT0+PjEQCul9eYZ9HXuvS+KtuqtYV15ADGy+V2vV5FsGma3e4whOl26/ppdLYoiorIdv3YTz0jEJHXVq1zAIDEakIUQgAdZIaccx7DXKU554axq8tKRPruBgAP+50hGG7XH3/8jIjXy6nvewBQst00Taq4FsbJe//4dDwcDk3TqKKbUi29JbIGUVIa45QYrbPGWU9EzhbLepSmMRvCumqKomBm1d0Wkbbc6eo2zyDAixciKpxXQc6yLJuqLkuv3iB/+ctf/va3v43jaK097PePj4+3220IU9u2RVE4VwDANIwppb7vQwjcX6/Xq+Z5ZeXbtlUqURHKRdEbV5Nx731rnDEmg2iFRygaM4hmqWZCJEA0pkC0bxN3ecvWhe8hOVU+dN3fVdJFr7RVDWpN00tXr+sYbzxh3i7lmxt8caqDufcIBoWAANHMYY8I79Ueyaby28z23sW27xZk28oMtI201QjFe9X7bpHffuC7NxBupn2bhhbDvV5cETbfi2rff/1/8vF23LZ+Pn77ur4y58i4FZjeMBa3A+F5vaY7Murbw7RGtc0Wvd9tfUJvFNE2Q0RBQRAQK6gXBgAQEhmXEQ1SRtI+p1HGBnoAyOroLbLijw0iCoAkyDw3OSWjQGEiwJwSIAuxAGcUmN+/kVdVH2DDZp3bbWd462wPMq8TPhKYBDTX2yY4sPjSwVsmg4jQW9djETFs7jePUvuXgAcAWajwpS8sAOQQU8wAYCyCKo6RkGqyLOAjZsBZUEW1W0SSEhuMqtKnmLs0TiGVlffeu6LJCadpygnLoui6KQzpeNgVReVcgWBiyNPYQ+YUxhhrVZAiREI0y9LAKSAKcxadxNi5ccCcyrLULVFKg4jowq37zjoBXeADOWeD5KxT6KZzzluHiAZJzYaIKISQY0pxViKedTKFVp7sMAzPz685cwgpMRSEFs0wDqqBGQL/8Y9//Pj0iIjGIoKklOI0dMNQet+27WF3aNrWkk2cUgjn80tKSRCMseoRQcYURTGEKcYYhlm9mojqZlfXdVk4WdRqYowaqFJKrix1mjuOY+LsvZ+VU7IZx8Ea87DfsaQw9sbbjx+OVemv1+vtdkWksq7QUN+Nt1vHnDllAGjb9vPnj4fDAXIexzGFsSicdwaFp6EP48SSUfCwPxRFoRWzCJC1KaVxnLz3xpBByXFShI4mFqchwDLXdK7wzpVFIZKHYQBBa8vKmar2zrlpGm63Wzd27aF9+vB4PB5SzorlSSmpIUNVlkREaAGgLIqcMx12AJBzTCkBUcwZEiAiD5JSinEiopgoJqfhp/pwRMQsbICFjA6qUcBZK2RkCSeWCK1Dh1Oa7sviG58ZTaHu+K9to093We/TFTG0pTibRfOsdPO9mReV7XcLMbwLe3PqRrBASwwqkpPnas8gzfM8NAbtVpts0+RcU2QiWmwMcC4e1+99S74CUOOkWcQJCGG2ZnnTiZWFeitbs527CKUREcTMzGaOK6v89LzqKFFkHseIzkgRBBa8p0aK36Ol/2PIT1l4fiKqcD3HIEYEEMG1xF9eV3EynZd8N+xp2/NdhMOl2lubb78XgZdfbdroy+vrcn8PdpuGOgARCACRqsogARIYOzPLAREpiZAICwIYAGCdiwgwQ5Zlcs3ZiBVOGvOAhQAKqjXsqTyPEX0bAAuyrNVeFtHUyLLdbOcmu1mHefae6wFAfguHXv9qhau9O2jbmwGWTjIsmiwwC8LyepqsqZ1zhmzOiRlyFiJaTq2gkCgJ9X5VkaJQRUTUJVY5dtatnAhddKYwENFxf2h2R+snZEFyKcScGNAcD48oOaVEgGVZqvQ7M6vvOQBrXCciAgYibQfoXWrtMrZkNt6oAEcIgRlwMQOq65qZhzCllBKxzvyIyJtZQhOWZIs2ehyzydsyN5UZHDgURXE8PGbhvh+v1y6lDGjQOEQZh2maLteu7/s+Mf75z3/+/PHT4bCLaer7rr9dcs4pTCSx9M1xv3vY740x4zh21+swTNM0eu/LojTGRc7TNKkw+jiEYRj6cULEtm33+31d19Z4ljSOo/oWTWHmoqx31mx+ZGe1MGtt6MbC27Yuq6Lsh0tGqcvyw8eHL1++9H2PSO2+Lcvq2g3XvruNoxIJ2rY9HA5VVSHLFMIUBudMU9VF4bquu17O0zA2TbM/HNp61iNNOSBA4UtvLIk1BmOcrt1FB4SgM1TvjKmWexZSCuPI2mMvS18UZVWVzhvEPE3j6+v5dDpN01RVVV3VMcbr5dZ1XYpRFuWd0ntjTE5SluVsJq7kTm+KpXfS9SPnWFWlDvCMMSbi6p9cpkjkAFFI13WRzDmLcw7FsLBkYMkG0Viy1ia5i12tTU4RQZyRnGvSv0Qst22urDWcXt66GXPtrvcyj9uWmNlIgH437CFuGGJL2EMBJLIIZuEKL2HPkGx5eyt0k7Zkhu3rvJntobwvM5btuf9zDXvbxf/dT9giOXGmd+sergFsWcGImWkTTXT2ta2I1oz/d4Lb//gs8Hvdyu93Me0WYfju11q1iGzwEUs/mt7kTf+ux/ZCQFzpACIiZjP8nFhWSQFCFCRBK4gwq6lYQEyAFjAxk4jXC3T2NF+esyAKgqPMwAY5o2QRRgGPBQCAZMqzOgQIkQDNHlii3WIGgVnD837it5FvpXLrI2dgBhFx8+206XuAVp8EKAILThEWObvZHRLXpoqR+/xvBSmtSUZZ1MwcY0opZhYgh4ujngijEIDmPytvVdMgFaUBQ4Y8iUiYJhERyEQzUwGEc5Lnl+uHRy9CwFwU1WF3THEyxl4uFwI2SN6ZyhZl6Qu1I3ee76Iqc74nIooVNBadM2XhNUZqhWeMybPuqE77ipRmwtV6HeoHGmOQGJBZ0rwbPCvsMXPKohFFGRF6re78nmheL+MUxiGEEAXo9XRxrkAywzC9ni9TjI+PTx8/fm6cm/rh59ulKFxZOUAzpFFyfDoed7vdrikJ8zj012s3DEOMef/wGGOcUqIMghBien19PZ/Pr69n7/3ucHh6ejoej9b6cRyHob/crsqoY2Yio91pIurGEQAIrdoVqR1gCCGG/j/86c/emV9/+jlz+POf/tjW1fV8GvsOEaq6LIqin8bn15fX640ZLOTj8fjx01Nd15DzMPQCufQFogin7jZ2XSc51aU/7tvjYTcOQXtTjkh7sAAAzilvPYQgzKVKhXlPRGNSTi3A3D9fsn7jiqosqirGcL3dbrdL399iyk1TMfO1uxljSl88VY9hiuM4AmPf9ywoDJxzjPF660MIBoIz1hWu9IVxBAhoyKAdptFmy+AKKlCIRaxF59zQ3bwvyRoDRggwC4NIzJwykkFBQBDKGcCSA56hJd9We0uqfRfZwFkyHtckVVmjPItNvnGWXvuZkkmhMoTaCPvdTtgc8yDLjLGYx3AGBYERwSAtYU8UpUJEJBuVGXxTlm0C4JZy/r7DuYBZtP0IsKn59P0iqrSSl1Gl3s6yIjkV6qnP89zxNoiIkEVm30EEjXOGiHRVYTZESJLWMLmun/8L+5wC23O61HkaYOSNTBojAWLmt9Xeu2j3LVVl+Zq8zQvwmz7yuyf4exqem03fmt0AKZUc5mErGUYyYMAYQGI0BkgQQdAIgyBIAoBt2AMgFCADJEKcQQxxlpwBGVkMFUpc07Cnsz3DAMx455+IAWDQhts9+9uGPYUOwpIV0qJUYumuw6RdkXV6Khs+0HfTAljSrjvmZXMXadfF2WKaphQ5ZQZAIvWL5ZmZyqiaoOuXsGQ9+Ep9NcaQsYgYEGKMMaUYo8zFEwCAMF1vI6J4Z5wtHx4ep2nozicy0BSuKL1TkpFk713TNOr9NJOxSJRmqJphSPPdu0mlZ+hmzrrK4Fqr3W4352ahGIQZzoCIztz7DetgQwngiKgrtXNOXXhyzoUrnXP9MPz886/ny00EAsswTGVZd13f9aMg1e3+qaratq2qejyfx6kH4MIb4AycSufaR//4+GitBcl9dxuGIY3BEJRVmbMYskLQ9/3ldlW9UK16Hx4ePnz6pFDSy+WkqiVjmHTiSESARjb9alg8Iqx3OsgMIXx4PKQ4jt1IyPvDvm0KSbG/XcrKIxEavHS3r88vX18uglDX7Yd9+/Dw8PB4EJHuchmGwVlypQfh6/XW950BPO73h8OhLD0RISTMiBYrZ5Ok8Xad0sSRr7dzYYu6ck3ZlE1Z2GJK09RPOjEHAGbISURAZQGYeRqjqpueTq8pB+9tVTXHh4aZU2LVWMk5q5iOJad2UUSUpiAi+h5v/bywIADAbOFhbUqTIXLOqGHvegnFaUIRz6UYRkWVCBKw5AzAhAYRmAVZ9Mokt0FWb3J/7TSstNs1j9fQpimaIvhmpfXF1WEJh1mvWyWSmsVPYA0/6wWPK45aX1dghYL7RAlGgkCo0hmzctkiBGiM2VZ7WzWWN43TN9Xe+xVY3iwvmw0D2AybtlMneNvkhLe8vXXFUyjifACR1gmoBht6ywLclrn31f47Yex/pNp7U7B976/WrdLIY7cNorWxuXJZ1th233q6v7LNa9ZU6F14Wzq5dyeqb8Pem0YoWUQUBEESJEAnhAwE1hMQoGEkAUQBYhYkJarwHPPvn2oRFcMJklEyMKNkkGzQaUygLCBze9Mw5BhUng4BlJzLoP0KB2+q5vmJWcrXeSGjnEmBmrTeNqgCS4sy77uwN5eDb6+AbTYHiiHbJHfGmMRa2tLmHqb5bBMKIAqKABJpR0dyZmFAmb18ha3gGqg82xhjDkGhkohYVm0IqWmqttnlLMMwWktFUTw+HUtLzhlvyDpTF76p6qIo+i6ujVlJmePM9yoLD7NuBoeciGbzTO8KNW8DmKVJxmlaD9dsz7Q0SK21lV3HIerVgnEK4ziO/VCWZeG8QXLOVUWpYQ/EXs63ry/Pz8+vXd+LAJMRwWZ3HIdoTK7q9vD44JwfxvHr86sLfdu2dV0KcEqjM7jbt6o9xjlP45hDRgZnrCCgyOutUzjJ+Xp7fn5JKe12u+Px2DSNDiCHrrvdLmq5gGi0Q6tnfwpJW4jMrEQXVXIDQtWqrqpq3zbPX74wpx8+fdzv2u5y6fveOUvOssA4Ta/n7nS9AOH+sD/sH/7w4RERFR4ShmFdX1KMAFD5om7K43FflxVLUiRkCpFzZrTD1HfXPksqXFkVrqnqqimd8WSQc0xhDNPY7g4xpu42XK9d1485s7XeGr877KfpOk1D119jDEXpvC+99yEEPVkhjC8vp9vtVrryeDx6W+Sci2J1ZER97p3EGAEFjRGCyDknQETVmDUWETHHENMkwSCiMYgCnDMwo0VLKtQHwGIMGmuBKAsiIAkgi9LSYVEdgvelxhu+MgAg8rpYa+RbZ3tLX+c+4bPWEst25V3vZd4YeG3DnsF72APJAMqPhW/DnrFoDFm2tDpr4lICLpXft9UeGJRFIhGXCu++y5vFWUdB2yB3/5Bvpn0rfGxbH5u5IcwAIBtexFY0Zj2A/I22/v+SxzawMcLKkVv1WUTFqdfKDQG2UtTvY+MmRG9rNVwInutKtF2v4RskJ8D9xe2T7WP7J2QNzHhRYwiFjEEEtGRtBmORMlolQarjhGHWsLd+l8yOiArxypRJkAlIJIOQc0vYIwGhNew5Q6vtqoY99T/k35ntbbdcD8h8KfB8GWmSuE3KzNZiaskx80IgfXdAvg17GgZut3FJUJzytAVlNmnUDo8eSFajoXkLV0sHIrJm9voqy3JmT/d93/f6hhCSiHz82B6Pj/1wOZ1OP/zw+enH/TB2l8uNOe2b+sPHh6qqkKQfbs61uuMpJU5Bl6qyLGMcAIA55ZwRpCgK/brLueu6borBGKNiOpqVa1dNCLWWlcXrztG9q6zAEJUU0amYcqUBQPHciBiD/PTTT19fXsuyrKr69fWcMT4cP/7yyy+79uGff/yx8NUQwuVyiYm1cHHOZU7TNFSF+/DhsW2qnOM09jFGjoxoysIxwzhM/TCQrc/n8+vrq1oWPD09HQ4H1bdUjWl1lNQNSynlxW9SaQk6NrPWDiHoaXXOJc4iUhTFw8PD9fy1LH1T7euqinFKOYBkYUkJUua+H/px0Kjw8PHDfn+0hs7n8+vpOaXUlOVut6srR0Tj0DVNs2+rqqoIJMYY4hhjpCzazzTGKFWxaZrHx6OWpCIyDMN4Htcu39/+z/8zxtT3/TikLFxXu6bZtc1ep7PDMAhj0zRt2zpvcs4xjpfLRUkORVGICGSYpsmS217kKt6NiGG6ppSQwHsPoCinvBKlkNEYo1eyjkKdmXUnOWtXE5DAolVHNGctkony/iaFTTq+3nrb1Wl9c85hu+7Thsa39m/0hGpDIo3Tds1cZ4Hrerj9KFh8NGeit65gS9gz97CnywUREcH7ak/p6mr1vP385Rvf8/bejpbeR4vtUfpu2INNpYgLknMJLfcYtkZeeYcOhTcfu/4t/q/rc76LXN+mMt/urx1DNhlsBmPEGjSGrRVrLc1NTiYiAwhEmTMYQ7YQAKBZw1TTAFhbgvhmZwGAtd8NDCAGVt6YMnQpoQATEKAQkEGAbIxaywJ5NiToGC2jUfEFFkQAI2AQrLI0nDb15oEqqOqYQOZoFQlsAQENg4ABFh+SUjwIBNgSC1JGnNGDquGcJK0Zii00Vt1BNzrkWIAn65yPdEgOQeubrB7WSCRIAJJnuI3+NyuUCojVjq5iTThLBszGCAGSiIQ4elupgUTT7G63GxoWZjFZIAtmWZBMelEpWUnzL2IAgJCUCmNgNl/GJAwZIHOfGG4TM1tbVLtjZa2IPB3aw37fthWxkBUjMAxDnkYOU124jw8Px7ZxiOOpswTe+5FfnHOArL2+oiissQjMHAEASarCq3z2NE3n305t85A4T9OkQ5cYJ87Z2SLGQGgQMEMuSrPf1+ol9LVLcQoCqTQOJU79EOLgLX78+JhS7vubtWV7OBa+unTdy8vrzy+/IBpsyufbgEz18QMA3PrpTz/8U9u2ZOByOXXXk0WsPSGGch8BJmuK46d2vz96X8YYpymnbNEUxkCKfB50OCdgysv55eeff75cLofD4cfPf3h4eADClNL53BGRKwrDnLPknMFa50qIGELoh4uqP+/3h3Eczufz2HWfPn0qSzq9/goAj4+PRVH015cS02F/qOs2hNB1Q85CvgQARMNxQjT7ujrud3VdN5X3mF5+e5mmyaHsdk1VeEMCKRhv/vzjRzJgkEHGnGNOkeMIOQ+3MQxDPwYiuzvsf/jhD/X+4F0VJHe36fV0uV7jOKWUOLIw86nrmXkYppTSw353/PRBQUx5ikPsxjQURdEe9mVZhhC6YTIQvHUEGEKIcRRmApNimOBGREMOE1HhimxydwsiUjhniaw1qk+NLaqhrsZg68ham0xFFK21zpVWJu+8SB7DCLk3BAZdFq7q9tY/33qu9w+H/SGj6frhchka+1FzTu88GiOiANFoLW1X53Vl9FTKNw8ASCkJkjWGcZnqaZwzcxGjqFBCQbonx/eO4qLFKUq3AF4NgQgyANiZ/wdEK41BF1JgxAwISGyQrc0IIOw8AiFaAFWJIW0ACYnZhhlYBh6LPguAzEL8Mq+fkjNnyAwMgEBGMuSc0VhClcDJKTJDIjRATCICwsIIjPNAHgAgi0EQBEZgyZmYQZhQInYiGVGpIEmFGJlZ7jKXsH1uFijeu4hFb2VA7k+2oseCCKi2t6DFpY41dT6pTV1empz3tuSmNDaEc1OT5iV42cQ7lGOt+bZJwbbvDACzWBe8T74A5+izrYzn3xISWVW44yWSKjwd4T0yZ77Ivgl7hEIChGD0TCOAgKAYdQdhARBAMQQoCsPQjFKv4LlLSURg3ii1w4IHWyZ2d1FaLe+shZyzWSLrXexmUyluMx2Ed6IP94xp+1hbykKIgkKIjIhG1XY2LQxAARXaUIoFfcPInPNBQ9M4AYDK8P/2229VVX348MEYcz6fvv72CwGUpa2cTTGEvvv8dPzx0394eDimcRDmfbtLYfz55593D3si0uUpb/wiZDm72i1XIRVYUOBrU6UoCt2Gqqq00brf78uyBIDb7RZCmMRVZUlih/6WptEb2u8OztDz83NdN2Xd+qISkdP1+vz8/PJ8Ik9DP47jRETWFIjoXdnUO+995hhizjlaaxFFM+i2LcqyrMrGOccMXddNY0wpte1uHMM0TcKoSoOXy+V6vd6Grqqqz58/H49H74ppmsYwMfN+f4wxhlmO0onIFEMI4fW1Y+aidGpxfrlcLpdzCOGHH37Q/o+KMitbjpmPT0fvvQrNqCgazFQ09SCM3hfNblfXNaJJKbEkJLFI3hrnjTfWe1cWLoSxcMYU3liMUaZhvN1uwzCEMTb17s8fPx8OD9b5mPnWdV/H0xRzShxi1pPiPYbMzJwwqif7fr//4eOHtm1l4ZzUdf34+KB7ocAi731hZ5+Hla3ojDfG1EWpiZFzDhj6vieisizF2pTSNI0xxrquy7LUKkrbD0pgr+tajSPO5/Ou1KQTNdla9HhzWTVlWSKnsetT5mZ3UP3VON+n90BCi4j5u2pmvkfSoqH/loG3XevWnyIyt+W/94BNc+s+D9IeDNwBJigEC/xyrRfuN/7bRUB/S6iMoTVsw7//sa5F33l925MD2R6f7dL0biXfTvLm0LXZnrSsZLCpGv/Njfy9Ku3bftt33v/vOBo25GRgTkyyAYXYrVW1ZUMkmTIROeVKy1YCdX7AMtv7dsley2HdKv2JM0tfn9zDniCS2r0iibGCBsnNYtGk2pk093ABZsQRWoClUb30c0kABA0DAhoBAgQWEETOzloAQM02JFMWAm1vMs1ZAiPg2qwQ2iqRvwcjySxuN/PTl0t8vmQk3xX/RBLc+xK8HAEQROVK6GW3XmIK4JovdEO4Tk/RqDSeIChcFQCWqeQ9wdEPZ5gHf7ABDQkDCqQUcxYAmOI0hQEI67re7/d9dxOROI1xmm432dfVrq4Pu/0///M/W2PGcVSAweVysQSPj4+ZkrGoYy3FK8bIS5oCajSTc9RlUWVKZAOPVmQjrOJGiOrTdr1e+77XLL3yjplvajlUV0VR5BgBsCiK3W4H6G7d8PL6+vJ6vvWDYRyGkRmaeudtwSze+aZpcozDMKYwIal5E+mS6m2uq7quaxHp+zHGqB1XbVFO0xRDFkGl3KWUdIynTq1DP/Z9n4Wdc+qGqiWAVnuJMzM/fXhQWeecc9d1z8/PIqxkg5QSGVC9FRFBlKapmqaJMXZDf+u7nLM1Xs+nsv0QsSz9YbcriqIbhr6/GQTjZo0652zhTFEUpXdV7TimaZrCZZqmIcYIQlVRf3jal2Vd1zWhmWK4XcdrP4xTQONmqTlrmHMMKYQYQujHPufctu0PP/xwPB5TSmmcjHFN29Z1rQfqer1OYW7evp5Pxhi1oDLGEZG3TjvAaKhwTvNX653k2WeDmRFn40ZrLTMw8/F4DCEo/UN3XKVK+/MX7aBqgAQBQPbeD8NQtjtPbhiHKWdXVGXtrDEhs4gASQYBYDurn8131lvEAYkImjccvm+f01vnuXUwsmU7bGcZ63pIi+qKhgaAWd+SVOV9WTPWsEezqgutc/0ZujKHPVQ4JehasCz23w0qupPyFpyI+IZUsH2umykLhl+1E+UbRN6c8ev+kFn3VPj++ZQRcP4E5RBrwq4rl+r3oqJqZ0jhG5rd+uRtafeOh76+ci9LYHFgeHdA9J+Lgr4KnPM8ibVzqUPMbI3RXiYbdmQEF3rWcjJW7O827K0gz1mjUmuQOamZcTU8RwBAMGCMIJIqlKMBskgkxgiQYlvEWFqqOtxqe8516rdhDwwxgSVmIyCQkAGNMWxISZo8+30QADHoudJTuw17jFsQyqY3vVzW642DGzuSLJmZFXz87nLEe6ifT/C7U7JcOvdRxHo85zCsF4gGTTYLHzSjmDkE4iLIgCDMIMQw0/iXSbuEFEtfhBCu12tRuj/96U+Ph6OI9LebQb3EwTu33+9//PzhYdcej8f+eh7H3hSeEEIIriqOx2Mfu6IoVpi4WowaY8qyAABdyFKaITPe+1s3IKIOb3SyhYjK2tYcXO0a1HW2bVsmp4rWpbOmsIUvUkrd7fbhwwdXVCnLrT9/fTm/ni7DFAVN3w8AVFd10+zUFIkzxBitOt9KLqxzzhiLZVm0bWspEdE0TSEknWs6a53zX758EcGcZBzHcQwiUpbl4XDYPxw0InZdl2Im5X4AnM/nqqpcUTDzOIYYo3G2LEtjC1Uk6fte3f72+93j46O+v6or/XZErOuybdsQhr7vb12vGmbWkTKlUo7WUlWUdV0jyjRN09DFadrVvrCurLwiHgkQRf1GJIxT19/GcRTJRVEc94eiKMpmF8bpcr3dbv00TYkByTlXMFBmSDmFKQwh6l7HGINMDw8PP/7448PDw8w/Iaqq6nDYhRD6vl/Dkp79aZqKovKFca6YVwMBnYoVRZFi6vu+KMrHx0dnbNd1S7XE6libUlKWZ1VVTdNkVunwMwDs9/vdbpfHi2YAzKwTMrTonLt1g5AxhS+cZ4OcwjQgM4AvEWBFzxIgWVDFrDVoLTcawyJdLZubZW1Xbsu4dU0wiy/reoNvo926Nt7RyBsbalwWK51QGhAkWUzP5vdbY3GlNs2KnUjwpiLEtzXfu+oKvolz24V6+wps1p/ZJO5trblN+vFOY7sXNvj2AauesLyBzGy/d1u6bY7um19998hvn8t3QuCbIv7dOmxVAwQgGRAScowikQ0bMZZmVCciWgMiwsQe5+bAqrS5rsjr/mxTAx0Rz4d4gziSuc+86AQAIBpAo0hOUHMRsgIWEAkpw1zfwDts0kpFWcLeLDCNQIIETFrbIQEJcrao7AgGBBIyCCiCBMh6jNQrWd3V9Zy+wV6KrPkIi4Aw3Z/LBnNoMjFoWoMEd21ozoiKV0YRRq1CYRF6X/IARsjCAMiwmBwRAWLKWayBtdojwaWxKYuNMeHdL0J1ZAgpC6PMligiAMLWusQcc/Jl8fjw2Db7fgzPz8+HIk85I+Khbf7w4+c/fPpUeBtj+Nd//dfHw64q3DiOzprD4eAMPj8/7x5bXe9ijCkFtdLeXtwppRjz7AbBAIunsf52togLgZmbptF2mVrI1nVtjOGUQphEpPAWZfaTana7omrGKV5ebs+n08vruRsmImudR6a22ZVlSag2xZYljf2tLEsAsZasJesIEZxBb6lpDmoYpDFPRKYxppTP5zORlcUM2VpbVaUaJK3LkJ7HMUzTNKnbrd4XWoioL8vf/vLX+arIXBXFYbfz3qcQYxjbpkKBaRhFctu2VVGiwNyKDFFzAiJKnHMWS6Zp2rppiGgch3EMKQVvzWFXzVMxxJSDpJxCTBOfTidF/Twcjgol1Vvm5XSZhuHW9zFmIrLOIRIDXm+3FHkMoR+DOr0j2aIsP3/49Pnz58fHRz01atRHRFmkn8L5elMVaWstEgrz09NHQ9ZYQiDapI+73U6VqUWkABnHgb33pS+tzzmP43C9XlNKRVEcDru2bU+n0263q5tSwUp6VXz9+vXQ7oAGAnbOGdWnTRlG8M6FYaScq/ZAzoaUmSfvPROgJn2cGDjrWmxn/e+Z4iX3uZFZhjICMo8w5iR7XuBZhADnRH72mtO34TIjWe5oEVoXxmWcQ0rug3udtIY9AlzBEmZ5I5HBlaIw2/W9D0lLKUSLud77PPtdZxI3keNNgNygOgG1AXOX5/42/MwouUX38rthzxgHkA3rhCqJCLERgAxJ910EGOfR0upY9G142zact294WyluO4sksvqqE8wCMbOVgg0pWjIzu57U2ZycFjrERsjgXPYZQYMEYnAZ2Kw/cWGVbQvBGcEPgIvqPyySqQq1FySFfQDOcUqHiiJzZa+HfN5unFFKm6J10zNYwh6JNs6ZBEjACBICEBAAZIHZzB1QNWAEVBgN16Myn1qt/HR8+0ahfD3p8z/hfYqnCSBx1j24N4IXBKzIfdApmwcsGFxhkW/SmXu1N3c0gVVhTQAEMjKQWabWDGgUz6Q7xiIIRlDAAGZtQFgAvl2v1po///nPx/2h67rT6RRjHGXcN+3Dw8PDcf9w2CPKMHRx6IHz6+vrWLim8NaaKQYAq71BDVqr/ohC+PREqwKLylxpVbdVps856wQr56xNP83iVbKEma/Xa04RgawzKFYFLff7/dPTxy9fXy637vn1cr7c+jHkPPsOtrtDXTdENA+HikJdpc7nE6F4b7XOKwrXVEVZ+lk+JczaoTlJ1/W32w2ARCYE471v29nA/Xa7+arUaBRjHMcp5ywgzrn9fq9CXMaYsqydc7e+67oOF+UOALDWqN3gMAzaJlUga9NUu91ORF5fXyFOax2srRSFsNZ10zR1URVhilMYQwhF6Zu63deFMQYRUgySksINU0pKqCh9oR3UGNM49jEmsUXMCEBm1nqmkDjGlJPEnNTxDlHquq7bpizqH/70qSxLZlZRaeecmhFOC+1E73T9VVmWlHMIIXYREQvnlb+BiF+//Pb6+tq27X/6T/+pdP6nn366Xq/H4zFPUSUL9JP1kkgplWXZ971AVoDrfr/vuk6/V0S1B9WC3CRJItK07eVyiSGUKYIxwKwbMOPKgFkAWJiTEQQxxiKimY1WNmGP8I0QB70RN3mDVF+y7jdjnbUk4sW++121t/yhengiyUxRtnMtKCukBTc+KhtD2dnT9Q1z73u0adhUe2uow99pcr7bQW1srrsvtFmgNobktPitIyLL+9fn53Bn7229e2GDYH9byb0PbPD28f5X79+P24HaXP0tgh7rPtqQEhMzWCIhygZJxGYSK+zIMJusaQYSs8kkLDPw2ugEcDFGN4tEzfpYEn5akSugCAtCZBQ0gqwoR5lrMx2lEhKqPTEjscKD9CQtFoqz8tus1T2jIgGA5jyMDRMRGlFrHICcCUEIgcHM82MiAa2XNHYhKiJy07IQIyKLWhmKgGpkz11vtakkjSwqZsCaHInAzPdnNgZVY1MgiVC6K3jPLZqsaSMAA2ZAlvk/AUKtd0XAIDmP1jFSBsQ5SRKZafpwn+St1d56mRiVV9CHKsVkEkgplFV1POyfnp5SiOfzWSB//uGj3H798PTwww8/VIVn5r7rDIgj07Tt6eUZOH04HpqmjmFE754+frz2LzlHkWwMWVsCgDGkmA6d9k1jRMS6akVEMRoaJjWQ6HOlImhZoFQHgLmwMyDGEApzis6YpqqcK4Zhulxul2vX3YaU2BhnLPmyKMu6dBbBcBZDDjnEcdIRsrPknClK15Tlrq2bpkLEEMbT6SaMIghCMeRpChqk9Vr2rtQVWUSmaV7slK3R932YojHGOAsAitlRWJMaGKnZwsPDUZ8YQ+q2IZB9YZXpjyhVVWuZ2/e3vr+VDoDQIhozN+csGet9WRUAPHS3YZjiNHrn922z3x8cjSIpTGkaxizsrXOGUOjp6WPOOaU8TSGHmBLnnIDBNxWgFYAppSnN0S5nQUMWbIFoyBGRDnrruq28C+OgzcycpReJs7Acgw4dlIqGBGgATTdO0zTlEK211nrBOSieL1dflB8/fjoeH0KYMoimRBxSWZZNUysLIsY4jr0CfZk5xNF73zSNzlOLoohhAKCUwiX2VeGapqmcY2bJbIzhxGGcTBZBsmQkR5wXinm8rdkwCdh5Uq5aSLQukcBv2pVrPFjjHy0ye0vqxttosWTG76lf628X0OIsJrpELF7ec/9p8d41QZx76Ws4RiBENbUkXKhK92rgdyZ8v9fkxAVeAZuK7f5ZeP9M2pgxbWd72rzFua94/wrDCIB5DoFGAFSrE+UOddRMIS9e7ct6K2+f3Pfjza/mQeI7rRZa8JnmHUV8DnspJTGGAZwxzCLGSAYrRstlRrAizMBEDGJFy7b3rlTrQdQjskqa4exxDzB7LMzFrCJtRZBJzRvnyAiz29CM2ARUTgsJEZDF+3bfL7XtLpHMCiVEC5JTshFQhXMtgIzcCTsE8/CNZEaCzHskd5na9VBqbbfp4/9Ou3mmdooxxig4iDPzG1WFLeh2lc+R1fLkndwDwlo06v7me5WIs+yOZjRqAQEEMMM4AYDQAgJJnvdIswsWEfPp48eHh2MK8eeff76cXg6PD4d21+yo9H4aekihKIqy9MQ5x3C9nh+P+/2+bZumaau6fAKW2+1W1iUAaBmnwR5glrQgIsUJK1BTe6HjOCm8pSxL3VOlcGmyj4hqoXe5XHTA46Bg5mmacsz743F3eLheu3/9P//vmPja91qnAaB1VBRV27ZxmKY4WWvrus7RXC7nnHPh7Z/+9AdjkUTIQFl6Y8ztdnl5eSmLvfMWAPp+uN06Nepzrsg5N03T1DtE1ILVOdc0zfl2jTHqNKttW2NMN/TX6xXRKMSDmft+GMcRDbVtez6fu64josfHx7Isx3HMnJVZz8xVVRwOB0Q8nU7D0BHR3P5a4isA6PcC0TiOXX+NY0SLra+ruqhLD6HPOY1DPwwDERVe7cv99Xqds2FGIuucOOdA6OV6I7LknAOTk6Q0hpiYeQqzWpOx5L0rK1/VRV2XKccYAqB47/u+v916Y8xuvwOAvu+7vlcvQ+2yhhCaprHWSpE1XCFi39+66+3jx4+fP3+uq+rnn3/++vULMzd1bYzxpU0pvb6+OucU1Kp4zr/+9a+73a5pK60pc86K/Tke2nEcn5+/XF7Pg0ERqetSRGLfG++8dSmFnLMvK+E8XDvatzoKsyAMYjRikDpCI75hecHarPtunMCFpLtdAHGDZvh2BLW+5x4U8+JneydPg4IAVCWJvo09v/P4+2/YRsF3kePbV/6tb3nzXdtguRa+Ziv8tiEh8JuJ3ve39t8u6f6x1+ndr779XvOf//Q5s7AAs2RVkAQE9TTIwpJZZP4PQP0GdCzHqgHLWUDfoA3LeczECiTFpS7muTOng2hmzsICgMYgOkEDRIiWrBsJwVgwVsiwMguMQTKgeBejI12F7zKg6DMDxizJkSG0QIRgAEjEiJAwCRgAQ2By1r6+RSS1Vie1Ap8/CAk03SIzK1neWeOkmnV30ZnlgAoiKDdlLtu1848iahAPkjQrRFDlkpyzACNBYrMM9lA0HgEIwBSjcixCjAJgjI0xERm0xdCP4xSs89ZbEAFA5wsAQZyNSQjJGmesM9bNGr4GyRhrFHYrwLLbtf/xP/7HsR/+r3/9P+qq/E//8s9Pjw/eWUpd09R1XQHknKIzWFfVrq27W/f08PDx44fSez3bREKEKtKxSDDMMU+XyNPpdLt1zs2k8q7rp2luh+o8j4jatt3tdirLqevCGlQU4yDTAAJFVe52e2Pt5Xp7PZ2nGM+n6zCGmBIgVVXVNo2zPqdU+xoBhXMMYZpGRNzvmqenBzJQOrfb7ZqmJoBxGGKIhiwLxRj7ftAGGqIpilIfzha0mAgaY1KK4ziQsdqJtdYKS9/3UwjW2v3+ME3T6XwehoHIFEUhACEEPbWF91VZMqdh6IWlLIqmLg1RUbii8Mx5Ggdh9s5NY4+IK0PZOVdVZVWVIDwO/TT0ztinx+PHpw9NU6FAf36+Xa/TMHhnd4dD0zQAGGO6Xq8aMr0vjLUsOIU4TjEbK0B91//229dff/2t63pRAklZECEIGzJVUezb9rDb1VUVwjj1w8vL15eXl2kaC++rqjSEOcWhH7q+F86+KMuyVFDuri4RKLNklhATZ/beHx8eCu+7rlMA3e12BYCyrlik9uXr66v3/ng86jWjqM7Hx0ciss6cz+eyLHPO2ha+dbcUU1XX+90up/j169dx6Ou6bpvGO49EKSVgcMZZY4jwFgZCctYqUtKStdaigHOeiCxZbWYZMohkyBALwYI3m8cfQCoLpWIQMPvC6H+yDGnmd+pNiCSa6oL+c/6JoGMWY2dsxGK0RcZYmtc1rStEU390ZGYwAxAqXZ2MGvJteoVrg5G20WBtJ66Ra464mwC2BSQKb2B6C1ZPNgTwb4tXItLhQt7aLb2xE88AShZbpm6KRF8WTs7CknVKh3M79js9228D3hx6lxmeqLU9C8vaC8R54Z4PmzHGgBgQnHl7ACBGSO7NViSd7aHl2QOWmdmwXVj6991eOtcqf7W+vo79EJG1Z8Tq8a3OTwbRIAtAFiRkEAJgzui0WpO5721E+yf0PmJvc417eS5AIkga7QSBAZlw5iio+iouY2Sae+uwngXYQGaWz4SlQnvfE3iXdGx/u2lOLJKSpEZGb/72zVQPNhzYpavOi2ybAuuNMd3ltlqwKmRD1KBnwyvalpZLcmqQhYWtQecqZ+nTp0/PX75MQ/f50wfvfems1lv/+T//izVmmqaxj0Xpm6rKcfr166WpS+9tTGEYJoG8a1oEl1Lqh16HNworV0llhSzqpaVRUESGYVg90xX0MSt5ImoU0Sfa+VyFTJumQTSIlgFylimkfpxu1/759Wyc9670VVmWlZuR8SAsZMBYL5KJJKcgkmOajvtH5xwZiDFOQ780Wl2IMcY8TVOMGWC26yvLktCq9KKqbc2SKzmrH9vsAh9DjJEMFUWhQ8rF5H1GOzvnhu6qRwCU8iyiZbHS1MjA6+vraqJ0vV6dmdWTx3Gs6/rh4QEALpfLOPY5pbIojsfj09Ojte56vZxOZxkuVdU8Pn4oiiKDdLchhJQzPzw8qq5CCCmkGEPWHuwlDIqN1Pnffr9XuZaUUo4BCdumeHw8tm0rwtfL6zj2l/Nrfzkb4w7HY121McZb3yGi9/bJHYwvyrL0RaVXbH99BcDD4aGuS7V6yHESkeOHx19//kXhqUVRfP369Xq91nU94k0P7/l8VhajHitNNaYwtG17u92897/88kvbtlVVMGmTyFfNjpkJJcZ4vV6dcwKQUxaRFA2SIBjjyAgTZ9bZCGXIRlAIWMTomiAzEuUOzfj7Jch6877753ovb9eEbQTCJZ4ujUEEbXcqsE4AEAwgiiB9pzbaFEz0rnf3P/nA7838NrvwhsO37jK+beS+iUZLcUwbZU6DZn2+Rjj4XvX5j1Z78DY8/5u1rA0xqmianSEtkoVtNkiSlNeZWAG1emf6RR9uRbKsrd53XlCzEfC6/hMZ1VdEFkIygoAoBqyIwhKZBY0gLSwFFEReeZRwd4mkhVyvYWWJUHcFa72GaLYskhkdKawpk8JeCBHn9qAWdNojFqXSzWmcAGVAJIC8GHsBILOwjisXQ0CcuXMgSALCiLKAsmY4MhGJ5OXSAQBhziLC6AGAhbNIZlZZzyyMhnLOSRiMGq7OgsvD2LVtW5Lv+54zm8Kp1r4Cv3BzzRlUyW5FG0lKCUS8sXVd1lUJzP3t0jbVvqkvl4twfnz4yMzn03MKERGrurTWXG/nseunsf9P//JPhXXDMMR+bNqqqipEGW63lIPzBhBZEhlXlC5FNSmNALiYgHMIISUmssbMN4/SFVaXJQ0JGva0nPLel2VZ1w0zxJxjjGPg67W7nG+n6zXmZH1RlmVTN7pE6oXXdaOIoDASWIPO+rLyTVlWVQXIOaZxGPp+0OLSWhNjDlMKIeUkzvmyqKuymc0HxnFly604hcz3qT6oEjGCLMY0GuC1nasUWEmxqOvKO84ph2iRqsIVzhiDliDFFIYxpSg6oErJVEUSJuGicFVViOS+78/nVxB+enr6+PFjU1ZTGC6vr1MYHJFpd96VQHYMrEYW1vmy8tYVMeYpTH03dkO/7Eu+DTGlJMxNWT0cdk1ThxDGFACxKlxRV4fDoWkqkNx3Xd/3Uz94Yz5/+uCcI2MAsnd4dO00TdaXZVka6wAgg7J38NPnH9Rl6XS5csoKkHHG/PTTTznnHAMAxBT6cUgpoAFjqg8fPhBh3/fee637VTXNOTeF4fHxcZompfBba3MWRFJYZVmWliCGMU0BgElq1WLJwJIZEqNCIufJPACAMrUEQDQOGu08AgBkkOU2FoVGrBhrEYHFnFtYYGb9goggC4iQwGK1Mpu84zIjXAujZX3fRDBZGmQAiJqjw8zZU0fzOwL0rY7/LM5NQrDM/P4Ho+DfCXhvI997svn6z3kJXWzO12HWvOPgUEjMfIiRQJ/nbACQFezOtGpL/X3e3ndeX+ZOmuTpJmyCIW5qGlq2F+yqlbwQFQwzZmMQxRojIpnILv5MRASY34W99fm22lt/xUvMIyIWrfbUVA8IDGKmbAQyoFV5TGFUKw8UyIBAJEAK9H+X/byr8zQQmqU8X44ja49CLZiBxSwfQ4gkd58O2NhzrKcTAJDu3sqwabt/b9b6/irZsBtnOwvKb/wLWSmTInlZ8ef270IVUo1z5Ush4mwHI3D3dwYkQq23cHP5GhTaGAKLZEsGEQpvq7KoSj/1w2HfVt7FMCHk0jtrJMaQYuiHvq7rQ7sTTrfT0NTl//O//pfDfnc7X4auL8vyeDxWVdX3txBCWZf6FSq5iYtaTUpJqVda/HVdZ4xpmmarpq8P7SIOw7BaV2vpVlVVVVUCKCgMuR/D5Tq8ni7n220a4+HwsLpvIwBIFmYEnqYhhMA5OWeaptofdvumKSsPyDHGoICLnJkhpRSm3Pch58wZFIioGFQt8jTmrY0NWJJ6bcM658qi8t4P09j3fVFUiJhFUkrTFMdxHMOUc26LSudeitCpqqquK+dcVRXX61VVuIwhRfbXde286bqu8u54PCLiL7/8Mo5jXVV/+MOP+/2+rctpmrquG8beObPb7Ry6EMI0BY3BZVGiIRH88uU5Jp6mqZvN/oZpDCklMsXT09PT05O1NsRx6PoxTDmGZtfud+1uvyeU/nJapLTRE7RNVe9aRNMPwzQFNFS4whJaX1hrGCDGOE4hxpiz/NLftMQvioIQYozTNOSc/z//7f/9L//yLwTy9etXlnkNYeZm3+z3+xAmZa0Q0TRFPRQiglQ656qqUpRsCOHh4cE5Z0lXW/a2EE46nmROThwRSEbIiQOiMVg4YIZluk4oKMxIkhkICbPMU4yFcvA75QV8U9J9d4z37Wz+HYbzzjfW5FvXZl3j0BDwFvC+DXXbagbusICZpv1tafg/Ev/ePN/uxfuydV2gdGgiGyb7EvqXwve93tT9sAAAwRvu/7cHfHt4v/863L9xfv3vHok57E0pGwMZsmNAFGY2bCyDMeofmIiIFX+PbABZInFWGCcibp9r61TjNxEZEBL2OLf4jDEGjCxMCxY2oraxhGgwZy0Ak4DhWZJspjdoTxSVSz6nXLrXv7dvWjDODXcR0N4xCwEjyAyzuRfmTKJuvMsQT4+OZABBQlxKQQCGO88h3y9EBbuwbA+u7sFa6s0wH2Xez9APEJGkiEwWZkk6FJ0xmhCVu4eQsqQsJkvK8njY912nzSJjjAiLQKnmA4o51iJ48asEAOYMkL0l51xVloVzFsnWnsCdX1+A5T/88Q/euq+//DwMw//jP3xoygIAuttlmgYieHp6+Pz546+//Dx2vTN0OOy8t+fraeh6Y0xd+2maEKmuS+/VZG6y1j4+PuoxTimviHznXM4RN5h+WWxcZtxKzpryry3QME4IJjGMU7xcu2vfC2NR1bvdDoSsQWvQEYLkceinaeqHlFJCFO+ruil3u7auCkQJ46icdBEpXJkw9904DP2lU8pBWdf1zBRknqYYwmwfulodqaRIZFmbtNpq1h1R5GFOaZqmYZhijCyskEi1Ms85O2/a3Qx7UaZjzklbJKtBR4zRWuvKIgmPt77rul3b/PnPf/rjH3/srtcvX36NMVoyh12bOU5DL6ZFtHVTOFcAwDiO55dT1w3n81khlzHNKuS+LAos9+3hw4cPu91uGIbz6Tb2vXHusGvrXVt5xzmGMHa32zRNzpnCF23dMvPQXfWu9o5CisN1rHe7OI1jnxOLiMSsFMzcNDt1kO/7c9ddT6fT7XoJYfzP//LP+/3+cjmNYbKGiqLQdshut0spnc9nlSvTc3c8HnPO4ziWle/7fr/fv7y8ENH5fCbr69KX3mnYAzJEVBTF2CdOOVMiItV/yRiNMdR4zSh1cmeR0JjMkHNWE2cQ0SR5FVbeBrZ3//w7j3er87q4b8FoAMBZF5/8d1bmZXWnbXGDSHMVyiAkK+RuE/BolqT/xwPeupvvnq9cq/VS/xbRivPG3ePTNuypm4piOHkWVCQAyRr22BAIv6nz/rEOp3ZeliO/Ecuc9au2R2Z9vq32iA3M9jTZGKeO2Fvimc6QhLXhuYorrgXfu9me/lOWCm99mxhGMkImSTA6qgWDYJiSAUwpzeY9gkDabc8ksMz2kIjeMjM2I7f5KskCs0qUSkvrxSKSV4wyzO6LApsTth6mv98afne1f/fSf3f1b7vBxhgjuA4+hWXFqmXO65+r7yrPduhZa75xHHf7NoagmofW2hwTzFU4k5jlwmWzDPhCCAbRGFsWrixLb61BAcnd9VJ65yw9HvafPj49//Zrdznv9/vz+Xw4HBDgcrl4Q58/f/be//f//f/gFKuq+vD0eDwex/728vJikB4eHojmcZ21NqV0u91SZJX3PJ1O12sHACortaz1k/de4Q8a6lTmAxaJHK0a13AogTPwOE3dMPbTJIy+rLz3CAYJrTWFdc5SDHEah+5yjWiLomiaan9oD4d9UbgYp3EcLRktQ4ksAQlzjHkck7qBK4NChTE1IM0yRUR6JYcQdDUPmQ+Hg5rqdbc+hCAI3nv9bTcMwzDkPINXlfyg8VKLdaV7T9N0Pp+0qL1er1oHM3Pf92Ly4XDw3r6+vsZxenx8/OMffnx8fPz6228hhBAmFNDBVEoppkjSlmXhfZFSulwuz88vl9slhDilkGKOOQGALXzbtm3blmXZ+Drn/Ntvv51fXqcw1HX9eDy0bQ0AMYxdf5WcLFF1aDWXFY6GrAFImdMUsoAlsqU3AIFTDBMLWu9q5723KfHX55e+75WpYgw+PT396Y9/KMuyqdzf/va3v/3tL0T0cDwUReGcUUDT6+vr6+ur1nPe+8NhZ4xRo3akhnnWcqvrWt+md5a3BkFCCt6Cdx6ru4qKQZBZVV5yiBkQUYxxS2MKAAk4AxpmtkTCb9T7vrvg/t49/u5mh+81e97NvdYHLcuRrHy1Td25/Mkdizj7kgJwBqRZP0zmsL0Gqn845m23/93zTZl6r/P0sc625moPv1/t8e9XeyJvWl/bQ/Ttifi919cTIIteDPxdnCqu4mQr/ERLE9Xe5KwKUsYYQyjKslqJeuuqvYY93Mzzts8TCxGRgfWdYjIgoXWYkcSgGCOEQgaBBWNKRsRouJbZS4dpYdjwinnEhe22ORYLeREWZiVzQkl2bscLCAMT3EW/ZGnFAywEPRG9Zea7SLW/ZIaiIoigZBRWaQG9vRaq3f3Ge1eVg7LjF/6pMcKMcxEgLCBZOC/ue6oTowuWiMQYGYQMCUI/Dk1bVlVp1L2WmQgBRXJ0dnGAkoWMhGAAMkpR+Louq7IkQs4xholzDOPUFMWnH36sSn89vQ599/hwfHx87M4/IXBVVUXx1Nblfr8P49gP3dPx4eHh4XDYK93KOVc4b4yZppuCFHLOo7bRyFprtRM4XzPGICJn4DxPxdY8SeXwNYoTzYgS55zqk4mIQT+O4+XaXa5dmBIaa5wzrkBrHBlrCUlyCnEcJQaQXJTV4+Px6cNDVZWWgFOYpmmaBipqvcJTyuMY+n7s+zFMsW13uqTqWE69XmOMzt1b9xqbM0cy4M3csZympV8KogViSmmKEQDc8sAFmMqS9FuMMTnHlILGdWXo62pCBsiAK0vlO+acd/v28w+f6rrsuusvv/zUNE1dFUrozjF5b3dNzdlPIZ5vt9vtdr1eh36KHAEos6Choqicc74olFzhvU9DuFwuzy9fhq6v62rXNG1TOWtjnAilcLMVgjEzvChOQ9PsqqoaQ7per5ySq+qqaWOMjoidI7TWOyKbOCdKu91htzuoR6NIHoZhHPoQQsrjL19+izF+/vz58HCsqkL9Gqdpul6v2g9Qx2BjjFZ4ACCQV7iKdol19wnY1pUhjDFKZvRQlmWYJuYE4AwRkGRBgzSGcfZZdSjGrctuZkZk7T8BIrAg64Lzplr67vr77vn6yraZuc6AttFi/atZUp8V5Yhz6Hsb9gwiiPBGpJDnGAKEtKT4mqf/L0C44EygX1AqsE3c7wEDvzfbgwVvoauu+oMDAG+8ypfVaZ7zMWdEmmd7ArQ4J/yjs71V3UO+3+x9f2Rwne2tW8YsWpkRkTCsVYghuAutgmzRm7QRanuH3lza94KIZGYGNzMLy6y1JcpIsIKJ0EpCM7d6iTkbmO0XABesCuKS7+ie5/VCAVBmnoAs4svvQFnrf3MHf5sU3Ot62fSL3136smC9lst9+/P72d/2WL/LdNSGYzsxeif6t9TKknMWBGVM68CpLEtv3fV6jTE6N5u8aI8LFzw1IqoRrpYdh/3OOZfiOIxpnHoJ6U9/+kNp3X5fD9fbL7/+/HA4Ho/7L1++/OHTp3Ech2Fo2zbn/MtPPxtDx+Px6empbduU0ul0khx3u51Bul6vaIJ6JozjqNqSRVEYY758+SIiOirTQEKKYnHF3A9cBFxERMGEiKiwHQBQXAMAgDFDP52vl9t1yEjez4pCVVUZQATOMU5jn8ZRJFeFo6Z5eHh4fHxAxGm4hhCYk+JdEQ2ixJCu19v1OoQQhOHj8XNZlmVZbINZSsn7Gpep6io0471HU+hCHEJw1ltr+3E4n8+IZm3kGuNEJKY0jmNpnLY3tcbVXVNx59PpNI6Don7GcSQDTdPUu/pyucQ4HQ6Hzx8/eO9Pp9PlclF3Ot0STlmFx6y1/cTn8+Xl9VX73mVZQqRhGETQe182ZVVVbiHVjePYP19CCDmmsix3u51yA2KMCBqYK2NMCOM0TZkjINRlxSnebllRtRXLlOLtcrK+JETnLAAyc4xDSDnG6H0zTdMCFg3TNKUYAKAozcPDw/Gf/rzf7wVSXddNVfd9/9tPX67Xq7VGeZwPDw+Hw0HzjLZt+2HUue+nT59ijE3T/OVvP4cQDEpTlYiUUop5kszONpo/WZucLVhVxQBizIh6cqwAAwuY+5rwbVX3jsb3ewHv29++iQSbqd5SEsHm5l6D38oUvDcnNQwvZRy+/fZ5YZqrKTTzcvU/N9jDv1seffu2dTdF7nDMFeu+jALXQ/HGY11EVn3O7UF79/g7Zfd3n/x7tn/7sLbmnGPMGRi8cZYsCCGjA5clWdaenOUMCRkxOecQGRc8vTGgvRBt9poZUmKI0BggEnTZGGMBDQtBtiJL31MQhVgMo0AmEYuekE9xbxgJwBgGFCImQSKyYAmQCRwTIy7iEJBoRAFABgZCBgGEhCCOBTliTsTMEA3PAzqKaT2NernwRrBVx4GWlXM3B0ojhAzIkASE1cFAzZhYoZcMyIBZgJktYQZgFsmMwsRiAREwpWwBPamydxQBg+IMNpBjjFNKnENiYYbAmVmmlClbJhRXBOYpZDLOVPvX19Pnz5/b/Y5Bzq+nGGPhsPSu8DYMo3AqisJbBd8nRDwcyz/+8XOYpucvv1lLIEly3rUNAJRleTpdvv7ya1EUjw8fCQWYYpDD/qn0dhrGaeibonw4tG1Zt6WdLq8hDrUx4CHHS+AIyM41bbtXYTMU8NY9HPfn8zn0t7IskTmEYI15aGtdefuQ9HiHEFKIkplTHseRAD58/DjPnM7nlJLKNV1S/vV2+e3lhbPsm7aqCu0TjrdrXdcscjpfpn4oiqLefTTGfPrUNk1BnKdpmsYgIgAeRC7XwXvPDJdb9+vLS4zxcHh4eHhAmQwZJIoxXm9dCMEVRXvYE9E0Td3UhxAY2JTWeGeMITx002nKkZyPIqdLN4wdC6UYd7tdUbgQwhgmVYxExNIZAFM19eGwt5b6vhvHERFvw2ALU7s65zzlwVpry8IUtns5Hdp2/+kHa20a0ngdEXHfPCiO0TlXFCQiKaUxhG4If/npr4uSdU1Et9tNVdnatlXq92pjpM3k55cv+/3+6YeHsiwNilBwnpqqFsgQcxxPicVZU5bW2pqIzjdh5imEcRiFyJDLICnC8+sX7wpX1YiYOOWcAY2zxen6Mk1TGEYlpTRFYdsKEcd++PN//C/OmS9fvsQYvaknycMtNk8Ne/7lbz+N4/j09GRHM30ZtHkQOT4s/eQvX77s292Pn38IU8/Mp9Ppr3/7y2HX1nXNWNwSl2hM3XaXkxAdHvbqEvXx48fh8hWlAszWG29oDAMm2+4fJcehHyhObQsOaJomA9xWVeBSk/OceR65awTapM4qkjTzkRaVqHfruLCgCC0YGWYGZhSBpOn1rDmB61gsMyzTLxYhoiQJAWu0MOtJCWU0AGqNbRRvzorSyQAos6qUcoiVYbz0vwC2oZHf/GIdGK3/zRZszIoxEDKk+ANhVhXveWQ3hzgAgDjLnQKiAKuusBGRKFGJiEgikBfhRSRnGcFAFkERm1LKKnJk3gyb1gpkDpw0zwvX1zMmADUU0BXdGAFEggyIxgASWQJLQMjKxhYAsL4oV/uPLMiJVRmFLRgwmbJJ1hi2NPd8FjdvVK3OlbRAs9+3VnuZyIpki5YhccpiLZMYECYyhtEaYgROBmyWKIJGcka2nLPJADqzBZit65ch6lwkEpHy7gAWaU2DkkWtblVDjJNkEsachRkkg8w2H+q2s83L/n7G8XvV2++lF+8qv+2ffFvzERHkzXtYmDMolDMzQxIxLAAqdAsMYKy1Ly8vIUxt23rrLpcLSq7rMkxD0zSGQGs15YoR0adPny7ns0JFuutZMn94fHx6eCycffn6tb9dHx4e6rJ4fX0dhy6lVNcftMdVlK4sdruq3u3qyhU5x/tOicCCwPRFoUUSAJhFeFPl9mEl3gHo9M4YY2VW41yRLPq2tq4VvK60ca3PhmH469ez+pgXdblr2rIsrSVjTF2rH+yAKFVdeO/bXb3b7Qqvqlc3VQgzxgiIDke/fPnS96O1Vs0EnCucc7X3AKClKvOso62VqNYc+iHako0h326/aUWrDnDDMCBh0zQaXfq+Vxq+tVbn0Cpc0rattXaaxq7rtK4t6kKJdDCTyr2xlpk/f/6sWJ71EOl9pzPCEILOCwHgcrkolbuqKgBYa1CV8iKi4/HovT+fz1+/flV1UCL63/63/81b55whgJQCCOuEtXAGDThQZKyklIZhyjmfrqzK/3qarCMSg8CHwwEEBSWx5JxCSCGEnOQyXCFD5ghCZIDMzINMTdv3NxXBeXx83O12cQrM/Pr8rEH66enp4eHBWiuZy7LUd2pEr6rqcDgAyy+//KJzXzWy77pORFRH7Xw+a7NdY+Rut/v8+XPf9/OoJaVpmnxKZAsWmaZJtRsAIKVk0BpjQCDnDPj+VoWF1r19sr2vv1u1rH+7XUZUrANggc/MbbolLgKoev38Cap1OQuPaVU3k6YQUTZF5IrOk7ctLvydKvAfrY2+fdx7oW/nmr/3ye/q4G01jJshqCwSZb+3qMqm6oW1Op/LR5UGgzfsvVnA8v3Dto8fYoxxCiEESZmZU86SWQSyCGY2AMZIovnmR4ka9gjQGENZ+3XqiRHdOudDK44EbUpsjEmJDZJBsWTIAkQhi+gIIRJbkGw5GbExU7QTs7Fi2bKGOJYZIKNhD8ggokEiYBQgIgPIyMRglPI/S8qwcCbOLEycRACQSSAleXcCtv/cHuXlLG5PJ/+bQKnl/CyyNqLTurx8g9wnfEhCBjFv57qylJDzXQLAM4tWma6sYL++H4qi9NaWZRnGfhxH4dx1XeFtURSRMITQev/p06dx7F6fX9q29t5fOZdFsds1w9DlZLruSiTH495ZGn67AcjT02PTVgDAORmkqih00oaIMab1htejocWHsVaR5dpzU9xNSqkqSwUlVlXFOV+vV12hKCt6J6cYcmZaNJePx6OInF4v5/PVWutscbvdfv3ly+ulR8S6KHd1U1WVtcZZssZk5tCNkmNbV+p+0DTNw8MhTL1282KMxjgRnMLU9z0ivby8jGN4enraVTUzozVl6Z2VYRi6vk8pW2uLwgshS1JcRkiKQbWIJsU4TbHvgkYdlalExNIXqoTZ34budkPEqi4IRXShNKZq5oh+u900PyCiwjlOaeJIRN6XdV0rS32/b7Wzqp1hAFBbNcXxg9A0Ta8vX7uuSykR2uPTkwbX2+2mrI+Hh4e2bfu+B4Cu667XKzOrTaAxhkBinELMBtA5W5VlVRWF8waFUEhYT02MMYUYYyzLpigK5wqdcGQQTowA3tgxxCmGlDgxx7ioikNyxhdlQWSJdM4mBJJzfH7+Eqbp06dPu7YGyVMYWOYWd7XbqVjP6XTqrjcF1mpYGsdRgU6IMAxDYawOgPu+j9Oowm91XSvy83A4WOvPp5eiKJqmOZ/PvnDW2inFYRh81ReNZYEYozfkrZ0Vg1A5x0Zk5XkhoKqirAFvFh/RV7Tug4V1921QQZVH2UY8NbthvZ1nrB0iEjMiZla/aEacUeuGBVDZXKDYQiZAVC4WirnL4s+hgGd4J/xbnUPYBOPfa4/Oq9XvBDDZstRnyORGPGS7tIJRJvQ2yMFi2LTaNmnVJIKbZXOurec0YbbTVrDH7JChgMaZ0T17ralJPJiZ+b36j745Jvb48ccU4jiOYZxijDlq6ykbJMm88HN1IU9EFkCLSrRIDGLAMIhBDjlbcmJJL3eDljMa4xKRJWEjgsSImYASoHWUE7JBYJSssCoitiypmP1fDLPM5txLOgBoCQJmgwREymlzSAkAgO1MOdcSOycQ4kySMItA1jFqBsj5+zTSb8PecmX8vbLv7yUmb3v52w/f5jjGADOb2f05kQBIhswgIpkEslJ6lHwHLIlTURREdLlcmqra7XZcFefzeX9oLJJ+jnP7siyronTOpWv/w6cP1toQwqcPH3/8/Nlb89Nf//Z//eX/3re7x8NBMl+7mzHmePz4+PhYVZSmkHJSWH/lvUFMOUBejZTWOpustZlZV38F9Gs/DRbtAr2stdTTTHkVJ9OLfhVqIaKX59PpdAIAdWM4nU7Pz8/14QkAvL1b52in4ny5IkHbNvv9vqy8huGUp/Uu8t4T2Rhj3/eXy0VrL/Ul0G8vCu+c627P0xRzztaasiysdZGzTolE0BhjyIFQihxjjjEpaPN8Po/jWFXV8XhElGEYnp9var+w2+3KapbctNbu93stDa/X6yybWRSaxsHiR6irvL4+TWOMSSEtGu2810GeB6FhHF5eXp6fn2OMbdvudofX03OOPEy9ZNgfd48PD66wt+s1S3r+8vJ6fnHG//Djp12zTxzDGH/+5f+ui7Jt26ppSw14hChZKTlJMi+EE3VZGpLVYW2MOYxjjCnkFGJOiaeQxhhYr1A0pbfekilsVZZVXRfeC8AMxxX23n769Kks/NPTB2vN5XIlwv1+37Q+pYQ8E/UU4aLwcmOMM0avqGEYDrt90zTX28kYU9f18Xgculvf9+fzOef84fF4Op2GYWiapqoq1Qmaj7BzIadxHG+3G6CzZU3AOSYzm0EmxdkaOwsewga4sGp3bbNSvZLXmPM+kLwfyN2h2iKSGRGAZwKDEADPpjmsohnrOqTl3Exrg8334nadeb/oIN5Xqu1C/23/6d85z9P/D5uKDX6n2tsO/+5Hb8N1nks6WJgP33vQG/zEuoczfUIWAON8tObH4jq3rJSIRtAQGERU3ZvN8i4AYHdPP+YQi2kM45RCSGFKMUrKOQXInFLIKXHKzMIgxJJS1HZ0IiIDhomILJIwCzEwEllDkNEaiy7lZAokDb5a5QhZQEayCJII3SxogqyBP06BjWFnmVnUbnEGmQoRCREr/DPPpLRFmVzyjErW+VsWFOGs/ulZkjYJCZD5fmK2Z+tdt3MT9rS5/14i4U3lh7L+p29fKYMzYQQxS95UiqzpjzHGMyBbMZkNsOXMYHNmAsk5A+ss0RCpEQUt/d71miiKoj7sdrvd7fq6a9pp7F9fX4/7/Q+fPo/j+N/+2387HgqD9Ntvvw1d98c//vhqze16Hm5dmAZ32CNKP9zCOO7b9rhv69JbTBnFGKzKomnrypfCKUnOS4ediGb3CSAB0rpKUYLKl1LXGABQPkDf9ylGDVq6vqzFoiG7Qt1+/vVL13WA0DS7yPzly5eX15MtyuP+QblczjljiQiZc4w5x+lwOOz3rbXGEFtrAXgcrsaWgiBALBymaRiGvh9iTKpl7H0JAJljURTeUpyG2+1ijKvqwrtyNYXQmsw5h2RFFF+TQsrCqFibRZPFWkvjOF4ul8vlVBRFu6u1XFakWF3XTVOLcN8Pw9BrdxRJpjB76zwejjp+s9Zqw+R26ziDiFirKJjCWqsNnPPp9PLyMgyTtb4oKhF5fn65dK8GDCNXvnKGJMVTdz49n27DbexGV7qHh6fC2Zcvv126Cwk9HY/Hw+HDhw9NVacU+tvtdrnGKVR1YREUTi2SUQCJjDG3l1PwnohiyGMMrLYhItMQsoDiusnZwpez4FxdWCRb+MI6IYwxahXorDnsd2XpRfB6PXddz5wQ8ePHj+rYUFUVEQ3DgAJlWd5ut9vtRgDK97jdbmGcPnz4oPePOj0VzjPD+fyaUjrsdrvdoe9vl8tl3+40NWnbdh3EqN8ToKuJ6qaJce5PAHMIIUqwVFnn1HcTcKYHMKDa/MoiyqjdF164YkzL/bj5iQCyEHPvYMBZFUTDZyYBQMmqzyKAM09eSEDX/hnDiSQgogz2xZ4AQLGfzKAungIgPAv0f18z7F1e/u8OeN+8KPcgt7y6/lii0ALjJCKTF801ACAGIhTDKuokQKrsu9hi0AzXAWaVv1m/TllnM7RxS0oX0v/N4W6uZoB0UUZanAppDn6KOLXt8Yccow9jCjHFKc9hL4Zp4pQ4hTgFTiFrj4slSRCApPpPCQCTAUQBR0a0bM05IRpKhg1jEhImstbq4IfQEhs0DJmRkSgBBkRjKNpoMqcQwkqT0GhNVkXOwCAykUPDBqyO+RDdarKopZ4wCAuCBQHOIJlZDOQsoGb3lOfTtiWOvIt/28xozTk21wr/fbjwtt3/LpHRXSBQxjoAgNUiXryIiNoeLda1kCUzz87LwiiAHOu6VupSW9eIeHr5mtr24bAPvf3bX/46heHhcPTe//Wvf9VJT2Eape798ccf/ulPf359+Xp6fnl8OByaWgWomqr68MMPTVO1bdtU9eXytxyTJVM6760DZM6ROWWOzKzuOWCsSBaclSoBYKblLVj8oij0etUhmTBrpzSEANbpjeGcIzSaa+csv/32W1FUKhH5/Pz866+/IuLHD5/ZmP8fZ//VJEmSpAmCzCxAgQF3D4+IzKzKqmrcsz2983R087a/4H7APd5PXaJ7OtqX3aXbuZ2e7qruKZCZEeHIgAIRYeZ7YFU1dRBV1WsUZGFuUIGosDDzBxDRe6rr2gcnJeecFLhpqraNsaKcR2EgqhFBpJBqzrnvB4te4zjmPHXsFgv4qqr2+72qWmGwqkLT1t5Hq44Ow1CyOBecCwqYUhmGNA65qCC6p6cny4Rsq8w61eC11lRbonvTTC56RoqYG34wDEPXddaIsi6XiOQx5ZJgyoDJOeddtPeLKHM5Ho85sfcxRh3H8XzurAx7+35v7LcQgogcjo9d142pH7pus93c3t5WVfzy+bO1u372i599eLcPoXKEQ3fq+76k0ZNrdxvnMDhPDqXwMHS2mlFV7yeTdAX2BIoEAE7AtySmz0fO9iVWlfceI+UxDd3hnItRO6xaW7UN9OV4ehj6yWrKcK02GJTI8qeprAOw2+2sZ2xODgbJub+///jNrb0hxhicH8dxGDoR+fz58y9/+cucxy9fvkQf2rYVLVNpIWVg9eRAdOjOFLzs94QIUgguQdH63+vLdpkNFnEJnX1mLjPDJe17Vum56OuKrEHaIibDIaxq0Lgpw7OAhypzWY6m1yzJU7Ba4Ryz1vPSkgvqzF17EfPwz+ICngAAgABJREFUOYMC/owiJ8yqkJfI9mpy+1rYw1ewCZtsrV21zgjfuMHMUpuURoxnhhe+udIMINJpeWI/hQQWX4gQHKJDBSIPRDDZF1yCtd/efBDmlAZOmUuSXEpOXBIPA0jhkkqaKp9T5aE4MGJfYS3MktkK3caudsqApMCIRJSJBCU4V9jE/5HIO/bqPPiIqt6glGCUdfSeR6fOOS+enJvsZsvM+TMTMnITuhPVARacdKWNvyAgRuUAUKeiUhBAoaBplwAWntpsi+TYkoC/LlDApRQgz1mkS+SznG9lbwcXTc5JTJ0mBz4AkOkbHCqgB0ZgdDQVJuJ8XhmRFVhVPatMuu+2SpwySEQElJSHMqZh6HLqbvZXdyht2/ziF98Dyr/+y6+Z+a//8lc//e7XJeXr/f6XP/8ZcDnc3wlnj/DtNx/SMCDizdXu5uamroJzzgFIKY6wqmPdVMFhSXkcBskpZUNFeiIKVVDVzElnUSLnnPG1jf1WVZXkYtLMzjkXAiJa1W7sB1PQ9pGIKJXSHTv7oMXOuy8PD/dPjsLuat9ud30B7zUG553jXFIaWXIg3O43gJLHEScpAAYURBhSOXXDuTuXUiRzmWQhsa5rRCTQWMWmigSaS0aV7a41p4WcszUpASCEMKmX5TIMaehTzlxUEIhUgMUyoTKmcRxzGYPDzWZrlrbm8Ldt223bRueMcmDfqcApJ2NfXO93V1f7tqkQpOQxDX0WdoAxOO+jMdmZdRzHoR9z5vP5HEJFgGM/fPr0+XQ6ee+bZlMFXwVfxwAAx+PBhK1DCN9+/GAeQKfTkXP69uOHn/3sZ99++21/ehj7c0qDKa+2ddxut21ddV3nPAFAKpMgTUlJVa9v39nFUnwAACAUkSEV55yxyMjK1OCK8NCPPAqn3KeRU2aYRCqaOl5f7cc+MY9mBm+0DUQcE6tiSuXx8TBVj7u+ruvvvvtuHEctPPSpqqpNu8NJ9QO4CCKG4H0VmqZpmo2IHJ8eHh8fQwgx1qYA0DZbQyGNkouI99H52I9pOJ6H9ry7vuFcRtUYI1WVVZWZmWYYpHUVCElEFi62Ve3s36SmxBP+0bQ6F7SnFF6C31IRVVUwNUK9iLUgzhQ9AKtIgSl8EgAg06Q1CIspn9W9zN5lmpMUJhVQ42a9gWf5v1bkfBEFp7D0dm/va2HPASgAW84jADiV75z1d5aungg4h1AyIIoAyGwnqoiT5apZJKLBWAAFlNCM5VAQnPXBYEmY0FAmEyHdzpuVjn27fye5hDyyQUhzzrmHwqUZlRmklDRyHqewxxwYRaTMKBiXSymJRPOYFEBEHQBYV07Fi2ccWciJC2SUdkYtpEGU5+UWIJHLWEhZQ/GTdw+JIBHThJaZ6taC6NQjMRISFAABmZiWamRIKaBOhQlARVmcmsiskJVZ57BHK8uCdc6+FPFf3+sE8DXIsvyJ8YG4BoU5Zwbv4pxbL7G8KCApOXasnpidkFOvpYAHZRBUBAUPkynI0+Fhv9nGGHMaEXHbNoByeLgvQ//z777bbrf9+Xh3d1fyWNf14emhjUGD/4vvf77bb37961+Tyl//xa/2u21T1d/c3njvVQooo1DO6fF8bpsmkNtsNrtNG5DKOEhOxqpWVcPtGomQtdgMa8gLK/1ZQyiEUEQNdtE0TQzBZn9EPJ/PAGBVUJyJccMwfPj2O2G4u7v7wx/+ICIfPnyz2W1zzjE2RORImEvXncbUR0duU8cYhqFjInMfTSnlXBDR0gITYfBVVFUDKtuG1VW92WwA4HQ6qWpd13WDAJByMVk1YQgxxtCeTl1OY9cPfTeakFEREcnXV1ePj482sb5o0cHsoNs09WazMRjLLA1DiDimMaXUNM3N1fWHD7dWmO2Op67rJk2yqlomNxEYx/F0PHddN445xvrp6enuy8PDw0NKqa6bm5ub3e7q6joQUC6JgEQZAXxwm7bd7bcEdDofh3748PH9L7//JTr8/OmT457IN9HXu22MkUBTSoenp27orK3CKecyAsB2u40xdkNH6G1accF7H41r75yzhUB0GENg5v58Ph6PBdlb3N5tY/R1s9lut03TPJ2OjsgHQkQWMJSs996FymrLT09Pp9NpGAaHZGYgzrmUslnx3d7eXu33ANB1nUmnOme9OW8nwnv/008/ffvtt+/evfv04w8PDw/fffddCGEYjyklAbiq61CFYRjS0PXd+cOHD8euZ8kxxskpQpWZyT+7ite+PC9ylDev/SWrs/P+MuYZbFMuqiw0lzdFGBWsy0pTmRMEJ8+YBVyCiGR+1itH+IWm/edEsn9vtPvaFPciSVhPnvDqSZ1F1JY0eim2vXigcxqhSgKiinMvywxFAYDMntXMUhEUkCY37SnUEZC3kiiaCNg84S9H0v0//p//L/KRvHcuTmziGKuq8SHWdR2qKsQmhDrEJlRVrFtfo4+V8zHEKsQYQnShsjaE80EReOGjEyo5hSIqrFyksDJrYS2ZswZiUAYuoIVEULNykSxUCUthNvl6FRZVZsbJn6AIF+EiUgwQGLRIKcxFhYWzTmloQRU19oIYthNUWERgNlpaig92MmyYrs/WVJqAeTkHE5dVhC0ALufRwqEtElULIhAQKoias4Jhw3TOQgFAQCfFduGpEWjUfSR0hI4weu+8MwkHRHCE0bvgHHMhML9AAJGSehCJ3h8Ph92m2bQVl9z3Hals23a33XhOf/NXf/nu5ro/HT3hz7/99sPtbR28cEGQ4F0dvCcCFUJs6nrb+DpG7xxIySkVw1ekRABVjLGKiFikiEhhq905Y1mJiHd+gm6K3N/d2YRiJT4r/R2Px5TGzaa92l2BwuPD48P9A4s2TVO37f3dw5f7O3L+5t273W4PiMxS1y2owV+hpDENQ1X729t3p+PTdtNcX18RYR5zyVlYhPnhMDCL1fRzzv04qEiMUQpf7a/2+72hSOu6apvGOTqeHruuG4aRmZ3zzvnCPPTp7u7+dDqfu86sJOYegb+/+ywiITrnyTnyJhoT/Ax/ABPgNgmenHOoQ5HS9+euOyPi9fX1Nx/e39xc100saezO55Ky91RVERFKyU1VE7pxSI8Pj8fDseSiqlLk00+fzqfz0Hc5ZVBtmvrqan+1319dNShwOh0e7+7P3akO8frm6mZ/zZzrUL17d/3th49tW6d+7PszKQaSuqqaOjqinNLQdymNWoqCKJecxjENIuLJxRCDD6BAaBACy2LUOYwxxhBicN4TIZY85jRwSY4g1LFp6s2m3bSbpjGxgvF8OvddPw7D+XQeuj5W8eb6ypM/HU//9tvfHQ6Hw9PTZrPJOe/3e1CwQNg0TXc6Wzn66enp7u7ub//2b4dhIHLH40FVr/ZXwzAeDk+bzUZZcyospWmapm67rh+Gcbfbb7b14+PjZrtNKXWn7i/+8i9///vfNU1Nzl1fXY1japtNiLHrunazBcCSMiF4R96R6X8hqB2B5U+0a1lYhUHAuKdcirIIsz3OKdmfyjI9b1OTIsrsScpmuCLKAirWVbWk0gqhNvejGnodLRFEg7/BtNAHABRZMJcmOPwiNq+BJ+t7RHwWolZ9O1MVg5lhsRR4X6AccK4yyiuF5HkLBBSEzYvTsmR7rCo6J8qioioCqsJJVVXQIBJztuaYVc1QB0iJEL2ZsE4gSued884HR8G5CSJH5JCcRSJC80y0fUdft1tmjqkunEopUrJyLYWT71FFSpZQchgpF+IkhYODUqQkU4sonFMZkzCnoVdmzZlzYc4wa+qY6KVlSlYZLLbHwgIJUJEIMxQChEIScBwRkbxJvJBzDkU8UVYkYyUigXNKJrmMBQUnL6FJMgBBCLQUQQCvIACkQoiGmaIV7ebFauXF4Jir4W8ujmSiiZAqXz4CMzB3KoDOhXHVyb5dJockREQTYN1smknjOOeMNA1inOqeWMVMmafVnABAGx2ROiiOnHfk0DuPDvC7j+9225bHYeiO7/bbjx//ClXu7u6++/ghEHbHw+lwzDlxCJxHBNm0deVdFUMMfk5/AdHVITmL7hbW+qnNs900VrDy3itqKaWY3qP31r5i5uhNo8SJiCVVkzTX7MmwGEdYFLQeWPCTG3jXdc6FTdtUser7XhFCqMZxrOuahc/ns4q8f/8uVu58PlZVAIBhGMTmHQGLr33PC9Eip1RK8UhEdLXbW50TVutxZkZwKsJcmFWFmWVIJWcGAPPhI/QxeiLXDUPXDX46CpOBhuXx9hwAmBvGpD2rCgCW5hpcc7PZ7Ha7qoqq2p/OqBCcFyRRRsSqqjy5lJIq55xKYUNsp7F0XXc6ndKsxXp1df3u3fvr633TtCWdTsfT6fRE4G5vrq521y4QCNbNJvoqVN6hY1UB9AAEjpgDgootQQunnFIS5lTG4LwLfrfZLmu+YRiQPCI8Vx9EIirKC7rHVut1IAhxc3MzqZrlcexL4jIzUdFIJs45Qfj06dOPP3z69OnT1e37Ukoex2XvDIr58PCQc37//r2tFZ6enkD1y5cv19d7RBzHibfXNM1+vzeSHzOfz2fTf9lsNqp6PB5F/d/8zd/8f/6X/+Xm5uZqf/PD7377V3/5q1//5t9+9rOfDX1XB993p1p1v993wxhjnMub+iLPW88PS0q3fmld0lyaeZfV8/J+EYObT1magqpYMLvkfwgmRwwrFaoXW/KiF/Pm7Wulzj//hs+xmuuPv/7z8ouvrEnfCLdfwUBMkmGkS1hEdAKIztu8qeiQ0BzpQRFIzOENp4UBAiHq9D3TU4hW5CQ1Fj/4yujq5Fx23rH4XEqBUHwMICqctXAuc5mLRdVz0VIn+9PagVKYc9IF+ZkTlyJSVBVwRBsQbMsZNfLFwnlRYJSiwsQooJgGRER26r2qVyfkixKJICEyoJJjKQ7JUC0Z8jRrW+MU1BGBCgojAJsbA07FBFSgmRG3HPrlT5uq1jJ6NlQuQwfl8idOlD4kmdy2rJMHXkQUlVCRUZCKioCSOCJSKkhgxW3EwsZWVxACIVCHogTqqVAdXSpi4cT463Y5AZHlwR4hhOBNFkf0Zx/f55yPT49pHLfX+4CSS+Y8vrv+JufcD0Pl3K7ZGbzeASJq8C5452lWUXJA6KIVY5lzGfu+S0NXSkGAEEzfy6a8MuakACFUi0kpzEw+mEFchokgIi7FYicA7NpNW9WoPPb9LFjsEPHp8WkYBh/idrsV0HM/KGLd7sdx7Puec6+F20293+8Q+MzZQCXKoCKIjhCQmUdBH1AEWEopLOAoNHVs29YI4yosws45j6TKUhKRJ1ItLGzl0Mly1iKfaV4b4yKXJFJi5RcQBAAjQHAhhOCDs32JMcbgiIBZEEQUnHOhiZvN5mq7q6pKOKc8aGED4zmk4CdEKykMw6iCosVyjJTGw+Hw+Ph4Oh1UNcZ4fX318ePH/f6aCEpJY//EudRV2Dbbq3dXm3pTtKQ+OQKHqmK1lgIM6MCTc4BSSsnZmk+qiiAOdNO0dYi+8sG5hTjILEY5XatS2FWLyqTqLBE2GVbvbbkmLGVMY05clIi8D+ic9zHnXFhSHrthfHx8HPu+qSbjiF27SyltWkopqeLp1A1Deno67r7bjeO429UhDJ7cTz9+3myauq43m93hcLj78nBzc7PfXzELKRBR13XHw7mtm91uZ9wGH3YA+O3Hb3788cfoQ7Np89h7h6RShn57/e7h8UAu3F5dH8/dlNMD2qmZ6jmTdInhMxVW/1TEsr3p38xUsOlOX91sErFF1xT2pjW0Fe8AQMTQAIb4xEmAbKqLWhVQYFm8oT2eFBsVXkW1deT7cyLlnwx4r8ubX3t1nicdzu1Q1WKtSZzYqEDERGQNPiJRBRP6MogGWlI4aYeTklMgQqeECKQIiE6wTE5Ntmw3iKISklu7bdOil2YEBlOXAAAmcqUwO+cys/dQo/KkyZlTztmbG1xWYibmYBIbUqQwS1YWFGbJFgJLKaUkESEsoMrMwKJSrJ8rCCyi5JgIQkAfMAR1RORmL/upgQyGHkEEJQZwSIDiGM1BFhFHGXEK5+QIFIhRHSAokwKhFcqFAFGUABxf0Jswt5dsGTuJesyRb70eWa9o0BbyZMPXjvKlyUwIF7oPqoiYWKpVmCeQjuicKLghTRHXeG/eew6xCCNiKSWzNYcYgFRZFdGbRnAGAI8G+SNE/fjh5u7TZ2nCh3f7GH3qjjHGv/urX21C1RWhWMfddrPZRO/QkbmwOiTn0DlnQuGTCg8LAghzSbmkQQp7cjOkUCb/HS3M7ENo2zanhDNpb5FdLaWkYZKn8t5zKSYDZF59hlM/nU4pjTFOJLaShcgTulKEVQg9BXNW0oeHO5Dy7t3N1dUOiZV1t9uJsPeRHBJ5UGRF1SxCk5qJJKta++CrtjHVEjMGQsTKhxBCYeNyIhfgAtaG42J1cejO/Waz2dTbNJbD4TAMiQLtr3YkeRkelpZbqDPWlzERFxlGC5ze+6oOTdNMlc80ljFVIQ7jWHKqqqqu68qHcRzPfc/FBr9jzn0/PD093N3dHQ4HIx3e3t5++Hi7n7tcXddBGvf7/fX1dV3XIpLSICJEILlkUa9e5oSDlNgB5jGNJeUBlEJ0dWx8IEJv6RFP/hPZqHtEVARoLTGvwqqkktPgwNWVq3zrPKogS+Yix8PR7AwRtAohxhiqxscgDPdjOp1OOXPmQkTv3r1rm+1hHEVkUzcmzXM6nUC17/sQwvl8/vTp02JE7AKVUn73u9998803Nzc3zPxwdy8it+9v3r9/P3bdnO73fd9b84+Z7+/v27b967/+68fHR0PMPgzju+v92J2bzRWpEqAULik3sZqItCt+3noGuPT4VyndIiyxvi3yQ+ss8NLbm2lRoJMCCymAN0ttNWVhpZnQ8CbIYKY3yIRR0NX3X96zzsBexzyd5Rj/jPj37PHLOAeKzydKfIbzXMWeldP6AiFct/dUVYyNQICOUFWQFBDQWbZnSBAEAEMCAngXAIze5hEV0AGig5ec7EmneJZc9U2M7Fw2RzHvRaSUYGfOikfIjM6BD8QsIlV09iqqrRZZmEVKSSMAqNFdJUthC3sgxSpKIALCKkIqADCWrAjsnJBT55C8OgJChoCTU6wJk0/ybzPN3FwBSQEZEBEjFQBwqCgoSAJIqF7RMjwLewAyZ3DgZ7+k9fJkyfCWJ5cw8LpWribdgga9IVXFVdca2QYliQg6MZ13VSUVYiAmRmJQm1aY2dmRl7xUSEopmbmU4hAAxNnSiAKSIjgXiIicBV0u3vumClVVlTRGQt82u+2WCJn5ar/95ptvxsejvTdGbwAWKewId5s9wWIHCMsBsaZqKSnnSSfae9dU0XC8UBARmcT44FVVdeezc87mdFPXXGSclqzIptGqqna7XV2FruuOT4eh652jGCM5l1IJIXgfc+HD6UjeN+2mqipVHM4d5xyCC9F5hwAEHn2g83H0FEJVc5HUl75LXTcOQx4YOReDlXoCgz+EEMzL3hIyciBaOBfOJQvlzCmVlDIXzTkPOZl+8XQ6OBVOIiW4KgSnJm2MujAvfXDOk+U6s4zXZRTFypv2mKqala4tyFgKOaxdXVWVNwMyVEcQKOSch354enp6fHw8HB5zzg5xu9nc3t58++3H3W6rquM4okgT42Z7s9/vjSlxOp3S0NupUYXg3QSkAi2FlYWFZehVMXoXY13XMcbaluZcUillzJPAjS1lvPdNcOu5SVnMDgukkCPvyNZRrJpLzqkQBfJexRQrSURSPwzDMKbSnTsV2bYb8q7vx34Y+n5MJQ3DcD4c//CHPzRNw8y3794ZvWEcx0+fPn377bemxG36AMfjoWlOtsv9uTPCyX6/r3wQUZPCOZ/7GGsiqqrm0+f//vHjx+54+v67nzlPv/vd7z5+/FhSfnx8vL59P6a+biJ6fHp62u2vsqjDpakP5NzcixIQgRm7qVxA2GYzFRS2B5d/MMtMX3RGlkgz8xpgLmBaNVxnOLoiGFmPTHhrjmhqhu846VKiAQgteMBFEX8dfpYy49di3p8MeIu/zWrqe5YDvGgMrcPedLnNtqcAOKmAW4pqz6AHACRFUSBHCgwegIFslaYGhzd6oiKhCZUATPVMIO8tt1MEh6QzhhMBdep3XmgYoDPH3Xvvac5JbSK2BGiRjSkqxIylELOqRmmmbMNybFERFi1lHADAYPe6AFJUOZnUGSOLckFl08ZMKSlAQWUkISeEQA4QC0RYeN0GxkUAy30VEOHiZ2j3OFXA5zUQEGpR8JNWpyXAylMFEnIuS0iDWY5hOUnrsziHvWeS6sugQTISGOpUe5g/XnAqP4gUYREhRVUtws45YmVmBhURx7a28MwsGmyFaIVBVZ1AvmRjyJkQJSIKlGjpBSpAjMG1bbupq59++undzV5EuKQY4s3tdVVV4/kUffDkJnwgKfoYY6hDdN5Ous5EKbANcM4pC+fCOUthmIsyxr9e1mgWS6z0bSIiRDTkMo6jqpqSllG/TcTLe9+27WazMbGMrpu424FCFlE1Up3LMnBiF6q6bs2Apu/73X7XtjUA5Jx3uw2goflDVTXehTSm87k/n4ZxzFy0K52qcilE1DTVbrezkJzFROCAFCa3ozHlnDFuQb0KChNzYlZmtSCdUjp1vVlhbLYBiFIaah9stY6IlslZejfrelz6FgsFzUZXKUVyAdFY+cqHNA7WhfJIOQ3DOABAXdfnUzqd+vv7+y93n56enkR4u22vr99vd/XV1VXT1AqcSwYs+6u2aZpt9ObFY6Vmi1UX22fn7LTaPSJG1SqGZvbUNU2vlFLXDTCrYCxN3Evp0uY4FhNSENTdZjuNeSmsqKqOMAan6gCAbZdFOZXCSQRciCFUHrFpNi54FXc4np+engZlVZVcLGI550xNzbJPK4PblXU6ncwyMOf8+PhoriCG/0TEXbuxbFhV0zgeDofdbrfZbLb99l//9V//9q/+2k7Qt99++/T0ZFBkyaWwVJsakA5dv93tVRXxAvC2n55ygHkz1tmeiDDDOpl7sUR+fZun4lemB7PnH05IBVj/ol2FMH0KL9meLTHpZZK3zvZex7l18Psj4mT/riLn697e+kKwitp0r88yPyKaZTBIVWm2imWjiJoSMQYlR+gUUa2QCVPrztGlMjfN0jhTywhpZcM7FzkBAHwMThEQtRQQQXFY2ImB0qy2MDud2pUTSpmyPSu+ijJn0VLVrSorGDaJLyua0YOKSIFiq6QCKqRSSmHQYt0ZIDasCkBGB7MSuc4ENZgRkwBKMpNUTClcyzRmVFVYRVkZFZQIFWQOe2a7CgCxFJp8f746Opc5i+hCczRS7eU95JehRnpZ+NgaoqioklMvImYY69QzM7GKyBz2mJmlgHcoQurVdEdNw7P1kdlVYdJXXSJuAQ2EDjV4cs7VwdfBe0cfP7xv6ggAeehDCFf7raqeng4B0FcT45gIqqqq68p73w9nEJ56wCYzKFxy9l4L55THnJMIL4ILqFpVVaiiqkLAGKOoHo9Hu/Ym4SXmMh9ee2wSzxbzjELQnc79uQOA7XbrYxiHfB4HZuS5pr3dbqtmQ0T9MJy73oCRbVujmbshFuZSyn67I6I85NPT6enxOAwZIRA5VSFyvnIxxpur3W63I9Q8JlVFIkKVUvq+H/vBVEKg2CINZkVfInSO1NYBOY8AEGOMMQiaG5QJU81K3DE6vzS95nYGIiJ6T4jIhEY9QcQQnCMKIUTng5/KoSqmnT3mYSyl/PQ5nU6nw+Ew9Ml7X1Xt7e3N7e3tdlfP5VP1nryv27ZpmsZxyamMQ29H3hu2EnQqr3OxEyFT0RLbqqqqKsSAoDmNwzAYtV+MKe/ishBEo9oKL7Aunb8VVGKY3KPQZl4Ah0pE3WCNiSlfMowdkCMffJQhpXEcsWQAqGM1hOpwemrbVoBubt6dz+cYY3fuU0o5lc1m07ab0+l8c3NTCpfCfT9sdw0RHQ4nIn97c20ynoS95FLX9Xa7TSnd390Nw2AYovfvP/7zP/9T3/c+uPHU/fKXvzwdnkopjvnu7vPm6h2wYPDe+8PhULUbMol+AMLJ48XkUKYF36t/wIqiwIIy6Xku/wDR0j1RWWjkeRGaBrB5Y0lF4LkL+LPy0qq3hwI053drW/Jnwex5wHsz/v05Cd8c1dbR7u0i5zqcrwOhke3Wk+380Hp+TESqjkxuUid6GLrgkJHBMNwKnk3LBS3nQ0WLdoTu8v0TyhFtHnZgLOpVzAMAG65+27TMHHJm5qIiIoVVRLLV3CxkMbtJSVVjKSvov3HUk4gAGj2zLGHPwpWrtiDCPEJh4YRFRBMqsBQFKCo8mSeokAMAb2eajAg57c10oEUt7KmadA2rKqXJGcAQUSgiyiQWsIERPE0QF0vblhLcusP3Ypw9WzusFFR1/RFcK6Bfwp6bSg0qIqhTwNDJEGTq9tmrJp3DqxMjIiEE8rgAQ+ZV2yxRzbyrK1ujee/r4EMI3pGq5jQI5yaG7XbrCPq+dwibbQujWp+jlAIgJu0/jmPbBFVv6BrEKdUTkVL6nHMZLwi95VDUMYa6ZmYLe13fn06nTdtaKrOQ2CzsHR4eVdWoC9vN5urqCgBM0MTkMLbbLas8PZ6OpyOAdyGkxL6K+911bOquG87nbhzH7z7cWgX45nofPJzPxzF1VooYx/H81N3dPTw9nlRcXZNzHgGdc9E7s3UNIYxDdzwe27YxVQxmzmMax9E5rKrq8XQ2q6CFaGXnwrT/jV9oB9/FUFWVLVxgFpqJMdpae2EFLVUEQ3hO1QkA51zlo7f1lsLV9XV/7o7HIyk4AlV9eHj48uXL4RjM0jbGut3s27a9utput9urq42IKBTnJqc9Ezk7nzorSNpSw/ZiXYCaepzeG5+k0UyzdKrlecyCiO1mg4hmKzOBp6y7oBdA41xuEwCwtB4n22fRGalLtGdmFlZVdOR9jFXlffX54R7RlSLjOCJZ4beq6zqMXQjh3A8AkFLabreoUFVV13Xv3r2Lm839/b1R12OMlptawnc8HrdtI7O+69PTEyK2bdO27fFw6LrOeKIi8td//bf//bf/9p/+03/qTsc//OEPf//3f/+//q//e922Dw8Pu6vbnHPwoWmaz1/u3tfNSnnimQ7nul+7PhrrPG89b6znFlq5zS3ENqvEvQh7L1KTrwWnpdS1vPjHY9jrV//MIqeC/ok3vPqeJdtbLqVn9c+vQDqX21zTsuYOAKAQKnpVAPSMQKa3ad60cMkHljwPp+qgAwBrqK2Pp73bqwd05H1AJvODIQFV9cwAoIIrPC6o6sjDcg7nVnllV9froaCqvgwATsQtl9P0cSuZvtR91tG9JDYuQ2fNtJvqNsy1TEmGqoCSKmuBwuID5FxUWciRCdKQCyEkREKMk7wnCoDFIEcO3dSbQRKFoqCADt0W162+52fRttxCndXBS4w69QDEXKomQaFSHKl3l763faynvFxdysWhC65lZjGjH5lsGC5jyFkpAEMIsfLeexQVkWZfOeeCdxYSvANb+3A7Uh2Di5pSHksSrGPV1gEktW0Nmg/HB85jCBiIM5+P9091XXPmoRv2+12MMecMRLurLQXKaXDOtaEehyEdjzUiKmyaFgCOx2MZU1vXMTjOyYOmnLyWpm2uti0pHw6H4+Fw4qhhW1XViP54Pj30PYPbbNpxHK/f7ckF50V5uP/y+9Pp9PHjx8rdU/BVrVi6lFRGdbCJrj6f0vl8/nL/eDgcBaXZRPZ8ltO1R4Syrbf7XRTJp7sDEd1e71NhRTgP6Xw+51x83SjiufBxTKrKCIwwmyeq8yQFVIVUnHM++Nk/T+sWhH3mooCxdqHyI0vf9ze7TR6T927XNp5gHHrlwbtYq/fOBw8xkvezbwjQ4eku+KZutn2Xv9wd7r4cn55Kd64Z7zSyQ6zacP2ufX/zbrNtKvLd6aGtm23bxBhROU9MIYYmpJxzTkTUNnX0AVGRpes6TgWkVFW12bZVVaFCKeUkWYtKYRFRAtdU0bwGQIU5p1FVyXJZ9ITkfdQ5EUfvrfjpnBuzubXoUuRAJ151HMZYV7XBlES8ByQW7bcbdzweg4P2KqQ05nyKHq52Ug5Fz8eb/eanzw/vtzeV356eHnf766vbv/iXf/mXv/rrX77/+V8ex/J0eLze7ZrKH0/y6dc//PJXP+uH/MNPn/7xP/ztfhP/9df//P7dzXC8kzFu2vbjx3dfvkg3nD99kd1mu2mb3fb67u5ut9v99OkHpPK3f/fLP/z4o/L404//+rPvf7WJ+8Jy0zbjw8Pu+qoOIXHOQ45Ng4hOknMgPIoIMJOqVyEUQjbaMaECCcI0OxECkvrZIcEUJpdjCJouM95z2AfApLAFNKU1gFhTJhJCIASAgkCGWnUOVUGRBACUDCQDIuY89zoevw5405TCk+suPv+UWK3O2Ge2urFpFHmK22oeCGqIF+PVWwJA4BTUPpuRAR1RpczIrKWAegAxQyCHHggFCioABVSKnBk8kRZ0RFDUmnmewZwGCRB1ZqDDhba2jp0WIAoiMtoq8+LEKzRhzr08E8N2MovjqaDSWl8HVFUoLIf1BT0FXsUwAHBlxi09/6LCbJAVe9UeIwC55+PgecF6rX0wJWEhKjEhackAoKRCpvrPWooqI6kjpFlSdoGlLZhsP/Ou7Gbr9EkObQGwvQp7uGoHrnd5kaxd5/vLq3YMeKKpqqrG+Qst9lrOxMwqpJcGOC9hDyYjEvTex+idc2T2tn5CdS6Mscm9sozLJec9hVBVsfLe1FonGzwiEuGSk0lHGurSUgr7KpOSijHKLP1nN/stYyao6iJcYmKYiFhVVVVVlpkZMevu4fjzn/98s9nc3d2ZAEdV+ZSS+ZKnzF++fLHUoWmaqqpExt1uUzeblErf9yqOvGPmw+Hw6dOnh6dDCKHZNgYiVdW23Sw0eQEylcWcc8qFmYdxaoCZmtr5fE4JDXXMzDC1HWBZaRHiMtGbCsQ49sFXdd0qoInkB+dps2Hmtm2r4FQ45xJC8BSRlPRSArVTX1RUsak3w5CeDk8P98fD4Xw6dsOQc+bEw2azubm5ubm+3m43dYgOUVU3TWsNVFzBCBHRLOWapqlCsL5sHpOU4pBCiEjBITHz0PU2DJKmeS0FiOhmQjBnE9eOgS5XAiISussAvtT+qfFxCXu66nXtdnGx3UBm55zOhBbvPbhJ4gcAWKSUstvt+m4cx5GIoo/m+f709KSO/vEf/6EfzqWU97fvN9vq//0//8//9//8f6vrJjr/448/Xm3a66uPP/zwg0re7/fDMEzD3jurmghMpR3r8xGBqr5//96Esna7XRqL9Z43WzaxDhOgWaYdw50ta25967a0rF5MVssUsXCl1njF1zniixzoMoU/D10vgtYfv6l+lbf3Zv73x8Pkv+ul17cXe7cURV88b5B4ABChCcBDBOAUQCdVY9S5gw5zoRhmBdElbsilyfecIAgIAH7q7gowMxEbBN2SvHlAP4twpO7FJL4OcuvDN83U4kmBwcqagqIMiqL2WBCWZ+yxkaffjCtLZ34ekYWZvYxIrEAMQECsavJjpbAym0MrTbgU9QQBgyPnnTfSsXMuOO+ci0SOZuYxgXPOO28YR1y3+p6n56/3d4Edr6L8dHnALA1j88I09VjFbLrMivd+WolP4KCyfP/S4AUAcrCAF0hFVf08VyGisUQmnyCqHDkAm3dCVVXRBwCIPoypmx3d1PpwKSVPZIBDizpmptO2rfcUYxQpdgoW21gBSCkBC7A4j0haUhnH0b7ByoxpHM7nsxGubbyZF665ECBiKWW73YYQxnE8HZ+KcFVVTdv6QHXcVE1tVUcRqWKTijw+3n/+/OXh4WHMpa5rC9XOkaFmFucHAVpcSfsx0croyxTRUkqloHnLiYjz5Jyf2PpOENF0Z5yVAQARccza1L5p28IypFJK8bGOPk5xOrjz8VBSaurKFg2Wf4fgvPc4EVpIhHPOh8Ph049f7u8P3blnRlN33G7a29t333777dXVFREoMwgTQh3jsshgzkYLAwAHWPlQxejJWf22lKzMTVV7JHIghW0HVZUA2Anp7M5CztgpRFSAnHOVDzFG55ybkR3TGn+2/gAAseKSIxD0k02BGldSmTdVAwDGwJmURVVNK66OlQs+hEAKDjBx4VLqqyvQ493jo/ceHVbBv3t3/fs//Ph0Pn377bei1dPTfYw/u3m3q+s6Jx5z9+2Hj1/ufgwIqno4HOxQd0NPRKySUkIXttutT9mi6dPT6Wfffky5Tym9u70+HB66rtvtdgc52dLnKg3bXV1VoZRSmG2tpwCF2XYbiSYbz+W6nt8z0X5WVpQvQsty4U9zGkzZ0fLOdcyDqf9i2Cgw6PoyvbzGIVim8Hz+eYltgefVyBcPvjZ7vxkIX7z5+TN/jArxOqJ/jdiAGJxlQUgKSOgUnCJ5y1uAdIZbTN88ibTJAj0ENB9W0yd7pjmgAAAOLNuzsGfTuhji4jJfP5vBAQClvJjrv3IU5rOupi3OwhCBVVCBF0KuAtu9PQ9KVbAZXhammt1P9recRYA5E3kwzmOoAAspiIBQUiA7XghO0YGaMxF6JG8ZHlwyOe+88xh8cM45hEvCh7rEOVmduXW2Z7Pn6712c9jDlfTccpSWI7lUa2EOeyLiHDGz2soDGABehz023IW7sKmsbesucXkifhj1wtRSLFZVobZiF+cRIEhhs/8oIuYlLiKqZA4PbduG4MdxDCHUdV04wdyAlJVhnvMh51xSQsSwqgTEGSBvCL3D4WB9vu+///7u7s6IWdvttu975mQW4ZOaVwgRY13XTdsG56+vNznx4XzIuTT1JoTweHj46aefjqdzCKHd7pxzfd+r6vX11c3NTfC0AGIBJ9DmMAwpl6ZpqtkD3YKuc87yvCUd8d7P2bN3zgXviWgi1AACQNM05rAjklBZBQjEI/k6OlQpmUAQdPZFJ4vK3htiTZbU6MfPPx4Op8PhOI69zWhVHUMI33z3YbvdbrdbTyAiIOqJog+IoGxVWFEuy1LsetuKCOeS8iAiKOocuhCdzZmFOZeSs/UUGKCqIiqgI2ekT+8tsYOqsjzPe09wSeinCDkhCCaInaoqApl6F4Ci5XPPGmA2PkMIbAKeKojo4yTig4iUSXKJoc4tPx6P5MOQCjNfv3sfo/909+Wf/+Wf/vIvfwUAx+NRpPxP/9P/9Nvf/vbz3elmf3Vzc8PjcDgcbq72dV2dD08CpvmuOWeP9rtgKSARjSVz5lyGbWpDiE9Ph3a7dc4Bcx7GcRx3W6hCyJFNFX25/Jdr9s2pnIgmWvk8fS+53XpyWCaN5fE64C2H68U8My2yV9K/qm8w8F6EQ11VmNYx7/Us/WakfHNW/9pHXrz0R2LesuPrWKeqaoY0cCnMkpIqCKhDp4AKBOgAHQIpEJun3rpXZ8zp2dXPrIlss54ZFC3HGQms7qWqYlkROEExiXDhVUpnSD+65O8vjs6L558dDqQldC33S0iz0uazxx5MUXsdII0Ma9HIpB9sOCAIhhqxgAB4RAV1isxCgo6RFylwBGN2ghJ5IkfknfMODYXnnRFu0RF5RJvmnIG5dF4m2D+84EufDd/lCNDzsPcqF3yZBaIu6SAqo0MSV1QJxKmqugk/ZisaADAg/lSenUVWEdHKm95KrHS5wELApSBcGyKRWQqkPEEZFSXnnMbRuCtpHEvJMcaqmrxbqzo4j8NYSikmQm5hD1G9Jxcc51G0EBKIicFBCMHsrayIdDidh5S9D3VdO+/HlLq+v4qREK3k+O7dNYJ+uf/SdZ3liJbtNXVNCMNwHsfR+8Cg5+Ph4eFhsl7absm7ruuk5P1+f7O/qkMkFAtpMUYgb+VWAKjrWlVNKszAFxYa0zgAADn03kXnEVC0iHDbVBYDENHAU3bOd7ud997OWQjBhcrSI/KYhl5Vo/fBO1sjVlXVNI1dDiKSUjYJ5pzzl0+fhj6lIZOCJyRy222z2159980OEREkjxkA/LREQxNFAim2xFkq8ORcSnnsOi7Fe1+F6B25SQGg5DwaCpGct/EZYkWIpmdxqerDhKKayk841T8BIc3Zh1V8VY3joMFXIsJ6WRxnZmIuxSYXRFLvfPAxADhwDBp95WMAgJIYlRx6zoIKTVvtr3apaDeMKQ91DB8/3CJpdzref7l7//6267ovXz79j//pH8LnT4jnf/u3f/vlr36WCR/un673OwWoN+1YRu+ngrZiNgwqCBfhpmkeHx/r4AuXw/H44f07M6RFdCGYD9855TFWdVvHx4eDUVZsDbRUmGz9uq5YTqvAucADE49uudINC29mQTKL5Suu5uF18HuxYlgiJc8e7YSW2M2oTSUAkon/Rgv+hoBsYTy/GeeZfx0vLyHpRaj7WuL4Ory9ioz/jphnlcwVNn4qwwAACakCgYISgXMmLY0OFBUIHMIEnJ+Pm9kywJT04SxCqZPl+lor9LKCmIQw1qnnlKDQuoF3qXkuZkivs+Ov7LdbH9OvHdZLVCC05McyPBFQ9apYCjkCZsekIoVQqBChOGSkAkqIY7GBguqQFEEUhMdlCE0C3c65BXJOROjNoomUiGZWw+SDhIu304sbrNL2Fws3NxtzvLm/L/I/VSWVy3FGy/MmjtT0+vOwBzLNVohIOndoiEwNefp14OXiKWWyPvfk6rqOwUsuUsbudAYUB5BKyWlgnvjyQ9dZkHDO5ZyrqmqaeunezeihaZfXVV8bSBZLYoxggh8pmb2AM6/2pvnhxx+dc+amlnOOMe52u7qu7758ub+/F5H9ft80jZnVxRifHn5k1hgjAD49Pd3fHSYNDi8WVnPOm0378eNH0zKOYVpxW9hLabILiHX9+Ph4PJ2t+Ga4x67rnKuMe+cNHKlsUgNrApzBweywW6uyFCaiKsa6bbz3IpCZpRREreMuRF9K8YRm5m4hdhiG0+lgnEWzczZjJkOth1BtNu3t+2vnxFSvPJF54CEiiKICIYJzRGRleRt73dMx51xKDuTqEKsQAWdl5MIgQoDBGSTHAQBFt5w7GzDPWyLTUDP9NppU7Zfe9mKepd6FIqwmYrAUIYjqOlqVfdFfteuijkFEXPDmNWFj0vJUILy5uTmeu9O5dwgl9998eI8I/Tj87ve//cUvf9Z1p5zzr3/9m5///Oclu1//t3+6ud42TTWk8fPdfS6b77/7LnOJ3tn1VUQWg3gRCTF0jz1AFb0vRUKottt934113VYhsEjfdX1/DrHy/mIwtLqCyjIpLwgGmBt+vJr+/3hepcsC4RXF+3L837q9SC3efOlrsQqfdwdfT9dLU+bN8PZmzvf6Dfqnipzr3cSV1hWtwt40HwLJVCNEAlI0h9g58zP1EVpBQ4lQwWjt9kViRrQTu3/aSFRQdKggyrSEvRknuRJOlbfDHi22tl9Pe58dKfrqquHNsGdUlhmX5EXUHA+cc8zC7OwZ76UUzyzgsJRCLpQcKEVxUbxXP0LxxQ1QvFMhFI/kvfM+ovPognNkEvroHJEzywSgSWYXjcsGSORnSIsZJLyB50R8rmGGb+zXi0C4fsnZK8bv96yqIF5VxRVVRV31ABQAwKtY1FsOoJvW4uSRjOwxcdMJiEgYFmcWH5xzKEUABFCIkIFTGmx5yyX3fW/GLk3TIGLh1LZtXddd11VVsJKPiTRam4yI8jgol+AoeOuZDR59CJEVcs79kFJmcsF7Ty4U1q4bzEd7GJIj2u/3IYTj4fD58+eU0s1+d7XdNFXcNHUd/NB352GMMYLq8XC6v3/ouoSOnPeAIiIEuN/urq73TV0jiEphdpMvErPKxK5bOpdGtACARX8rVDHGWIdoYBxRCeCJiDkTgZ9MrzwRWd/LhGVRwSHF4KMPqso5iXBtriTBE5IL0ebQzJozG0n/fDwfDkdjcXgXSynCJYaqqmJdV5s21jGAdp7Ie6NlzksoYXJAiog+OG/GeJa2mmliPdkhOGHOeZQ8zdR1XZsn/QJUKXgxmLys4xRo8r9eSRQRIVHw08R0GeRKiMAqxRQ/RRHRBe+NKUVTfDVZHyJSQlsJTVUHBU/O00TkyDFBP+58LCrvbq8F/Pl83Gw2TVcNY7dt6s8//uQrf3t7+/nLTx8/ftw29X6///z587t31977z58/lzRe7/Yx1MEbbghzf7Za5WazOafJ33EcxypsvPeHp9N2sz+cjs45IDemZDxO3ozOBReDEmZhEXEQyBGFhbCvqA5lMbZWUvIwTZJrzMs6BL6AoBdmnbQzX6yY10ZmaK/O5TtcP7N4rqoqqFNBdDYVuHWfT//c3t4b4flrofePRL4/Hu1gVSfDNbl1HoqXsEceJ0qY9dMdg/dkbukkFwjL/OvoUUFJFrMIk3o2hZs5GOsyMwuwqrHa5lWMGQtMKxqaIt+cpU6ALnq+4lj2eWnqvliSLJYYX1tNvHhe5ueN7LBQHozgP0Oq/CLuxYjoEpAzeq0QCYGQc7FicmJhT4ubhJIrh8G5gI4IHZAjdGhlHhOhJUdoXVNCh7O06QrB9haNYb3ja+roegy9QLouL5GFSVIREUWdS9LMRlV8GfZQre00gT8RYJLPZXmBh7bnl6zFITkEkFJKKnkkQk8wSil5VGVb/fR9P45j27Y2STHzBLBMabttnXM5PxumiGg1T4suxsmzlwyfIrP0qPEFU0q73c7sxYlot9tvNpvz+fzly5eHh4e2qt+/f2+inXVdi5bHhwdy5MgPw/D0dOz7McYKnR/HEYFsWt9ut7EKKSUi8N5bhVZVh2FgnRS2xnE8nA6IuNlsRMQs0Ylot9uNWRdUQuEkIhYnRJbzjgDOe19XdVVVx+6oq24Nl1xK6fuBCPc3N03TWAJqNcNxHM0aYpxvZknBzKBFmAGgbqr9ft/EqqqCQ46evAs+OIdOQJQVABySqgKCpwlabAdzHEdPLjjvgrOKcc5ZcrKJvqqqTVN774FFJ9/2MK9aLiPZFrILZHE9vEXEuWBT6HR5zitTC2bTyY3BtOWIqB+zIVl8ytmUiWhSK7WxYak/AFi259WllIIPVefcfldEf/x037ZtHfzxePzHf/zH//V/+99uP97+wz/8w7k7/vrXv659+/333/+3f/o/iejbj++Px+Op6//77/7w3c++se0ps+65DWPvfd/3Vds83T8UUefC57svv/rFL51zzgVW4JJwxFxG5owzDGcxSsSZSQazlMg661I1cMulY/d6pbu0GKb5U+nF5LAc6j8SOV6kel/Lrv7o828/eDEVf+3XX3/kz8zwXs+Q62A8j7RVCERSQSF14BQQyAE4RufAGB0vN0OIQAEVlcQmSQRL94xf6lBZCUHMJBqFgRA8gaiqQyACRVSxX6JSimUiYidsrszxV+RslN4+ZEwvm7evk7/n2R6ussxLD8wWjzY9LePMOZcRhRyCYyTvvfpRg0cumkeKgcsApZAwgXrzKXbROnaCNKl6I4liVTVucrAga/gjAgK5Za1HBERWylEAmqs3L26OXu7mMmrfHHOFky1ukIz4pEZXMIEHMNdL5cv3z+hdRHR+qsqSAgQgIrc6OcsU5r0PbvJY0KL9cHp4+HJzc9OdDsPQVVXwns6nY9d1IhIr/823HwDldD6Ya8HpdGia6nQ6WNqUcwZUQ2AOY+8Q7BBJycG5/XZbshjGpOu6Po0AUIcKnVctRdhCo0VTRDyfz6Yv1Vbx+nrfbuq6Ct77oT933ZlUXKwOp+PpdALCWFXDkGRMNm/WdX19tW+axhoqogWUiciADN57T96ai5bnNU2jgKfT6Xw+E9Fms6nrOkZLtrNdBd47IlTl7aadr1JtqsoUzpjLZrM7Ho9aStO23tM4dDlnB66KPg0dgdR1HZ3POTNiXbeG2Pzy5UvXdWVMOWdmJfSHpyfnXFWFQOgRmio2dXCkdbT4AcCQcwKGpcVIiCKSS8qn0VYbqurQF07DyMBiF4j3VNf1zdW1nXFEDDNGAwAaE1fDS0vJBomtXZZu3zJQXaxlVuci8jIrJyC5uGh1zoAXANhs97ZhvnJ+pbG+IKHqBtCKzyLex4enT/v93hC5h3OXc77ebdM4fPftx67r7u4+h+ju7u6+fPmy2e6Z+Xf/+m9/93d/d3t7e39//8XTX/7FX/zwww9/+PGn2w/vCxYirOtqq1vv/W/+5b9t23os5fb29nB4nGRRu8H7+Hg4Dl1fEreb7W63G8b86Yc/OId/8Rd/Ferqp59+2mw2V1dXh8Mh57zdbruuW/ykZJasWrTOLZzJ6qaqwc0qNlNeMWHBHLzd9Fl4Ry9DBXlAFBUwSQRy6Kz16pGsFgjmRqNAOvfzzKzczuE65LwuabolcXwV/Ja3rRH7L+ax1fPPYtvlnXqRB5lJC0pEtp4zKMSaQUdkAE4VRQKn4OYnjcsIACjPimt+7l5OYQ9g0k+Y+qyCADrtJYDzARW8GXha6LYEWlVB1c10S8R1L8oEdxBe3SvIm88bjH513N8oeK4PnzzP0Nflcrv2FlqhXaugVKBYGjodF0QoWR0iB5cduBFUPCiBte8iLjHTlJzIAZFBZBVRiBxNF/SLtt56wbLenRcrmvX4WAbBumW9HjcOJkSoEaWXUSdw6SLgqh2NOtsKIk69WAWkqew4GzpdklGLLnbjlPr+rFy2u3bsz+PQjUOnLApTLyTG6ILXFWOJZ0hiCJNAORH5cPGTm/PyC+LVmJl9349jLqV4Fw3UZ2dtHHvrnDVNU9dxmNhqebvdXO3323YDAH3flZQcoq9jV0pKZRiSOTyqqp2bKtaGFEXEnFPOGUm99955S1VLKeNg/jnJJq9SymnO87bb7SxKmebDJaiz7LKblgvek/e+jtFkboZhOPdjXdfNZpNz7voBFeq6rkMUlLpunHPDMAhPNoRG23h6ejLREDtcOWcuxTnXxKpt6u2m2bR1VbsY0AcIDknNrVRBxIanm6kLUz7NF7jg+XS0oWSZStM0m03TNM1URaRpgeithqFgXENd1ZmWBq0t7IAmwSeYCkUT1RUReZEmmiQkERHBken/TkM6OHOXhpUcueVPJPTicjYHYABglVj5tkTnXBoZM9dVbJvq6XgwzeXD4fDu/a1z7vvvv//Nb37jye33+8Ph8P/7r//093//97/+5/92d/+4321CcKzICpFot9udTqe43wMiqww5IalzLiAZiHeKNwreoQKlNBwOj+3+u7ZtLbxN28b84tpfBr+stItfTBRTbjcZVMGSJrqVw8OLyPfmMno94eCf6gUuGdh6/nwx7bxISd98/o+/c/XSy19/Ddj5c27r/SIitmFLpEqKk+tCMbDcm7nsNFZJca44KgGKgClykoKATAHJrL69qQkAoAqqJYaIqtOVZif0sv9KArwQDJ7f45vPO+QXceLFkX1xfPPq59Y1cZqVHpcllQ0pQjGmnTonnpAdFAclgAbkAiFqGdHk7QEQ0fOlXuG8Jzdf084jGDfAoQOz5cVF+honc/rpHnFKer8i6bk8UH0Zh17kfDQhZnmCy84AlgIewIy/1Er2tiFWNLZvcxP/UgDATwtqRUTrSFmUqkK0zVTgcez7oQMtTVP1kohA1WhdOfVDHsaS8/XtDQCIlBCc96TKiOocElFKSZRjFax5NtnpoSqIcBa2rZql900YnUygC4tINwxd141JDBjSti0oD8PAafSe3t1ev3t33TQ15zx0J2UJIRDi0+HU933KuViP16rS6K72eyPRM+c89jnnEBw5UlXLSsdxPHWDHYRSio86DMP5fBYRYwhYJHMzogMWbA6S4UVCcJumsmSFmfM4DN1ZwYcQHVHXdXlM27ZuquAQFbGpAgD0fS6szrmSy+FweDw8dl039iMiOkBVUFYustm2m7rZ7Ta7bVvXsYq+qnwI3pEAM3PSogjgnSMEjzCaScQ4qiqRl9kSr/Sjc66qoh3S7Xa73bYm3wUAONdazKcaFUxCfRqBNO+4uZW5SVGNVglfVrCziBfxJ0LEZCQK9DPDYWo4iYvL/OVWpVSljKIkgs6TD+RDzllZaqm5iIDWdSSiKnFPI8BQB7/bbu4e7r2nzHh/f7+/vqnr+v2H69//9nfNVcPDUDXtDz/88B//x/+0v755OByrqlLlpq7syrq9vf306dPN1R4RHQVVHPoUXHRVFNbgAgCIMqB671Qhj8PTw13YfKiaOuc85tRuWhvnsa5mjRWTFEQgRBFSdXNWYCYN5r6gEyhmXS5eSoLLn6CTgIuYiCo+szuw9yO6CTIKc8HJLNZnhDkBOgGdBFLw5fTyOi6+mIEnNZbVk2/OzC9C4IsHqgrwdTD/n4p2sIrr6IJTRcVieR46RefQiSgAKaLAQvUQABD0c6gDRVlICwps6MvJq2ApRuo0W06zKpD1aKd9UFUVhAuDx2ZjJSTDN/7Z92r6zIhuoSssWM03CAx6KXLSShyBZneI5VVLRBwB5+II1HkVh1yEHXJBzsoFuShHUEYVh4qILs/yLgTknDcA9yQIhEBqPTFwTiec9zMNzxdrutfnD56r2K2zvTdHJE5ZHSxrQ1pTWdd5nmlwPw979hWI5k51AVguP2dVJlUe+yGXZGpCnEcQmVHRoIWnhhNA27b2wUWlxYpswzAws/PO0C5WObyMVecQxVDEMl3PAgCOAhGpAnMx7D5RbRrBiHg4HB8fHxHg/fv379/d7toNqqZhlDI1bodhOBwONtE7xwAKStHHGON+v0cEVV43F0MI5p9gvmvjmBZlgMfHR1vj13Vty3lDVzZVbemdXvB708kyHUtEtDTLNB7f374vpZzPZ+G82zS7zZYI0jDGugJhAIieACTnfOr7w+FwPB4X7Gthc/fyMcbr3a6tq+2ubesqRBeC956CU2BmFmABUD9bfRdOKQ+lTDOUQUOtWbip6rqud7ut1Wyt72uncqkHTIEHEQEczXITF5X1SfrHwht4h97jMtplEgJcbBnIhJSYAADJry4HUtVxKqc7P0PAAIAUFEBFbEGN1mv2TnKppCqusJQYvSfHQdU6GpybGAhgt9mg892QDPebUvr5z3+eSv7d7373q1/96uHh4b/8l//yd3/3d4fDY2Kz454kVW+udl++fDkcDoZt2e/3T08Pfd+H4GoAazYbLMiFYJy/vu9Op9Nms7GXbm5uENGEF/q+X67Z9dREMHfZl+O8Sv5eZIrLlb68c5kQXmRvy7ct64/V/PMSP/EisH0t5LyZw80B+E+Hvbei3etf/HdAXeCSD7yYVwlwgnRO+R84RKscrkGcFx7DTA2ZspGpw+cWR4s5hgGAI1Lw5AB02QFY0PIiunDQEEBlfg9+dQfWoKNn/8zNw5YvigpWkDYau6rYM6SgoCS6jBtY7pdnlj+NP0OEishIxKDEoIQcpHiQAsWDiEoBKSBsySwiRspTbHATcZ2IECa3eSJUIqVJEA/mYYevbtMaehXb5j/5a8cHn4OJ59Fsx9ikyORSDJlHu6rSNE9dTvZkqIFGdV/5QoDYEnEdm80Jb1I5qaoxlcPTYei6lIc8mh0rmzkDIgJISmnxSYeZmN8PnXOurtsYY0rDOPb2NpwufnLOKxAzcylc9HQ6ARC54BYMsQsuVN7Vm80mxtj3/ZcvX47H4zfvb7///vtNWyuXvuvGcSAHnrDrusPhwKBmv8RcRKSK1X6332w2TVOP42hCXKbTZr7nMnDOueu6cRwt97Ug0Q9jjHGzaW03DcnpvQ/RLyAR5QIAiN455wmaKnhPOec09jlncthu6hjj+XwehqGp42azIQKVEj0hyNgPLviqqhTL49Px4enJ/IAAgJBUtWRm1rZuttv2aretgq9qo06gJ3DICMAlmdLAVGxQKSmLAOcy1T7nBYqRAq/bXVVV9aYxzomqijKbdYbZAMyDHAjX6G9dlN5t5UukhOTcItZnr0YKfd+XnEXVVZW5eYiIJ7fMxZMo/rRQs0KKQ8sapwYPIDhkFBGrIyNiQCiIoThV9eSiD0LCrHWsxpD7/kwO6rr2Mfo6py/34ziGWD8ND1dXu3M/pJTuHp42u6v//f/7u48fP242DTOjSikMAH3fv7ve77eb39zfNU3zzYeP79+/H4Yhd7Z6MDIGS+E8JiIIMRIqKJ/PZ8M9Lbp9OFs4wVy/ecZB4pdQlHXYWz6yhIHFw+jVPPAMLHMJe5bNoOAFiGQtngu3D5SmWRTRMj+i6c91z201yV8i1p/M9t58dd7TlzH1jbD3x+LFJXDgqnR8+Ti6xZGCDMyJlxL99BL5paX2DA1rkqAwDT7LQ1FNgBm89f1wgsNeHOsR1cKknbYZI2su92ASL8/v8bIpq3sFcoCC6AAFHYravU6mgg5RGabHpFD0UvteWKLTAJu9AGFecDEzmPwKqhKhEHhlT8iByRGIMqsU1CnhJYUQHSkIgsH9p94DoPkaT+Q+IgRUU/l+bjO7DOV1qHs+xC+H/muLsnX8wxV499mDFayA5gtpOcxLdxhxkrcTeVZMhrmzMgskJgDx5JCE89j3XSmGKkylTHcA5L23TpgJVSxJQ0pJRKyXpqqW+timWs7nXJiLOUbXYwDw3jsfLK9KKeVcAMDwdaWYZflQ17XpT5Yydt1onkSOKOfc9+dh6EKzV8VSUsni0bdtu9/vN5sNlzyl+85VlauqyhZ1AGDWo9ZiTPOtqirzxfXeG5zS1GcIYWEi285aNtZUwVK9pZdsQ+Xh4SH1QxX9tmkJNA0petpsNn3fM48i4hXT0B0Ph/PhkJhTSs65xazYmpr7zb5pKmfITAfBzGlVShEpxUCGNkhyyWnMzIzomDnnYkln0zS73VXbtjUFY5/C3IVawJM4ly6XBhsiTtjgVWkXEXWCMSOZ7dKyHESMLi5X4gJ4ERFaNbP00vKDAZGQ0DtzBLV513jFgqxFVcG480YRnMxGSjSFICoSK9+kOAwHIn+13zIQ+dht0zjkqs4O1eRPf/GLX/y3f/nNz3/+cyL4L//nP/3jP/6DA961zfF4rKNT5nHs27blz58ej4fr6+vNdrvdbp+GZJqx0QcRUJBSCmV0zhlRwUr3Rlq1YWMH1vtVcXjVmZO5TvMi7D0rJD7PBV8EtsviY10EepHt4fqZy0T04qa2xJi3cB1K38zh4DnC/I+Eveex7bKZr8Pe8nN/POzBKhNYdp/MKf4SQdZRzJJcfPGdiBNlT/EZuWHO/JAmMemVfgiAty/UqTgLMAl8gDk6TYuJiQeBy3z75j68+bwJAE6FTlhAF9MHzFDRAYKquQmHmSlhVfKVNKggEvMibgkigKiOHBMQBRWHwqAqQsgijqZopyIiaFEBIIqzdGkOcESLI69Vj6eTZccSZvzRG13l18/j7Ef1+j2wKmU8q2kowVJKtovC9Fnm6hAgqF7gLW4KfvaMwVhsHaCI6Pyl6BSdBwehiufzOQ1DCEFYT+eTiReDeuacVUspBpSoY1PHirmYtHGMsetOiIjo7fo39KaJSi84iPmSLgBeZ9HRpZTnQ8XMQxqHIS05CjOfzofT6dS27YcPH8wa1Jz5rLRYSj6dDqkfKh96VuZiqPf9dn99fW3UiDQOqhrCggnFklLOuevz6XQahsFG+SJCZuKcFhis8Dux/oERlQAEptjW1rFp6hg9gJZchNl7Mmxz13Xns9Z1vdnUIQTQPEm4qiBoCCEVub+/fzoejqdhHMdSOJXknKNIdV37yhP6tqmWTBpAnAsxekeQc8p5dKLk0BOqasolDWPKRoBTEXFEVdvGqtpsNm27jTGSHf6ZHUtELtAq8plJy6VE730FK0jL0t7z3ovNrbMO+8RYVazr2leVzlLjSBMgABGBLmAWmYgr3gGCIwcT3d2GMU59azIHLg3oQAUhloiIxluVwg5VK1DVU9ezwn6/fzicvPe73e5wPKtqyYl2OxH5/vvvf/2v/z2l9P0vbv/w+7vf/e53V7vNbre5u7//eHsdQuj7vgrx6urqdDp9/vz5Z99+17btWHdc9Hg8h1mVV2fJUPRIBDmlMWd0Dp0bc3Yh+BgVkby/zOarvMk5WNZM67Anz/T9X0wRF3cqgGe1ojeDIgCguZDjmsn3csrVFVx8/fiN0Pg8yH3t8auQ9iryPc/8XuwCvBEmvxY41kouhJOWDQGhAgpMA1jn/vG64rhecNirpLMJoSqAKIGqU1Ayf1bnAcCWMGtkqsCKxwO6Tr3tsfv61r/1/KtX18eCVofbHlvvQVdIpOXP9WlbBoQzG0NQsSRNVQQQRQicBlVWFlBRVYt8YfIjnuhQ04oY0dNK6lkULkuKS3V+PUC/MjpfxsXX9y/WXyb4v3RQRQTJHNvnpsjK8fnyJSt3LovTs7/rpQFpLPaqqu7v73NKu92mZDmfj33ft21zGE5lvgGAqaXs9/uHx88GCAwhDEO3IIk2252lekY+syJYNh7fcmrg4hQxjiORdz4ufT4LUEQ0juPxeBzH8cOH22+//Xbb1N35bOrVForG4XQ6nUhhv9//4f4kIghus2nevXu33+/7c//w8OAIY4xNU1VVhci2Nh/H8XjszuezqlZVpTqJc7ZtOy3kCzvnbEdEZBzHujL5MV3CoamJEoF9oYiE4AGg7/vD4eD93hJHUHDONXUjkk+nUwihCjFx9/DwcP/4IOBtPpU5+4+h3rato+BokjZFBFXySCEEQi1lFBGPF0bBoqMGADY1V1W93W63u12MUQTGcWxCTURWqgXDOaOIyBRZ6XKl2KkJMSzZnph9sY2W4B0YK30upjlCRE0cY2xinHZnFi+dZLroUvqzQWDoR/tRg1xZ5MMykWxlCrWTrI96Y79wKaUoEIFEqaWqqpCK1hT44UkRqqrixwMiPj4+fv/993/49Hm/379//77ruuub29/97u73v/+D/8XPiOhwOLy72m42bUqdQ7q5ubm/v7+/v393fWNntuu6ru+3bVNVlVHsrQxOmYgoQRrHsaqqBUxgCd86HsjsQ/Likn8dV742T66ntRcZ1Yup5vnyev3qH4soywastxNexTZ4K/97M279OQHs9eT259+e7SZMkc9WZEQkXwk6f/JLYZWE2GPL2vGnPq137E/vp14kUxffcwBYiOYv3l7oj23x65/jPK2qRGQWikFVzcyqKhM+XkVEBVVVFv8qFmYGEZDJpg5VlU1hVGfSPHi0XssEDyEAACacFKgMdzoZS5lLr1SXQ6EMAKhCAN4jLfJgq5qySddb1WdaOtB6GDEA6Apw5cplOIrVgGTGdqqCrKWoEREzlyWwTVxZKaoavev7XqG0bVv5UEoBFO/9/fGhjlXh8Xh/350OWjJLKv0omh7v7q1HOozjbrcz5nhsd1MWhCIlK4g5IF/tNqp6Ohyfnp5EDPmCfd+3JCULEwDFwvpwPD8enrqh7K+u2najCIfDYewHM5SJMT4cT8fHp5SHj7e3f/GL77d19fR4/+XTZ1La7a4Q8XTss4L38elw+vHHH8MuxhjNnL3yQURySimlbVOHEOpYxRhFue/70+HYdd0pkZELF89bO1bn89mqmgbVWcQHmtp4adOEvhAADLfCko0OcTgc+v4MAPuripmbqtpttohuOI/KXMW2brc/fbr/77/7/ZfHAysi+T7nvu+vKBtwtKqqKvq2qn0gr9i0FedSV3G32wXCYRjSMCJirKbrq5Qy5pRzTkVExMeqaZpmt23b1odqGTM+XMTq4DknYUagXHySiajQG5h4nIH1RM9SQ0TUUL2amNZX9EW9BcEBAIVmfXXrSujLitLL9WuPc+nymMZ05iGVPPCYhqErKXdd99vf/h4dnbuxH/OY5O7weHP97rc/HL777ruf//wX//RP/4Te3d3d/fff/na33TJnp/I3f/NXP/vmfd+f923z/v1t33XtVTAxhP1+//Hjx5TS0+HRouN/+A//oe/P1hN9enq6vt4T0fu2fff+fb29Kup9s726/YaqTT+UqtmUwpwTiDpkEhDNwMIEzCwlX0h7nI2ZWkopeVztMouIlkvl80Wx8XmEW6Bwl4P/4pS9ON1TNu8uWASCy5tRVtP7KioXyZcZWC6RbwLQrUArdl9Enw+GOdd/ld3OH3km03FRqyllOQjrQzHYLwIBACNNToboGNT+LACKwDqX1lfpkEzz6uV5wRWQUAAACgjM2d4bt68F7Re5zuqZ/yuM/TeeJzUpagKa+2SKSG7ihUx6MYiopCqEk8mrKqCzTgwLEjAzqoK1J2Ud9qZsD3UObOAQBNGjCuBkaLAAhmjWFIUZVznVjBEBRKxCi4j6J3ZwXdxQeNa7Xlc27EBestv1l8xcqPXQX97JzOQA0TNztur4vHIfx76k0dhyLLkkLpy0ZEQszFa8wtnbfXXedUoacYKYGnRQFb13iA5Avffd6VGAkLxS7ofU970whBB2uz0ijjmBNfmcM2RmSoNo2TTtzc1NDMGYbYjY1M04jqCE3pV+/HL/MA65aZqRk5Uom6YhXZKeyrp0JuefS7Z+3jiOhcPSpVsmBQAw6MjS5bqoMM9OGpYFWrKSUrIEzvk4jqMR+Q0nCZDnt029T0cBAH788ceHx5Px8wQIZHLPAJQYY7upze9JtBBURoJcfs4apUaIHsaTnYJSSsqJmYm8eU2EuTe5HlrL7rwIb/ZgeX55p74V9l5czuvJdOkxr0bsmuC8moX1WYKyjGp67jM3Z7rTtznnxDlHAb0QVgXAbJXM6SIXnpJL1eiDUUvP5/Px+LTdbh8OT6p6c3MzDgOKgsPPnz9/fH9zOBze3950XZfGsWLabrfmM9V1namcH49HIvo//o//4z/+x/9BVZ+envb7fUpps9nYcqeAc3FDlZRSkPIyCyJemGnLRf8yXM3J+lKUmvddlo+vy1fLUXprUgVY8c2/NmdeZogXGeT6pVUPD1fp4DNIyyo0Wth7ocAMz2Es+JU66ptb9ToDfpFW/pkJIhpz3Fhkb9Om396GdWXY64u9evU1r352PUev8RXT0wCwRtTQ15L91X+qepGypBl0Y7DPGXfkHU3pHYqAdf4UgESCnTAhsc4SOAVRZ/bDluqtVi5kMgBTn1EAxDR9AWVO+GyDDRKJKmZObxspIGpvI1AjRU7fhFO/bSb0vbHXyxGjGRO7Cm8CqnPLRQDUmoyyKmPCqxKH2XIoIoCUkpxziCAiWYr3HsDlnFGl6/s09KjinONZy1FyJqKUs4iYOdFYOMZqVp+ZKYzzLJlzPp1O3bm38ACGSVMaMztHhJoT9/2QUiLyVVWHEIYhDd0IgnXd1nUNwjnlksbo/e27m/c314j69PR0Pp+rENE7FS7MY8qHw/F0OgP57XZP6JqmqUNE0TElZvbOxRibukJEEDUdltPx3PdDSjnPQ2+B3q0n/fVFaG/LeVI8scTI9nQYBpYcyZdSjsdjSoNNl8aUaNu2DrVchPlxLPmHH37o+mwCMQomJKueMEbfbur9dmPERwBBVB9IhKsq1nUUYeaEBM4RIljdmFVKKZmVyNVNXdd1u9l5712snHMw4wkBgCzdC2HJXxeZlYVmvgw/IvK0HkWXBxaK6PkNVtCqV2FvNTub+QtO1xesJsQ173ad1lwqhBRAVKQAAqhn5030tG7ddv90OJ589opOQeq6dkQxuL7rvnz5cnt7+3B4QsTrq6vfn05tFYexM3iwIypZXHBjyuPor6+vReT3v//98Xjcbrd1XT8+Ppqe5zAM+/3eouBms7PtTCmRG13c2FrNC4cQFXjCa6Ctc0EViGjh0V7C1Rz2YE6j54SGVNVaGK9vL6LdKsLJ6+dfxL/X8WM6p2vD21k1UOfepH2KZwvrF9ne1F7RV9//quHyenueBZRV7fTNUur6YlweTsCcGXOqYGDhS0gzCjoCyFfSrTnwv/wtC/P+RZnxZebx9cer0/xaTe4ZouZNcrtdH6xKAHYdyExzUDU0KQKAyqz/MjcpEQ2G4ObT6gBAWYhoCntzkXNSTl6FPVzzYFb1SQKZmIZTGBMCmAwslmqDmIsfgDAqANrIuJyGqd+24tLBvDZZHbc3cr7LAmI9nqzbhCQiaxbq+lxMkVhlydUArPGgNn13XceQxzSUPAZPoGywRjXza0SemWQAkFJqmoacU2VAJSLvZtywat+dzudzTmW323kfx3Ec+iQiVdWoTIr7CuRccL6KdZtSOZ/6fuhjjNYpLGlUVVK5urp6f3PtHXbH8zgMqOBcKKXEWGUe7h4eT6euajbtZoeI19c7B2j7knMO3jd13bZt9M7yofP5fDweu67juWyyTNnri8pyqQtSY44Q49DZYJyYBIjMYljBpQdpZHA7RNebpmk20fm+H0HFOTcM4+Hp1PdjSgUAHIKoKIAjCFW93+NuWzdtRQqoDGrSqBqcD9ERUSlp4YFNMY85cxEBg322201dt87sICykIeKM2FSadEeXyq090Bl9uh5UVvlaX7wvimbrwGkHqnwFnzX1pO0xXMqer7OW5RQ8T32WsOeZ2bMH5xwo+8RSmLmqqt12P4yFWSFzkTQTbMA5fHi8e/fu3W636fveAkyM8fHp9P5d8/vf//AP/8Pf393d/eKXPxeRYRi892aFYWRHAKjrWkRub29/+9vf/v3f//1mszkcDr/61a/u7+8hNJPqGAGIDMNQu9jWGxZ1iEIEwKhokv8z1/+yXzCTkVbUyQvGW1VBny0I/kiWsyTNfzzmrWcDfA5pwTW8bh11Vr/LegmHqKsoNc+ZL6MUvh3wnkNLnm3V69uLl56/DQDMQ4YUzNuRFHgOVUig8tavrB6skKuw+jlRAGBhAPArCNIs1zKtztZVuNWxRp4f65x2rE/bapOMVaaICoTw+h5E3awaigA4Pa/GPgEBB6CEkyaNtbEIBJRUkVBAUaf2nLqLq5yxO6y3Z2d7XR+nS23CKA2Cz0KgBViG5S2XAocCCCmoM49koXktNAufW5zmOar9idsiHwfzu1FhCpLz8yiWz60GhZ0XK0eYG9dc/LTDQyCmKjl03eHx0TVukYdO4zgMQynFuO0KrMDkQAkZJvNoTyCCOkUCR0TMknPuuiGl4ogsNliOparv9/txzGlMbEjz4NA5RCxZmBWUvIsELo+l74ec86apPtxcbzdNdzo8Pj0CSF23iJi5jEmeDse+74F83WzatgXn2ppSSuMwcC7euaap6jpGR8ZJNwPb8/lsiB5yDssEdNOZA2NTkoEUFgDnQr7OCkZsqKrKeQQATyF6L6UM566MQxNDFStgAYBN3bRVW/mwXFes0nXd3d0deYeZbQCSsih47+u6vn3Xtm3jHKFoCB5BnENVjnVEVJEiWqyPOw7j+XyumpaVi4CVdne7q6qpnfdFBZEm0pu/3HIpzjk/K0aSaQ8RWWK6TnCnq2PxJVuKZnMYW4qb6BxOtL+5rjCD2l5Oc0r4Vpqy/LmM2zXW8RlYX7x3CkERhFQIsYpZGiWi3dX+3A+IKN3Q92P0JKyqXNf16cvp8fHx3Yf3Dw8PDw8n51ziYnWKcRz7vs9jGvqELoxjPwwDkd/vr1Mazuez936/uzocn9q2/fHHH3//+x+++eYDADw9PSFiKsW5MOGJuo5hFMVQ1UQelRyCEoHItGZ1iCpEBM9EyNbi/pejMS9BnoUoWTkZvT1LrMghbwa8F5+Vufe25Go4M33XYW/JwtdFznXYw6+GvZcb86LG+yL+6b/zRjqVuN7YTVtCgcEuLGN6Fthm16wLMUPWsjWGVhQBAL8Mxxdp35sHel21x1eab2/eaOYCvLxXQ9pcyo9z9XRO6icxronqi5PhGBAtnHo7vlNvz1nlB2wpBlNK7GBh7M1XtccpfxcAUBDSOedTMDcxnCudqgpQFudYQFsymCUggiggTdQIVQHFt9gLz4/qG7ih9XCcPitT1Reff4OqTrotqjrx4gWmDqaUUkyJxpSr+r7PORcqpIIOteRx6FIeVVkVFNjMWo2dhoiGV4RZHRSs34kXTRAiqmLlXMg5j0OeYN/ochn6lEURyTkHbDyTyadzW4VoaVPOYwjh4822rSPn8fD41J+7pmmij+OQmfXz3efuPFbNJtYNKGaR/XY7DE+cCypsNhsTInEwqex3XXc8HM7n8wIoXWbyRfhYVS1Or2trL3pLpvDinLPP2XG+v79n5rqu67o2plfbtre3t6KjKpZSUippGHMqh6fT+dw7H+1AISIhBIdtU223zXbb1tGjsjA7kui9TX8xEoBkY7DbER6GnLOPDAAhhKpqNptt1dQ+BCJvxBLvfYwVBb8Ya9Aqii/zzguNhWW8ERGsipyvI9byDcv89RyJPR/fS/xD/Go6+KxEtMTd9SYhIgB776fKnxRSDXWD6Iah2273+/3gnBtSKSV5F4qUqo5c2Hu6+/Lp2+8+OudU2Xt/Pp/bpnl4Ouw27W9/94ePH24/ffpyfX095nw+n82+cRhc13WmurLdbu/u7q6uru7u7rbb9vb29suXLz//+c81jwDAYNp7SSkwM+dEnqxbTgDWEUEguJD+L1msTcfWddaVL810SN2zIuc6G15PvMvjPznBLu/HJSosU/SzgLj63VWiKZP84Z9b5BRQ/MoUd7n8nkWKl5AWeB4Ln02AtiPTdM8I5mzOoE6ggKlRA6mo4hSwBS9b+MxSdpJ9WwN5Llvry7wI0+d7OPHDdM7i52iFS6izB6rPj+7qsSoAVPiVhYzyclEtGY5O9U2TB1VFQItyaJkdEoAqMoDKBIyemlxzGVgnXRhFQbIDSC+uajed0YmrwDCRFc2WUFDUgp9t3rJItSyQEECZFJAUkO171eqoiCAKdFE+f7HrukjgLGMaAMS2m5eTj8qTOfDUlp72zQYrTm1GVWUwCpTlxrkoqTnm5jQYtqJu4ph7FwIo9CmZYggAGCy+6zpFDFWlCM5N7OzCiYVV2bkKZvq55YhVVdV1CwDGYSCiGKthTCmVkgXIyBOESAgupQRA0TsiOp378+FYVeF6t99tGs751Pdj35tpDquMOZ37IY2Zgt9d7Ztm0w1JAVLOue8dYhWqXbvZbrZENAxD3/enw7nv+/O5z5nNFlFVc+bJ003VWpg2g5vKGqyQijCng3UTiUi0sIAVGI29PwxdXddW2xRlH1zdVD64rhOAVFIeuv7wdOz7se97RTCrwiIaY6xCdDHsdturq6vohUCVC3NBUudCVQVPLkSfc2YuIlzyxJSIdRwz13XdbFqjTvpQee/Ru0p9CDHUVQw1+UsR0oAeU8FTp1Y/EAGgiUrjsri0eEaXmPRspfDsGrmA1Oj5+wHgQiXWZV1Pf2R2xlUncv09s+IuIaJ64zAAeg1A3sVT39XtdrMb0YXj8azMPgR0eL3b/uY3/9Zs2s+fvpzPZ2O5lHKfM28bfzr3sIHT6fzu+uZ4PO92V6qYUgmhmALDONoCY9hsNk/usN1uz+fz09Nxs9lVVZNSaQlVdRxHpBBqX20aQh37IewigThyqlNGBZYig744hksJGidlt2eRD0GNu/zin8jkK6DPpwiitw+szhPUs6kYJi/2ZTK+RKx5erdgvEQg1nL5UbmEH3me7V1+0V36Yi/O5rKgeb6yeSOwLY+XP1881vXEN++HgoqKmiSpTN0nWWNNV7nd5Fuwcj+1sMew6u29TvXWK+L18MXn2R68hXpZLgb4M26vVzrwTOpEAKzkRpfza6zyqUW8UjkRVXe5yFXVrfLIucjjLKaSOgVGIlWeoqUqgJt8lgBUGZXUTSsGmPlzaGcABMkBCAjNpEgCAuB82TW8RL6vHecXSzwUnRYBcsEar8fKXJnnKTtEMTBN4URK3pOhPPq+NxPzdOgIzRY9CRePIARjZsNVxroOoQIAIqrrupTCbEQxURWRAoakn4nkJrCSxsIsIcS6rlOXMqsiIZKgiROgI/d4ODrngiNVLWMqpVxd7T58+FD7fDqdToejiNZ1hejymFPhp8dD1W4ao2B7f3XV9ONwOp1q08zyRLO4tml79n1v6azONGqza1cIdklbaMeZjWe3hb2wDOntpjKI+TJVWdizyqfNgORwv99XVWX9RRHhLOOQjsfz+dyravDV8XA/DIMiVVWoqlA3cb+tr/etpgdEEBCQYql+FYOhY5YzagsLa4I+HQdTHG3qDRiDM/jgoyLEWMe6DiGgW9pIaIXoCXU5JxCvbzCHvdm36g1IMLyVqK3DFcCFvSBva2zNw/g5UHn5oRe/gojM6JxTwAKAKiAOHYUALGAHZHE5sO3fbTePj/d/eXM1puHp6UGBb25uHh8fiSBxqWJ9PJ632+2p6673VyUzTB1/QUQ7p+PYW/633+/P3en6+vrTp0//9m//9h//43/8wx/+8P37awXWXHLRnasaBcmly8ftdjtzmp/jNvXZbi4v0CsN3ldrjJfH6s0b/tloyfURnp+9vGphbBp1cqlqCPxpJOf6O9fyHW+mpy/+fAZF/3rAe/UNioCgAggExKgos3WrMiLJnCqtQSs6Q9CXL1/ngvaBZNhyk7B780jiBM16dlaYl1M4Xwxy2Vx8hfsSfmNp8OKH1qdouqpo5mHIJJdiiB3LwwlIFVQZYV5yKiDgOgGnReXk+U2sRjmBgjwqINJcFIUp2bLMT4kUBQIpGIZzWn8AkYKozWKCqDDLwRhOyEYQzofFWoCmYagTMlP1cpqn+upUuyYFRVQoUlYjhVXVgQJCzklErMKJiMCKolkLcw6uSmN/Pp9LyW1VEVF3PHJJro6lgAWJtopZyvl8/PTjjx8+fGg2m2EYYl3VmxacH/qhrd1m0xBZ3lMsOnZd9/7dO6vOrXVzRGBIJcaoWM5dj1422z2Dnk/d6XT4+PEjIv7www/9+fj+/btffP+ztqnS8didTqXwdrut26brhsfD0+nUVW1TN42JQjFoFRtUES7e4367azatiHTnk9kJWdWUiCz7FJGczZYIYdYoJ8S6boxa7pC0cN203vmhH7z3+/3eDEiVxSE5B6oyKbUNfRqG3XbLzMKlbeqqqhw5KYyI23ZzPp/v7u4f7h9LkaZpUua+H8cxA5DpjW03zX6/DcGdj4+NS2MurIXAtW29aduqqpxz5+PJkmbLhMxxnpm/+9n3BmOJseIpjoEAtputc84FTz6YYKtd1kWEAAjmChzaggXm58xda9Xho+fduLkPTe5ZNrbM0BRMue0ydxMRgjM+7LK0xSnRR4aXa+UlG3gRCaZ1hncsBttyMTh1Rn0roWoyi6FRAOT773929/nL0PdVbP/2b/7qx08/Xe+2x6fDdz///p/++dfffPNNSuXTp+PNPt5cXx8OBy18++5DP6Z2Q6fTKaX0/fc7Efnmm++sBsDMu93u7u6uO/d/8zd/81//63/9zW9+86tf/eo3//xP//k//+cfPn0monHoTj6EUpp2ez4edlfXnuI4jqb7o6rDMPhgBk9TP8/Ixsu+09zzW/ZXTZd8rt/Awmt8AWCZgwrn9ObsPPdWV6VFVUTkddSTCyWO9NLtXoc9xbeLnGv0+3oRk8fxzUXVslUvFlLMl+bli5wPVg34y6pe2GZfm+AYFciUplGt9rh8yr7QrdQrlx7l5C805XZzunBZFvgpyLzCWooIokOcRE8QHaJYS8suFtuAWS8HprxWYN5nmPz2/rzb60UNIgK8tYT5egd4/SXTVr1+zwIlmYbK/F5EFQVEFWdgZUASmYvTS+MfQJUERXTh9qFMLzzbXcFFyvu5LMuLfN8kH21QThT1y7psHfZsIAoUUVnn2ZOyU2F2LMoigqIiAigK7BzmsR+6Xrg4UJY8DN0wDG3bWtMrVNHHaPhAH4KNN+sblTxa8c10LAEg5Xw8nFPKMdbee4uLRbiIonc+BADJuaSU6rpOw3gYhu50aJrqw/t3Jmzf970jX1WUM3f3j+duGIakSLv9NSISTX24/nwEgKZy+20bgpOSSzFMf1nM2ZfrTVeOHCwXR2KD5NkJruvapt0QQlVV5p106bjMzoIGcDXQYAgBCZYyqb3teDz2/ZhTscpeyXI6dU9PTwQYY6zruN00wZFTIUFHWkejlnvv/Xa7reuoqqaUbdKgdr4QKYTQNK0FPO/DPI94mlE46DyiMyEVMBkfVFpFqXU8wxWBb/0krOapFw/erHyu87PpganGL5f5n91/Wl+hy4OlNquT9qRJl/tms0UV7/3Qna+vr58eHmP051MO3nFJdQxIsR/7u8+fTIR6UprO+XA855xLVeXMVYWIaKorp9Pp5uYm51xVVd/3pThEbJrmeDz+27/9W9M0h8MkmPfp05fdbtcPOdYt56SCTdMQCKoQgrEalvnaapO4mnbm6eVZJo0vwUGXbtyLWevPye1ghmbgc4AFIvIqCYFVNIEVpGVZtsKMbNRXRU7zdMRX51dnoWZ8jl5Z7HFeBMKF0fwi5r35YNk0vehTTaAJmWqAxICKIAIyARz4ddgDAJ4gLZegOBU5lQHA82s0KKz20bCSC6rrArvX1RWkAOAcXc6rzsSWFblhJmq+wnqsj9AzFdAF1AKIbhIYm4yR7GCR6qS0tkoZnxe338IErf/DuTUItlae8JhuvrBV9NLZV1VCAQQVAnJzdV0BnJiRDyjhRDkBAEFAmSRPp2GqIqsRBgDKGWA+dSxT4QGAJ1fSKYypqkl/CfKCwjLgjYW6wokKoLk4A4sWZRUpDqHrzt3pLFK8x5zHvu9THq+ur1zw6Cj46EMQEdFiyZKqICkhjkMehi7nvFjw9Gnous45v91uAeB4OAOhSb1UVeV8NLBJykPbNOfz+eHhAbhc33x49+4mBH8+HnMqVVU50Kfj6eHxkDOTD7FqNpvNMAwA4n3ImbkM3vv9bt9WXgRMmYWLJi5pHMdhmJVKQERykZQ5Z2ZmKcWKls45DECApu6/bTdW7q/rpmkaR66U4sk5nOghymJ1YEdUVxVziSEYMMETBkcAMI7jl093pZR+GJmZhcZcjDW43W6bGHe73W7btrUP3nkPznlUNihKXdebduO97/vxfDzlXHIui5RMCHWz3ey2V+1251yYahVoGpshhMqYeehI0SkSOSIEUBItNgMtEsU2vKeAZzrRdq3Si/noRVr2DAXz6g3WXaDJRwVgVo1Yvmz55B8jPq2XgFM2SQ7A8AvT+wwDV8W65BQjjX13dXV1Ph6Cd2UcnUMAraoKyB3Opx9++OH7v/jL87l3zu33+/OpYxZQ6rvx4elYNRtCz2XIiZ8ej7e3t6WUzWY3DAnRAdC7d+9E5Keffvrrv/7rx8fHknmz2fzwww9/d/0fSuktKVRNDiANIzdZothJAQAg50IUyTgtm5cD8Ibe9CqYPfOWeVEDfD4j/rH4t86Z1sf5ddib08257jdne3Nv723e3pr09exsriBR+qpl+2IFprNdOXwl1dMZU7quIcG0uEed2dBCIAqMqKCCJKCgEzOyCC5fVUy1SnHedV2HvWcqLdOvwNv3iGhJHk4m6TKbewPSLIoNgAhzXQVmQ7jpT5nXOYvznugUK+Z7XB5bc/d5jZeWkGPNvPmFl4Ll+rxK/rUVqCC9oaiiC+FDkRCMDqeqhIbZmogDpvUJQkYemSO0GTwC4RLkFzzVTDiccEc4Be9VXfv5AkxVzUthZh1csjcrbBbNuqgCTu1iNmG2Uoqb993Ye6rKXMZ+yGkwLnseUy7JObfdbkUEZ0x/KpklA0Dwk7+aqvZ9b/qWpunVnYdhGBDJ9Jxy4kmESZgIHQVWGYZhHHs7HV130pLff3j3zYcPDrU7HbruTCGiDyWN524Yx+xj1W521jKca0SMiLtNW1Vh29acxlLKOE6OdyVP3m+2jyKaE+eSF+dxU3ODWZbFFLQX70ArJ5pwlKrGGENQawraM4uAi6qsSX4A0Pf94+Pj09OTKhTWUiRlHvrMRSx9NMnHbVs3dQiESOwQh36wX5z1X0zPujNZS1WNoTYNmu1227Zb5+MlSyN0FHwVvfcueEBnVgkGoLVphuWy9F7uYS6CLcPszZLUs0tgFe3Wj+UZXfoNXPd6ev8j1936Cl1fqradbrpKAAiUAVBjXTGzCxERt9ttXVXbpnXOFU7bbXv/+DT2XR3i3d1nKcl7/9NPnx357XbLzGPfm3Drfr+n222Mseu6GONcGK/btgUQZt5u25ubm9PppKrGag9VyDnf3d1VsQEpgC6ESqUM3SnGGEIgHyclBEAi6rtxDnYvj+36mC8TPa0yvK/lduvYsH7m9ePXz1hRcfpmeTa1wAIjWtUVTZxsypLeIjC8LAm4Z7t2oY2t0KrrQ/HvLXJOjxFAVRRVSUBFQUAFnaKx/pFVzKKO8RL2ZLIVIlW1bI+fHQIAALaWk2mjvb4tTXIrT6/2ZG7qyqXWCaQrdNPlHgAKASkoocp0b9W/9b0STo8BUQQvIW1hn9DEz7PLQ2FOMvHimqsgCK+vK3ie8ykuHgtTwIMJpTntE80eEfPSy+IdgKi5Xs3TiDOOHSKoqCKKCqLKXEWVlyJANowuYW9SyZtagXb+eU5lVaRM71FlLUbDV1XRsoRAQUVE0+QUYWASUiQlVhAWYFAdhy6XkQi895xHA3Pu2s3CaA4hoHPCAgBu6gTRghwRER8oVj6l9PT01J3PTb018+6uG5i1yIjoAJCZ+3HsxkERYh2Oh6dx7PdX21/84vvrm313PByPR+9cCPWY+PDU9V1yIe72123bClI/dNYxTUl2TX2134bgck55TNbskcLDMKSxAKAnslSpiObEY045cynCLJyL7ZTZ0ZmY5xLzbOYCmMRQYoyOSt/3w9jhBfCpIouh8cTCHIbh/v7+8+fPCxl56POp63MqFOLVZttWsW3qprFuonfIKixcPFEdYxMbUhq7CYzDKUfnDazTtturqyvbSOeC+aUheQNwTv089M7IgoREHi7gFCJZVS9pioqXic+GH6IATpHyK7TiNVbhcl0jTivgWYdF537efKGtg96k7wNfub2OiLZtgIjo0LwilRAIUNp2y7nYcsGD7vd7YL653nfd+f276/v7+6fD4/sP3zqHj4+P11fvc85Zi6o6F2KoU0gp5aenQ/muNn5eVVWfP919/Phx8nM4PBonJ8b69vb2D3/4wy9+8YuHh4f95rurdzefPn3667/623EcY6iD9+fzmVwYUz+ObURn/Mln2JHnJbs393oKLRNyW0lBZLrHCbc+VbKmszZNNKwzkWlZKL/42vXPicA8dJ+FPS38ZrZnGILl/csXvgh7l+CnL7O6ZZmFcy9zXfzUFS/wdaq3bNJSwQIpFvAElM2BB52KMgIjKILA9KqFvbnHDCKyoOFhbhLx3FMSEZjCpAKA/5rt4bJItN1Y7Z6fx/rq6tBLnLNAdPlTUQBQzR1HjYq4zigVceo8AsgEaZmSJpkrmaZBo3Lhzy5tsufAqjey/te3KdZOH5t+TxBQFQF48v9RQJgYAqqWzC05n+qsNWP8ckJgnSvhsES+9ckWVSO5216twp7YgFukbCekpp0XM2TRojqVQHkV9nCGNllGyMzusshi0QIA1pxzRKhTWuMAzaXavGS990LokTw6IgpebE1tlghVVcXKE1EaUinFGlRV1ZxPPbN674/nk/deGXORbkwiavD60+lQ1fHjxw/vbq4QdRg74eyiZ9bj6Xw4nhiwbTZNswHnpBQRCcE58ahc1WFTV6LcD50qzYtBNkwpAIQQhzEza+ZSsqTCliza0TZyetu22+3WMi3DeRqkE+ZM2hI7LqPpVltEZOZSJgdaVbUGWUrpcDjc39+fTqebm/dpLDkng5Kq0Lap9rvr6/22iq6pY4yEyKiiwqpiGt9mlnQ6ncYxiYhzAdF553wVTag6xno9haGJsHjvvX/mYO78otIiKxL0stBebutpBVbL8+dT0rN6yfp+Vad6NnfjW/289atvVe1ezv7rtek6TSJ0Cot3nVr/taqqseRd25ah2263T4+H29s2Vkb/4LZtn56edtubd+/e/fjDT8djX9exreq6agn1eDw+PVXfffedKep1XTcMw26/sWhqZzDGuN1uf/Ob3/R9//T01N9e39zcPD09FU5S0Huf0pBSuXn3XgvncfDeu6pWhVLY0DHT7L7M3a+6VrAwB75yexEG1s/PFmMvv/PZsn7Nq5M1vmCe8eemyets780i5xL2CPDF6cM3ot7lbOrzsueyEX8k8r0oclqvUSyfA1JARWH0jFOHj3EKe0VAJ0oZzPtyKXLyKuxNP8EKs/6WL/z26gyf6fledsa5C5Jz6fABqpvtwC1UwOXamCIAfhVj8uywTk7iMgNmJirDLO0puo4pJlq9fNXFfW5ZZUzLpNWlaFFhgaDNmZ3O/vEKKjpd3LMzL07OdlPOZx+U9fcqwh8txRt6RWnuLOMc/NRO83TjJezZ6mzK7XgFOMZ80Tmc2ohsQxlJUayuXUzhSVVLSYYYy1nHcWApKMycVScpZJxkcxFAM0sThJyTXMaxV+C2bZ1zVj9s29Y7Z+D7pauvpEqYufT9WESDj4DY9+cQ/bfffvvx43vmfDr1AFBVEUTOfT8MgyC0m81ms2PFMuacR0cEADHGTRWb2osyjx0qo6qUxJlBxNxaC7Nq6vtBBCzslUtLFIx7boUsi3lgOt2zUpdt+cxPh2HsCiecQSsL7cFuVuc8HA5fvnw5d8dYeRQdhuF0Og1D1hlk37btZrOpq1BF9MAq2bTioqfd5so5l1PqzuehGwAgVtH7qKp1VbW77dXVTV3Xxv4iIvHVSkvaE/k5PyOdpcPUIJGqqurcJDLwQpNzmb/WoRQAiNxqllzjKteYzFXFUi42p8+/zaAN9nl6Pgcu3/kSYqav65w6fcF8LToU42Wpj4FTatt2PB/atj08ubaOViS8urpqmrv7+y9V1Z67/nw+769vfvjxJ+v1Ho9Hc+0ozD/99Pn29sN+vzf576enp9v3N+M4bja7rhuspgJAHz58YwIunz9/3mw2Hz58eHp62m2vHNJw7gQJEY1n4mPlqhoneCpMkpuFl7BHc7bwIqRNz8xEXTP1Fp2g+JNaCuiECF/C3lfcz9cnd32uecbCqyrqCnfNl3nmBYHhzbBHc8kKXgCRVnFhnQ4tp9tuq1XUvy/bm55RNKq/KCgCI5rnpAAy0pQLgqrqure3RnIuYe8SVm1mdQhrlZbX0/Q6nsvFV8LBZVV4we2saiSTc8J0FJ5Hg9fj/s2Vy0K00KmosqSTc+4FsGLL/l+5WbpmUVRnSZTZidfI5vNgwtl7wTLFOZoKTCKi086+insvTrMzvt+S562Scb0Aw2YyKbNdJCIis3eJqiU/c0FgCntWr2ARWhoJ1upbtoSZJRfjnAHzOI6+qg1qIaI5Z2FiKv9/6v7sV5rkyhPEzmJm7h7LXb4ldxaXYlX1YEroQgOCWgIE/eHCPM2LNBLQKDRKrWaRTRbJJDPzW+4SEb6Z2Tl6OGYefuPeL4s189CjYCIYn99YfbGz/RbJunFsWLUYI0KROJmmCQU3m413bp5y34/jOI3jNE3T5rolojQa9Q18cHNMp9Npt9t98cUX19fbD+/ePz7e77uNa7k/nsZRcs6h6Xa7K9eEaYzjNIoIN5xzdm17db3zTOPxcZ7n4LmPEmOcpmhByHtvC8w8zyKQJOekJXOrkcMAODbPs1PXijnrwKiqlblUbSUM/1IFOYtIqb2VFRzGl8gSb25upnEaTqfj8ZgTejMD6jqb2zFzCA7zHLMSQtM0V7sNgUsp2ZQ0RZN92YQQ5iltNpurq5vtdotAholvmiaxXweJJWbYiVcut09w4BCf2C9cnIfr8HMRlp6Wa6uY93QdqC88f/TFy//VGz5Dcq6uFqPSo4KCEnsXHM0Am9CdHj56RO/ZTDDiNO/3+6vd/p//2w9ffbkDgL7vN1fXzHx9vSOid989juPUdZ1z7uPHj+M43t5ei8hms3n//r2IDMPQdZ19DUMXf/XVV//tv/366urq/fvvX7169bOf/ey//tf/en11y8zDdNp0VzlnzQA8tGkLAMyeRYmK/S/Ueg4q6O/FMu55wfd8lbh4zgXd+yL4Pb9/0rv7ZHm5+jKr94cn71YWmMtD9qSyPBu4rwkqurKbr8Xwj4W95z9N1RIBMdEOBc05i6oAKlgujAZOETr/yL8o7AECgIvT43k3rS4VJAdLGlftxwEgxNX21UVoKjFiV86Ko6Z45p1cNEYWcueKAwpxKcO0ajsv/7fsNV0m4JA/yZGwb/VsaZCMT6NskYg1h2zLtjAvAZvUBBizedcTYLZDQcACuZpEiAiiSXoygAqqIFpGrMZkIM2gKllAQM8dOZdtMpekOpBVjz2r/qIaylAlSxSRqe8X2EW9Tgpe39Zxm95534DS6XS6QXcaT0rYT/Fw94DM25vrGCO3bbvfu6YZxxFBch4x6n6z2W72j4+Px8eh9TvnXJpBonjY+eCYGDPnuT/dHx8fHx2Hm90+0/54PA7j6N2GHaR5ytOpc/L3f/uzlMb79ycCuNntU4TTYY7RRZ1d0zaN3ziSNGgcHcyh8aR5u93ud1vOMPRT3+dpZlWdD4ecs2RNKc4xz9M0znOMOUlOSVJWIgcEKkCevfeYs29CaD17EshJlBicQ9+QSPbBbTYdEZ1Op9PjIcYIgoBMzAKSU8wwuUa7znVdg8Cnvn/3w8ePH++ROobNw31+d38/TVNO0TPttu3ra7zajC2m62bXuZkyTNNEIjZTJBfglOZ+mIcxQOj2oQkdOB8ThN2+211ztx8FM5Jvt+BDVA1IzOx8Y7AdBQRgAHTAqKgZRYEICMmTAwAw28wFKw9oGAJaX6GrRYaqatFZfgUFzvIUuoYIqCqgg/I16pjEOFGl+V/EXRENmIacz2FVzssnxDgjItCTCAoAUTwAICgSmoIgOECmlBMoELvHYeb9q8PhINvXeqU3d48pTdfd5svP3/72dz/MWYjccRinP//57/7d3//P//P/Y7frbt7sHh6O7+/ub25u8pH+8z/+9v/2f/2Ph4fvX7/ekY6nuz9//vrt+x+++3c//+v/z3/9zccfDm+/+vzh4WGzvzmNE4L7+OEh8J9++fNfvnv3btO2N7tdilHiScB5h5hnjb1IVslM6ojHOUpKTESMMUbRzMysOecsOYIIi6IIS8o5J23Kwqxqc2LTq5JYa8F68ZfkuPDSoI7e0OrybHu3rLoWKEhVFS79U225ykhFC4pQDHleGuFldmjzRlUlBFUtPM5nHSzHpeYoqzuYTDJ4dmIkZhA2ADsqIs7J/BrPhSCUxmP9UTbkkZJOMYlatSeiAiISVQVmBUD0QGAacSLiLLCxzURBREhRFROqCmZ7cxAVTIrGElJVggEuIS1Pwh6swt65yTnrDGDYjnoGGzA60YIoQVFYxCBq/rHOKC9C0frBIgN9GcRWGe6SJ+qqOXNxWz9zqYGeF5pLlnHxWfq8RXNBK7SqzGjppSnxLJUr96gqWTIqFI73k966gGjWUtJZuYeiURJkiZLq+ZJzVs2qSOU/E2lTyQKoaj2cpZtHgCWIEhDRGGeTIoSanVnUXACQiBiC77rudDqZe47VDaX3mrPk3DRNjGbEM7dtG3xr12c/HGOcr652SDIMsW3bt2/fPun7AyrEGOM4zkSOECuzEKyuCt53obGOq9Wpi1XsNI4ppXGO8zxPcxrjPE5zSkKhWb4kYGnli0izEpuupY8u+B17YGideZ5FxBFbXmV1nqra66BANx8fHx/neVaFnGWe58PhQARNCF0TDMDpvQ/BL1NDmyx23RYR53meDqeCEXWOyZtsMSCavqgZHDKx9x6IpdpHXMSGi3LqSX9pJb+5fvD82vnxLc8v0k9x8j59JT6Ztf+bPnH5m5bGqYChqNlclfzSTN7v9w+Ph5ubm/f3h/0+HI9H58KcdDj1fX+8udmO4wySQ2BySi5mAACAAElEQVSDogSRcRyHeTIlBOfc6TTc3koI4eP9/dXV1d3d/TAMXdccewWApmlM5OhwONgRZOZpnMEzcuNDm1Kcp4lYhJjIrbs11WNhVUstfxPJKqBqiSzK+jkGpjf6da4rkgoqimZZtlgmQYvjiq6tBs7ssjImWrrQAgrKoLnAKHTpHWQEMrTFou5oM5JlYvPCMVsJ+SOiFuFjyAJIphgM5ixoZ6JArn00tPWr4hysfMhiYxqAskWTGcxlBVFJan4mKEiiCZQFRM67DqU+v+4+FEVRzJJVUMCKGFtXUVWTzADgqk4jIlYvDZtjlRkeE5HiudExiQF2z1ssNLKy4qrtuXhOroZ89cqEp/e6LhoXW4lnl/rLKN6XAMDn7Qv65vJTzs9ZTp2LQ3weyT5924rPlFKCqwiU092yW7SCUQVVLJNSVZQMJf0QsHvDvtrhzaW1DtkqT7EHKoAqlh9hNp4EMRBJkUdVBCMSIrkwTZPaSMnzPM8pzyop5qiq4ziO89Q0TVadUzJkv42y5nkGVesNeu+ncUAAe4yIFn5SSt45ERnH6XA4SErb7daxH8fxFA+GEEl5dkrbzeb2dv/ll18eHu7Pc4SkU7mNu5s3IoJQCPVM5BzZogYA9nFGV5imOaWUssSUR3txTPM8x5RzLlc1ICOy2R6ZOmLbtW3wwXnPzhHjcl6reOc9O0l57E/D6WggF2I2MG6ao2hCBc/Os8sxnQ7Huw8fD4+PkoHIzdPcn8Z5njabbrvdXm02W7ODCKFpQuV4qfeh3WxdCPM8n05D7Hsicr4JIbjQeu/BewTa7XbBt+Q9EYHzzKxIOedFXHqdHa6DxJmowEZyOBPS18FPnooLnt+tTtFqRlL/Wi0ozxlh/cOLseoi7D27+vTimcunrJ9g1efqcssARQ4SEQkds6pXD9K27Wa3n1OW4VU/jNfX19fXj69ur3/7L+92N5thmvphuvv48dWrV3/84x9jhqZpgvcfPhyQoT9Np9Npt/HzPDVN8+HDh7dvP+92+/u746vXb6dpPpxOX3zx2W7aHY+PwbPJq5q23ziOllCeTicX1PlmHEclIk7ctE3DKUcp7Zma+Nb6xboUlo+piGTIWVXjKkwWYlIt/S5bf6CaStg7O8faEm1tJAAw2QRBAMiAAEgF8VBVBYxjmJEQCDSBIoggAQOrwCJ0tyBIDWd+1t9/ujIKnddDRlxEi1WhCiIDVbg/AoikpVVXtVPKYwAUzQpg91WnI1oYSwpZJCskEUUSRSBWSSYDlAsEAiKUvZGtpgAERQFMKQlgVlTFbAWKUdKsyWlhr9RMVQACALAMinS5tOzGWjCEhSHEVGAUUA0tCYkAlQEEkUGeJKEXk4PnV6w53mmRGjPbncr40/MoDk0LXEFBUUlRLu4FzvdKavekBFSGegCkajmIEROXiFa2qK6AL6oAmczJDwQL/kVw2f1VS3M5m6UICSwi4Gd1oHW1pyqomgw2b2pFgIqC7BQTZEVXEA1MLCKohX2siKBZK6LUoqu5BQFAnMY0T4g6jZMgjPM0TRMFb2Xjdru10sq+FRH5gsnWst2onSmZP1/OuWtby4IBwCKiVYrHKXrvRbTv+91m8+VXn19f76uxGYpInPM8TdM05SwGqNOUc45ZEoGaCIlDsmLLuHlTzDFWa8AY53mO0Wq/jEDMBKRTjKoKKDnnDChZGZyF82WwV0oiKgILFk7MaNsqWu/94vZoabi9ltmb/+04jjmpWS4M/XQ6ndq22XbNtttst9ttFzbBB/O3M6d1ZisBc9bTaXh8fGzBOd+0beubzjnnQuOawM43oQMrR71j55lZABHRsWFSzg52uO5XEmEF2pCRF59eWS8+vox8q38uIWedktZ/Wh12CZTHZ6FrCW/4fGJ3rucuJgsVkUh48SZlSWVGRFecGc4wJby5ef/hY5R8c3Oz3W5DeKeaY5xU4f379199/TUzpznVUROkBDHB4+FwffPZNPRdG/747d39/f3nX+6sqWDuDcMw3N7e5hwpRvP0OBwOV1dX0zTt9/umDadhVBlG50JogTk0RJI0e8t1tAz21nP6DCK55H1ZRLJkyRk426KgILkIySmIZk2oZPXQuSoCzJAL5K4WEhlXNQCWA4VnWFCzgBVsaSh8SHIZlMCJCGpx/0PVLGKr4bIFAEhUzH4MoUo2lrVXWJbHCYTRFWUvzahG5kZFZMCMwIBKFatSIuMCGyyTKlOXNmiPjYss3ReVCCAqWSFrBuSyi8FGUCigoDTnckZZRBVAFRTErAmAVFFk0fJXcy9HJRenablOzMK2nKnp7GOCXCb8ADBBAX+jmDrf0q0v+DezLicSAEUUTE/UH3485lnVbLvVAp5W1AkAnLeXx1bEEQApwuW9HSz7KwghAaraFKh8BgCiCiqgvW8xfhAjGhS6gQ0vzO6vkOfASrkFkJVVs1XYWAOXWFWX7XwqVSBURn85HVVqv76YI4lRKUrQVUUyURo7tRVQERVZAbhowSAiIyZATEmIyHxtprEfxj7lGNiNKa5XE1vrTScs56xSQpGFPZOdJCI1z+9xnMcxpWydN8N/G1d9nud5TkoIIDlHFQnOX19fX11dkerDwzHGhIiaJcU0z7NkMIaApmhpAqgSk/c+BMdIOec0x2jM8yQ1OE1z36eUphRTSoUZg0Tn+X+2sIeILbdd05pzngE1bcS8XoJzjvM8mtGunXWiiYkBVMRAJd48b+/v74chAhiEXcbBrArzbt+FEJgxOGqbpm2Cd0SoAOK9N96eAJkCnIiG7SaEzjdNCIEcO2uNhsb6IFoVlq0T4Zwzc4XiYL6kkUTABAtihYkqld7SrouiSleYgueX20UYe/qcJ9ssXL34zH/rbV1crgsaKmf4yqbA8nHn7JrPOQMSEnsXmtBqt9lutw8PD7v97WbTvXp983gcc4YQ+Hicp3Hs2jAqzPMcQggNpBEQ4P7+4euvPjOnjnme33/8eHXz2jn38PCASDHG77///u/+h7/tum6YJiKKWWIuynwppf1+P0yzgV98OLm2cb7RlBOMWbKCZMgCRU1JKh5NygjDEl3IKqKiEsHQFQW0XZZlMbeAMr+r+AVQdZVGonUHkhXNCLA0uhGrX2KSpoiAQAH9laJQgKy8ts6WKJhlbDXyXrZYkAORkv5XkGcRFqFsc6xK8jN0AyAGLHEHAdH82UQBYcbaJDsXlACaKl9QQER0odwAqoIomriiANp+UUJRyIa4R9BCaZOKvKnua1pQ/8QoAAhCUosZAFQSAQJ0fX9EJSBF5CW2qV1pQMhAaFkzIwMBI8TzhVqmJwQA5AqMpTZhuGi55DOv6CL4wYtp6UrhGxHzckEayUcXhczKJxf6hMoMlrmaKdmgotg9lLIRitOxlXeolu7A+h5Aqw2jaMkcMhRaZCIFgWQi1CKChRiyrvakwk5r0/9ZtZdUUDSDgrHzQAUUwApzlHOTgdT6mSZ6i4oqhAQghIiMMcXGoXMkOc7TmOep8GABYozM3G03zjlF4OCbpjkcj7ayeO83TeuIrZk5ToOIQNYY4zAM0zQx+RCCzcPMgo6Zh2FIKXnvQ+MeHh4aH77++idvP3sdYzycjpUoA9m4Teh8y1ZI3T8cCRAJmLDx3Iam8QUCDgA553lKwxz703Ay4cp5sp2JYGgMuzyQyJo5ljwiM1s1sK7zll6dXRoppRiz/Ypl+Kc610xXDc+Zcz4e+sPjKcYsGUTAYnDOYkE6OPaOnOPAFDx7R4gQ2IXGdV1H6E7jMI4TIm03+7bbBud9E9gF8s754HxgF4AJEIBpUZxBdszMZSkjqN5mTGxhz667xVevZJnVR3Adz5ZratmOT7GdFyFpKbYuwt7zIuyicbr+3OUTnz8N4HIasZ57XbyDtT1LUNezRzl7F0LQprm+vv39t3/cX+H19fVtPx777wmACYjg8fF+u92B0sPDUURCCOM4E8LHu4djf+o6EoBuszscDsMwtO321I+bTbff7u4f74Zh2O/3w/39ZrOxa8GIEGbX4BzLHCXOcR7TNGc/29Q9V0VGEVkagylnLCpClaJQPVNiSlTJwYs4BSCgTZWA1uMZBCBf1HYWR1/zOCkzG6Al7GHhVGwWLeSCuoeyfFgJaG9l35wswRc1GcWlCl9IVst2C42ooJRLeKvBjKqOj9mxLZ25cjT1iVrQ+UGMAMUMHNdzULZZnaCiSCZFUCGLZ7aSg2atvQgAV6W7BEEFqZa5BZSIoGKpIWZQUFIhAHCn4yMDK6nBNRe5OavklIDIgpsrYQ8LaLBknUQlzzBVl/KQiJwVuyx5uepeLPiehb1P/vXp5bHwSD6pMrPOUpe34oUCWC6/coJJGaGdwUWliwArFYOcLewRgIp1jC3smVR0VlVfg50uZ5laLiUgClqqQBGwSzrlJdJClqX8L/1+01yAmikLoEJxliBAAbFyD5VVZ6NwzfM8zyOSokiaRkXoxwGZdtutQQBdU7jqzGwsHO89KqSUJKVpMGkxsBaQiARHTQjH45GI2GOSPKcYc8oqEuehP4HIbrd9/fr1tu0+nPppiiEEAh6mYRxHBN60Xdu2ACApI6jzzhE6IkOEqOY0R1U1Nvrp1B/6oR/GfhpTFM+EhExs1XDOFkkl5wxKxGQCH8455zk03jGylUYLSkAzIApklWTUK5XsnTO9TUZgtLaTOCRSGPrh4/v38zDPc5znNI2x78d5io5913Wb1m03m5v91XbTNcEzs3PsiUzqGpTmGPt+mKfkm2a3u3LkPTvXBCt2nQ8ueGTnnFMgDt57b7g1BCkK1PWIF166YzzLlZFVf4hYXHA+cR2tY9I6TMpi1LLmSFzM+Wy34QJaeIJb+VS1V7cX/KaVbedLtTB5F+z0Qk3LAIgGRS7dpvNHKLIiKwqz9z7ltkmj399cNx/eT9P02Wefvfv4sWm89zDHvNu602l49erNHDN7VKSsoADOwceP/f39Y9feiuTXr1/f3d33fR/8ZrfbMbubm5uHh7vf/+5f/u7f/Y1ZX71/9z0AnE6npmn64+HUtKFtiAiQRGSeBiLSrmsQTS7O2jQZDDyF+UwYRi2VkYHvCdmZ4161FihozCeyzmtdU7eq2AAQeFljbRK7hDdbOQk2bG5kAKabQ2gnQ+kuaMV8Ug17pJDhzKpaxntFaEpqZWDpISZzpClnhfnSVIAprnp1ZR1Wf5HolMeurKvyZOKjmlgV2LCsBQAgAJQkk2pSoDp3tJMka3XCgtrktLAnAkAV4woqmM3ADj0AuDj0aXVVmAYEAAETAq/Cm6sFNTHimRxLRMR2KZZBNDmrAJWUCzbI0HQlEJber03+QC62LB4OL8a8F9o1n2bvvRj2qErurp+wPiTrYwBLM/oc8+yqzZIzSt0CGaX4J7h1drNU9LVBWsauSxr4VINVVaVqqytARig62Iu+ImIy8SEQBWQkc7ZdRq2iMc4TSHLEGdMQR1To+77pWva+pCnMfd9fXV+3TRNjhCyooDnHaTJwox3TdZpikM62bVM01c0p52yEv7v7D69fvX395tYHrt6zwc6zaYyn4xBCuNrtTRfGpBEbHzwjAZqpQYrJfN6nKQ79dDoNh9NpHOdouErnCLiMzUVSkmmKMUZQQlQuqNUnYN31unw+rCoWVud5JirjsSek1dKGwXGcH+4PqjDP6XQcxnGcpigCbdt2Xbfb+pv91e3NTRc8goBmBjCmYM55nuM4zv1pTEnajd90O9Nk8SE0bbGNBSYAYBcQ0YXgnEtZDVC9/kolWa4AFqpnwmVF9glE9BL5Ls5/fMrYe/ZWTy6ZJXC+2Odcv8+TS/Ivu9VrxKhBT6rD5zEbiMiFEGB2ruu63e7qeDze3FyBaOuDd9j3enOzOR4fnSPnaJHmQQJkOozyeDh88eVtEri+vnl4eHx4eGBqPvv8y2manSOF/Ovf/Ornv/jpfr/vuu7+7sPC4JymcRhOvgmOEAhJJc4jmmuSD7WueuGHCxbvb0EABONHkWvJvLQVuWo9Fd+zZa6JJRYCgLoiF2BB5Jy+cxGrW05dC3uONmCUF0RBWPwRrZQsSw0CCDgjE4sAQJHVX617i8GsVtyplQtcxNLO7HJcmaNSwUWdwYwkDtZIwLokMhX6/GW1Vy2cUGxJlZqUZhQBlVy0RAhRAciv4KwmbmdhL1W1l+VM4/L7EADc6eHDIu4ATBUgUXBi9pdF9QgRKWwZiZy5NtsBKC+sOanVvEwOGZ1jM7+GDMkCGxEQORGzbjkHwhL8nvl+lR1Kn9iO4eJqLOfcUxr+Kn98op1zruufXo3LQU05gRWDqjXsZQLJOZMKqNhsD6WwRGI2WFcWKXBM0IyaS9+8gL5SlgKXVzaCumTJOS8i4sUmu3iYrdoeMc+MxI4cs2cERY1TSrPkmJKiJptGSIog2TPPKV3dXGfVeZ5D20TJOufQNFf7/el0AoDdbicpPz4+TtPk2fmm6fv+/v6xtEab1vuw0AlC07F3eRzaTcfs37179+r66usvP39zewNZeotIUzqdTkQkAsE1bWhVdRxHSckzb7cbRmRGR0wghplRkeNpOB6Px9MwTdGkUkwvselCmvM4F5sew7nMUyLHzrnAbDx055xR5UIIzDzP4zj2ABBCcJ4KPU1s3gxqeyOEpmmY8Hg8ItDt7a1z7v37jz/88F5E5jmeTqdxnBExBJezhuC2u831Pux3G+85S0SF3ba72m6bppGcp3F8PBznOYXQ3t7uu82G2bWbwOjIIZIj9uSd957YW2WATAKKTO2mszPcZGVKDV/oQ+eGp5mRQ8FAYu2PPUkK12f+krWcoxc+X6PLLE1VDXyhZeKIRCTxrMqxfo3Bd19ITJ/MFKt/FgAxG7RjubiKqa8WRcL1Jbn+iAU/ZcvZ/urmcPzTN99886tf/Srn/M1Pvn54OGya0PNEIIjwhz/8Ybu7evPmze/+8Ke2DUzY91PXwg8fPv4ifRM89H2/6Xanfozph5/81c/6fhjH8a//+q/78fSP//if/i//x//z/f39F1988atf/er6zevf/va3f/3zn5lps29aIEVKodsAAIJsN93702PKiIjsKIumnAGh3XTH47HEAUUEFlNoJAD0huwwFLYVZEhckl1ENB4Al/2Z9HwcoXTAGQCQ/RLq6lwPASDW9RCRi0YqISiaSota/xgAuBb3drhXe97CVUoJFnApFW6DqhJWlR9Vm7Vh4dtZYSUAi7iLLdDn9VlXN9OMNxNgpfOSS8U8XFGEVMBwfgigUVlRlc6TqayKwN7O6lJmCNQoi+Z/CataGQCSCgC4/vARKzYMasdy6a8QEbNHd3ZwhiEuAkgAQOhsPmEG2MweEWVVDE4YHZHViGKYVwuDRARwGfQQ1QU8x3I0YAsB5mdb7DmZlIANvQl0RnXa42ULaXmO4CXqbB35npThpYoXNPtYUYUMklGzgEJOohmhTPWkEhggF+07NQqlJpBMpc9jk+xUxYGyQQSXSl+MEm94LRE0eU94wvZgRjqvdPayqDk75pTmOPXTcFJJnlkBZsh93wOAC4G9Q8cOkZ0LIVhVB1niNOcYJWUQzZCJcL1uGv9dVbuuG8fRipLtdgsAfd/3ff/NTz7bbjsAGIbTMEyL6c88z4zkuy6EoKoxRgJwzlULSLSWSc45zvM0TafT6XA4PTw+pqRCTOiQmYjGcbZoZ7FQstGW0NJ5uxndYingRMR0K6n4qhMA2DvY/XpCJiJNaJlZBA6H0+FwGoc5payK1YpBnXPbbfP69as3b26vr71nRhAm7Nr2art1zs3z3J9O0zjnrEy+abomtE3Tet8oEyGRcxw8eUeOyXyFEIioBolzVV01Np+EPWtO1kzoaScTzvXc8uB/RQV2URcuV8OL3C142pR7UhE+ne3hRRn3FCBjOTiiKj6ZgDyPgogMJOQQnXPOAdBut/twf/fq5tp77trQNZOk6BBiLE2ztg0xRsmcAaLAMM7jNIXQNV0Ape+/fwfqDoeDVNGftm2nafjuu+8+//zz0/Hxm2+++f3vfvvVV1+N47jf73POXrJzzjE6BABJcRqGE64EWXK1dbWkrazvKkmymF+2ZxEuEBJAUM6VMyaAAFTV3tgEj6E6BpTTA0iAKybWa+kioxSUi6FY2GB/i1WOsRrQWqy1vhQoJAfAdB7+nQs4oGoYdMbu2cHVuIKoEKgYC08QaZEP5bPXrohbGQ8oqmYQAlByZXaoYgxFQS2mBYX6J6oF1AKgyMSCQhmlyiaAszAMSKoZijbIsm4/O8MRUAGdkoI7fPzeAl61WeHSIyZCYKwsIvMAQ0SgyVYTgyUQkZFwkR3Z8AeXMtExs7qcbTiDpAgZEJkEaQldds9IQJgU1Kd1YFvCGzJdhMOynVwJbMDLfW2pnrcDSA2Hl1xgWJqQL4U9SkksrzGxG1HUDCql51mnepWxrpzjuV4U0VIgmti1gKhogoJpTiIgkLTogdZVhixhgsJ0LF3ichY6JgYkUsAEqipZUpQY28alNMd51BwZCRFinofxFHP23oe2CSEgswUMM/ohIhUdhiFVxrpmOQ6HeZ7THJmoDY333vhkgNi07ek0mBFr3/enYdru969e3W63HTvUSSVlyVkVCFmSmA1CYBfjnGKiwCEEAwRlERAG0XmaTqdT3/f3Hx+O/TCOo3NN0zRMPqmIwKkfbcpYVUBBkcj5EFpL14y04L0P7BgQJKEiAROzcxRcIX7kec7znEwJOrAjYFQCmedkL5/G+eHu8fBwnMc5Z+37PsaY0wwAXde9ut2/fXNzc7O7vgpGqGDnNpuu7cI8z0M/HA4HFSB2wbdN27ZtG0LLLgghMzvjtIdA3oHlfCVxL9pNiEB2/ZL1h87DcFOOeMKiI1QEc9BapIHX4efFqFPSqaWNWWVWLJ0q0AymczMKVFX5E3IQz4uzGvaeQGywZpnEKKJY+ve6fE+yJZsKJQfRIIdlNi/mAIYEpMyMwBp8s+lyzle3N9+9++Htm1fXV/vHx8e2dVOaQwOHHnLOY5y7zab/cI+mGaUwDHAcht2+tZzPJrt3d3evXr1uUjPNw83Nzfv3829//y+fffZZ0zTOuWEYEHEcRyO5QsUoMEHOaRzleHigq50WtsIiQKF2NQGaqg7OSdQ8rYizApmQfOnOWRLLYieB4TOLzxwDgBTrNgZAKd1gRGQFAjTxfLT3KBIhVoygro0yAIhpFerODW1EWWkpn1EqoJie6+lrLaeWKEh6dnJeeogK+WyMU9xMQRfgJ5iLKYKYmQ4RKJhesSioK8R5lPKfKqgysSqSZqVqClc+jhey2dqBgZ/18EsG5wQV3HD8UFrnZSZHQHhWI2Ni9gZ/KGEPByLDyjklZPIWFIGQ0KFzhG4pE4mIQpW6p3I2c9XSXfI7Ilpo/TmL1WkM9K/ekyJwNkaeKikpAAsKqaGKSFCgAHbEtmcgBAvBjFTErI3tD0oKWQUVDOWUQaniW0RVNWdUFU0kKhoL/FeyrJQUQKK9Voo9rIChXaxSNP5mlpyzGLwZEgDpYvywIGlqAwQAkEznDwHAWdNElS0uakQVEzgAAGbm4EAlTYWdtttdOed8CMuutmxUcg7sEoCN9ByxI86arfq0+skkEE3oeY7Raqm+71NKx+Ox3ex++ctfBh/t7PCBfWCdMKXZ5DCY2Vf+GVaJS0gxi+ScckySs8lhHI/H0+kEipvNpm233LSSIQ7jPBt8VHOOIuf3seCt1R7PpFLspLSYYpmac2f5aYtVVuM6qwGLXCAROsnQ9+Pj43HoRyMIWgcYAEJwt7fXn3/x2e3ttXMMAIxEwQfnCWQaxmEYTqch5+xdG5qubTe+acl5NeCA944dB2/V3hmHudjAPhW/rfHgEhJZrj4t0/fnldxFMvc84H0ydC1dUyKsLdAFaPCpYpFWAowX/RJ4VmheVqirJamuKutdsJ7cn38aoUNWcO7q6ur+/n6zaff7vUL+6qsvfv3r3zjGYda29Y+nGGOMgt1mw0yo6LxHjOMMp76f533uAID2+/3pOL1///6LL77c4e7xz/cmr3p8PL37+OFnf/XNu+9/+Prrrz9+/Hhzte/73kjrqhkk55xUIWedem73W0S0fo1WdUpLDYmo+DUjJslGQqjiSoXmayxck7xXG8FgIUArAShRoZsjIFsuBISgVQCSa7Qr6QvVQ1pj2+JXWjIsAoT1Qa0wWzVmWMn166FcHPjOx7d0ERffBsaqq3UmKFvnVjPA2fVb1ajcQqqK+uI9kAUsqxkKpKD2NKX2NlVW4Q3VZoeoBeqC57W7Vpn1pwooKSMquvH0sJxYT6ArZZEkZm/tpoK0Vk/EzjERKzKzZ2Ymz94xO2LPzOWNnCOiOHo6l5K4LEnL9X+eGhIBQCaxoJaBgVTQgYV4hqy0bJEa/lCMwA92r0BAoIqoaFsAMhCoJtuuSohMCpZHmG87ANms0bIG1WysGwAqKgTVpwrN2U5BFpZfFY+2wyySAVYyRZBBFSDnHEu1VzJDEUmqaEktgnVBCutEyzpgvR/jp5c0C3MkRSJkBQRBFaPPS4qoSqBZNc5znAYA2Wzabr8DgAyaVKwfbZMSBJAFYm3xAwmybNpWRECRmcFU5+c4z7OdPMM8RcnkuNl0t7e3X3zxxenwZwRJs8RpjjHOc47TnGLabvYiYiCXUmI6R0YVyCnHZNaww/E0HI/jqW+axtrpAGyKLMdhHMY5qopY1i+qysSmPnZOvZmZ0TliRmasIbCOqwFSitNUuHo+sNWgJoqmql23ReRxHI+HoT8N0xSnaYrRVMq4bcPtq5uvvvris8/eOE8xxnmK3vumaRkpxngcxhgjZAjsfAhd17Vtx761C4HIkQvITOyJPZIDq2usa4dYyfKF5AsLdGWFji4cPiqs43WEWMeS5xXexZ9+vNv54pOxQsBefD6+hItZfz390c8qj8uPQytjliWyrmsIAGJkXFZQhcT7q5v7+3tm/uLtmz//+fvPP/98miISkIL3HJooInMcm7YNXRtP2RMhJBGd5zRNkyV219fXx8P379+/n+fZBz/PM3Oz7TaM4XA4qGoI4ac//ek//dM/5ZyZQHLULDmmCLO1QFQkxck0xInNBsXCjqrKnISZyXkgJPagkFVzEi7m9IYzrqhdRFMRtAXc+AywAH2U4CzIUxuhjMtpYm9TbXptS9W+WfoDBhV51rLW+n20cq0M0gm0FGhPXNfB2QFiVaUSirj8IKjyLngOfhnPYpOqitUVPeeMT8zwSorDUMgV9kzU4o+ONh19hrSHah2nWthtbJCZUneuf7ACaEYgRTf1D2UbwkIAUSqMEEKHRTaiXJDsmiWMKbLp3Vt7k9lhaYeedRGj65g9M9aJXqkUmdFmect2m+5lCkaoYGBgcOiKugrDevv5MZFVe8uW5X693aETFAbOjhHTKtyWo7KILKx1y4uMGIDJjKnpREsGBQXBrApx0eS0fASKK7oCSCH2iBKISIIqQQslrGq1qihsS7QtCKgUnJOSfBRiPpaMSRCRQBEINYOI5gSSNGNKU5znNI7T3GvKXdtsNps+ll7AUm+dw56Izeo8O4t5ZrxARLbY1e5iOaFPp9M0TW3bXl1dMXPXbmOMRuk7HvvDYz8Mk2RCRQT23g/DkJIhRwIzE6qIpBhTSnGex3E0z7oYIwBsd50KD3M8Hh8Ow5giTDmlJK5tgURzEXnQyu/Wqiy6nJxW9jmHzjnmEhFtrGhcPdNIWepCWy/adnM69ofH4+k0TFMcCi09heBCcFfX+7dvX9/eXvvAIlkkOmTPLjhPAP0wDsOAoiG0RNSFpm3bpmmVHZPzofXei3cXZDuqbQ8rbJYfUuaQZNLSfBH2kjUPnzJgLwLNen15HmDKP5/Cm8vK+/R9lgL0R4Ll8+1Yu6br156XS738Vs/jtK6w02I22WdNwTpCU7WsBQA2m03Osd1urq62d/cnIhCRtiVVzjEeh3Gz2TycTjEnU+CKMZ6GaR/nxvnd/ma7PR4Oh4eHhzefv+66jgiur6+JhnlOf/zDn7768vNpGneb7eH4cHN1HWOMabI6I6TGey8AIsnMHIpuuC7WKMXfqgQhJla2Rr3zBrwwfCYbEELR5LdKuYYIQMXrhWyAi7XhjaJkTDNAIF3VyVLme9ZfoSXtqBAYKmHwIuw9LdPJqM5VtqpsXwZm9WzRAu8sax0C2OPCXKwhEACWBVYLTn4RKDgnN0Tn6aF9TQILb7gQzEhYz7cnwtY17FWVj3U4RKFVZwEAojgEcGnuS0sadDFWKACk8pgsRtg/XbCQ5hANb8Q22xNEJmuHeiTHzM4FZs7+itmyb0cEFgKZvQU5IjAk8BL21LVW7TG65Z6AkQGVCneQwbajEjKtG5+MzuZ5NtuzqhEZrDrMSim7JebRsznfky52IfSdIScWYMDm1pBIwKgLRksgi5o5lpdDJoXqhmGEazUDqRr2RCtWClRtRysSEoLaQlmT4YJTQkT0bIsfEGTVpDJrjjnOU4IUJ0np4jw+Ho9N07gmLBGiLLKIeY42eydCFTVoyTSPIYTgWURSTiklJA4hGHmcmQ0tudlsfGgPp2Prpr7vHx8fj4chZ2Vq2tAG32rOmtR45cF7Zs45GoUg5zzP0zgOY9/P84yIbdvmnOOcjqf+cDhOWZhsaRNkIsJCaEywKJskFe/80q6k6jdEEBF1IWBYaDcv9RoXi2Y0MzrnNEl/PB4eHsZTn6aUpqRJPfG27douvL65fX1z23iO46gQG+873wFATBNkiXFSzUyOmbuu69pNaFtml0ubhINvU1hYQAzEyIwVblP7edayYiPa52wD3TPEo/C0Sm1w1jwqS55elmgvdiYvzvMnseclvvnz4PTjwW8JlnA2X4EiNVLCla2dAmeMhuExTGSK7QW2UAqQqahb0DBwuJiQiIiIbLdbo5zvdrvT4fiLX/zsf/l//5PzcDrNTdcKeJf0eDxeXd+0bZ6mkwgIwDin4/E47Ltm751z19fX79+///Of/3x1u3/9+nXfP3ab9nSahmF49+7d55+9iTG2bft4+GjSDSGEpmJ5iAgki8h4OjhHzm0BbGYt2ZD0hFllnmciYh/IsRGtBY2VV7jpYIglEGQSI+wRIkKhKVXyfmVLOzRmHCg5LlIVQEaQMH0Qjw4AZE3TgvMqp58AKJ0jXI0YpWdbkZznlao2RbVawNXXwsLWx5U9XF5peNo7WbUni8hLLeBqNLUUX0qns27ntUvh6oV0ln8xYsMT13urKdY3EkcKTnPSlS9tKfUqXq7uMcYqMj1OcUFsKhAiMTOhI+dr2HOwCnszw1oR3zAIS9mx9KmWCEThjLomIsR5nR2/sB3P3tNElDDaAfbi69WVKJNVeIgYqxL/clsvCutGtt0vmnuaDZYpKhlFTUZ1CXslcwAwbcaibSAmGFc0yqrFbMXTogBozpbh2pnNhKBcMn1GzHg+gexKcQZZLnSJDFlyjprzmCbIQiwhBGKd+sGklfq+d8615tO96ko559I055wdESPFSg0wWL93IaWUpjTPs3GrrS4049ZhGIjIh9Ykevt+qKbnPvi2a7omdKfHk4ggoc2GETElleKTV0KRDducc96H+8NjnHUcJxXsuq5tdgl0GKZYtUahIo/OGcmq2luiGsS4Xuixug+u4ce56Gc6Iur7/nA4HA/9PCejaiFi23ZXV1fdprm9vb263gHIqe+9591u03DX9/3UD+ZMsVTGm82mCa0LAYCzWWQCIKL99vWNKt0YK6FbCz7R9vNsNdMClVz/Xv0ESPJ5BHrxTz8ewPAZb+9ffdULz3wGFrvII9eHhog0Z3z2ElObRUFCt4wbyyWpmlLquo6IDofD9fX1t9/++ac/+cn/8//1T4g4DNrtHVDwUabHmYg2m43InOJcqr3TaRyv8yZP02Tys999993Xf/XV7avrYTgAgJlNUvB9P7ZtYwkNAFh31IQdbH5sMixL1xSqedMyZY8xmrBfgfvV9pJqqd7AhHVREHDRDS641po6PJ176gplXzQLa14EhoAjPHcIlrC3evxjx1Qr70Ar0H0926NqRbr8FfT8mAoI3ToKsiyna2mgUrohqKpbGSqpFt5CUS9YRbV6MlvbsuzkOtvLAMUGWSGryZedg7fAgg+snw8AlD0AOICxAFFNKC6feWNak0FdEkwApBYQBcczpQFIEdEFRcxEwEzklDkTCSK47wURbfDCrMzCXhFd6CzpBSKo40NEnNzNEtsKgrxqUiwXzJNazVn7qDCxF4TcPBbiPJx1EBwRzY09n85hGG3R9GBTWVWwBrTJE1AEVdBMqihJs4Am1JxTVM0ERXIItOjneDCHvPNFb5pieG6datZUhVmhKDswIZE6A7QZhGaC2mgmBVHxknPOLhgQQ3KOGmfIY8tIrRuGqR+GOUfsuq7dOW4Op+PjMN7cXO/3e4NuOoBNCAjQxzgfjpJSIBKR0+l4PB5zzj54CV6bcIrx7uEupbTZbAjxNBwfH+5fv37dNM00jRsfXu02jiTHof+IwyHHCZtuu9ltkSlJPE4PEUduabvtbvcdA5yOx/FwiDFSAs2aJ4DEvrlqiKaU7k995q04odaHAEocEVUhtM3xPscY+zjFOAtkdjhh9jnudhtyyIyBqXOhc6EVx6cE2waJFCkJYJ7neZ7mIaZpt22IYiAOwcU0TdOUZwLufniXhpGS+NMwjv0AAFf77urq6vpqe3t7e71tUz86wjfbV85zGtL98IiIIEFSyuJCCJv9brPZKLnknLCJlllHBDKPVHwzCIoqrCR1COjJ+FtESMwOiROwCFGzXccSXRdhiPTU0hoRwTE8u+HC3jMDtLpqAoCXp7M6hRW0r1xh5Z/oAQuk+HmcOzcVFlgmAAAkDE+WHiwgC1Pix0KuyoDIzjHSjJUrpgpc5EcRsQFz/dWUYlZlJgBSyaM2jG1UaNtXrrnrT/Pf/e3f/8tvf08KKK718btvj1/9ZHdztbm/O318/7C9mvwmzifYOIiz07Ade8RX4e7D93/3dz//+D78l//ybew/DJQ/v/7iw4e73a5lvvn9739/dXv1ze6r288/+9P7H2ZN++3u93/+4X/8+//h4+Oj75qmaTYti6BmxeGUUYJz7CDGWVWtkdl6JISUZskSfOuYmCFlBgQuqPjl2AmBMwA7ggLmJdABr8UdmchMCRHIVfZ6hboUKwY1FsD5VeWQ8bL0wyoYWJ29HK/6AHOOemakmEMMqGoxskOzy4YiBaHq2GmdEBnHwF7qsIEKgSkbaRkDgRbHsApkV8ViI0pPpFtW1cjCDAFwqupxUela6jxrgV7aLNtfyY0A4Gz+YQm1VH0/ADAw0jrTrAdoXsQClgEJAuWcF43OMswnQGT12ZRduA78iCZETimBEjFYj5TZ2/NzOMtSrCzTVp/1NOxlLDPIhcRVc39X1USt/VIk1iKUdxPjVzAbbmQpE8lopTVxSDITQLITPItKUhUCUUkGJ7GO5fJ80VQUyRWxtDRrqlNOrEsePda0DSuYxVYu20iAgOCQMikqOMeqIrkUMXGeU5wwJwBt2xapBQAbwgHh1dWVuYlafQMAMUYViTGCiFkoiEiaIyzTJgADPVqlYrAUKxmtOFNVaoov+TRNDw8PNg405hwQMrIjbhsPooHZxoPzOM3znOcYXAsA7BAAYpwUaLYJItIwTP00IrnQGhFwOJ1OQNdzTvMY55SBwHlsmrZrG78CRi1rh/2K4ppEGkcxlr2RHOznHI9HYmiaRkQOh0Pfy/F4Oh4P8zyZPfp+v9/tNl3XVe20ojJnteBy1JxzDtkkxwpvtU60z6gt5gWxeQ5X9dpZDn85mYmISPTlem7peVx05j8FG7k4wf61Z52ffLHlxZbpRTX5/B2WUH3xANEQDfj8N+KnITDrm6kTqGrbtrvd7v3xewC4vt5//vmrP377URVCgPv7+93Nq6urq0Pf+zYzcdsqZZjn+fExdw31/c6zHh5Pm82maeDjx49N2Jis60z6u9/97vb29v7+ngD+6q/+arfbWSPkq6+++v3vf/+zn/3scDh479+8eXU6nRR5mZEbYATF1O4FijQHFXsXBCLiilNFRKAlGgGyIXsAsUiFLBXceZXA8240nl2hWddK3c6y9UEvD7Subedb7SWsmgpPjywvkckALPU4nole1fk9w9NZIIKunk/rN18X/VonN1o6n0/qyyX6PO/APXkfeUahefr44oHtCGdCwLV9hBUvqgT09DVlsc6SbM6qxUu9it9IBliFK3RGEtDkDbpifIbSkgKaR14UW2q4srA3rQPbcp276iK9LvgQMWMo/8S1s6g9dsb1KZkSMxGnWB+jE2ZhjuyWXAmBHZJxIKEYO01ZVEDBhFdEQJKAqBoJXapK56LWcw57Ntpd6C9gw17UkmMX0jbaKAcIkM1joZyVJdjrgsqidRsKRTWLmbJCiiE4ds4U65NkRWhC2G63UE95771jtpgnIsZbsP6M7ZDlQTHY854AFkHqm6trAMg5m7NP1c883R8evPebdts0gQgEhACJyQfWLKCiKatkRGycF6R+GHJWC2n9NMYkSSTGOGXJSTMooCbRnPNpHMYxIk0xTTlnVGCG4Hxw3nvvmDwTsfG9FFFL44DZOdcGBwBDOp5OpzjPRlVExKnvT6dT24WmuUrzcP/xbhz96XQc+yMAdJvN9fX+9uZqu+02XbvZdN67nKOIJEmkGmNEDACA5JjIRL1D07EzJDOzC8xM7oxkVl4159ERFuYorcCowIz11H3ShnqG1bQ3ehJa/oLw8+Sfn9Dy01XgWUcgxSVxe+H9zx34hSO/WkYRSgWABTWPqopFpgKBivnfeSVWc6gGWOo/a5QBUOl+gXMOAVNKIYT9fv/dH/84TeOm3fzkJz/5l99/ZIbW84eHsd2l6+vrjw+PfQ+7jsxhap5iGufdxk/T1F119/f3Nzc319c3f/rTd5vu+vWr7Jxz7W63u9psWuPVTNP0y1/+8je/+Y3xAAzzZWfvfr8XAXSYc47TrKrIDgBMpERVqaxayZZwJHWOzv6jFTxh8o1MDEpVbxPL/lkURkriRAXXDUhIxde1IjzLIabliEOpBQEq4HONaiyPMy8DXVxpqwDxQlB50qm2tpv1roBL2/l8UlgoM/SMGK20qIUtLc26xV7HFUle+weaAS4D3kXoehIIk3wqvP1IOHTOMQCKZFVe7gGQeXF3gcWUAMCO0ipBOO+RjMjFTEdAsWCQclar6iFLZtJEwERK2TsDqTjks84nQIrDRWCzPNeUHpfrf8l5lTo8DwjP/EIr7wqVHtE6pkSknhE5EznnlFnJk2NFhoKPcskKxAqIojxZVqC6TPIiigLmLAqYUAE1i6pARgCBjJCtGy0VRGsXw+oAWJomAMQO0CA5SAXQSQpKgpmRycA6khGt788pDQiiOQEKojpiYGc97JSSyVt3Xbfst7ZtrXQLIThmE3kiIsOS2F7NsSjLWHph0ErnnKQC+DbpLzMu2Gw2wflpmo7H4ziOWaR1rm1b8s7ERrGQj8vKFWOUGPMcU4wS0/39/TRNfT+ehiFmMTnKKaYparvpmq6NSY7HYz9FImhbf3ff23HxHn3DzpGqxmlst53tVUQlBmZ0nrxHqiPMlFLf98fjkQk2m42qmgugFW3jOPbH4/F4THmb8iiSmqbZ77vb2+vrq13XtV0bQnBMIIJQtJpyztmxt30bQghd27Ytu0BEzntmR84x8+JMwsyyEtVceymsEzi46Fu+NMA7d72ebf9E4PskbQ5+9HbxqouI96/WeQvk4WLdWb6/1hnAp77JqlC41FFSVVFxzosIae42LTM/Pj7mmG9ubq6uQkzaz5IA5nlmF9q2jXFMXthQnqIF6S4YQjge795+9tPb29v//J9/++r242dvj8yeAP/2b//2t7/9jfFWf/e73/3H//h/+uGHH0D022+//etf/vz777+/3u1zznd3d7vdDldEPU9s9Thb7wtBGDx6s4pFFCYPi0NmyXipoqwBzfUcuYh51LCHBfaCFeT5BHK/bEezcUBZOnBINWiiiWXXEGoS2WD1pSJUB1MtHHIQBeJi4SIokEFI0XJaISWlDMoKGQQKHw8NUeKWfibSAgBFrT4JNoSrR3mp6jIAF7Gx6nWqxX2wds6KH+ozl3bMhXstWM11M5TVlNZb7DnGz3beESgJqRqTUFBITUZ6Td8+k7gRbVT2tPa0w2b/KYKQCeEIEJn2uihkRBLgatDkABgqmUCBTXsl68mOqKzCGwBodi+GPcAtVBb8udBEtIaQc45KPKstU++IXCZSF9Q5cEEiETn2ARCVnZTqkMvpIKYUnm3CB6IKSUVBI6mRI0EMtFkMcuc1X2QZmNS1oSbIqsD1eUZRBVCGgvJCJUKHaNruGcUQyGQwN3snBUYUAwcpjf0JCVxB2BpUpyh1AcCSC4DtdKgWNgoLj9sgFVL74pqzmQ2hwqbbmLDFpu02bZdSGsdxHkZN2TXOt548lWEzgGP0nkkFCCTqPE5xGIdT3x8PYz/0mk2WJQtYmpJUUMgTuuBtEZlzShmcJbVYiUhEBIhatf9EUDOBekZvZHnPLrD3hbQwDMM4DCriQggh5DnO06Sqm82GCU6H4+FwFIHxdMzz5Bxtt93N9f76arvdbkLjvPOIkDQnLQAVBckqDhmJyJupQut9w84pAjlPzOwdsy+KlsxQqQul3168FM4gF2BehPwQGBSIz2vik/DzNN6c//lpOvnzSPav3n4krP5IwHsatexz82p9sNWtnICqWg25EZRM0cqKSlj6HAD52SeiKCmknF3bEZGk6Jzb7XZ3d3fv3r0TkdevX3+8e7w/nhig70/jPG233TiPoBhjxiTMzCxgSQzAOE4icnV7kxXuD48fH+5vbl7JEN++fQtAMc7M/N133/3ww7s3b97effggIjmVWsZSqLZtC4CywEYAGRkZAJhyzWkAhFTB8jNUhMUq3biaZUVDk05YWhdQFDrLpvM5s0qZLuAuiFjdxmwdLn3IgoWButvP1kRLZ6q4mJa1ywSACKQESEVCAUXC2ruy6R4ogbHMiRCK19tZxwAAZAVRgUJLBLB06ix/hssDEIDiC1d8T6GQ7otffHVIJQUFJQEudWr5AlCFuO3nlS2FhU8owgDovGcUzcCQZblH0aRV6YwgA6FoBiKFjFVTTszUDqWgFJHKNBUBmSAD4lL3QGFcG/URCFCSAqaCL0VexMbIhJcXFmddrHM+630sQRERs874dNq3yMQQubTQ5JHZESE7z4RMjsEFcE5cYGZkh9IaDREKLoYFARS5pJxFVFpUQTKgYEkdBQzVWUiagJRKtMM10Xc9pLFuQ/EFRLcwEkTNRAFBNTORM1vwynZAJEcIjJpFNOccJSWVhJoBJKZ5t9s1TTPP86kfRKRpOiPVWbdNiiVrtgcmlTkN4/F4NPqdaXQdx9GeYCO9lFIbmqZpYoxd15kd3VD5dqpqxqo2biFA55wPzIAxzqQgKacY0xwlpjTHeZ4jahZBIu+d45ABMXvXgAt+mObDsZ9SJnZtCzHGx8cphOuUUs4x5TlnJG6IXBscATKjdy4E37ShaUJo2BcwZJqm6XA4GOSv6xrnaB5VVYswW5yGYej7Pud8OB4QcbPd3L66urm53mzb0Djv2TwssyQRWXKXwjl1zgeLeZ5KS5Od98zexnu6aldQCXtF85aZF/U+rPx0qjazy/n9Yrha14iwaub8eAD7C7d/Kvgt7h8X1dvzT6+Pf8y3oaITcb0yniNmec/L6c76mYgIID5wykyZttvtZrP57s9/TgCbbft46HMGRzBNce7jF1/ukRtVzMNMAo6YCOOcD6d+mPYxp7u7u+12e3sbpmm6v7/f7a6I4cOHD69fv/7Tn/40z/PNzc1vfvObf/iHf7j78OHrr7/+/e9//zd/8zenw4Oq7vf7cRyD52XxASOuEQKwExAQUkDnkUGMgI3KTLbK2w87F2oMoAbKtCl72Z/ECoBKQIveNKFBoaCs5Gs5U0nn+Z+czyQwuKyVXgY1L9GHkMEEm0HwfHyzzeoYpXhya2bbrsYuANWMYNpxCmYNUgY99uvMqAEQqG5cmpW4alvi8teS8mey6rDc8xK6yhYlOP/VEnyhjEBCQoIZhS7/lzEv94mEhFxwhMXrGrMKGEdKlBUXa8H1fQIkBUGzgMRFBiarcS+Rq6yWEpBCJGVAQSXVKkgKNjkz+xwytraC2bKruYEDrBGkAEDq1lfaudrTaMmMINpIdxkCM3k904QLO1AICB06Fh/Ee+eCsAd2GhslZvLkvNGuanw1hGcu0MvywFIOAVBAKdaw1oHHmRSkCOudlwNrsms9y8/LhI2gGWsLA6qBRSkKxE4cEQTzFc8gOc2FcZBTZFRGaJqGmVNKw2DKlg6IkDlPadN1FrfyXMD9KSUQzUsPk7jxgQDjNJsAm6l5aZbGB3vn4HzXtEQ0DMPp8TD1g9WIt5/deO8dsfVOG8egklKK40SAcZ7nYZzGMadkLJzT3OecyXlmTiLTnACQfZOzmjJLFvA+KFKSOM7gJZm/BCJ4YlJySI648Rycb5rQtL5pvJnLOk/GjOhPx9PxACC73bYNLqcUY3QuOMI0p2GcYsxxSn3fS47b7fbtm1dffP726urKOUIE1SyIWXJOyY4RVkct5saH0LStDw0VfQbHvnHBW3uByNXnExEpORu/VP0/tkCCbtHw40WPCgCU3JpDcA4Yy5Zl8IZP/u9Z6Hp5iKefCHvruLKe8DEVOIYW6PEZowxQIObrV0GR4kcFBWMmF62BrKX3BIKl/49VTasuh2f0PJVcvShm2chKVYnA2DIhhH44IWm3aSwbEzGDe4onSQIIkFJSIhUw4AEiS5Z+ms1D2Lvm/buPv/jrn33xxRfffvvdhw8fXr1682p/ezqdrq/3r1+/Pj483tzcPDzeDcNwfX1jQj+GgchZjdlkIwCPhogUEVRCYnTEc4qiOYBj5gwioATinFNFY/4uiUVd5ayZabgX0qKNDGaWRzXVVwJQpUJ1OG+3dXEZo9bjsvAclnlLkR6zs03qLHAJSMtJVeqz9VFfI36XvhAirEjoddynBl/nEreWIeA5CpbWF+Dqc6Ga3lqvEZY6r4pHqv1VxaIGIRISOQRUElshEcFE/clg1GhUWUazxyMkct6ZMBaogRizCYcAI50ddfFcCLPCagvWviuYxoACLULS5fkveTqrirH3S4fA0kOt9NUn0WIVPNYZnxT3euZke3gpD5d0UoggFw6DNZSUCNTAfj4nR6nJvlHHQA5yQGJgry6wd1Jk8gks7BlgqLR3M0BVtkMprrWFYQEEGQEIM1a6TC0WnvqHXYQ9NHkqQMTqawVshjmSUaOZIaNqmmOOc5qnOE0pTiAJCIF5t9tZidP3vfNhs9mFprFPMTkuUxIsV47qQpvz3m/aruu6eZ5PpxM3AaokoxHAm6YhwKurK7OUM7VPE3CJMe52G0anmiUKIRBqjGnqB1QQlTTNp8NxPPXTNA19fzqdHmTy5F3ArNiPw7GPjl235YfHY5SiLprnJIqi6LweHk+IwAQhkHPOEYOophy2bRN8G1zjQ+PZ+YKYs57tOI7DMDSNb5qGCUxXumkaUDmdTv1wssL38XFsWt7vt69eXb16ddO2bUoppVkEichWUqyDYWZyziOwd8H74JxDJgSGM4Kz8HAWMXdCt6xJ6878ov9QevJ8vkby6uRfP6CnBNOLWPVS2MMf+eu/evtUxQnPKr+L24Iv10pMtm+dcrp41b/16y3PH4Yh5xy8Nx8q51zbtnZCh8bt9/vD8OAIvaGINQJQBhPHZJGYothgu2mau/v3jsPNzc2//MsfHx4e+r5/ja/MbOTNmzcf370fx/HVq1fffvvt3/3N3/7jP/7jN99888MPP9xeXyPi8Xi8vb01cTJrOIKqQgZwBKiWzmsGVGJcFHfZkQogle4foiCDWSVXmazSraxSWWKGQ2bPgKDWNLW53Xq7NcUIYcVYwaVHjrj0n8+BDUBK2YHWt8sLIsFK1XpvNtkmPCaFt254FvOlYQDzVVApPhA2GLLJl/EFVaxHqIhg+odW3YKIQmmyAiQFQhU8uyrY4yhAKjZCNL6eIKIIFYCMxck6X2IT1xKAGmeLZppYH9cFx8U512leuejGnMsYVJ9sp1L5WbBUJATkFXxj2Yk20wR4Qita0g283PtQwEV0Hnfr+hrXOK+vw6UcJPdE2m4BpDEzKiKOqBUeroSCkACZQZ0qpzTm6Nk5ch6lAWRxnnOA7Ng7JUJ0uZ6JZLJRAEgKADlHVASUArdb9F11rlq6BarHeGbeSEHTLYM28I4Bih2lQSEWIascUxrnnBKJOkJVyXH2THmScRzG8eQIt9uOAGOa3333PREJaNu2m+1ut9tl1dNpcAoSk7hkJ+s4jiaeWcjdoibcZZKvZvo+nPphGJxzPngicsSbzWa32w3H0+FwOJ1Ox+Nxnqarq6tvvv66a5rj8SSSd93WMQ3HU386ScpN0x4fD/3xJDHlnB8fH0+nk6o2241miTkBCLLf7nzOOIyzoks5xmztHkyS4lwhylIIPsTQtmG3Cd6BZ9f64L2vAE5UlHEeENz93Yf7+7umCa+ubzzxOPUxxjY0jvh4PE3TBAJ3H+4eHg5XV90XX3z+6tXrN29et41PcYox2snT970NRhHRe0RyBCiKm65zvjGNTZOWJseiCOQEAVRUKtwAKCs4E+esTPm6JoEjQmAtIwsz3cJ6Ij9JjC7aiZ+KFhdp4tJpXL/JsvEvjzHmN/n8g55SR86T/mmKNcBTKfZMJhhdgU1Ybw+thaEkZ5L1mQkogmzq8sCACTTGWGzXAJqm4YzzOIYQdrvdhw8fnHPHuzsAOB6PALzftx8eRsnJN3mckiOnAiatAeyTDIfTKWW53u2G8fjHP3379vPPiNk34bvvv//s67e3t7eHw+Hu7uGnP/35r371qxC+vL6+Pp1Of/u3f/vrX/+6aVpVbdv2cDh88cUX4BAUCkCsbdg5sW4BqA+spUkszjMrq4KmCEBs3c5FUdMWRTC6HgNZD1MRgNmAryB4Dm+EwlxY4VKG3yXIkTtbWS2nm4Uha28uBD5EBCAh69gJKQgSigopAWRFAsir3p716nKWdQGiBACMClkiVRInFEKfnXhSISWlyy0IxBhFyFjLWmjN9v6e0aA1WMzJgEpIrN1HyxGWeaTkpVhcXxmGqn0O5iz4A+MvMBTlmOVxLcNsIFhgrEuYWlj06y2rMHZ+bHRVrYGu3CvULGMJk6A2lJT6N1jQR4LICnnZsvwVkVX68g6AunLjQ3LWciUlVCIlW29IkZRJhISJEmpG8ZCzxIjIKKwaVJ2IR2ZBJApqOSyRLpJCdqihjlzRTsgisGkHCdT66YzlO0PtZ7AhVkqrk4xtZu+hRAqQEdCQ/IRi6wOWYa5Ow5hjJFTPxKiSctI8z7PJLhOiY18tQ4GIgguIuHj3LAFYVY+PBzNVMXV5K/7mYUgpGaq4eLe2XQhhOJ7MV32aJoRiNe69T/OMKowIKhIlp4QKgd146udxmodxcWOviyMKYM6S1aajnJMkwZglKyI5BU4pjbOmCACQMzCD97Btw6Zt22Ci09p4Dt43PizMOTs/D4fDNE3GI4Rqs2doJgMy2Ebn3Nu3r6+urr788guDs8Y416m7Vt0NMuKN/eecN4rCQssjU6DFQh80+Y6lI2qITV7JuJcItJpAr1WQamJ02RX4/6/b86+9DOqWJ8DT4F3bp6Wpq1WMY1lV1g3YLGLDBUS0Om+73e52G/pgtlqompmxaXiO2YKlIqiiCuasmou9zvF43GwcM0/TNI7T7e3t4TCo6sPDAzPv93sRUdFXr16dTqfdbmfSDVdXV+ba2LZt0zTjODbbdpomreBxS5vIOxdY0MoCKSagWExky6RNFVBqcgzIizakeS2UKSbxUtQVu4ZaEVgDCVhNcVdLOVBHTHVXrwOeRcR0DoQgNj1jrFO1gh4VsqCLCrWRaWEYIdcDZSQNsKGgxVCAs77BKmklhzWykDIIADgEIFmmduYmAECaBcz8CoCrqAsgMqj9gBLgqik9lFig1QfXYBNU7rU+rtvNvs5ZtfEUlnkJGj4PIws7sG7RdTq5PtfXKgBqsy46fx0iBWCy/OJsQmgKpBkqFNUwPIUSaYevSIpXCA1YngKgCAwo1qe1FESlBFYlAEKlqnLvoPSgRQUpZ4SMyJoQyYkS6qzi1LEKAyJQY8sYYAlWWvlJdl4uobysVhbtFBC5QqYQgMrkmbi68NYlzxqPte0MUOgiNjomI7wTaMqYs4iMwwmrgSpISjmmNMdxYiYRYXZt21o3MguQ4zYERCzM9JS1qvDM4zQMg4g03juieZrmefbeHw8Hc+chojY0V7t90zSq+nh3H2OcxnEaRwDYbDZGk0jzRJIREVJMKcs8YRZybhz6HGdrnE4pouPQbWKMQ1abi2TTc9CcFLJgEhClLJAV56jjBCnaYAOIoWl5t9tst23TeOeICYyT7r1vXMPMACSSU5Lj472kuQ1u0waF3PeDpGyB7XQ6pTnGKR0PPRG9uX1zfX399vVtjDHOaZojmhuRYM6GkDbASnA++NCYZiaHhp3jENj76v7AxB6QCRl5Bdfkws9bIA9idZLlOKa5a0Xe+r+qPbWEAVvCPsnPu3yI8PKWOpP7BHFPP/EJFXn3coRbv7xMdGA9gCRRUYNAVLksG+ogkUpCRDRZjFXuXEKdqfspigjZVL2Cy9ahlBlDcG3b1gwDRTIihuCywDTN2TxbgQQwZtWUGUAF7+4eXr+6DqE5Hg+Pj49ffvn13cOv55zv7x69a968/qxr8+Hh7tWrV3/6059y1mmKTaNXV1en0+nx7j62yTk/9OPmaosFzKGlkickAnaICzuOlLCS0RcUQI1hAoAITOfZnvHxbH3gEuUAUGqSZE4tANXvAagAWAq+sVj0AVQkZ12Qcwl4lXVua1cdVZXAUasrsDlF/cYl+C3ng5VGZL4HluzZulvqgWwnnlRM45K5rLt8C90TyscDMFbaV/HDsbhROpXVIWfZbnhVwDPmU6HKaRdEKJSSpLwrgqJzHlBQUFEoQ66PDYTCglK3k6CgYC6QF0Blm/wBISoZxcF6xKuqjtDpxZYKoDXfc62vogX0D1S/dJFtX8nhUAHPWoQBVX7qt26HtIxBUQDM87fuG0AGNpo5WgxVtaBJ6lAEiUkzqaNMtiQVR08lQipGmAXDW2pO490wkJbUyQDAaLk8gVnGKJqFL2QiD2z6WQgAIJnIkVOiAvsRzZBBJaMAltQsZRHNkyYVEUZCFUNJkgJkWdagIsdMNM4pa/nnUuoZwcgkJ8Z+8N5v2tZkoIsvj/dlRO+9KZsY4HM89dYLNSa7Zdne+5yzA0miKpIENGdJOc9znuNwPBk97jj0ItJ0LXoHKnFOIpCSCKAKCJAAZiUkzjEPUxSRmHKKEAVQYN+CZ/CNa1ofGueYHZNnDc4HdsGkBpRy0mmepjFabdd1XQjBHAcJsOs6Ve37XlLBsrZtu9/vTaslpRRjsvE8M0uGnBWITGM2hMY3oQmd6bxwCGd7P2YzSyIiMH/mYrxcmp+4op8ux8hKvZdjjCqe53+XNRP897h9quL8kV7rua5dPWe1EJ+nlRcZ9ktd08vPWjqnOceU5uXcNlKpcw5wLjq3qimBIKgjACpWbWLsCDycjnOK2013d3d3PB5/+ld/Tb/6bzHGw+Hw+vVr64I8iNhVMM8zYTGiyjkfiYZhsIrQrCVVNU1zcrTd79i7ovAOoIgCxR0WK9qz7qbaJiraLGXG8zT/seqKSghdqjpUqOWdgX/KqUUgJeVQMIGUpXEMVlnaC3UBttg8ipa0g4oUNKkq5ErQzot3A5LUVR7R+pNQO1uGVShDy3LUaKFpvdCKrJZbpZCoPJYz8WB5YJrUSw/yfJIsggdL2bXCiK7eoeDny7s5T7VaBiQ4PwYiVBJQgIKEkSJ+AyiaoVAWqcCzKMkSl2lBbJKikry43bq6YsBTBKMFkxKwMXbKvfmS22GroWZJYQRAkXjxWzczHmuRFhdhQmsSEKD5szNZAFqSbCXMVLkSpMRAKJGQKRuaGMEGyZatQVVHRzRfRrLc1T5dc8UTM2Je2lWKiJTP9EYgRG/tCGQkBCYktAF3cbfKhSlogjAZJEtOOWXPpKqiKcYoOTkEY5EZYdaab6Kac9ZynaH5hNmKP8/zNIw2ithsNm0IRlfw3sdpPjw85pgMtGlgFgCY+uHx8XEYBlA1T/bWhy40njjn7BxqlBRjikAKOqexH6ZpOjw8ZpXT6TQMYyZQR0huTjErZlFRtm6nAKqYl6CLcxyGnHJJUozA23VAqM4Rs7W+1SG0odm0bRua4BpHXlRTysMUh3Ey4rn3DCCahaC4yffHU5xmg/DcXt1eX19f7W8QcewHXcjUoiIpK6oio3fOh6YLoQ0hhBBc03jv2QXnnDMnBybDYi7oTSBmYjL5RcNwggJUHk6Bf1FVJ6EKaKTlgkS+DDPPQ8j/vm9Y/yudOhU6J4tQGuzW5lBVgIjVUtIKEFrh+p50kApR7GzSthByKmFcrZmcUpzmnDMg2XpXaoWiWgmwSOu9frVTxNMwhK7tum4cp4eHh5zzw8PDN998s9tdpZSurq4OhwPT7nQ6tW3bdVvbknPuuu7h/v7q6kpRTqc+Stzud8w4DGPbbIjIutcG/rPLvLjYmzgH6dLmzqBYp3dAdgXYfpQC2SvgEFvehRexm3OTE8ugaDHntTYgVPgBVOocVjsfxApCASgV/yoROQ9bz2FmKf4MxVGOEZYZHgAUHZdKHyzhBuAM3bSvKBaJl88FAFWQBd21im0AttZXLHH9ngYjXskIIaqoETysMgU0LArZYzVAjYJzzMBg5uzVx2ix9QYGV4bSpPaYidU02gT4XKoWuh0DAJSZKQNUxPKyXSz8gxKKFH1sLonH0q2tSJpFVuD896osvtqiZ9GyurwYjq7MsMpAr95I/FkXgwqxmBABhAEBhS1cSvE1Fgv0TKAFBWpf36EDALIyDsoQH9AcIRhICFltNL00rKzbQrY7BJEQlYBL3mf6ZmYdDIJS7I4gZciSU5SUc04AhKoSk0omFWJnfQqTDbOBVtKiTmlwxFhvs1kLTZOI7Pd7E2QxHTLvfX88vX//3ncdInrvN5uNc26apr7vx9rbtGH+drP13heikiTQrCmDalJNcRqHwbiAKaUomT0DQgbVnKKKqrPETAAEUDLMSVJGgTjMcY5gCxSh856ZfWhGEC0ASSJE9d5v2866rFba5myw0jxPxUoJAOZ5NnpiCEFE7u7uxnEkorZtr19d73Y7k7tTBSJHjKqQcxZbZICc8943IbRN0znnvGu98941uHIUUbLDXr3xwFgKT7Q3z5isutDUTWsB6DPY5EXUyX/HmPdvrfbWczi7L9jOQh3DZbQMy8T7fCsvBThXFxfAURFRyaqFDL5WPy3lBZGqxggpATEIsApIFgIA7xBRMuSkOenhcKCv3zrnhmGapun6+rrvv+v76f7+3q6dm5ub+/t757bv3r3bdA1VWdTr62tDPCHi0A/b7ZY9WeWX5ugCq2qOCYOnijcAAGIgLlMpG5vAwtjUs0k62khkmeFBMWpA0HNVUpp3Yksv1DXTeqfnnQjLGBCWFAKL755tV5DzgStTGsild1roJMuZmZeehfUhEUkhV7ZZXrrpqLKKRFhCHpbhVEHwGMuxnAZagU6A55nR6lN0gTcai7Ss/6pabZC58CKwlpZ1R6gWXKWqLq7hznv3fIa3BsBcNCLgjE8+Xw+Fal5uTwRP8bxfy6yunM1Lva/r5xv9/UUa7MugapQAl2EPAIDZqn7bX7rgvzV7ItNyLOeEo6rzCwikTGAMTguZAjMZCRCJkFaaeuVDqUgtWLKGjIWPYe0LskIRgDwpkKmjKCGWEY5yTd+0DEekRART+8wpS9Kcc045R5CcZlFVkUxkRQWnOM3zaHVMCAEUSsMnBCYf42DRznqb0zQZViXYok8UQtAs/fFkdkLWvSwilqLjqT+dTkVpV4SZt9vtpttAlbqI0wQpE6rknGKO45SmOU0zKlQTnzYjzSlOOYkKoQMCATX7sRjTNOc56jBKEiAEZRJlUfKu7bqOaFRQJGWHwbF3HEKwgOd9IGJVTSnPc5zGeRxnE0JEhTjNOacQgqR893h4fHxkZk++cc3V1ZW1c733zjWmaypZBdR2KiJbp9eiJrJH5yrrTld9S65CqUDISohMQKxIUqyyVRmXswWUqvIGFYneeiqBKVdWZALCk0IH8JMTNj4Lm//obK8Sr+RTYewT70/68vNrNbJ6vQIoJBCqopLlpxkcTkQBEFkBDP9sywmviBmWLkuJiwBQFik5WzdDlrRcgitCCHZd9/BYTlTnHMAsWqd6IjFHwNApApAIxKgp5bu7h5zVe9/348PDw+3t7Z///F3bbr777oftdnt3d7ffbruuE0n7/b4/jZtuBwCn0+lmfxVCa+p31upnT5vNRhGHYQAHNllABg6BqEwusQru1qSokM5Kn0BhkfhHpkVj06Yx9SU2naJqsQCLrg2AaAGELgelqpQt9Z+ttAvEtxzGBeILUDEhz88XrcLTssi0gJV0iyirjfSMgH15ZmHpecpyotXlfzFWq5nQ0zBc46IhWqGeBmd+IZ1njef7Jx//tGosw6AFAvciquWF7bTeeM7Q9En0WgkjPZf9Ls9cMRvWJhFPpyAX988fkL4c9krVX7acw56U8UsJe3YAiMhVqB0RIeX6lOrwQMhM9tL6AVAzqSXIldwMCvPGVjhLyIgYQVltQk2ElaZYvUJXiQUAA8bayYFsDrcW7WSeZ9vdzOyZEHGeZJomIuu9+XmKqurYe+8JOU/Z6jy7GTl9WfS32y0R3X34+OHDh5zz7e2t77rFjjWOk5HfLVLaSwzAaW9FRDCMZjQgkCUmK/JqKJqTqusaVJxTGqckCD44UCUSySASc5ZxzvMEjxECgGscoZ9TFlEibppWsoAqEThn9uPsA5uuNFTUZS1k4zRN+53z3hPgglwdx/HDhw8AsNvtAgcjY+Scp2ny3gNojHEcppwz+9AEZueYAtXZHpOvrYMz7eTpdaV4Nj2ji+3rQgaf3lYnc+lPQNUfXF4O8N+z1PtfcdPV0nBxwZ7/+WzL01oPVdWKtnotn2vi9Q5Z2pvmB6tFK9zEdGaAEidEJAJQ1Q0RgZQgZzCzLVsAT6fTq9vPrJL79ttvf/azv3r37l1wrm3baRpev379m3/+9eeff24Q0KvtjpmHYcw53769iTGSw7ZtY87H4zFjfvXq1TSPXhms+kTbIUuncn0mFO+9JeLhGd+7lpdb1DmoLKKml1c4IXVRArF1tS5uuNpOFeT58kHRc9tTP7H98vnGF1vPZV9IkT7N7zw/ZX1wRdZPW77Ak+evTgNaJYTPg9eLvwgAHLG8GN6IqP6kJ8rctv6WVX/9k3gd3s63lOTZth+r51SfTPuX68FaChcXCSKSzM+3w5lXxJfP9xVJ/PS2yLIgIqJfHrFbYiqcbScQ3VPAwvnnWLREIaJSCBKAAuUMdSBdaXsIoG4v85zmeVZhR4TgRCRFSMOggqqaBXJKMcc5jjlnxMzsnQsILsc0jXGKDnj3+WdvQwhz3x8O94i6v4GAcRxHTToPD2kaGu87384zAUDTOO8bg1kS0bE/9eMQQui2G6aECI2HOI3ffXgnon6z+fjxY47z7dX19dW2YdDcB0iBomZ5IA4+qKRDP52GwxSnEeaJ5EN/H0KnhKchzqKIDQcvMTc5JQHJKFGHiU6DO8w6R2zaLo5xnmLnddf6QEA84fCOkmy3cNPwhqGhvGmb7bYL2wYdgSf2KADz8XQ43uUUtxve7xqVaZxnxNg2gVDG6XA4fnx1c7PduKurHSKeho9t2242NE0Pp+hEQKhRVKQg3LjQsvfOBXCckYS1aZxzDh1mzI3pGAAhMnlPxWOBwTlAVmJBNMpMaSeptbkXMAuxukV91XIfLq6QqqqJlyuu2FUXHZOUEbEY0i1XueqaDn9x/VcViDrZRwRE0SfPWS5h+kQVSJ+oA2tEXg3hEBRgA2w2kihadR8ZAOpqTSoq9SupqjakTxepUlXkKGajjAJO0ChjOTeCc45OpQkekntIcxx6SbPmTMCqOk85pbLH4ww9z8RE7ATpUSeHiK1TlXSS6zdf/pf/75///d//u8P9P6XTIbzavb3Gj/cf376+vf/4UVL665//9O7uTkSur68FcpL48e7uiy+//Pjw8ebt7a/+l3/+9//+30/T0Xvfdl3wrWOZYSbwmmnX7hFB5pxSJIfeG09PchVfso49oliVTOSs2YOIkGlBMGBR4K9mvFjbv9W3agkJZZ2k5hPH6+Xj61fV1YsnxuUDjD/+/Iumw+LVdbH9U5/CLv/Ip9fvALVGLE3d59tf/vL1gWN+mcDwqVeu//ji8y9uNkF5fvuRfPDiaK0PLTwLe0wvxEIA4Cf2jOvtS2/9CWRu/c8nKbl78v5LtVekGVYeESU3X7nAF6yLrXZ1KonwJFAOw2AqR4XdBSwCpsmyOiznmy0WKUpKwzxOMWYAsuJsnmcTsGgaDwAxphjjeBwNhNk0jXfech+TMfPe56yPj4993zdNs91unXM5zd77GPM8T3Ga55Ry1hzTdru1Oo9AxUxkgFRzCA2ITtMUx8kKLBNJCSEwcwasvA+CDAC5H2cBjIJT0hg1iiIwMeZcxKts74hk0SSApIAIS7+xqTfvGBFNONR+tcFWmdmmet77nOTx8dHc0W5ubvb7vVkxlHZZzimlGHUR7MbqMYbINjWkFWXFzlhjNBpHyZCfqjZxdCKiKoBs7bpi5p6zjZ6gpm5GPilV+zLEOBd2T9Pn5f5p/nduRi1dl/qE9ctfnBReXF8/fv1ezOqeb1//FU2sxFYgKApERkcTseXMYv/LNUR92ycrA64avIhoB1dynFNe1IIWyQUb0NpKuxgamlK/qBiEiiqCe57nceYYs3POUuGu6/wwee/fv3+/2+2+++67b7755k9/+tOf//zn//Af/sN/+k//6Ze//GWMcb/fPzw8/MM//MN333233XgR8eO4KNNKyvM8O8BqoWhoLLRZPTo9l3a4TLwWprICGLfPSlUCzAwsKNV9nQTF7m07A+vq8aKrdXEPz7Ys2xc6eYEo2hZcnAtAsCQxpGCfniETFGy/2pb6rS7uM8qZora6t/cXKp/O9VMMI2pENHN8KAZulQC33l4EQ8SQpSSQCUixECVf3G4cBVcH+89meC+FSliAOn9ZzPuRJyzV28UV9bwmtVtZmJ6FMY+Xz1yi2ifC3jlFuvjrCzEPinbi89sS8JbPKr939baEy1/tRDMNTwUkQLZueIwRkRnQFK5yUpNvtncjBTMZXryGzQU+xTQM4zSMqti2G6PWmbwyu2InNE3TNM0muLzM/Zc9L5K99+YTO47jzc3N9fU1EXm36bpujnkcRwDIMcUYm8ZfX1/v2q7xpCliLieBKnpP8zCOp+MwnOZ5SjHGaZ6mKbStiKYCEgEANQH7OYkipozTLOMkU4SoCOhtKlOpjJJzFo2EcLWBzWaz6zbbtjOxscaH4LwPTkSGvn94eDidTiLSBt81gVBSjKAUmmaeTuaCe319fX11u9vuTJs0pZSipJTGcZqit7CHRqhMjmJAjM57RM4ypZTYN84VzvumbRFxTsLMlDMzg/NEkhGIHHvnGJiYkESyqEBOpiODxWAFswlL0dJCUQGl2ukyx4/SDKjrIiIQG9xMAECzLsFAz2HmIoTgcjU9ub5Wl+b6waeqPcmpRtA1HgfMXHR17SzvtcyTFpkGrVoUtcIrOk0AABDx6Zcpi0al8NscxSnkSjUzkklc2uyWwy22MCKSs2EogHAJcipGVUd0BFlRBIZh2DTUj4NzTiSmJPvd9cfHj1jtt+7u7n7605/e3Nz88Y9/3O/3Oee+70MIX3z52cePH9+8efPHP/4RwJvbc5zn0DQGIuv7vsPgHFM1tSaybqQSFeMJSwdX+3BBV8IyDwM0VSwhW+hVqOAjzMdBGBVVUA2zKIU8ZQfr4l7wxe3FP6/SxqFaJFxsQTAhEUDJoErGy0BBJdMrNtQaolzckwqak+LTe4baAwFlWziNfb6IlyhQgcAIAlUVF6NBn59j2EBFoz4UZQJDSLy43Yh+52oPPlHzPQtvCJ+IiP+mm6x6uM/j35IMXlR7z8OYw/W1d77+1mSpp+HwhQ7nRXb55Layt12XabSu6tb9JVpVk6W3LjXbJQQkVBFCLF0L7z0AOXSIpAI2qIgxWuVRKpFSYQAALPBLEQtmvm2DcdSG8SQi23bbNI1IsnXB0IwGvFzGXfYNzTPP7Bc2m03btiLShNB13Xj3MAynnIus4vVuv+s23jGBJlVVJQV7IDEZOnSepnma7LuhgiadU5xjjkpILEA5ac46A4LQJDomHaKOGTIonoV4ilUeymx42+vr5urqarvdhhCcY3NHsl1nutt930/TVLjkzDlHRARA+2vOebPZvHnzWcG4ppSKAHeOcZ6meSoAeEHEKFkEDOYwjPNyfMkVrh4RTZsNYnFOYKMxOM/MiuCbpt1su65Dcgu83ns2jN/SHrC6ZDltltLTniD6JB1c8nGo6aCIrThPztvl+esL83nMW8eVv/D6vRjUPX+Hi4s3r1ikcp4tAVL1o0Z8YmGNl1jN5eOW/W+9EDmjgTAClMtklRMXeG3VAkWs3rmqICqlJiYBJYScYE45ZT0dB1AapjiO49XVjX774XA4fP75533fb7fdH/7wh1/84hfjOP7zP//z3/zN3/zxj38koi+/+vzq6uq77757+/YtQlxIFIjIzDmneZy6zq9LCPtogIWlXj2GAFbAJssTaj6hBJARlFAUiw4GFkHg5R7tHjQhIkImFQAyxZOn9/ridoAnTnVYpZ+hetQt1SBa/WS2MzWDqeE0AViwgct7vRS3O++Q8qCoO1uYQVCDWxLap1rNlgCBIK8f218VlDALAILlzTmrGpDyR7Y7Kr3Rcr9Ijl0AYJ5dHWcszY9fOfiJLBJeinarx0/i0LLleSRzqy3rB+vG45Pw+axpuawXL4ZDfKn5Cc+aoufv/yQ8a0X6SSHlFIDoeSITSi+eRFSyLPLHkARq1ANBKCMjVcEUxS74zWYTQktEqvnUHy3CmT1Q389xzpIBEa09GGOcpsJJt199d3f3+Hg0l2ojbtvFNg7z4/3D4eERjJnHJuiMKBpzTDFqSqxiK/upfxyGYZ6mPMc8xxwjAIQQ5pjinFM2yXDImk2ZZcwkojHJKBLVoF0ECMRkvPuco2J0AN7zpmuvd+1+U4gKjjg4b2FvHsYYp2keYpoINDg2cyWJ0vo2pXQ4HPrjsQ3hZn+zadqkojHiNMcYpxRTlJimOOcxL31LwJRjkmmemT15BwCgRkl3Bhxl5r4fiAiZjBxtYY+Isor3PrSbruuarvXeM3kAQGws1TBB8IuwV9piItZ5JiJgcxtGJCocHjzzcmTV6H7pfDufh/IMGrAOYxehCz4d/56PPJZojS81SKV+Dpb6RQtmtfjqmbqDVboEYFJZRaZZq/sIAFhtR2QEKkOjsF0RiGiqC6Z7l5I1UGERgdO6AoNpOddvqWqgUi1aFYhEdDgdNy2lJPMU377aNj483N1PwwgAj/cPjvibr76+3l/96ds//PUvfvH7f/mXH77//ovP37ah6Y+nq6ursX9QzfZ9mrb13jOokWhLhZTFgJfERESKYnB2U/+qRfuilml7xmS5LLaZ1wBQcZ7HVcMPyhYFVBBZiHYrRaxVc/DF7apiOlpoUdY0tWqldLGdgASkIj7AcKC1OasvfSoIpBe3m2yPMdWwRpHCvF9oCmiBELDqOC+QyPMOhKLbuejLFKYe/tj2YlCw5HT/apPzzGH831bwPQ916/t1tfc8QK4vcn5p44+FPX457C3vfxnhPjHzW0g8F2sKMT/93NKjLzjPZ//NybIcEYGUsiS1ojBLlNVt+VFWr1j0smFbjHlRnmzb1jln5UWM0SgEtrbmnG1ttYw4xmTMhKurq/1+bzWWAa/7vj8cDjFGIjW0Z9s0KJolpjjlORqFVqLEGB8fH81ayJQ8bb5CxCJJVQGZyWekGCWmnAEPY8wZYoaYrLXHRFwV8wBATNSdHWy69urqyjlZMgyTzGBm0Xw8Pi7iMos2TYyRVG28dzqdYoxX+5uu60QUCWKMElOMeYoxJck5iYBZyeO5aicRUE2OTaBARQRFchaiDADZiSpwkfyQmBJmVcIpziICek++CEVudts2dDHPFvacc2tvQouaFyctImYRNoXfLOuAt74ufiRW4Utl05OwJFUa2CQdztKA8OIMZv1BP/Lp5xKNnmAFsdIzSuB5lgPbFVTrM1p95zXM7Ty/tNO7JJWq8zyLSAjhsR9s3KmKKmViuiwjZPZJqoqQFVEkOCD2GfD+/mH35VvnQkrC7F+9enU8Ht+9e/dXf/VXj4/3n3322YcPH3a73S9+8YvT6fTll1/++te//v7777fb7VdfffX+/XuDkiZNzByapuhF1Au2ZK0iK0txs46R0uMz97vyzUuhrLCq+FVqI7t2KamyBmqvsohN1V7py+vtSyhLsOW8dEGLh47RwasaTO1Pa1o+8el2rdt1/a3O91r9Ry/vrdCtrf4zHw5RSTTXYFxC73JPQFkzKiEKKqrmJfRm68fiAsV8YTsgEtSwB886nBeX2Sq7/ORl8OLtL3nORdSBp2HvR2IeIvKzAHkRRC8eEJ+ffwHCfDG8KfHz8g4qpGX9cvvO7NzFu138zNXbFNoCmpqaiIrEmOY5aSyHQpJo1vXekChEHAJZ6xIRRVLOMQTvnHPeOpkl6s3zvGsbRKyiYt6kyE6n0zRFVTV4CDObZywizlO6u3uYpuh9Q0T7zfbm+rpxLueoWQwGQqiqEGOcxnnqh/54mqYJ66Te2nsiIICKhMgpQz+naU4A0EdICVIy5QIi9kk0SyYFQiAGQvSIIXDbht22Yx6JCEGYwFSoJcWxn/u+B4A0RxB1jbE1YE6RCdMczZKta7e73a5tWwAk5nlK45wMN5uSZLXA2xphI4TAwZuRLAKzd7bLzb3DuWDVWNs1RMTsF7SLkUwYSVXmecyTjP1xPPWHw6H1AfwTVnXTNEY6dME3TdNA45wr5mSESEo2C6221uWcqT0h28l1xRAAqHKWz2Z7uoguolGay8tyWi7dpWpUALIaS+HinvB8pV907cr90h+yjz3TpdGMtqvSrD1fAEDXDD12uvi0adVTBlATVzR1Y2VQUIyK4FyY5xmBnQsANE9JRJqmGYaxoF0WKzuTrbRaAhWKbKOoSFYQginFw/F0TOOXX7x1Pkwx9+Nk2YldLwCQUjoej8z85u2rP/zhDz//+c+/++47sxP5+uuvnXNAQURkimmap3EM3lNJK6MCk/PL4pBTBgDPigrEQMU4zvbkKt4XFZXzgSz/0irfX//+/B4UrEf6whr7ibC3xE5CyLW0wkrkq4/Pf1V169CyBJXle17c4yeqvUIz1GJhVxWXAZWXSSEszVVVIjO3k/UcEa1t+Sz0LvXi8+2Gmz1Xe0tWtSRWF6GrLL7PkJw/Htt+5E8X8cxu6zD8l4Q9+gSx4cXnA8CLs71PxTxEFHy5yblGiq5/rG2HJxEX7PpR03stCw4W7QaT+VBCTJIhxjgPU5xz23YLjOViL9nSbL1KyyuJsfWmIiHzPKc0WwdyHMc3N9dLUtwE1zSNOQelJMzcNJ0hApbZ0jiOj4+PKaVt2yGi6ZRIimgaxllAFEAUMWdNSXJM0zQNw2CtVEInkucYY0ZRUnQZeMppnPIwgSIkdnNOswAimD5qSilq8oAmz+sZA4F33ATXnt0Oio8BACwjSTDNjiJJzIUBoGDcfO+arut2u33btloVk3LWeU7znOKcsmZEavd7q5ubTdc0TfAtMwPR4lZR7KAqqtMV3vo55plzRbvd2Pe0FbM/HU7HR1Aij03T+KaxbxtCuL6+3l3tiajddJvNZuEgutIzNRUNsOG+6dlneAL/xdU8bA0Nu7haL7bXKPUkY13C3nnL0/tSJ60Wh5LerTAB6weyGh6D1nkNAFUjGFxNHJdr/KIle/7+z7LGRXXI9vwwDKbAMI5jjDEnXTu4ngEaxgIkMp+3rJoEximmaUyDxiSBXIzT6Tg8Pj5O03Rzc/Px48fdbvPDDz/c3NyM43h3d8fMb968efv27cPD3TAMP/zww6tXr+J0RESRQiFNKZEhshIjgapbfqBKEhHfeizmALkKHFoVIqCkKHVLCXJLtWcx0uqwF+sq6+EWdeBnVbtJqrxYzS8Z0xIqsDYbrQO5KKMszcYn2/Hcclw/p95nVCKUi/s6ucugRCiGfzCpRwUFzQqKJsSvoqCoCkgvbC+KBqs0bskSXtpu4d+tLw1EXBWdenHC2fbltF4WdPiRVKK+6vltTUhYP1hjZ16MYReRzH2imcmXzcayXWq1V35AvbS4Qlcu32dlCnOx9Hwqsi5vbGeiGAIK1UxSa78RSMk5N/TZORTVeZ7TNJOCc04FUSFHQUTvmxSnaZqIcLvdZpT7+/u2Dfv9XnI6Ho9I0HUNZJnnkcpQXft+zFl3uyvnnHUsN5vNpttazLDl8vr62vtmmqYY4263I6LT6fT+/QcAU6eU25urTdvM4yBpNrN1A7Oo4jiOZml7d/fgnLu6usk5D+M0TXGKkgCdb0VgjjiM0xB1FkgKc4RH829zCEoqGbIgqgdmlcYjap4naDq4vtrst62k5BsCzYjaNJ4A+uNxnqcYp/1uc3d3F+O02+02XTuNg6q2betFIEv2bRtws9k0PohIjHlOaZpiP0zGMiZy3jkmTy6wD+QCmm27c6Fp2LmmaaDIylsMKtlMynNKKU3GhkYRSdNszkqW2RARiEgsj9MM0zCGtrEa4v7jx9Ph8Pbzz7uue3h42Gw2r169Mnwpe7fb7dJUuB+SswB4LjNLV7h9lnSYftASxnRpbq3zJGYmJjRR1lXbbZkmYiWeEtHiF7NEHQtF8rRrhvViT3N8QvBY4l9BfpKoCCREJHVEVBChRRrL0L2kqux86Q0UkcYnQwpbWgWSLcjWwGRyKck8JQB6eDh8fH/3+vXbYRju7x9izMMATeePh9i1HCWZgGIubpI5QxntseM5y2dfvPnuD+++/fZPv/z5T//lv/3uFz/9xWevNc9xmiZNmQCG0+n4+Ng1DWQhhe//9Od///f/h//pf/q/q+rp8bDfbJsmPD48NE1zPB4B4Hg8vnn7tjQ/ACXlOCuStm2D5JImVAFVUXTOMXvPHHOGrFmLrStWjKXkpVRfw/aKs6kpKuoZuWPK1aCYEWoDdXWvnwhLUGdjeIGStAqZFifV4m+XC9WnRtz66bhQRRbRTsNtYkFRXtwbuhKXz61G22KF4znirL+pnTNQw7Q11cuJWdO58wn8NPY86cmdO3LwUm/zR8LV/8bb8+7f+v5H/nnxgD7R5FwjP9cPCnD8+fZPVJMXRd7yzE+GPYSLt637jVU1J6ilCSnkOOeKMMy1UnSS46wZgKx0GMeRULuuI0LT0Nput13X5JxPx+M8z4g6jrFxnpkRYBlrmVvePM9EZO4BBmK0QeB2uwMAQxjajHAYBjOrM1/1JgSTqpQc0xxRARVyFq0ZtzmYE5FkmPKUssaYYwZFUuUpaz/paZzHBEk5g58hRtWMpUqoxiHgAInUAaGIIwgBNp1vHTnWwIWxZ/OSGKNBZU2SwwhwdrPulu1q51xL5p7B8zzHLHHO94fHGPM4zTkrETl2TQjeN4YFJMchNG3btW0bmsb7xmyV0LG5JdgIR0QaaFJKcxwtciyjwb7vQdSZRQQTiqY0g+Q2tEkRksR+jDHmae4VPuK77X7HzATYh1OaY5JMRHGaNy6ggtrYj2rqtJo+nK/eigTBlxCbL77qxatvefLFRlxN+i+anCW6v9T8JNPcXdqkZcSmjrhaU579WEpvf8XhW5Wq50mH+XJauR9XRWcJzCLjOHrvh2ESJQBISRjBjpdZvSEiAeQioYiqmgEdYlZIGfpxnqaYYr77eB8atmqybdu+701+s+/7zaa1c75t29evX3/33XcPDw9v3rzp+yMzi2JK6f7+/ubmpoCxD9N2u3Gh9ADO5XIWWJiLUj1SURkQ7L4A+kueVY2JbD0xqonFoPNa/uS+tL7hL72Hmsh84l6xWENYNDaiy/PPBX363bDO7Yxj8BffQzUL/PH7pZXw/MT+S8Zqbn36/kUv/jcCWJ53WpYr7SLe1MvmEmOyfnARkwCAIb+8nZcXPg1yDC9vR4Czn7vWBEsrzfzy9inIjOLZKRtWawcR2xVu7FUAilFizM5hSinPUWYprSwAzZIgEREqaBZuXNd1eY73h0Pb7Dbdpts0p9PhcDioqnM0TQOjFnXN/tj3PSI2oWubzeH+hwUJMo3zMAwLCtRIvpvNpmkaM0D/+PHjPCVmbrrG0DGqKilLmrF4QJmzUOr7vj+N/Wl0oUlJUkwpSRJEdiqESuMk/TA/DjJnUI8CLokbc8yeUIxEJQ4KOc0hBEZMygi7rbvZtdvGNYxtQ53ZHhBpzjEnImob770/Hh8BIHjvuKjEombNCkqO2HtHRFlhHMfTME5j7PsxxRxzylmJPbGyAgP5pmmqH/1ms+m6rW+bYr3EwQXP7C28LUSxnHNIDQCAqIik7RhjvPvwUUQARXIUkTxHFUHEnGYE0KwpRQV1jHEc3p2Ox+O267qUZs0xhLCIHL3d30hsbeejY7CpnIhmO7tWV1ZRINaL9cfWApFsIpCLv4+djebgWDGWiCAGn8uS9SILtIydy0WvFeetBXWpWfLy5CXyOaHlBeYmppBEkYucbYbFvQQzKmRlQC4+pCZxr0m1uLCrtfgQFRkZGFndYEAtALDm/Dynu4/3u91VjL8nR8wwxxwCz7EOqBBNXJLQiYgioOQ5Js88Tykp9P04jBM5/+7j3c9/8mWOSbM0bffh48OXn38xnPpTe7zabTXLPE7Dqf/J19+8+/6Huw8fT599fn27QcRxmF/fvno8HtI0P9zf73a7x+Odc9xuGgDPSARo6pY5CjMjEippFinm8qsl8KK8LqpmsNRAWrZ8CrvyI423l57+ie3rc2aVl+gnPxfO+crTB/+mrwNnUc5P/X0l1GnnyI9vf/F7umXh/ktKPfh0GPu33j7V5HyxyINPhz2qPsIvVmDP/6T48izwk9Xks7D34mvP72/uelhw2MsZAwAVHkEAaDwfA4bkrCnlGJMqqrBNlaxcKzdPItL3/el0utq/UtVpmsZxNuAGs7d0Muds5rGquul2ZjZraixWLRngm6rz+FIwpZQeHh4+fvx4Op08NcG5EELjgyPSnDRHzRLnaB2tHFPf98fj0aIma5CsKeqcsyJl5SwwC/RTGpMmhQSYEkXVOcGcKYfSN0GArMqIxMCgjkESOIZ911zv2rah1mHnyWA71U2Nba4JAIfDIQQXQouIBciK6pwzJKH9RhQVgaGfTqdhnKNN+Ji8803TNF27bZpuc3Pdtu1uv9/vr4286JrA5K22cME8bCGlNKVo4i+QxXCqlhPkOKWUuqYdh+F0OsRxitMU5xmMZDnPC9GwCcEFdxqHh1M/T1Pe7yHLPE72bayZ2QFbK86HEDQoEhIplMoG6Hy1lsLoR0u9pSR6PrBfLnabDlosed7YWFd46wryYvC8vJXoamhHjpQAQFTjWpsXyQoHVROupkrnf9KhrYTVcgXZSc5NY8nHcj6P4zgO083bt03jBTxzTBM0jZvmmYlXHEdYHO4EaZTcCEwpKsA4pRjjptudjoN1+0MI1vawyGot/c1mg4jjOH75ZeHtff/9919+/T8ej8dpmr788ksX/PF4PBwOX3/99TAdIUuc5ilgCC4Ebz1hkWiDajTTn8q7KMt0oSesbnIOP88P30sL9L9tHf7xp68DXv0OL3f7lrY5PC1pbLF/CdSSX9yOoP/G7/Py9h//1Y5KNnfWOqoPPvHB+V+Jxhe3TzVF63DteXgrn1O3LNzV5a9V1NV6lvBy1fWpsLSM3eDpY/xEU3QJexd//VS1B+ZAiFilR8upJaImbVxH31mEEHyckr1WVeMULeG1togKknMhBEmzgcecc943j4/3MU1LT894C5rjPI/H4yml1HXb/X6PiH0/tG1rtnnjOPZ9b9HUe28Pmqbz3p9Op48fPw7D4Jzrmm0bnOEsjJonKYPmceob54lcnKb+eBpOJv7ixzmmJFFUlLNiynmIMkaYZhFy3DAlirMMUVLGDE7U/OSpEG9JCYBRWdUzbAJsWte1vvPUBmpDCecA6hy34f/H3p89SZIm+YGYqn6XmfkVEZmVWdVVfQ0wB0DuggAE+7AQWRFSVoTCF5J/LN/4QhGuCFZWlgOA3B3M2dPV1XVlZhzubtd3qCofPnOPyCOyKqurZ2YxMKmO9rQwt7Dz009Vf0ew1hZO49inOHXtRa12ljRLKdZ7Y0wTmqUNSaIsMcZhGPp+nGJyLoSma1fr1WZzcXG12+1WqzWEEEJo16sa85wNxtkTiMaCqYpx5FQbVVVtukZrwTOXlOYcU0YloufPn4/D0Ho3DkPfH6MqM1P1UGItUioy0DvXOJ9Ds++PcZxqUTo3TeVcAkB/OEphZfFNEGZCdM4hQLXaqdFE6b4YuDyHcFLdPL3CFSiBp64bnp/C+v6+RkkCXZyjl+0e4gjPgkFvRDgAUJF74M8DYWhVPfEd+WR8L7KIsREAgCGLpEiWKGqpU7tKP1WtOSdZu8z/Xp/RI6Cpzw+zjMNcWTPTNF0i7na7/XFGBAaoerZQVbVOvaH6+uuitwXGOBHwBnKGcYqX61V/ODZN07atiLx48cJ7fzweq3D54XDYbDZVhN059/z588Ph8O2337568UxEYkwVUJpzztOoqpvNBkBjjIDFe9s0wZJREmFCIaxkVTCEZBAN0skB6OF4LQAgdD9Y3ec176KCnG7mhzWhHs+Jlhqo6msfkN79h+t9e2OnqqpAFajyxk89eZW+uR4Y3rO85sZ+rtK+Y/37cr2H2R58v94efmBv77HAe0Y8wruCHzyIKG/8882w9CDbe8+H837eqd4Cj/cC3wh7b5/RG7MwAFhIOa8bKiEQkSWyVfqEeQFKnKfh56k0AllrCEwFSlQr9Fq33O12qjpNMabBmNrtwCogMuelaWetP/H5CjPvdpuKc6v5GTyYiFSJy6qlNM8zIrZt24XOkbGEAMDMmkvOUXMBliSJOVYNzJxZpCo9IKAxFlWBC4sii7DU0rNDtIJaRHOpEraGRQCwSu5Wv3kCrcCwJsC6C6233kLjbRNssPdVnUozIAPjNFeZzWr+Xuf+9XMVaoFFjYWneT7eHYdhSKmEENp2s95tt9uL9fbi4uLy4uKi6zquBkNtE0KwxhtjKnKzDta1qIcnY7Ra4VzeK0JjjFqr4J2xiLhuO29os17v1utqzCsiDm0FwceYZxYD6Jxrm2aeppoNnN9caMV7X20BmDnEUFnPS9N3ecyIiEDvk7C3kZwPH8uHidpJ7PS1aetrzcLTB3zQ43g4IDz8WU7LOfSehWlUFYCqY6ExRhWLcNUgRTBAZ0aHJSI23nuvqtbaOtCfVPQeFtbw7aBbCaaVrqqqt7e3V1dXd4cvEQhBcs4KVc6/6n4onLoedSx2Foy1ouoanCc9HoePLnYppb7v1+v1fr8vpaw32+PxuFqtamZZ4aOIOI7jRx99tN/v//Iv//Krr776xS9+0bSrw+GAhpqmiSV/++23Hz2/yjmVUlLieZ5Xq85YY4wRXtLiehGWrsZ3DaFvp+CPjavfmeh8/+WDUqjzb9/M+dRUTOibP5cw+eb6e6O673c8r/WAv1+qB7W3986O9+OR78OKtY9ne+8OJG8UP78z7JnHw94714P5gDCJiPSa/cf3eaROZYpF42dZnHMAyKzCCoqERkREFzTg2a+HiFRARFLKAKCF8xxzidXfu2maYRgAoKpwqUjXdc65winnWEtwlW8XY1SFpmnq577vh2Fg1mqnV0tGlfw+TdM0TYhYw0bjKp6eURRUuZQUY55jCH4apuNxGOcpZlZVEc05s2vQGEDRVEqROXEqImCQbGGZCo9J5iwZgIGIKAubqrOAWJtPFYJtEFat266b4IxBCN60jXVkokAdIC0ZBU6J53GahvHyaue9JwVgqSHQWiqlzNOUYsxFSuH94XB7u88lN0179fTparVb77abza7p1qvVumlbH1ptnLXW2VCJayJSyzhFM5zmzmgWJjsiliHRWXdOANE4G4yReZ4R1Z/UXKwxU+NFBAvknA2CMxRLLjEKF0AMwWcuIIyqyjyNg3CB1UpzAWEpOXlfSjaIROicI2voZNH3kN18rom98XA+JAK9Dht5LX68H6KiqmhOUIyTBFfdWw0Dle65VL+rbWFZNiBragmamTPzPFfsq0G6F7gxxhTfeO8fPpbe2zM5Z1H+rJh5VEXwzp1BTHW6VssnX3311S/+4I/g119679uSY1FrXGYBUBDRk5/MAgfVqgekWUrjwjjMx+PRWqsKv/nNby4vL7/66qs6dzwcDvM8W2uJTLVsXK/Xt7e3z549e/78+eeff/7y5cuf//znl7vdixcvVpt1ztmRGccxxlWdqyEu+C9buTFAzIpZENQQIRgAFZHTMEOnMeR8I0/aTg+nIIjwSNaFDw3dvsfy6Civ8naqB/DAl/aNhR4c22sfEM6CZw9+nrK9N9cLPp5Wvet4KpnjXcf57jOr6+9R+9+zt/ehSM73b/92nvdG8fDhlu/O0u6roK+tf3eFs6bR71r/8K+8c/0bH15/RO4nX6qKeO+AtTAmBV3jSoGcswrUoCOSmQuizbnEGHNmOlFHcs7HQ991nUWqU/6u65wzdYZrrbWu6ftUbQeMxWGMdTYdgvO+YeY5jt6H9Xqd03GapmEY5nl2NtSsqBrGqup0ckJfBCGraNbpRODES5vnebNe9dwPw3GaMlhDZCXnGCOLQSzCGmMe5jRFSAICAqHhrFOcp6gJQCEgkuhiDVKvcb1Cy6tkIITQNsFaqkHCOWcRU0YissbUTKsiUSvQrsqy5JydN03TVM7ieByYmQVSypViYYJfr9fr9Xq1Wm+32/VqF7q2adoQWu99WXK7e8j+wsQ9mUNhtZuS1/256ofCyiJSRAFJRZWZjQIAVBgtIo63vbfObbartuvHoe/7WNt+ztaJTmUmpHkB0EJZKnvMDERt24bcVDk0fcgTeIB4fLjm4bt8/nxu8tVY8vBZPUe4N5KJ85JzrAdTo9o5nzs7ONZkFBGX5I+XAGmMMW6q63PO41wNwsgYYypiyFpjzKBYKfw1NbTWVo3Z9XoNAAD3Gj311JumqbhNWMTWY30Gbm9v//erjTHGe9OBy/1kjGPNIrnOXVSW+p8ggGgqHGMUKm3XMkCMSw/i66+/fv78ed/3V1dX9fZPU+0UuL7vK+3yeNz/5Cc/2W63Xdf99re/+uabb5rQqWrbttXJ63K1GcexbRvrLdKiJlPKYm6qqsxaP+PizKCWvmNK/XAIev8Q/SMuH/q38HX8C8BJv+D7//xBx/Ohx2kffvOND+9cvnPvb+ynztfeXv8YpOU06r7zeBZb1xOdsoaxOs68e/9vBz96hHjwKI+Qcv0HACCea00AZ5GBU3dw4UfmhV5dkzYVQ9ZY51SsMHMBZmUVVWVGZioTDkNOM9eSYyklztM8z6ExoimzAooxBobBEE4AAIAASURBVEA4qzB7pyJpGqeScmg8kZUEFrylMKd5zjMiOkshCOJcioyjHPv85VfXqvpPfvmJcz5O08XFxTQchSXPcxl6Hvtao+u6gOklIjbBGWP2N7evXr0iosuri7t+f9vvB04UPBMNU5pLRmd0jOQcohE1E1PPwmTZNVPRQUovHAmQCCgXFRGxFFQKiTqDHpgKWIBAcNnRrrGr4K1BY1DJi/iMaAMbY9oQjDHTMB73+1zipm0a4+Z9j6AXFxdd06YppXk2CoWhFJ3mdDwe98MM1nXbi/XuSdjs2vWlW20gNMVYtj6RLax516gqqnhLzqAFVE6aSvAGWZgziiKUs96jmTbMLAhkjaIUZiYVY5ixLKK9Ss4agDxrmmbsrCFCRMOMjaFghmMfY/QKhERimgzOYGCCiQ2PHz3bDcMg02C7ToX30wjT8NFHH+XJhBDIB0BWRGsr40PhgYoPIJJZyPUppYoVMYhwVv4s5UwSQEQDCghLEgdcu2tnSWUFzSWvyszMmcsi7RZzTDEn7qeZuSq+ACuUssjfIIVKOEFUgKyqMU7zPJOBEELjHQlxHDmCMWitbe2Gx76/rWpt3nvP65V2HQ9DCCG0K7QoyIhorXXe3ZZbWDuIQYIl5/rjMB7Hxq+cHr/6/MuffvrZv////dms8OTZR99ev4qiBcCQgikIUhREhZSAyBApYlF3O0S08GKG/+HP/+Kf/fGf8Bd/NvfHn/30ky9++/l6vVqvwjQNzoaS8fmzT/p9OnTDqg2/+qs/+1f/6p//T3j42S9+/md//p9+/stfbC83X3z5m5/85Cc3t6/64U4A2tCUWC4vL2OMhtvhlnc/uXA7EZFx7KeUfAdqJOdIRGg8M6vWSZ4nIlUQEYRqK33uYZ1afadh+I05uuKHRcT7sfV1rst90rVEr3O/zrxze3hz8xPA2PJ3bf8GM+c7irdvnK9Ieedmj8WpirF6txne+y7T9yj0PTzEM/LnseN+a/v3rX8sh3vPn3h743dmh+/cwxu/+v7nXmtltaRT/znnucqaqCrSvatD7WpgAEQ8Wxmc5+b4YIciWvtY+/2+7/umadbrtfe+xLlOw/FklnsW3mTmeY5fffWVqj579kxV+74PztYZ+hzTNI4V+Vk1VpxzIsk5pyr7/X6Mc9M0iDjPaRzHlJIIgK39UUJEUHKNj6kM8zDOGhkQgawD49I8lMIsVeFvKTDJWRQXBdEigrXgCRoL666rx9A6a91S+61onXqVKiSnCoduulWMsbruAUAttS0tNOY45ynOmdV771xYr9dVDyW0jffeWGtOscEYE2OGZbIlCIBkLBJYNw8DqAALCCvwOftZIzGzIpI1glCkiCpaFBEpqZSCINWO3SKpc3BS+TFIGEBZUKH1IcZIiDllVVXv66zr3K9CxMyMzJW72Q9Dt96ckzY9gRpEpKqfvPE8v1GrhNcSuPuwV1lWS91y2cnyGFdqZkopp5G5UvSrnGmKc46FU0pzzFOcU8wMJ9KOEkBMcywlV8Ko99ZaWxEcyrKAMJVVlRlKKUMuIgJUq+4WEc3+LjRNaNumaZpu3bat9c6eOoXQQIUfhxCatg2hnYdZWec5HY9Hv96GNox9jDEiknOsDDVRFBFYZDqkIm4IAauZt0IGmOfU92OHOE2xvlmICIB1DtG265QSwkLWLFyOh+FnP/vZF9/cee///M///Gc/+9lqtRqGwXt/OBwUMcboG1/LMPM8+2BzzoIFThqkXNQYqYqypyx8uX1V0YKZrcN3Dkrfv4/1QcuHplO/7/W/p8V+aNHye2Z7D27Vu3l4j2//7uj4mPHQY/t5zJb2sZWPHcB9nnf/xYfV84ffrUe4DC5ESmQQjNR6WCnMp+KTgsgyspSEqFB7FVVUHlSdc3mOYE4KngpSuJLNK5OBmZumaX3QwnUoSSkZY5yzddZMRDFOwzBcX++Px+PV1dVut5uGARHbtp2miXM8Czs1znfNopJM1jvnh+F4d3sQke12q8y3t7fH41ANHBANKFU3FBFFhSnlftI5g1hQYwtATHlKORUti1CQChALsMJZDoRUCMEb6IJdN2a323TeBue7riEjyiLC1pqKwhMpMU7zPBbOTbtumoZzaZrGEMYYI8+IOEzT3eGQBVJKsWRC27Zt16032231yG2axntPxtbIV980TJmIHBkSllK44i0IsABUhR0BEWSWXGIppaCpQcJaKwhFsoAagyJSShJmAvDWOmMNoDUkIosepjEAQB0657TwPM8xhHEcOeWqQaOqMcZxHI0xaIyccJI1nDfdipmVWVVloeuBiHjzjoLN22/rg1Jn1UcRRJQqb6bMwsiACkQAqqIVejlN0yRzYuaUqyVvnlKMc06lDMMQU5nnOZYMAISWrCGyRkUFU55r2IOuMW1rjAFmUSliagZKiFWjmUsBAGectw5NterVUkoZhpTSnFKMbWi7WhQlotY1iGScb5vVer1tuvaw73NKscDN3f4qdE3oeB+HeQJC6z2UUnV7BBAAawdJgUWQCFUWxwgGmCIfjsPa+kM//sHznzVNU41Ka51zu5YUI6KmlKQU6/Tm5uaf/JM//Ob6P+x2u88///zZs2dPnj795ptvnn50FUIYpmmeZx/cNI6r9TrGaB3FGK2tT5irImtI6pypbmJEVMeHh/fu4fj8cKQ6kxfhRwp+76hP/r2u/70u9kMv2Xdu/1YU0cd+9c6Vj0Fa3h/G4JHo9VjMe/tXj2WN+t5dAbwjij9kSlW7US7VVrtyT03VZ6itkZLunY/qV+pMNo7TQ1RCbWvVLh0RXVxcrFarOhJV472U0nq9rrT0+sV5nu/u7m5v766urq6urqQURLzcXXhvj/v9NPQVUt+27aoJZ2cAZ221LI85eesQMeU89NM4TrkIoFFVBi1VkLPoMY5cFAx4Z9nYUrQf58OkGSEpiAKQUUR5IKhPWBXOFACMQR/sqm3W3cqiIKm1RIiZBQCstadGTg2EAgBVbmq9XivAnOI0jqUUZanYnCEXYbAVz9q2TbeqPTxCS0RojLXWWH9CTKgHYwCNguRSOAsAE1kyJTMzlxxrMsTMMSUu5S5HZkZU46wiMLOgVDVUYFFhg1TDnkVCWAxmT8w0JKLGeXDgrF11XesXUStjTC3XLAgRImYmZlVlqTqrBY2x1c6OFirDw8cSX7c+fjsbOH1+U+1FdbGjO+uWnVTRc0k5JimFY0wLQ26epzjnxNe3NwBnUWkASDKDqhKnWuRsnDXGODLAhbmgIRVREetcnXYYAFAkZyoQt2maKtyM1lhrWbQ2glNKinTWjN3nQ7BOWVjBBe+bzlo/QXKeDkPfThMYWwQ0ZvCeyFoLzCwAxCqVtq4qCiknMRZVzAkdkosep2gv/DTNvglNuxqGoygDLCqvbSuIOvSTs/npR5f7/fHjT55tt9uc86tXr1JKiFgbzxcXF7XlWJViLi4v62+nadqu0TmnEHImVYhz1Yhfyj9LIxOqxWAdbd6tVvr4NP2DhvP7fcLrEejx9fhh2+MHbv+BJ/BYPHr/+g/O9r7ncTw4sXfPSr5PeINHAtvDu/6dyM+3v3Le4DsTPqjYs/ud3H9Webi3B+7qWEFNWCFYco+BWG4qnp+bApLOviRaMfLuRMdeDk6hDnw1F4wxphwvLy936zUhMDMRCes4DPVSeBuISKTEGIfDcLw7eu+fP39uieZ5vry8bHw4HvdEVBkL1pha26w8NQDQLMdDPwyTMc65MI7z4XCcUsqlqKISFdZUcow5JmaGYwRrDTUejYtZpjz3k/YCRMCKQKRogFAXjb+q0YcEi9IyIXpjQ/DOm6odX1FtiGAsWkeaAYRrZgwAtRBasXzH43EeRlAthY/H43DsU0oja2V/r9dr64Nx/mEYqCwI45wxpgiUUtYAtGhcimZOzIiYDFX32nmeK/Ckegxx5qSVgSfGGCAUEaQF04EKBtQQWTK2+jSKtt7jqexsLTkyteVWSvLer1atKpecqw938NY4MsZoVfsspZRiqgVuzmgMGqunUjYaIsJqKk6I9fN5Ddr7msT5AQRVc7YeB6iuM7g0Y1h4KUuoMrCUnLXwHEtKaTwtdW6RM/f9cCZ7LDODoiLiykRd57vOWouoRkVZuEi7XgEAIREiAdHiQ0nWLkVgEVHOIoLqAMB5z8wpZ845peS9d6EhouvbuzYEZ2wqQtY736glVuk225vb/WEYYxZFUCSR+vYuJXVdrOSqKNqCgmUVBTBkBHgusB9GvNrkwsM4t+1qGI6lFBAE1WHod7udIzv2fduhc26ap36YPv7441evXm02m7u7uydPnlxdXs7z3HVd1zT1nQUAa0zwHpbwaZ0LzoYa0nJOkiXGXPV0ai74kKf4/gTox0r4/lGlevB3kO3d90jfC2n5PmEP3hXGPjRMvv9X73iMlkrnozES3qL2L6NStUdX4aUxhFVA8aEvbx0QHzrq1c6B5LIkbbrQofJp8njWDKswTmttitN+vw/BVzWQWis7HA6VAfb06XPvfY6x67rL3cXt7e3+9na329XGYYUjsuWz99sU4zAMKeamaQDgeOz3+0PKDECAVhRjLDGXOeZUQATAOSFbgGIsxynuR54EAICBgEiRAA1XjSkyiIoiBFINhiyqMei9bXwAAGOQCBQY0XjvrSMAWK1WOecK2DNEq9XiJTQO4zAMJaaa6R6Px+OxBwFoO2t96FZNt3KhQbLGeiBrfSDras63XNtKr54iEQmCCBTmxKUIq8Cc4jDFpaMJWp35SimZMuciwkQEtJDqvHWAQoCOjKWqmrNoj+SYai/KWeO9V7u4SRRhK2K822w2uUTU5UmOOakqn3Sii4plNs4WEcOMIjUpPz+ND4tdZ0cOAKhkOHh9Qv3wrcST7fjyU4RVq6xa5cbklOI8D/08T6kfDn3fV8LAknoicmFJi8sUApBxlkzXui547xARCNQb45zTBruuAyRdkhlEMIiIhLvLC6nNPURWqYBRAKiw2FRq549KKUHBe39xcRGcQ0QQXa02vmlEYUzZWAvWDnEeYwECQJNSsVXHnNCgAeDCiqKIQMZYNESUM4sqATJAATiMsR+DtfTl1y8udxt3vIsxo6IxpvZcG+fnefahqQJJ19fXV1fPhmHYbrc3NzdPnz799NNPv/72q9vb27ZtAaAG7IqUrndqHFNt5hlySBXVWVQ156rmY/Sh8xwiPGJt/+io9eHh70NSMQDAD9seP3D7R07gsaD495/tPZItPaqx+cj2j66HR8LPY1Oed37lYRh+Y1fvPovl3j3o7VVmzGmo0XvZXISqpUqERACkLKWwCKgCIqlWIwyt89raLdECS4NBtYY3yaUghhAMUsWLSynKDCIgEoJHYVV2ziHINPbTNAHAuttYWsLh3M/DfihZmtCt2jbHaK1dr9fzPA/DULuDRMSlqKo3y0hcS6bDOGUWRCMCKcV+mKY5iQCgVTLCkhKPc04FpNpu+S6zTrn0YxxmHgowVJ1HYiRGqhKQckp2VRIIEKEz6AwG55sQ2jaoSvC+C9ZbY61x3iBqKalru5yES0bQpm3W61WlqA/DUJOw4/F4PA5jP4iAMRTazjpPxlofuq4z1lvfuNDUypuqZi6ciYiYlZnnKYoqA7JI4hJTnlOcCwvSuIS9XJhLkTHOMcZsc47LuF8Xb533XqRUcqE31jsXnK9/cSt58fOzJhTOlXWGYJ2txVIfbGiMiBgka+2YcozRgFprlRaMu3PuIUpFVVmFBPREtHjslbzHsZz+uTAxqoaJ1Pp5rlY5FgmkCOeSM6c8TP08jP3dNI2xhr0YY63aLSVWVRSxLITQOB+6tgvNujOtb8mRCBBRaLqmaawLaI0xBsmoalFQXXRcLi4u9GQdlVnrJIOczUVUNQjnnGvFW7koU9tsvLGIqK2s1pvNZtOtV2E/vLq5Q7KFdUwZjWWAiaUTpSLOGW9sRiycRQEQDFLtmiuSqChSBbrkojd3d5eXF59/8dtPP/tvVsfNcBxrr304DGmaJTT1aG+u7y6ervtxDu18ubvIMd3d3L548eL58+dd097d3S3i7/PkGtP3fdu2Oceu63IupTBRqalyJeYTWkSuhlaVuXu+Zeat3t7b49j7B/rvM27/o0r14EdHcr4rvL2bh/f49u+uN74f0vKeyPfY9h+0H3g9vn7nJVr0i1lFQFgRjbVVn0WZhXmRNk4pablvXDMzp1z5W/bkUl2zvTNv7Kwu1rZtfzze3NyUUtbdqmmahUucS63OqWrt/zHzbrezZF68eAEs6/X6xTffEmopxQAaY5oQas1wv99XS506Oa0ze2ZlAXIEYIpAyjlmYAG0iGgT2ZTzmHksMlV3IQARQEsKpECqUgkbiz4OAwAQqEHjDXlrgrNtCCjJeRMaZ4msoxAC83KJKlDTOdd1Xdu2UqT6wlfgz83N3atX0VrYrm0IwTYdomEBBXK+8U1nfTBk0VgFKsIlKhax1opAKaXkxKyFOTJnLv0cD0M/zLEAjnMcxzmmwqwxxuPQj+NYnNRDwlMzpnE+hCCFEdGRcdY2PnRd13VNCEG9cWSc58aHrNAIu8JIcNXu0jxmBkS0jlSVgS2ayvYT0KZpBCmmRNZU3bLa65JqnVOxLSK+cWdU6sMprL61vL5eatdaWVQEWIClknuqAkCOKY7T0PdxGPOU0tjzNErJRsEQEuIU59rFNM5Xa9aKG7IklXvOgAAQfBvaxlgPhmrHE4CyMABYF5xzAFo1MEMIiqaUogjGmEpMKsLzPPfjVAEyqvri629CCI0PoGqtXW02V5dPY9bPv/wKyIloKYLeCi8nWwSMoq2Vk9PrSQZzyUSWmQGI0BqLWgqSvz2M6y28uh5C0/qmI2s4F2stM1RA2dJi3O8vP9ow8+3t7U9/+lMA+Pzzz29ubm5ubi4vd8fjsZQSQqh47Mq1jTGuVisEo4sLUmWqUR0h3WlBROEK9Vyqv+8cGN+IEz9WkfMfyPrf9/JokfOx9Y/NMh4LYOeW7Pfe/rVtHitmfud+Hlv/2H7e+OJ5cm3ogTfTiYQOAJVhAwvepIoqkTFGjbXWisg8zaWIIWcMMevYD0RWGGJMORZmlqLVXzqlBCzWWpBqsOfa0ADAMAzK3DRNIao60QBwsdtaa4XzPMk0DSIlON+2bYwxWAeK0xSHYZKiIYTGtznli4sLVLi7uyulaOGYJlVNeSm+IWJtX5WY0jTv744hBEQchmEchpxYFEvhi+3m2I/HfkwZqhO5InDRDLSP6fYQcwEhYAO1eVdAtXYmEQjpYcHNYnXQUkNQS5lpnrYr771v25YASo5jf6wCZ85gVA7ObDab9Wqbcz4cD8MwoOI4ji9fXr+8iSrQtuSbVgHa9cbZ0K2a0K4UjSKJCGsJZlESyTkVkeqWrqpoTBHep/lufzxMw3GaX9zcXt/dFcAp5mGYhnFOacEmqOpEZ5UrsIjWWksJcaj3ruJWnLGVgh1CKBeN936zalctO2+doXXXrFftYRgJ1RMys7HovUcEUeWcQ1h0QZVM23VkF4tdEQFmQ1hlBMCQ8U6qdRmCgIownOX36ORKCognVJSI+OCYWcriHrFwYREBpDBrycKsUoSzilgyVhNLtJoaYuvAAAoCqa7Cur6btWTXNE0I3lqk0JC1aF2z0M+99c6QM97R0te03pAhV/udDImIKlRKka0jQAMA5AwAWLX1Yi74r1JiP0suo/YlppJyCGF7eXEYxsvLy+v9kZzdXe2+ebGfAVaNr56sYyxjLFXGLwQSgaILc79agCUuiAaty6wUQNDOEV7d3P3yl//kV3/118HbFNN2uzrs9wjwT//on97cvNhcrm5v99vtWnL59quvn33y8b/4F//iT//0T7/67Zdd1zjnSsqWzGazWSqcKW9Xay3cNt08z9auu64tJdbccRiGj549WVxqF+kWQARrDdJro+55BvOjYzK+9/j/Yft520D4h/3dx5bHjuex/dTr9sG9vffYw76/2Pih699Zz/yg/fzgPw3vysffdcXfXXZfZJwUCS0iVgVF5mriCAQGoFRxRWPMPGURoZO4YgjBkvHeV/1MAFiQLCmJSC12VTGLE4xT0CIiVr+9qroSp7nCQRFxtVrVMFxtbEWEc5HCKaUuNM65qidZX7nKiyCyOceUSsmiitZ6IB2nOMaUCwiQIYdkc+GUePaQRDNCocX2uAACklYlLXyX/JECAZCCQfKWgnMhhM1mvWq8JRItxqC1vtaViahtW1W11s5xHPqpVttQ8Xg8Vhhk00BteQLAdnfpfROCs9YX0TJH55yxvj+OIbTGWSJCxZoQT9OE5MBQTOUuztfH4WZ/9+Lm5np/6OdYREuubAVC4yuENDTWGeucq7gaZ+oOqd4gYGHmXMoYy/W0B4DDl/266z56cvWTTz55crnpbDOLwBwlqENUS0osUdmyMWSRyCyVLliCnSFrqgOREi7aMY8s52cVT8brb5Q3K+fvxGSQxY++as0wo6iKKIuyoCgqGEAq2UBpDfnGIyLQojPSNE21Yqv1STKm2oroyQRcWQRQQYTEGPZsFIxBZGYuQiRVpUWpIGLtctVCKKAsnb8avZcfWAPVH1xcTNO03+8zz/WNeP78o/V6++wnn/6//4d/9x/+l7+NAG1jO+enXKY5+lNv/kTdA1Wk2nRARjib/CkLqMrtBJcpdyvzq19/vu6an/70p7/94nPNIoKl8Gq1evntC79yKaU1htpHr12DEMJHH32UUrq5uUHEzWZTH8j6Xp9jVS1dMCsXrX6RomWe5xSLAi9Tlge9/4fOGPB4tvdjLR+afv1Yadzfcdr3wb29x5KkRdjpkSLnw5WPlRbfX5/8YbndB2358J/3XVa9fxBPW9ZyK8Ay1zgfqAElAWBmyZWWvtiL1BadIa1+EapazZoNGMnRGgMIIlIDXoV25xirJFSMceyHeZyMMcF5Y9A5g6gxTjkmSyYEZy1551NK/WE43B1z5vW6bZrOGLNarQAgjlOa5moVVHLMOVdEeNu2wQcRzvMSWdt2FWOe5xRjzlzxisYgXu+PKUsWMNYiWVGTSplSGiHPWZIsSb0QVhVFAFCgE4JQTu1PcAAWwSBYU9t71nvfBHex21gS1aJciMh5Q5ZUVXK0hESGWfq+3+/30xRVVVj3+/04gjXQrrz1Ho1DxH6cL0O3vbhardYikjIzsyJkLpoiclkQkIiKIKDf3twpwjjH6/3tt9c31/u7u34YYpoT13tuTPX7c9UaNKlaJOO888GRgZObjbHBACiCQ0QwRRfPGsNmyCne3PSFnw67jy4vrnbb3WqFlsUaQmCRqCyFrCM2xpEFRVUxgoRL0wuNEb1/I4hO2JBTofUNgfVzR+AhWOAUDvn8X20nK2dh1pKX0MisnFUKgSBI11hn1NvlVgoQM2cWZlZABkUwyApFKQGicb4hVZJMxMYYY8SrGCNzYWuzcw50EUirEOIa9oiKGGPEkgtoDZJoYTUEpGTQoa2NTxF5eXNdSrGGLrabcRxfvnz18uXL43HYXF79V//V/25zefH1y5sXL2++vD4qwJPt5vpwNIQGSRBEAVgWpSUCElRYBP9rEqWqEYCFLq6effnbbz59/vTnP//lN19+NfMIghXm+uLFi5//01+kOKXMMRerWEoZj33XdR9/9Ozbb7897g8XFxeVaKhaPRgXCqa1dp4nY0wpJaXkgwmhETXHI1SGg/MIAAosyvS6hNZ5DHxYsv4RF/zAThv+SB27D93+R1keDXvfiZx5O5w8Uq78jm+9/RV4K+a9d/+Prn/nMT+2/kM/PIQYvDHpZuaiUns/rJoLl1yFFkFUalCsNnv1Zahxjk9s9pqZVcuFqrc79P3CT2/bEExVlx7HUVW7ruu6zpGp5s43Nzc55zqg1ITvvHGVgCk5c06llFXbrlarrmmJKE6LtmE9mxjjMEy1OGPIxsJzzMdekICcIRsEMJWSMrPQEOOUCwsoAiCIoiJhxe7AEvMAAEAQARUsgTPoLHljvXXB2zYsXgq4APfE2Cq+VXtvuV7waZoOh77v+1IEEXMqMQoodCvbtm3lloQQDn3vXLPaTKFZeV/rvoBojsdjSmmcU+UFO+eqLcBhjsM4vry5/ebVy9vjfogpqyiSab0wiICCYQGVAgKqmrBoLiCqqUyipdS6oq1DOVnjm65bh03TGOeI7KWL47Ef+sORS7k7zoXnwpHzc3OFiN47NQZUWBi4Tg0SEZGwMaYevFpySnrm5JmaCNI5wj0MdXAqcp4Bn/D6yIIs5wdVRFCUmbWwMGsNZoU5F2auOZ8PDhFVYylSVApXvEtmAECjZAgRDaESKoHCOM31r1tc6hneZ2s9qnjfeO8JsDZ6FzO9JeyRtVZqSgSeiFgKEaE1ZA2CISJLqKqfPvt4GIb6uMZ5ZC6o4r3967/4y7nw8XiUXC4vLwvry7t+mqYCiGrIEqjUBBeRFm1MEFVFQIMoUKVv0CjMqewuVizw1Tcvnl1dbbfb413vrW2akHNGhDplHMeRDOy6tfc+pWSt3Ww24zj2U39Gb9aEL5V87k8fpkMIpIIxZtHSdV0tEZ+7hudMvWbP5/t7HnbqdXvoYPP6AP1DwsBjYMvHxv/vD87ED/67H3oCH8hEAIQf3Nt7OxjQI4ijBy5Rv1Nv70PD2IcWXd+Zcb71FXrw3WrqXcWLa5sfARbCUB0EUat/bC65eOsAYDESSinnLKVKEFVyQ/XfAhFR5jTP0zQZYziXaRirEFdTlwA553EYckre+9W6bV31Rj++evHycOjX6/VuvSUiFPDGzcNY/WljjFzroqUg4sXFRVVgqkXUeZ45FxAdh6k/DvMUAcA6p4rTOO6PJWcIDXjfCNo5l2nOMasqzikn0bPuE9eZANbHsXIb6yUSACAAFABUi+StCcF3XbdarTar1oAKLMmNtdYYKpJLScZAKXka4zAM4zgzcx0tD32fGVyApmutDyJivdteXLTtJYG5ud33Q2yaZrVadet1CG693ZYixk3TNGUuMad6WW57ubm9/frFtzf7wxBjQUBCNcZWafySuRRWMFDNwcE3YKRgzqDAzDlGAPCuGaexiCqCcb45duvtptusfWhHR2azubrYGkQomfN8PY5zTtbaUjpTkfSGwNQExNSojCrGGKl9UbbGGDS2hgdjDNT0jggQpR7wQtcDxIW9p4SqJylHRBFVQlFwVOuJNfjpogKwAIZZSiop55zzHGvzrPbUpjnmzIW1qBbWxJJFBRQJERlI0SigQUTW+VScrU4LGgSNEUtYRIsseuvGGEVgFWcVEdFUX/V6aEpE1e0J1VlVsloJf4DQ+IAgwVtmBs3VTsvEFBrHs85jf31z06x2q3V7GPopFwXHaBAIUUAzCGh9ccsyj1EABGsA0YAqaoGbu70zutnsXr68/vO/+ssn2+0L9wIUVk176Mftbj1N09OnF1MacpFpGLumBYCc83a7vbq6klsRkbEfDJL3q9qP4Fy4FKxoL8S6fS6l69pt6NbrdUrxjdFMgVnyyavktfnNj54V/aNK9eA9SM73x+nHIgq8I7w9GmbeH5Yexjx4PEx+n/D5fb4FbwVReD8/BvBsKPzwUEspXGMhGhWouvWqaqxX1pRKian25JillCKCOWeD1YHMiEhO6WzpUkcdAKhhz1oLUGrehoht23rvhSXGuL+9m+fZGNN1XTXGVNWU0u3tbaVb1cGtTjmrnmdwnplr8TOd/mjfT9UozlqriuOchqmMM/gGrPeEJrPOcxzmzIJElLmIgACQAUVU1moicnqal4SPFBAVEAqAFSBU71zXNKs2rNpmtVph1ToxBlCtrbw6EBF38tEexzEvhkcS59T3gAhNMBWVY4zputVms+k+/lntetYa4/F47McREbfbi3ri3vspzvv9/ng83tzcfLPnu8Px2E9F1HgnokVFitT/aRZSsGQ9GmsMASpFRAgWG29KUVMQ0YTGSjKJOGVO85jznHhOnLquC93TNvh2u10FrznGocccDeLtsedcUAXWsgqusYYsmDMEoJbeDNsHnkE11SMiIHojCXj72X7Y2zs38157XOv6xRyWSymaSkxzjqnEFKe5ZlRTmlNKcypFhFUVbBHIgqm2/1hZmanq8CgiKmdrrbfOWusEGYoCWpWskDPPKVkiY4wzdimzN4aIjHNQzThUqx+6b4KePEBQ6XwR9rfXBGidDc7DdlvdzOd5RpWu6z755JMp5pe3d9OcUWEbzItsFIlrtxkNGqxW9aoqspi+oQqae/3bMeur2/3TP/zp/tX+13/75fN/+V9/8vzjFy9e6Ekpdx5G8/yJ9Q4A9vv9er0ObVPbz5vNZpiH6pQ5z7ML7iyDV8Hb9bWqd0ZUYoxcWu8bOB1VTYVrRVdVS+FzqDtDdnHxXn7n0PzBMeCdqdt7qn3fP9V7/+E8sp+/k2zvsV+/v/j5dm70NgAEF+TkD8zSHkv13tj/d2ZsHxQm4fW7/uBXr+V5Dy4HASDSPapzQTlXMAvclzGttXUiXb1aCEz12fDWiQhSxfJhxVnUqFbRMVW7qG3b4DyI1sxMpHRdt2paYBmGeZqm4/EYQvC+aZrGmAUUczgcDodDZdqeha27rru4uKgNhhjjWBPBXCor/HgcVbWqeg7zdDzEGMFa2G66AiayTHOeY85Ji6hCNdIDIDjn9UvvAeuAe77sCqioYAGcgWCdc4uzWkXeL85quMA/RYpIUeU4xjjPaR7zHIsCIuac5zkzQwhQmfsL5cNaIrK+aVduAdPOcyVZ55z3+2NFSFa55/1+f319fXd392KgeU45M5FFFZHCrIggwqpoyQRjG+O9sR4NAYw8WjSdxZU3hbBPERQCoTS2Q5dFp5IiFygxxzEb6HNKCDqMgNg40262AXVlTOzvkug4RwuKHDA0IFYMKKioYG0s1iRMREQMPlgexLyH752eSOhv1zbPL5TWsRW4mi2cpERzSklinuMcpzlNcw17JeeBUyllTiUXKcKpxDGVlBmsZ4GsxKIFENEoGiKa88E51wQXQgjWWUfO2NrERQWH2PrQdiGE4EoFfFlrrVc1tU5LBFwYQdUDgEpBVC1w7l8+ubyQXKYUp6Gf4+y9327XuZS+7//qb3/97YvbDABoQvBONKVMpltCHBEZNYDKRatIIAGcetIglX1bjDGoMkZNsbTt6sVXh+vr659/+tnhcCilNN5P06SowzCtdm0uSVUrC7a+yES0bjtSmHPKOQ/DUBsN9bnt+95frFiqIJkhJGbJOVtH1traXK/h84HywAJBqk/COex90Fj//uUfW6oHP4C399DQ5+GHOr5/UF0RHg9C36e8+cP2/571b2/z9s14+4F7Y5uapaE5hUYlRDHoiAikglQUdennEVHTNJWTVFGa1dyu6krDyeZtMRYnYuZU5to2qKneNE1VMiql9PTp065b8rxavaygxzr3dM5R5Zk1zeVuN00D55KmOcYohasUVt34ZMUg0xT7HhShXUHXdUMseYoxxlJUBApDYVB3qm8CAQHwW1dMl8rmUiQ0EIJzzgXr6hy2kvSNVVym9gKcAUw99+PhUONxKaUq/TNzzmAtOHfWJeEa/2qSZ+19x7qmv2cE3RTnu7u7/X5/NpQfx7mUwgoKUIt8guC9LUUMoDXGGeusdWhQVEUdgCVsrGmCy1BmhCKMXAyicdYiSpRY4jxz0UPO8Zo5OP/kakfmWRMu2tYFRAdidOOkFJWU0kzokADEiSUj52dPyfAp8p0fvzdeDX3QYIYH9YlaOXhjJg6V9P1gOSuOllKklFrerFOrOM8551E45zzHHHMZ5/kwzofj1M/pbn/MSkmEhRipKrIRkQvFe981Tdu23plgnbHo0VxcbpHVW7Nqu0vYICJYBQBrlif8NSVbvXeNX1oAtNgiXh8PbdsG721HaOh4PL548eIv/+pvfvHLn//1rz8fRiAPMfHE7A0YA0AWWUSZFMhYAmZFLWoMqQgiGKjFThZFVVUg79wY083NzdPLLmf4zW++/Gd/+Eer1ep4d2ia7vZwa5zZ7/e7J5t+OF50XX0+m6apnNrqHajHQ+2pA0CdidY+ffd0l6dcSvHeIXkAKKUoYAi2jqL16a0vu6pa684ydec7VZmOHzp0v2f50MDz2PY/1n5+34s19t2X77EJhUKGOtDVh/L0wbhH+XDvWf/2ft4mSMB7t4fX/3mvHA2PJHlg3rm+lGIeOo4uhUo6mxUvXgJV0g/AWssgVaQKLQpISiXGuHJmSQ1ZORWj5KizFMosd9f94e5orW19EBEt6o1PeTCE3jXE+bDv+5evUkyN89X6p5/7EEK3MoCxWGrbNt+Ytl0RWgSXoqQI45hub46bzYUIljRbawFzP9ze7W+G8aby8Jyzotkbs7voNp2ZpztHlPI0T8ecSimSE+YZSqTd88/6vn/VjzHGcYQo0KxxdfnseixTop7tsdBQylyAFRiwSFWZlswAIoSIJAC8IBeAEKFi0lRRVVdYdk4v1rTtcNuaTWuMyXGO4CxCsQattxYpxdQPx3EcyzwwM3B2Ro0IF4YkVsFYCI6crxRxZ1dN9HiTRz5eX5iLsG66ZuO3qzCmm+P+cJjGot63petEWwxX2vc333z79cuvpWmnYcglo2DOKRcgAItyue4AIOcMUBj4bo7GmCdPnnxl/uWqCbBuRi7QjLtP/gA53rx88c2Xv/n42bObu/1nP/vFT4L/X//sL6xtnG2bgUMQd5BiU3ZG/S4D99NotYM8dUZd532go8amxNZ4w8E5RwYVoOQYq36bIcmJiYyUrBZEAB06a4zzpiAiUJWCM4hGCEWJjBMUkIQKAEKiKkIiFiFYW5TjHCHOJmVIOcWkcxrmKQ9TP85DmodUhiXUxVzkLqZX++NXd4dXx+GYJSn5bte03W53uW67cTjevnw1TaMDOGZvMsIwGZ0cmdb6xpJD+mwyVtEaWDd52/PFjjer1jn3NM2I2LXtZiPrAnXQN8YgJUTUKmxrMxCJIUS0jqaS2FEWANdIWO9nkLD7X3711VH8TDBNQBaYcFIlNi0MLKwsxhhvPJEB0YIwFwEgRlAEqZwLVACYicViyXCXdYWr5mJ80cf/8T/+p3/6y1+q/eb65lvwNmo+DOMXX7+6uroK5nA4HLabz3IZWULJ5snzZ/v9vr9Ohflic1Eyd7ttmufWhnmYy+G4DqEUnva9X/kZgGVY7zZSwLYdiptT4jiuVitDME0zmgYJrXcu+IfF6inOD5L/++yf9Qcmgm/Y6p2Lhw/GvfMY/OD3p/HzXl7xze0f+3Pvpturvpvn9x6QzffZ/4MIkuEHZHvwe065fk9/4rH9v/NX32cCUu0LakMOEeW01JinilxKFZIgtSBwvNtXlx88YTWrwJUNxhlTy5vjOJYK4bN2GPrEpRb0KyyTmadpyjk757xbKqJ9P1R5QO9913Wr1otISnPNAquUSXUas86c9TxzzkNKJeYiWkqZpxRjTimr6jTWgDdPExgDl098aFeqWGHrOXEuNfmoiPBHL9S5q7Rc0/p6KgRfyAApVKGZxZJUtZTiLNT8L+VUJY/neXYP75QSANdBvlkZFxrftOhs0642F7vV7iKE0CcYxzGVQmSN9aFtL3cXlxdPXt7cplwki7VW8vHm5ubm5jrGqEZRGIVrQ8UbWK3bal2LiIfD4e7uriorfvzxx7/85S9//uRPpuHYOLNpPeTJaFk1xv3zP37xzde/+tWvrLVPry6e/eTTy6unf/YXf5Fi6boG0YxTX14UtKZpfBtcKcUQNE3rscwxEwt1DsimwoEq2L0mOkylMHPiYpmr5DOq1mK1eS33Q8SFSnHyqIDlMzAsRYclR+STVEsRYeFSSsxpivM4zcehPwzjENOUS8wlFR5ZD+N0fXu47sfMst5eXHQr47v1xSWrEpGy8GyCtRS8tXa13gKAcJbCFtAA5hiPw3i4uW6c3a673WYzp2mch7ZtrTGw9dbanBnQCC9S4z4ApoIGKjljScNg0Wk7IQSXvLaUMs/zzc3NOI7MgACIBlFBQGFBjS2h1BAsJXR5rCUkoCKyTOlKSSWPM9xc333bfWsIV6vVNI0iEmOs9KFpmkJor6+vP/m0e/Xq5ieffTb2Q9u2XdfFcZqmqXYxV6uVM1RfPWstkgKhXbgnVMeEOi+EU/pOhqqJ/Buj1jnUndc8pIS70L7nfXzH+X4gnfx7Ln9fadz3OTAAsPiIG+9jceExHt7DNfA7hKvv2Xt7+6+/8eGHITkf/vN0297d4+QiRFTBFyJQkfeIeE71JDORdTYoa4z5eDzmXCpFV6qrSynGmGCctbZkqQr3ImLQFJUxztVz4GywWbjEGCu3r3Wh2gMdj8ccUxV6bJqmaXytUN3d3Y1DX3I2VfCJoG3bRdBSshSeppmq/DFixZeyCpKdx5hinkdIEXaXcHX1VNHcHodcypzLOMc5V5lpo6Ly+pzuVEerV0xUARdCFFoiY8ggtU2pc4VV167WbWi8MWAB0jxY4xCRmeeprz25UooP+JCnpKpE4BzUK3m+0c5VPbDu48ufWOsVcZ7TNKd5nse7wxyzb7txHPs55szj1E/9EZQvdtub44AgCEKIztnGh6uri6urqz/6oz+6ubn567/+6xjDRx/97Je//OVnn3327Nmzn/7r/9Ov/+avN53/b/71v3z+5OKbL3/967/+i+tvv/3/9He//Nmnf/wnf3KzP3z07JP/+//1//an//4/fvvy5a/+6q/GYbq5u53GPo6TN/bq6goudprz1aYxko43L0oeAEmQYpqN98osiCha3UtiyRijDy2UYkVIFbTS9tAioQIhQu2hVuzDyQPyBGCh+8kHkQqAaGWqC2gRnkuJKQ/zPMQ0xDLEMmSeU4mppJJ7arNDdxmeXMBTG9QYRssCiVlSylxQWdJkgJHUGYjDYIyBwsrsQli3bSLDc5rmIefYz/3L2+tu1Wy364uLi67rYKBV2z65VLBOFAPLmoz1LmU2gtYv4byOSERYSkGkUgozIFEppR+H67vbb799eTwMRcCYOngtD80yMztRPrTieEAIFyWUN/ISVkiFBUAUmFUFGeA4zF988cXPf/bZZrVW5swp5Xkee84xkF2tVv00TtNknC+lEMB2u92u1vvCNzc3Xdsej8f26dOcWRUr7owskrPn7qyqllyQKqVeGKSUYrBi3N4ctSv7okqNv7HAUp94x/Io4eHHXv4eO3ZvHMY71xO+V6UFH4978F2R48dqsH2fL37n9m+GScDv3Bhf5z88tpynaaXIAtc09V0yCgLAiGiQYo593zOrMcaSq6YztRVakxsRKTlXLAwRceHargOAarIDix2PAkAl+qBiSqkmf0TknK+twUr4Ox6Px+MxxQkAShYAaJp2s9msVx0zpzwXLiLCilKZyqLMUrKoVme1BABdB6tuQ0T9FI/9GFnnWMakiUEMqIGimkEemxbcX1IAVCawVaLK2hKs6drQdV3jHSIiijV2FgFlZkwpDcMwDEO1VTqrSNa9VdCGNSe6iKoK5MxnmNwKtGl8s+pAKeYy9NPN4XAcxpgLgEpJfT/M4+C8+fjZ07Zt+fMvPUEqptKn2rZdr9eN9/M8157oZrP54z/+43/zb/7Np59+2nWd3TR/3l9/+/mrhtLPP/1kPNwebl4N/d0/++M/AoD/+v/wL/7Df/xfh2nuGv/s+dNSSvrsJ19++fX1zat5nm5vXn351RfW0ZPLi26z2WxXHtloiaMBC4ygFCqJXhlAqw8T25zJmlgyFMfMhgWtQb2n350m/2axaMN79VolRDWECqpsFksbRZBKLiEUwgKaRA8xTilNRTNQBsrgMmoCtbuPLEALqAgpleMw9fvjOI7Xr25ySUTQNR5AOE0ppZyiVTIu1EZxKR0455Ccc8aucykxTeM03cXpZuzX49B1XXLmcrsratS4yNrGXFQVwDnng7XglNAgoSE0VFW+VKWqVqKxqjpN0/X19eFwSEmhnpcuZBjEewcDVsET+pHg3ouKABUBVEFBEYqACIhCYc2sxofGjohwc5N/8kluQuODNQopSZzGu5vr9RMPSrvd5d3NzU9/+Qd3NzcXV09ijOtNx6XcXF8ry3G/3202Utham2J0zlkw1pAIIIsYUmbRqseioMqsKSWD7o1Y9fa4d655npdUCnzI8mOJnJ2PB16PfD/Kzn/UReE9dPXHw96bG/zoJcq/r2LpGzHvXZHv/lqdcMlYmXlnkJUWRanWbs6gLYXHce77vkrtIWIVaBARQ2AIpJRYSpxT7VSDyDRNd3d3iBBCaNum8n6MMTZQG0IXGmYexuFwOMRpBlHnfQhh3XallOM0Ho/HsR+qSwAA5Cyr1Wp3sdlut44w58xFuagIpJjHcUy1wpnzNMWUOCUoGUKA3fYytM1xmG6PQ8o8Fp6yJIEMAHVoBmBQAoF7K8wlzwMA1VqFE5SqtqsO0VsilRC61aprvSNQ5UIGLBnCivcpaZqnaarodu+9lPGMvD9P4U/2ddaFoMZWtXsAFNFhPKpq4myMRaLVOoT1s6dFXt3ebi82/pUdhl6h7LZd13XOhnHs5zlk1qZpNpuN916RhOFwd+ut+eSTT6r/3JdffnHojyGEr2//3X/80z/94je//n/+P3Dd+t169ezpxXq9JoXffv3VX/zVXz55+uzZJz9h5p/97Gcff/zxby433tg0jbeHY8rpi89/rVzgF7+A3c4a3bahWW+IUPKUQYwPgNVuoF5FQaHCSqmifkot2T14IO/pzESV0k5QKaQnNS+sM1xVVRIANCfYEaGiYaQsOjHfDfMU02Gcx8QFQK0jFxogePKspDz1/f7m9vb69nC3zzFK4TiNJSUioJyRNC7wY/h0d+GdyYiHFMvU35bMAv04qCEwhMaYtilaBi77mxu4uSnWX17Os9Asuu3GxoftMPababvp2jYwcAvBWkuCRi0pENWZJRKRdQ6NSynt9/tUVLG6PIIIiwIAElkAOSMhKzUCEa21vMwbXoegKyAZYRaAxNLPs0dyjQGEIjyOc/BkrfVkDUJM06uX3+7claHQOC8MlfJhvUfE1Wa9WrWbzYYA52GcpsG5ENomxyM80IqjhUWo1lqDhKhgjCqgoAgQvTYCnfkn8Lo28jnMqKrDd2d1fzeJ1z+QVO87lx+S7b0nzMCPnfN9z6Lo9z+kh4ZUb8+e3lP2fG3R5VETuQfa1bDHzJXiVK2z5mkahqHE4r1X1lQlLiraEwkqzy+XeV6wyynGfhj6vl9tVta7pmnI2Wpb4Jyr8K15nvf7w/F4zKlY66twewghxlhlKlNKFQFYqQjb7baqJZWYqjRMzoxgco7TFOc5CasIJOZ5hlQAAdpm1a7WLHLsp3GKQjYmKQxaIQAEgFBQ39MW0Gq2REQqhGBRDIFDRdKuDV0bDIGIkCECEikIkjNLzlVBrXY0nXPzXEpmETHkAMpim0NYr7Czwbah7dar1Wq1WnnvnfdkoBoUsgooskIqsmrapoGS8t12zZK9977pSOHjp1c5Z0GsfEdjfUplnGcXGjKuaZoi2jTNzc3N51/8JqVyc9i/+ObbYX9TcjoaKk+vCMrNzavLy0sR+fLLL60Lz37y2RTjertLmT/7ySfzOJUcX766ub7b98MwjkN/PHTeBk/aBtesEHHuNeZIKiuPqBXBi4BoVLMwcDlBOkFE6kgJJwAnIp16e4bInt9QBajdLABAFQQiACBiVUVg1awyFx5T3s/zfprGKe3nlIuC88F3oWmttbcqqfBhnq73+5tXr8bDUQpbAGQpcS7MOc1IVLSgNT6EMh2xREDjSYEhpzlmrjXJggAEYI3xBqtLluqrPmXowd5kwXV7XAV/OW3neZ5jt1mtKnbRe2ssWVdr+U4JQZgFLICqzvN8e3sLALUaqnqWDKy2TQAAAoDV8uCkhy11borAAKQq52HaWC5CqrlIP06tMQ3YVGJo4dj3TWO7zgKIsejBpWk+HPrN7mIcx6urq7ub26fPPkrznJsmThOIrru2aukdj8eLC2eMCSF454AURevZkSAoWuMRGRGstQYq3f81FspZ27Pe68d4e48VMx+FePxIwQkf5/P9w1oUAMDW0tm7TuOx0/sO/yd4KxT9sPW/e2/vg/bzxq8enM5b2bAubeRzC5rIQq1tKgJQirMwe9cxS/Uprf08LhJjlFwAxLulNVVVoatI5jzHw+HQT2Mt4ZO15KwxxlgK3jtjDVKcF3B5yRkAKy7UOx9jHIahPxynaaoVV1QAhd3l5urJxWq1OnmFiwjEmI0xuUhMJaYsAlyEBVjBErTdquvWOZXDOI1zKqwFdC5QAMCCQaNos2oBFZRT+v/wKi05HwFaRDJIwkbFqBhQZ0zbhMZb4AICjffeYE4zAHBKcxzPHc2arHARZobT2744tpilFV8zwrZtV916tVqFECKIMaZtGgTDoIUl5sSqKgUU285/9tnHV1fbUgQIUWHTBQBAQ9Z6FzyhzSIp52GOhdVQ07Sr3dVlyezvfM7ZeNNYOu7W09gTyNXFZbdq53kexvny6UeKBqyzTXvx9Kn1TRKYp8PVxWZ8/hEzs2RrCZS//fZrb6hrgwCwALpmfWFLmodhEJgVlFUBBEUyohZRYBZ4SD+HE9kc4AGjb7G2RQAUBBQFBUQBJNDaxkIhBK6t5xJzGefpdhhuD8eJpRjjurUnT6El54VMUTz0hzjFvj/mFLG2bQuLFGWRXAABwZkQnHerbbfabLpXfzvFnHJBQh8sCII1fr1mkSGmcZ6FGRGdccYaRIxyPExRb+7GWDZd2LZtlVGt2W5ovWssYu0OgxIIuvuwZowxprAOY3VlAjlhEYmIFYowUeWUYu3sAcCio7fMDJbXnE6fRFHRoFFGmCNbj9YSCKxWzXGcL3I21B77a0uwXjcRobCkmFebdQjNvj9yLs6ZNM/eWhVYt908jMaYvu9Xq02M0TWB0FZFnBijBYsGmNmIqBQisM7YBVOuIIqWTqPNO2gq8AZ27HEI5WNB6EfMyf5BpXrf0dv7HXf6O9Yhf9/rv/MqvD+9w7cBPyd8sMiCta0R6rRSEKgU4cyGmAukOWtR70M1G9LCqurdYo9Qp2wiUuUQK+s85+KCP0O8jDHek/feGKrdu3NfEMmcU727u7vD4VCN6KrCCAAYYzabzXq9ds71fR9jBgBVzImj5GmaY0zznLhoKZwSsMIqtNvtznp/ezzuj33KwmhS1lTFV8gqogIKiyIAPTqhUwUgITCmUvpVrIhRbqwJ1hCocEGg4LxBmVLEk4l8FSesXc+qtFJ1OupDqwr1qp/9ybxrqnBb27ZN01ztLr33RLYSnRCNIhSW/XHImXeb1ZMnT2KMt7d38zwRmdWzp4v7kqoKAiGQUTTTHI0PwhBzGcbx9vpVSmm3220vtus/+ZOL3Waapq9/++UwHisp8NCPL6/3rm12Tz9++uy5b7qb27uuW6+f7NarNjgvIsbgnPIwDHe3h9988eu28atVW6lsV0+egRS4vub+G1BhBRUlAFCpAE6tKVotVuq9HvFp8mcQjS4TjprtacXlGzCKAiBKqFLrziAiRXjKaYjpOM1308hItm26btOsNmDDnGTf9+M4S5rG493dq5eHVzep7yEVFEHBpm2cc+h8t9nQqqOmaTfr7cXuD3+5+/qrb7968WJOnBk0KgI573MpvhLlCZ03yjr1/RzBI5S5xHIcp9g3YVqvicgQ7HYbXp4wc65pW2tD04BWkylqu1VtdRsDMS8xT6HWbxfLBWVQQmvsMpM/NcLOL7tBhIr6BACAqoxLzhoAkaxkyBgpaJwbxlkA0dA4jt7iR08vg7OS8vE4PPv4k+Px6Mi8fPny008/2d/ePL26LKKtbe7u7pqGDodDhXG61tQHuwgrLfKkJWdjqWghAoMIxuIp1JE1D96pc37/DufhN87rjeX3hNh8x4D59x3wvvMI4T1Fzu9sdf6esq4Pql6+Z/s3+H/3X9d3b18fowpLqXlGFav0bYeIhIvCUIVEi5wnWfXrci6Txik1PqhrUizzONcdPmQcnxsMIFxSjPPMuZRSqmfQPM9AFKxRBBd8CGF5W5zzYIhIChNgPch1u7rYXQHA8Xi8vb7p+77GjJzmcRguLrafffbZuu0M6DyMeY4iooJTzMM8xznPc5qTZNaUOSVAhNDCbncZQrg5HH/71fVcoN02zPrqECmAkGHAwho5JRUFEoSlWIBSM4E68iKIMVT7FxaBFEjAgnTWtg0ZLUa165pV66WkOY7DMMzjUaUAiDPGewcA4xyHYYjHuW2XWYIqO2fB0kJCNwYAvPer1Wq9Xq+6TQihW69PecAKjSGylTp4dTUPUxzHMeZi27BqPhEtXBShingIEZFxNnjnPJKdU86sMeUp5baxm3WXUhIB23Yi0g8TEf3BH/6RcybG2A/TNy9f3lzfZWU1/vPffn3Tz9ZaujlsyjWhJeSu9Y0jznqx3VxdXH7zzVc3N6+I4MnlRdu2aCh0m+e+OX6x995bS1xSHCcVDs42TcO6NI8BQEQ8kTXWkEFRtIvHrJwzAEIERFUAJAWkaseHQmiIcs5FxXinCLfHw91wFKSw6rLgenex2l0OU3YkjcD17fHuxVcvvv5m6o9GgDhyyZ0PjW/v9v1/+9/9H7+5ubl6/um/+rf/rVtvhMy3L6/XN//Obdv2and7d+zH5IYIGJ5+9Ml+vz8eh/3+EONc+ilnIYAVQK7+Ss4KQsxlTimzINlDPzpL3SqE4Ax5Yxeczt3+uNlskAiBxnFEY+d5vrrafPH1kU9oT0UUVQA06IQKKJRSqGqGnjwRl0FAF/NePNcoEJxzAJK5NNYo0BQzZD1O82bbzqmI6HZ7cdy/evHiBXP+g5//AREd94cQXNFkAIfjsW3D9ctvn338k5x5t93+6m8/3263wzQpYi68u7pcrTeZU9FS3Qpzztbb4F0lpofghaEwO2MrZg0WDPMion2Ga77RncGzKe3j4/kb2eEbA+kbSeTby/dc/4MrnB/6xQ8N5zVX+Yee7f2OF+v7rz+VK0909VO2dH6wzi7n9QXx3lee3hv3u5SCaFRQijCrFmVWZZ3nGUWVBarmVC71Ec+n5VzqtMZUERYiYhUCrFwmEclzPOtnLqasRPM8z/O83+/rwQ/DgCDPnj3b7Tb1FEopMeacWRhKKSnlkiXGnBaiuuQMKuAaaJsWjTn04zDO1pGzEJMMpQACKzIAAwpiLe7KW5f2gbsegQKpACiqEoAFcAgOofEmeOcsGdCS4lTSPBzG/uAdMdybBpyzGe/xjB5aJK4XVII3NnjfVFJjHffrfOWcJZO1RITGAsDOb0OIbQjMLEDnGxqnY61RqyqrkrXBBxtCJ1BEY+aUUky5VPNB1b5eRk4qSMYU1mlO/TRmLk3XNmRKKb/+9a+z/G0V7PgIBudcUZmGnkCb4JAsIv7hP/mDeZ7vbq7H/phztJaeffRRaBvZXVTlNgRJXcxxKjmd6+QiIsqqVrWCXgwAUHU/WCp1KLSM4AqLHDgCIJICAXBNo2OMwzgP84SGdhcXF8ZHBgVD3h2G/vZ2PA7DMMVpmtLhpiW5eLIjgR5kRGkdOYv/9t/+m//z/+W///yrb7+5O/rQfvzpT3dPnl18/c2L/++fXqwa1zbNdvPV1y8Pc+y67uNPnn322We3t/sX37y4u7nuD/t5GlDZGXuDrjoCkTHWGjQmcTmOw3bTxRjHYRqDd5aMybXUsb78CNHEGJWM8wEA5nm+uzsSgSwFPjyliVSLu+fAtlyvN4YCBYPI97NzZc4qalCFIQNba5q2Y8lzzJbE+QappNwpqvfNfn+01jprm6ZZdeuY5jRPxqCq3l6/3F0+Wa26n/700+vrWxCIMbaN08JS2JCtdIVSClmXUzHGAKAKKIsCVG0aBn4jsMFbUlk/YPnQtOwffhr3Qct7eHuPneQPUWP5oPXfmeG9dag/wpHoSekV6sMoUodOBEJY5DGrFLIxaK211jOzSFZVlXswlQiAqnCJMcUYU0w5szKM42jRmDqmsxQszFlySdPMzHleCNpF2BkyznbrVaUuWGvbNjTOT1N/PBzG4xBjRDTr3a4Km+3v7g77Y50VCheD1HbtxcXFdtUhIpLGOc1zKqXEUqZpHvoppTLHXLkKuWhhIALrQmi7Irrvh+M0k/cG6NiPfQQwUABUjQCKKp+kahARdcEFvv2UqCKqIoFVcASeMBhqg2sb3ziLIFI4Kcc0pRS9a0kFAC0uNuhVkKkLwRiDaJjLQ+gQnrwaTmqcJ3dWQSJTDTydc4acEgKA98E51/i21tlgEbzkebLWOCDMmWOMrFiVQq21RTUXyexLET6ZZRwyiABzrsoEzIzWKGFoW2EAQgGa5/k4jFIiJzpMtyEEMsYSrLsmFakFxsuLbc5tvenffvNVihOC/PKXvwyXV0RIRAjivZ8nmnqNzEWh6H1Fq15hAnn3zFVp4buD4ELRrlokpIKgJIp1skVEq9UqtOvbYx+6dWYzHvo5jt98882rlzci4AGvnl5d7bZljr9No9XUdW3wq5//4iertR+nw5/++//5//U//jv0Xei2r27v/vXPrtt25XxbxDICI7gmXFxdXe2uLrbHddO+bNtXBvdScoqkgBaYFYQZSAlFeZqmW+DGUp4DqniLTXAhOABQxQrqAUREq4rCUFRZa5SDrAuWSnFhMKjA0sl7fTzDZcvlJ9V1qjXssYIlU0AwlWC7pmsPt9ftOgzTNMRkALe7y3nqU0wxJhEd+mm1Wl1erIVLTENJ2RDc3dxut1vvm08++eT6+raydVV9SsWkFNrGkGGRUooXy6Uou0rYWboYSMY4Fj7PvM+j1jsheLDgyL57tPygDtw/qI7dOw/vw7b/YcZD8AgS8p3f/V3WP1a9fOw4v/fOHy3GnkAT91r1RHSWVz6vrAqZ90qGItWKdqmtgynMcYrTFOMYY8xSVERKzHRKXKpEtRau6V6VmZiHcYrzuZRRbbW9921oQnCgkGKsbDYAaNt2u90aY/d3x8PhME1zCGGaJkP49OnTpl20/jabTUxTSrmUJKIxxuNh6PuRuTbzSoyKCM4v/RMW6YdxyoUBCGwqZc5QGMAjCwkAY9W+IKimCg8u6BuiSBVTgwBGwBAEA42zq+DbxgVrCBWkiBYC9cZIcJziwwpzBeCklK7WF6p6NmlCMMZY50Ipxbg6NbGE9ozjv+c22OCq7Cct9xQBahuv3nEmZuImXCCiApZSarmpiHBJwVtCNMZ6IRFl5lK1gFsPsOglojUIZrtdDxdba+2hHysZGY2p2qo5MfXFWmtO5Mtzcu9Iu1XzZLeOMb66vemP+6++/MJZ+uUnV5V4JgWYVRTRWh9CvTLndwHxfpZ2uuK04NCWmFfHKQBFJKPEAKi0uHXX/LgK9alqEe66zjq/P+5vXr66u93vb272d3dd13328U+unlxebDc3r168/OZrDn67W23Wu7/+1X96ub/+i7/5/MWL26nA7WFqt08uLi7/6i//tuvcdvt0vblUXcQWnHPNqkM0zpjGO0dFc7y7TSUXJIcCCJX/mhJIspgM9H3vjaaSE5dUcinFibXWDsPQdl3XtNZ5AazE7YuL7avrQ33eqkgp0kLhqKWJpRUBSgpLZf5dAzgiEmhRMNUaiSWB5FwESBR8tzreTd++um4dfPL8aYxTP8/bdutdUwrf3d01wThnWCyXYqmJnPrDcbMzxrjNZjPMU83tpmkCAy5446xoQakV1vriI9QCbRUMfcCEexh+3ujgvA0nfGz5ULDl/zbAmR+yLGHvB9AV/w5wKH/3Cd8btTU4QcVSWVhoAGRMBZg0zrl5nkspXFQViOAcL9OcK2d8mmKJVbGSCO/hdgRYTUaqyVmtc1bjhaol5pwjaxnUGNM0TRMaAK6VTGauDkTr9bZpmnmM/fE4zzMAEGITQtO4zWYTnMk548mltnJ7S+Fpmvu+n6YMisxasrCA9xS61hjDnPtpvOvBOOusG+bST3NWAAusyAo1z1NAVakC0whv1gQEgZRqsxMADIIhcBaCc6umWXXtqjXWkYhwzqDFGapchfE4EZGxiKQlc1UmY4aaVbMuBA/CmmpbUa3Dd71i1lrnQv3PWuuMt2SJDGJlAAOXgtUqA09zFEBAMt7VPVer2yorHGPmHNEYZwy5Wvemqg5mwIhAQWAGRLAWHfnGQAjBk/bEiBiCU7XTBPOcbPfRORjXiF7/XA3qqrpZtxcX22oT8e1XX37yZFOzVyVRQiC01pKxhHBfzoWlQfWelxcRBbSC+hWX8p8iAIMBQ2Du83VAVb68enJ713/129/8zd/+dp5SHNO2a54/f/b02ZMnl7v1uosxh65NZbatsx22rtvffTMOL9cNtmpjH2282UDz8ZPtarVpVjuyLZfcOr9ery8uLna7XWlK23hjNKV+6Pe5DHGeHREggDBB4axJCgdrELqu2263u92u6zoAmFO23llXPSC1lGKss86ioSWLqumc0usBjRQBAeG7OlKISHVkB7UARGAtsUIWiDlNMa23O1Y8DnC3793V6jhOBWC93Y0xed8QwtBPh/bw7NmTLjSHw52uQuND3/dNt8o8PHv27Ktvvzn2Y44pE5KzOWey/sF0rU52ERANEouKii4a6a+13PAty73vn4f9Z5bq/eDFfngA/zEZ5e8Pbw9/+32y0u/3V969/UPHAwB4WPBU1So5VtOI+sy99sIQncNe3/cplXmec2YQILR1nHXOGSQQLcKAAgBcSu3SLYL3MQJA0zShbUMIUEnZ1iJijaPVeIEqF0617/u7m/3Nzc08z01oc86Xl5dN40opBLJerwFkGIY6tubEx+N0PB5rZ1AYcxFmJQLvmxCCiMRYYsyJzappBHDMw5QALRoyKVVsKAHhIwhpAaBzngGnyIeoxlCw1DZ+vWp3q65p0JKRkkpKhIxkQbUCXM93IaU0z7MIeH/fVa1FRjJUQ50ghdBWQbKK4Qwh1HKiIVulp14D7BlDRED23Kmtd1lUzm4vtYdqrTVmHqrIVg05lowhNdXm2zNzSpJiFhQDgFjQqEV+sgkXneXay1W9XG2JKEVYCJRIClyL55ZM5iRFWUt1ZueiDOzIJS7GO98Ei23xNpHJZhRmb0wI4ezTVE9qOUGlc5FZkQAWp/t70WBEQaq6nPW7pZQcUw32xnobguRyuNtPw4giFmndhBDalQ/GrLI4VhdWm92Tp2rEN44s+IaajE8ummlMhtxl9zGiXXWhNf6iXYd2NRdA0S50V7uLJ08+CiEwq6gWFbTGr5tN3jap2Q9ZC2sRYGEuCghcVNU6qt1r7xtDrr563vv1dlsrw0im8w0ADMPw8tVk3MmuDwiRVFAJF64bKFbM1QOJu+XiPBgnlkxaSzWlBVGDygACMo7jk08/Ho93M0DK3K42N7e3TWuef/Tsm7/5AkV3u42zOAwD523TeGbu+3632+XK62X96PLpbX/oh6lKT5RS0pyNIxtsNVtwEJahFUzFbFcvMDFyHm3eGHbOR/49s73/kurBdxY53/fF+n+/n8j3ncjPRw/rd/u7Z7xlLTOeA+3JE5JrknFW1HwDMSwiOZeUli7a0hq0RgWFpRrCgSzOOAYVERciXUwnRl2xPlTaddM0zllErOW+aR6naVLV1oeSVUSGYz/P893dYRxHg2SImvU6hEAgUoqQRURmmecZVbnw4XB49equH6OKUYF5nlNWATDOkvUqmBPnIizgmkaAppRyEUBAsgikCAJcB4ulYwcACoRY3oyCJAgEgKd0xBjjrW2asGrDumst3YPQagArOdUs1hqsStkppZTUetjtdqeYx+dua12c8+1pqSotSxkQCIBqE4gEARAIifBhkVBVBSreXWcUQQDRKmpDRMZQ2zZcateWiwgporHGWUsGDaihjCYTCIIhJ4qJ0DjjTFiAJyXVHrAxps8h5ywliUjOMcdERMH5ItQ1K+tNNe4gsk0X2tBdH/fOUnDWknGOnLFxghKTI7IPYp6qSmFUsMa/Sx6umnIDAyCqqf8gVL5/sIkohOCbYF0g515d38Y47bbrENr+OM1zIrQqJRaTDpEFjPW7J8/IYxugCYagOLdatebu+i5NatGVLBbTpl1v25VzbePcqr1iCKvdbhzH2/1dfxyP/X4aD1GS79qtuULQdDfHeU5zRGYS9oRt07gaGFKqhlPqLIIBIJHF2OvhdDDFogDV61gftPFUVVTfBrGo6r1C4YOHt1ZCUcEaAMBSCqpapMU8j8U6t27gOAqgGefUduspprZdHY+Htg2rbjMMh/1+33UfrbpmjvM8B9d249i3q12Mcb1e58RlOhLZiioyAV3j6om4YGv/4t54CKCUIviaV/DDyPehy3+Wqd4P7O39jn/s95rt/ZCz+qGQzodJADyAdMKDSdbSE2KOMZ6RdecvnnKUmkZYQiKAUlWnU+q67gzbtASIWNGbpZSq2yIirlY1m8Z7b52p2wDLPM+cCxn03hNqKWWu4P4YjTFtaNq2vbq6Oh6PcxyrW944jinNAGCMiTENw3B3d0gF2qYDgJR0SuAceFM1QkulAxJB13XjFOd5VgVyCESicKoKqurDPOM7bkQdYGvp0hvrvffBqiYDKKDOUgiuDX4GNsY4a4jIWFuviQh47zebTTwO58IgLpGUzhoute53zrZVFYH0gVAhnKRTz0tdL6+7lgtLvR1VXtxau9vtSilpnkopyqqkhGos5pSJyBoyhEjGWitis80Lj0pVcDEFTHme+uh2P0NENaiqBsEgOUtN0wzDAChc1BDsdru2bXPOfX94+vQp55LLVCGpIQTUXJCqxdVDpPHy7Jnz2H2+H0v18jQnw3vxuFOCaK0NIQjhkggCbjarrml4C21SzmLQ1vxsBjsMvWpZb3zTtcZerlZm3Rop0RFebLZGKE4ZMgrDbrObp1vrmyiErmt2V1PCr27ufvvb3/bjMM0pl4lQ0Zr1dqXQEACtdOyHcRyQxaE4pIagsbYi9VOKMUZviIgUeJ7nMZfVarPbrZxv+3Hs+x4Rn1w213eznks5hCALQ49MDYT68PQfRr43FwEkrILQBBDIIqFw3u/3nz5/2nj4m9+8GqYREcn6b7755k8+/YNfHe6Y2Vl7dzft9/tPf/Jss9nEmzhNU1h1wzC0q93hcFhf7qzx33zR1wZ/RWxVcTLOuTY7VPGMSa5DDdgHVPQHU+2HZc8PGh7/MQM4z4vFRzTcfnC8eQOB8qGan9+JZPme6x/b/32l4x6PQQAAxoICVH9LMCoLEk7LZImMs8agaJaCQORDEBFAi6goaMijMKaceyZ2qIoipZSUYilFRQg1TUdEnMcDM68uLqqtubX2y30ZhjTG1DSrJx89vXxy5YIpHBGwXbcG7d3ttSS9XD+xaIZjf3W1ubm5GY77ecxIbr0J3Wa9Wq1SLoAY2tW6aQ3o1A9liii6p+mL3/z25avRWRtcczjmcc7GtNaJD64JDaLGqY8zWwvdCr+yzWFKAxr2oII5pzkrCwQDAlCwMFQymJYq7Y8WqrZFjSIqSGxFvAOPGhA6y7tVuLrUVTOByM5f3d3dFY7b7XrVNdM0TNMEhjKnYAPn0h+OeYqdh5ULGnOOiRBVMc4SHDTrxvuGC65c04SVdy1CUDUqTtjkBG1gg2Cts3bhL8gp/omILk87AtkK3wswozVgF9bmklnmArVd2nRwpjrMPI25CStUrLNFVQARSxR8l7lUKCBUyCWidStjOx0O/mRzYVUdgLLyODfOq2qFG0rheZzIus1m58boABq0aBAAihSGhpGCRwETizpnvSWyBg2ggQxewBCTRWPAWEVWBWHITCfVMlUCVVBHapPvolAyKZkM5GsGKSJj0U8+/XQ3juM4bi+bZULG7D0ZcwmGcs53+/aQmHMj3bZ7sgshRCRmjr70fc+q0/YSrn5hjCkxHfb76esbh9S2/pNNN08mj/M0glHYbje79YZL6vueNq7v+1cvXt68ejVNU+fs1cXl5Wa77lbrpt2s1pu2a5pm3Xab1bptW2mLc8jDXT7edutdPNyRUsoQ1TJ5rmcMTJAbyKAwVQtIRECsU1JEsYhoEGoZBsma+iVhFgKDiowkKFllFEYCMngXe380l1u/DfAXf/7iv//v/uSbLz6HMd4dvrj8qBEY9kfYXW5SKX/zq1/99Kc/vbi4fPHixRU8lTzH8Y6ZOR0//fTTvmv6vscJr66uGmzmPtGsvnGdW+1fXf/zf/HPv/zqc9uazUV3O996b626nJhLbekZQgOKIkp0DkjL+YmKqvyOfLvH1v8DrnC+m7f36Hnh75DtPbb8WFfnBydtP3j/58v0sHFSWz73WYIggIqKClbGnggAs5TIuVTB/pwX4ZVzyfRUmNBhGHLOIYScc2Wme+9DCKUktLW6uaoZHqf8/Kc/4cLTPBNRuwne+RKTqh6Px2EYco7GGO9dCE3lOSyZSu3wpxhTYmYUvdvvY8xEgGiYpZTCLIQagluv18bqOA5zZufAeOSiBQqzVsVqAKhofyRYDMp0cYEBoHc+WBVCUq8XIhCBMXCuTBLR2R3QOfda2wCRmXNMFaHjva0tEACY5zTPCQC8O1UR5X6Wg4uQjW/a1lrrvL9n7D2YCS1K3yfyw/0p4GsqGNV1T1WrL/Z5OYvmzNNc+0zGmKpdsFS8cUnCEFFPOSkAhJVDNCxS+7gikrkIoCnGetc0zrmgiKWUeR5LkRawum2ceRr1CRTODwrv99neG7PK74Swn1whFzDL2TBys9nUSsNqtYo51bIEMzfNFTMzaCnFN2HVrwqq840hCM6LwTjDVKYck6Jwzk+fPCkpjyUHa8A5b912s75Yr1btp3EeYz8Kl3XXrNpORKZpkgDjOG7Xm4vt9rg/aOHGeQA4NzK1asqUEnMioikPTdM0zhtjU0o3NzcvX7687WfA7wvNw9OFwncUn5YiGOnCzz0LXxLRNM+bjlYrk0c+HA65lCdPLofhbr1eu0WMt3jrjDExRh/sdrsdx9F7PwxDfbWZebvd1spQfXIsoSVjrS0phxAOh0PbtiklxFW9NVBOr9XrwM4PGu7+y/LG8mOGvQ/txn3ofn5wpfT77P+BzP/y6NfBxVkLJ/GRU4mBluFdFx6CMqQ510Yds6gqiuLrUaF2uR2ZEEKKsZrJwbnF1djFfgxAC9dRL8apxNjY4K2rU2/r7eF4M8+zqjpnfOutsQZAcmYuTQiWMMc4j2OaY6VG3N4MKQKCVTFxzvPMokBGupVvWptLiTmJgm8CWYzTHLkS51UYanGoFiqrdO9bywlIDYK4OO3VmhIiGlBnwFvzkFE+jiMRNU1wzsmJDoeI1rmK7Y6RncPNZuNciDECUIwxJWgCha5Fa85gtnuugnMhhKYJ3nutSsM1IV2wecu9riXS8whS5yQG7otFWCfOVQK4gTPO9qHIDrMqszKrtYu2M1T1U8ilVDVUsgZOisDiiRDJQGicccTcVZp/KQUURTWlmUFVkACdoTRPZ7hNLbcaJOdc5PzG4/0Q6XB/Cg/YOOeVD7/1UN3RABEscNDq9+uc67quyHLipRQutop6N853XXd5ycwsgJnLqgkFVVNprNGubdvw7Nlz33YTTCTSGltSiyqbbff0Yts1Ledu7gbO2RgMzgNA4124aHNMV9vt4fJyv99X8xAUDY13xlKdohBo/Q+1Co6vmtZaFxlSSsJ5ZfFY4CRn8PrLvtgPfq9hgYiQtd5WPAmWnYmA0zSV4tfrdYT9fn9k1t3Vky9/c1snXlxYxNQ5yjRNhM3Fdnd32K9Wq/2xb9vWWptS2q43JeU9c0oJp6kh9I2p7b3Vtr272z95cjHmoXa7EVGUz8dWB5z62P9nWXv8O1t+AG8P3r/9d1Ypv+f+f1/B73HqApwM8EBfGx3uxxcARAMIiJaI4hQlFymKSsDCKccYARwpnAtrCLWUIiUlEfHNoqsCAN5XS1gRkWB95QKKiPO47rbH/cEgNU1DotM0SWGL1jmXUlJl68hZT6CiRZlJrHVkLYHonOIY55JSnOZpHKdRRByozjH3fU4JXIAQbAg+pTjOEwC0bWubtpQSOY1xnKdUMjy4ighAiAAKiJW8L6oI9JY3E0qVgDKgKIAGjDkxt8xClck5t11Y3AFjzCUBgDHorKvdNVVo23az2dVsQFWFgRDatqtSbSrom5onhxBC0/gKaTkhbEFETqA+PWdp3ntEFKAHyABUVWNeAwuccblt29b5OBGdK35aPTcyTJIByFoKoTXeGHJkgJlESimCJQGQACvDbb6pGaQxjiwacmDqCsvMUphZGBYUK4HBE2jlvsbwuuzi+aHF1wkMb4+DjwW/h50hOOV/Z0IIInqEeu6llHliRGTOaOyiDw4AQALqrB/miU00TbNr2+12+/z5s+M0W3EXjW+ahnMap94Zs2nbVduI+HXjpGQFqZBfFE1ayDmzXXc+bNereYxpHjmL5GSMJYtoDdZCpAExaBFzzpMCUToO8dW3L/q+N4agLPMS1HNC9/p7/zqApX42p/krVKw2Yi1ryEIbuIctl1KUgYtCMKFtYk4EWrK0rQegmNkSWOsRkVnrBO7q6sp7TwAGaZqmzfZimqauWVW0cM5ZpskYk4Ilmz0BoY3TICLBuWGYmqab5/ENXMl3Qlr+8SWCP+R8f19Fzv/NJXwPqwc15NVJ92maf19SQ0IE8tapas4ZlZxZkJMlFQtWRZYi1EleuaQcY6yssmmK4xStCwbpeBhizIt5eikyZGNM1643m83hcNetVhbs3A85ZwIsXMZxzDkCqHPGGBRgyWycNc4RLND/GGNKpaQyDNPd3R5gq8wpxWHI8wiI0AZsWg9Y9sfbKfFmvdlsNkVkHuaUYBzmnGu2AUQkDFqNuJHwrWwDlACrA9yS9yDWeTkgKCFYouCctzXmCSoCij3B8CpVsSZgFR/LzNZC03T1gpQsKRVFCMFUeTZm9sGEECqGs+u6pqll3sWw3ja+3iJVNQpmgZSeqE4sKkuYWYRdQLQKFIuemJuiAJkFACwZ27Rd055RMLd3NyylqHApXLSkDASI1LZNLCXnxCwLEh5VWZ2zACDAKgBJABKfIq611gUfwqItkFPKubTOVzTpA/RNeSvmmaqaYIzl1391/nSP3XhgxqaqFonhPuyJCCLUHAIqtb1KuyERoEFqvBWGXGKq9k+mll5NCIHQWgBTuLadqoDeReuyBWds13XMebCgyp0l5GQRvbfkrGipHgsENE8TqQnWNGu/WTdatJQkRY/7W0JLBgxaIjDGKGnRYsWKCCkAlGkaxqlPCRArX08Y8J3j4DsHhfNVqEn/yb6JFWnZi94z/nIC64EVximuHaEhInzx6tXHHz2J8zjP03Zdp2WScw7BTdO0oDdz7rqu73tUiPMcu1j5uCJSeLFHRgM2mJQSIk5TvHyyvb179dHqo7GoPdGoThJ9PxzJ+V+W8/LjZ3vwelD5Xfb/ewl+j/L87gtED/sfqgJazUsqRRirsJ+IVmtyS0AKUlgKA4viot6yQCRyKSmnlFJKq9VKRA6HQ0VgFi5nkx1jTCkJEZw3leF3sds5MmnOIuKcMWj7w+Hli2syFW1hjZHaP7KWvDU55xSnOMW48OWnYz9MU1bjCus0lmkEVeg6aNvgjM55TJkBoOla3zTj/ngcyxylFBABRKgqHgJFq9CTIYAzHYwAFJRU77VZKoHdqBIqoRoES+AsWWucIUNUbeScc9YaAEl5LiXVuqOqxBSnaVKFtu289ymWcZhzzjEmRHLOI5pSRBjsyrdt26y6pmtd0zrngEgRKv7bqDtnQnWpuLfKX6xlWKD7Gimeqqxv0DFrr/Tc+jo9DHqx2Za2dDmXUhKXGGMd4IbBExGYqgptznyJzXZX1cjO06DEpVpeiNDZoPgM4QOBilOt3PZSCtf06q3lnf3p7yxyPrw4Zwu388IP9lDRPed6ss0lg1ggR2iM4RRdgHWwjVk3oTNEhdnS/5+9P2mSHMmyhbE76ADAzHyIITMru6q7P77N25CfCH8/F/wB3FC4ekIh+fVYVZmRPpkZAJ3u5eICcHP3iKzMmqu7ICEuHuYwM0Chqnc69xwUgkIoreh0Fq1eGzMFglxmVTXmG4BG0hooEV/1OwBgpJVkbn0K336zYW5ba9qkqYBoFz0ABGYArAL7YddHGBuZJqU1q9OXhSBRrbsGdJNfFzOZyxwAFEK1yX35xgrATFVkHsfhZu99dOR/+P7+H//hdpqmKqrISKxVq1odPT89Pb3/+KG11neDZThNHcz6EWutkrOISG01FyltHGcXaWEZFFtbZHFha82mhI3PBiD//fbV/1rHX0G0B3+zAd9zTmkTPl52Fl7dQEvvLPnPeZxzziCKaO3VqaaMANqqtgbSlswgIxEwaHQ+Om9kK4hLtxYQA1cXF8rrPsbb65tdP0jNIYRWamslhKBN5jGN41hbjh5rra0056jz3bIMWpmnKaVU5lJKmVM+Hs/nOYnCOJZS8jiV1qAf4OpqCB3XWqZpDD3GbghdnEo9zeU052myNK/oWtVY0dIkFx1RAGDanABrXQuEEQnUfjoATxAJo+fgOTrPvIx2CN57byONiM47AMk5Z1Omdv7q6irGeDqO5/NZBETUOcd+oWUJXdzv9/v93tr1NlFNRGTnTEIPFQzEaA/LsOxFGiIS+7W9D1FUtaEsElxbnGB2gsPS8K5Vmzyjk7oYGImRGrug4olJwVq7uq7ruz6EsHFhM3NuRUSKtOUrmEhJuR2GnbHnqDQA9S74GJg5n5OlhRGx1tyamn2ylACzI3KGNJZmt/sc5332l8sc/vNYAXpiAXTESkiAhhwRU3q6YOPTmg0zpdJIlRBUWi615hwIPaAjGjwF5wWCI57zGJiqSsuJoA2evWfv8NAdaq0CzTln5GtIFLxvoltHpluyAiAifd8DLEJLqiqlFmlaW5pms8k551YKETJCq83ErQBp3Qdl6cP7CfviMskV2CrCLxk5lkEDQOBaZZ5BFBXQha40eDydADD4ThVrrc4Wi4hz7nyeuu5ERNDk3fXNeD4i4vHp4fb21vtoXeqllNACNZznmaEFcPvrXUpl6Pdpzt4FZsw5b4wKG1buS3Kyfz9+/LCqwB/f7Nnxx3I6fq5T84eXJPVVU9dzZtMaEwzUIPN5bKUSICnlnNM4LfRXRNBEiRgQmBySugZeQmDnnEWBIcR5ng3lpanYFzmi/WF4f3MdopvHaTydtTbvfej8+Tg+PN7N0zwMA3G1OhMA2A6bUkopjeMpJ9GqIppzHaeUsjjnx/tUas4ZyEHX+WEYgOtpmkqFq5thf31bhaenecw1N5gyuL1TLcsmv2iZ41ZCx7UF6nJvXUTeVAgRVRiEEByBY3SEnp1jcosZMrYUQDLya/be1zpvqFcf/G638y4+tnMpzUyU40DoENk5Nwz7/X7f9YN3kZl1KcMoMLngzae2lCkR2fUv5MVLHEZbsmgB6NNSV3uVmTDmGot4LukL5nm0xBSAidM6oxd4OD4Nw7A/HLYa7fJdnlpr3BZggqqueNrmPAc/mBevC1CmDcNg9lJE2oreJCJHz2hYWD2S1pq1Y/+Uwt7ixKyz3W7KOaeG+SfESwHINfDtus4xL1IjqqSQpzlN8zAMDrG1lqdZS6k+eGMQcEi+a1CNHi8EZ8hYRJykahVp1ZweRmRGx/6ZYZzYyMYAwJ4jugU0C91Sg9wPu5LzeDyO4yhSYwi7oStQz6UiCC6CE585SF8wlCEAw8IUaHIHi+OLggpEIEsUCA3A4FyCUKqUBoKURXfsKbqH+6erqyvfdapQSvNdZKLWWhf9PM9PT0+3t7c55w8fPvzLv/9b3/fjOO73e3YBRFspACClNscplUCIqIHjPKbr26vTdOr7yChwAbhbqzB/N3t/0PGzk5w/0a68XYdfQri8ev2yaP9Tzv+R7708c1v59IXpEmNMKVnSCVcaC++9Z56mCRkdc5mz5dlyLt775qXmeRxHi/xaNQqTMo1n9i7Gbs5zSqnkllLquuF8Pn/6dOe9B4CcM5NLc57n+f379+9ubr13u30fQkDU1tp4eupCJAwppdPpVEoh50LX3d3/5nA4DF2nqiXNzNxKmU4jA4LI/dPx+HRuDV3oIevdQx5HVW3Ow/4q7odetKbpnFLd73039AJ0PJ2+uzs+nlNRoA6qNmY2wTJzwHOTnHO321UFFa11bR4nJiKHUGsFECQmUBVRAGZwCrvod7HbxbDfD51DkBq9d45yzgpLTWqaJoDqvX/44Y7Qffz4cbfb3f3w0Frr+/54PImiIgEtxNzDMHR9bzh764KIMaIPIpLSVCs78pseoe3jlircxR0iSpMqZfVk2DkX3LPfc2nL53m2sy4aHgztGU3hT1VljUUAoN/vAEBWLTRdJDsKcCREi97WqQiw1ow3DVVEFFngT0RLW4J591XVoocLF40RFyaa2HkiIseX1wlb9uJiITCxqpJwAfDO0TBUS2OKRueBicAwPtxdZD5OT0ervppWi0pzjNzHaTzVeQrO9zFGx96Rd6StlDIxe2bsmBtoySmnGQBaa+xcHz2uj2Z1JrgsQCC0gWIfELGpIKIgLBV0RGL2IYjUsPOB2TH/+j9/AyDvrq9++8N3nSep2ESVl+FFBM9snECL/t5zLU8B0TlHgDXnJtbUJ9JaH4J1mgCR955Bk/lkAE2gG4b7x/nT/ePuF9+k3HyIP9yf/+f//Pbf/+Nfn06n9++vW2vSmjYIno34aRzHw+GQ5vGr9x8+ffrU7fb39/ffdB0RdF3nQnh4eLgNH3ZxsMD9/v7+5v1VSTk6X+YCAUxoxfalzaV7uwcuieufsjv/AcfPjSu+VIn8Y8VFXyp0funz6U/Rt/el449lXP+QYO7HS8GXqgvykhbBJpwI6NKUbX1gUquU0sqcypxKztKEARXEfHMRSdM8z7PJW5pk2oInFJG2pFK/+eYbIpjnqetu+uBBVWrrQ6Rh55zLeX56OFpcaLv2JSmJIfCMxprI1SLSAMBJq7noecxPR1Cp3mM/xK4LRNBKlaoAMAwDIeeczynNZrgMjr+UfBTAyOAJQJDUlFrgEuGGYmQXiAgCBGJlTwfACI4heGbCllMtBVywK2dmJAPNhgKq+qzZu9/vY4wlt5TKNM1W0mDXETpmdhxC6LwLznkih0zARESCwM+kMIhbY8nqHS/9T+s02OInoE2WDjZH5xIJsp2/TQZEJO+3cFdAVABQVQCYQFFA1vKnvYqZAV9Imy5OGDObXpXtVgwsKELPcRisG9lmwreLeXVVm0v36lJ/ZJ7rhbokADjnGrwIJmBNe8QYiSxYR1xue8k6GnNeSck5in7B04bopBVpJuywpCUQcX99tXHMVq2taa0ZgJhVVzpAVV3iaAAfAxHBWnMVkVpKydmDa3UpEnddt+vjbhgO+1DOeSWYQDWKAruRVUUFV30sS4OjKIhajEsrvEVV2XrCLWdKirqNOehSmIRc4WkciQbX7c4/nJuCd9Ff8Xffffqnf/h2nk6H3T7NWbQi+lLKNE1d1zkfLfCNMR6PxxBCB3g8n7uuKylLbeQQkUlJRI1hEIAAFjDLAn5B5AtS1r8fv9/x5zB7f6wq3c+q3n32nB954yUtGcKCj1DVKlJFaDEHC86FiGqprVQp1epSNTdScM5JzY7AM7ZWzCJyiEw0jXPXda21nLPZGw4ewQ/7Pk2zZbf2+32rteXiGJkJROdxenp6EpHd7tBEjudHJofIoKSirbbW2jxO8zS1iqVqrdKqpAzjOZ/OMGXoHfSDv7rexciqyfhCA3Pf7VLT8zSdximVJgpCCExLeR8UBMAaJNRu2Qyf8WgL0tLQsG3EBMiwJIg8QGQcgg9MrRYp2e9674gQiAHxWS0aLhqor66uHIfj8WjE3DlXx459JO+9i94HH2JYGKejEXNegjssU7fskrUZfQ4ze/ZE1FQAwFKZsLyLEFFMLGKNjZAWnRp8OWF07SNg03PVZt3Mq3wpqRAAKRm6VRB4ZT9ektjPsxdAxfLGSMvYEgAhCoEo1SUoBEVSdkjoAQDEzNKl/NBFL+nnXDqLaV6V+p6z9wu4Fi2JTarIKiKGPBIRQUBEBTIJKetdAVDAhoj7/b7WmtOUc84pz/OMJzWiO+v/syPGGPrOe59TcU7VAnETPFyI09jaE58nAxjUlUAWxQxAqLrwtlNPUpu25r0/7IbDbn/YDTdX+/vxzlx/1YVnWhUu6affDs42FMbvbnaXQK0tDgRU1BBwAuCIAIjZM0IVeDqePbv90DeB77/74fbm3dPD3fl0Ph6PTOq9n8ezaA1MUuoEU4zxcOChj7VpYHp8evr6m2/I+buHh2G/SzmVUjyt6lS5MRcHxtDWLM1rOLi4SlD9oZvyf9fDvJ8/VZLz7fl/EeO3nfY7Ub8bfMO8w83hvTwAFlUsh9C0gvnOVVpVFGVi732RumH2RKt56/be1to0TfOUrDW473eqejweg/P7q8PN1b6Lccyp1awVp2nU2tI0EQgyl5LGKZ1Op9ubG9WlN6iWUuaSxlSmchwTKNeGKbXTcT6ecirABLs97A9+2HlEnadSa3bBx74D4DlPx/M0zrUKCKMSiaIzpS8EZFBRw60sBTkTHdIGKLhkCwTRrTpvAABM4BCYwDF2MXSBsQERhRC8Q2lt3d201lJK2qp6McZhGGqtj4/HlBIiMnHf9xx755wJynrvY+y7bghd5yx8dGz4SSSCNSICc/r1RTFn+5PFeUuItSUbL44fny2CREqCBgpVNL5rJUQWIARFYkRFZEUhYI/bJKQljFRQQktgoiziHgBAgLbPX14A0RKKSDW50ddh32em6ArMebtAtgj4knIWnvONqIbStA5FUFWdzvNy4yKKCKIKggrAFCg4T530C6VLybW1D19/hcgAYgx9c6m5nRB5vx8ACJGtb2S7/tKa9zEEx+wt9AdSRM55BrDuEqu3EQCgLuhWrc1FjjEOQzfs+j5G78g1JeNLRxR8gb+6DPVebA6qBKhEfFExZQSHuKBpVgtqCfMmQA4E4Dy3OE7MvNvBv/zbf/7yl7/87jf/ud/v//PXv/2Hb79qVWHBloJVked53u/3XdfNqaSUVFVaYxeGrmulikJKBZlCUxGouRVulgcopXRdZ3uIAWsvsxE//fhjJRX/+o6feV9m9v5Ml/YXCvhenf8j29nm+iGuibvl9ZX8Yd1tSKEBGHNjM/2eZetkUmLE0louRUQdcegjkpumyaJFw0fEoSdyIYRa6zRPX3/86uuvv940/FQ1l9JyGcexlBpCKE1Pp9OcSowx+A6Baq0plTTPaZzO5/N8LnnKQKFUHM/58TGfRyAHwy5e3ULfe+el1ipSETHGOPT7acrnUxrPqVZAD0zOCvukgKimICgIUIEW+Z0F+qgLOlwAQUUZg1nJRfsAgAmYwBMGps478tjF6L0nFGnNewNrLGgRXUXtu9tb59zT6Xg+n62UxYQhBBd6IiLnyYUQOhNm6vvdJfOLouLSp4CoSrbrrdUts6zOOXuuAMDW2LcGiMbBYcKkW2H51VSx3j4jrhIgQAAkUDDlOwDSpogMKACMDBbtITDyRv3DBKKKIGoADrCAQhdaQbXSsEXZKqoLMdZik/hCYxaeITavzB6+Ce+eTwMFAIOhChIACG7O3FL6sgqibbL2FZ6irmLuK9pLUGGcTkDkORLRYCl3rao6n+fgfdd3XRxCdN5F54nJj9OJyW8Vyu3zGxIgC6A2FRVQM4nE6FRRpYmoooKgI8/eW7pCWiulIOoQuyF2RBSdn0ojUUFFIkRGbT+yS1z+afMDUBSkIjki0lZEANZhV4TaWpGmiEhQi57HCRH/4Xr49N04jmM37PvOPfzwg/HRO+cMxKuqUlqZU02588E5d3d3NxyuUkoR+erq6tPdnetjzpkchhpqrVSIGZ0nbZxb2u12ryTPtkzJ34/f7/jzRXvwlwv4fvrFry7zc2RgZGMAACs/mahIbTnXec7zPJecVRUVtLYMGVVqKbU1WPilXG1tnudUmirmVPq+77rBMvWtta+++urbb7+9ub1Kp/F8enBWOGuNkbSJ+Xem7cAu3NzcMIbWWk5lPJ3P5/N8HudpKrkBcM51nOrxlM4jZIC9d4f9zbA7MmtrubXKjjx67yORSynNU8oVkICDR2IRFVFSxKoAwIwASgxAEIIVd9SsHejKgIjGcYUASoom1+0QPIF3xEiOObKLMTKRSlsGpNZtBzdQPjPvut5qRVawdA5NUtxsm1k8U9fruiGEwMGzSYQbAmQNYrQpAOAKmoCLOH5LZsLKXbm9/mpiXGbAXk0eIrdV7wQZFAnV2rztFMS1y9N0zVE2zjZURCAjigEAUmy6ab/RGg6+jlAXCSd+RjEQPP++Fp7lOYDD13W+Ly0EvKh5rwyfhM+UzUhEwXsAsJZ5WAG+qtrvOhVpWjetLouib24/WoYDREuraRrrsaLgfj9YlmK9JUPVOoPQwsp+oKrSULGt4AMWkVaXrwiOc8oG3Gi5oOqw64ehcwTMyA6hNkQG6zv6Aknx0rqHSBbCijAtTR0NzaIor+19ljTfyJsMRgSEiJobnM6Tuz3sd/Afv/7N//jnXz3ef397ezNNqbUWXJzHqZTivbc0z/F4VNUw7IwcIKVE7HeHvX11SZmZc84xRw6sitbfYk3ABq/bAvHfA8b592hvOVThzxbtwV/C5j2XfC5IK37n5cHLyK/WYiHaBm+RplZmsCZ0FWFAQZQmeU4AcynJCoWiAFDnlE6nkwCV0lJKwzBYwqSLw37XffvtN0blME1TKYWdl9ZyTjUXACCAlFLOhYhi533g+VxSmufzOJ7P5/MxT3OrVYVF9Xw+PzzAeQYEGDwc9te7q2uk+9oKiBJw1wWmoErTlKoheACYiZyrSMTiFDEXJAAFtrZiAmQOITTYYgsryiAuxSMLsWwQlygIEZxj0doasYsOqbUGKq9EDYlItW3914+Pj6WUEMI8L4q7RM5CkL7fHQ5X+6ubfnewFPGG5kciNdNAqqRq1L1NkEhRiRY59c883yaIKJ5hi/ye/7rEhS/euKQETZfcNj/ZfmfTcSUFJSVLXSoAqRYkxjX7uCRedcsaAD4bRVCgeoHEWeA2aslNAlhE4ZZL0mejDhfWfbuLzyY5bfy3tlSb2FbP3rqhqy4PiJlbdbACnaxQZmHosOu1ttyMtq3oyt73dDxbbc8RI5PzMcSekRVEAA0Uxahu8XeChk0xChxsQC2RJszsCEszbFNzzqmjBhq9k0w1Jc+0H3b7/X4tDTYCqNBg49tR/ZLCzKVLtOZ6kYgQxBMDApVGqkQMhE2htNYFx8410dKUEZCwAZ7P036/N5OGyF035HSepmn/4R0pWGwKAKUUS2MciPuuqykDLwnbw+EwpbkukJ1caw0a7CKlLclVANjkigDAWPp+7634v/OBf84k5/O3/oUgnT/9wjas5pKEeSmk0JpoXUhY1sI/MDMLFEvbtdxaM3b/UltrmlKa59zvdqW05yo6883Nzddffx12rtaap6mWYpHNnKd5nudxsoluTevm683zPE1tmqbpdJrGKaXUaiVg5/3xNJ1O8HSEAhA97A5X++ub3XDIpdim1EcjlPJzatOUyTsgR1TEMTELKiohAhY0whoksAoJMTCzSLP073q8CEp0dZ/RUp0IBlqT2iACgNRaEZr3Pud0QYKzIEUszjsez1KbmT0EDiEQuSJg0d4a6i2KE5fgxoWO6wW4UQAATO3vguUSXjzl53gOX7z3RZz3Ok8uS6gHS1/z8vtC1goEuEB91ln0dtvFbfRQSWFDjfJ2SWsAhwu4Y/Eo4LO+7StIy6vfL6NbANhyy6q6EXNb0Lx1NIK0zexJW753y6oZlGaapk3x3JiJ7Pjw9RUAkD4jlRiZiK4PVyJiNWmtDRFVUElLqXaRG+B203C2F7cWFBC1FgvnXCMqIujYCOqcW65NVLXJsrfptpQB4LUW78sU8YviLhGRgrkAzKyEzmlpzcrJqlAKIAEFYqLTqez7jpnv7u4Ou/7p7rQbhtPp9A/ffkNETXTbRqyzxXf9zc3Nw+PR3N88p8PhcDyfBJ7H8DJNzY71gpbF0j9G3fLH3QD/Wx1/KpaWt2bp7f7yI2//6ed/qU9l2+OWPXGd4hEXuBQAXFBrAQJoFWnNOcfkqlaj5yClwF2ZS5vmrutiCMfxabw/lixtlI4H5zlN59SKas2SfeugYiqFiIPvSilpqiCa53k6n3dDHPoQnO6H/dXB7wbK+cmMpA8oFY/TnMaShE5J9/uOWDVLB67rulrz8fFhJ3l8Oh7vjnMB8TsI75JCSvDrYy4gcwRtsLuFmw/C7rd3T/+fMAIQfPh63++vn+bcQOvQP5Z6nqfiCIYOREsVAIiIiPhwysPQOe9TKnPJxC50O/W+Jom7fQCdn2ZRAAERcAyH6QTmQ0lTgA7xqttd7boAFHnYH7rdrgOPrifPqJpRAkoDaFjVKTMwQoPa8piC6mlOtYqVMFMTT9iHfojDMAyWHA6hoxDJex8G5xyTJyUBgAbQAFEbIxGxM8QjI5LZN+cDIiLwUrO0yaMqrb3KCuiKMrUi2tLNDdCWXKQ8T86L+VlK2pKEDAgrgXS4AJgQERgZCipIQzTEywZLqaoaKVhMBqKoyKsixNoJwABQl9CqqepAg10pgxIgrvZJFlklRCt7AoCKSAPw7BjFjI167wEqgPhAAM00CB27JQBT6LytFNgosWFpwLA4mhAQlB2SEAuIZ9kWrAqtv9aUTciJ2CE4Xoe0kh+W1aeaWkNRETCt+VKKkbbv+r6FME3T09Nj7EM9TdjaYehTmgj141e3pc6iOaXa7+KUZSy581EIA7sFnoPQVEHJytCMWGqJ7B2Dce9qm7Nqq+B7klb2+z0yfLo/9/G8PxweH8eA0E5H2bt9549T8d0wifZx+HT+QY7tw+3ucdL9dXz/8avvfv2v+8ifPn338av3Dw93nz7d7W+uReA05q+/voVWx+NT7xlqklG7YdCxfbXrGsK//Nu/HuI/4dCmu1MHQSt8/PixdXg8n99/eNdaq2l2zo3TCfDK8gqECGp0QmgSoeTb293ypV/0eqf8wv75+bO3kvNPPH5ufPI77cKb83/e9xL9Cfr2/tTB3B8lyNuW7vbI8SKjteVCNyeLmQBAK9Zaq6h5o/M4Gm0jyJLzdITOuTrXbY+zTzAZZWQKIXTD8P79++vbG2Mg3HIgAKBNn5XWa2utnI6PBvbrus57FiFE/HT3YB0HVVBqq2U+Z5lTk9ryDKpwdQXXhx4RW84EEDrwHpHYUkVjyXNL85xKbQ3Z7k4BN9Ij73mpZFgUSwuyvLWGwJa3tYdACEQEWx0I1SF1Icbo2Rksoi1UIxocknOoKi1pqUlbCyGAUi0FFXyglIpxSSuyd84FH0LX973HGGMMIVhzuvPehu7SPcKLKt3lKsC3icovzKjLc/Siae9HptBl8vztJ8BGpHLRDwpbJm2tn739zMvQbfscCxcs1fmqZrmcf5mtWxPOl/e4feZ2ndv1fin5j28Qg9vlbX+6vOv11i7SAOtE+pHCxNuvtse5MSVZLXEjMLO7NMlFi/yGYbi+vna/vSeq8jLSfTvIz9+yBoSILy7jsq7JKwOOc465orksC4S76io5+fg47jryBLWKYy2lQO/Gcby9PpxOJxfD8Xh+9+7d4939OI4DdTafZVGizn3YdV13f3y6vr4+Ho/9boh+OJ/P73dDrTWntGwgRqmjavBOWH0FvBi3V3H/ZyfA3w/48+jt/fmrel8650de3JIqiIurJdw3tQAAgABJREFUvnImsZXfq0ittaS8kBrDQrABspxp/x3LvMUKpidpRrG0ent7+/Hrr29vb31cmtbneXZsaxtbLnme53kuJYu0fojTeUwpdT4QQU7T8enp6fHx6f6pNK3qFFytMpZ0PM9zarlCKRAj3NzsDvuulanW7AhCpGHYI1GuTQFTKk9jUvaqSkzEjOxIYVOVs8Yg2z6cc4436qxmctRb5csgDKpiHQRMFJ0fou8iR8cxOO+YUBGXng8iNIleXT+ckEBVawHkWmtKqdYa+66Lgwshxrjb7RwsYgtd14UYvfem2/Jqz7W2EyRaYStMxOtGzNar8PbJwyo9IxfYXXq53W/7Bb1Mgb795fL87cxNVRHW4tmXpuIaklkJGbYufkS2jd6CxQ39b8augSooXzLGXZg9XMuP21cgEagi05L2RDS5VVF4kcRGVLmwjaq4Ni8+/4KLfuF2Algroo3WKry17My0XDM+52CWXO6lk7ENS1tBYYvevQgihhBKy2gyzrpwdPV9f319HWMkmtv6pc+p3TcHrUNhI3B500RQK3ivqg1RnVtOCyH40lSt5KFoQG5QkRYjnZ6krPX+m11PRN7FnPOUk4tdzpnIWTvj+XweSGKMsDIBtdZcDCa0eXNz85+/+fU4HsKuH8fxPUDNJWttrdWUQwjsnE2DnHMI4XKW4lpf/+x4bgXCvx82Gf/4SM4/s/H78at55ZK/9Ya2aSGiW5RzWcyLMa58hAqiRh9lK0FqQwWpTVUJcOHLXykcVa2qNxuxUAjx3bt3Hz9+JKJxmpa15L0jFZFSaprnaZqKsRApBOczoEMKnkHb8XS6u7s7Ho8pA5ATYAEuVeappNRyhlSgD3B15boYoYnURgpMFPuu6/u5lJIqkKvSSml9Nyirsif2QAiy7CCqGnxQ1VYrwEJOpohSW63VAdu1GZsLiDKRAERHMXoCZFR26gOHwCFSCC4EH4OL3jESqNRaVU1i1Dx4YeZSsunOlyqK3HVdvxuI3NLvzF3XLQ7yRtJvSJatvGeJi+W/hIYX3SAbRmYp8MXkie3tX5pmb/NFb20evNxnL3++XnUXYdnl5LwM9T6/WV/c7+WZF418z3cCZlcIZQVE4trGtoISjY8GEBXJvcxcESKCmnruM3PN27t7FerZVRk9lipejOclk9bl689Lkt4MvjGPG4mLrLw2RJTHtAS3L3ttvfcLyGqB8aJBLz/v6epqKOztuBhLQGhrsx0RheDs9xBCKA2Rc82qwACtNcWNckEA4Hg+PZ6O33y8CbEvIo5gHMevv/76f/2v//UPv/zHH3744fr2/ZzTOI5d15nRWqSyzsE5F70n55j5fDztrg7keDqdTWmWyJVSF25So9QxEiKA9TFdGr/nmXnZH/mnTjb+pY7f73r+THp7f9mA71WM/1nLt63treQPa35jmlJJ2Wr7245jCYqaiyFcGKm1No+jQkNCQCmlTGmc84SoXRd++Y+/fPfuhh3mnFSqddoysxZoLeeU0jTnlKQ2RGWiNJ2lFe+ImXJO5+PTdD5pq0q9AFbRlNuYW861taXf7uqqu73eg9RxPjlS0z3wsSfvpbYitdQ6pVIEBnLBu2aUGyKtKSo4YnILiMDulJiJqKpamoXILygBRAKkpXUBvOe+j1CLtIokzOADEjTnfde7Yei6LnhPosYUTF3f1VbOT0dtNXhfq9zfPwIAs/fRWWAHQLbIfRd9t9i8S8Loyw7rrXuBiIDdxnKJRl/GTETS9BkXcuEs2adZNW6J+cwC2Z9FYYFM/liZGS8S469+bj1wcMFyvvUbvD1UbTe27NxzaMjkLxixn83eYg6ZYKUaAbLi5XIjAHApCEzkVFGbIhlDi1qv4Rok0ssltkTPr+z05Xp5NQjrPguX1w8AaJzZq2W6XIyXn3PpRlhofOHc0CVwf8kht2bCjRYiK+kCSVFVBSuLwgUP9atdQ1VpxcgCgEOo9thFiKjrQ85ZpHrfO1e8j9PToyqEQDmLoohI0Uoemsr41D6FH/7pF1+72J3H467j2jT4rouDrqrowfkGZS65z9ny9iLScjmfz4frqymlq6ur4/l8Pp/ff/zw+Ph4dXUVhuCJLeJlZhHwLpZSQAkQnvmEdHnKX/Du/n68OP4kfXt/TuP3pau5jPEv19JnPfHLk62ZeltpunIKqxAq1NpaqdCk5lJSrrWiKAE651DbuLrAlt40OgZj6P/2229bayvT2O76+trEE6C2Nuc8pzInaW2FiIvU5hyb2urx8fHu7m4cCwAIdaWUac7TnKeiuQIAEMFhD4f9EIObx6nlGns/dME554IvtZamTeQ0p9nwKCI+OhEptZViCNPFtb/YX8hYg7W21hYK0aW2p897HAEEj96BADJQ8BgDeU8oJXgaOtf1PgZyTKKo3hujjZSa5yStqMg8z9OUYoyh6/u+j93A7Jk8B29sTLZBWKi3WT57PEq4pTUXws+N+RNxgTD86HEZbMHlnn5RP9u2klfh3aVn/Sq3uU0nYnplM7b+gbc1PLgwHq+qhs7x5cmXk/ZFqEfPgB28qLNtJ68mwRCkSy+KffDzcgDzZ15zll5+1JdqSLAGjojblmyvXz6Iz3RZWPyGWwx38ZmXaRhmthIbr2GuWb5SSq0AAZgZ10S69V189rkbKNQ6LhEARQGBluZUqLWGEEyFo7aGqOwwRt9aI4LY+ZzTxvjS99BaA4IppcenUz/syjydp/nq6uqH+4evf/HNp0+fdrvd6XTq+97gQpYT6vveLs8ITnPOQ9+P4zifR7ltqeTxfN5fD86FeR5Xkodql7GO//p0UEDBCOS2Z335jP7Wo72fez0/Xsv88+nt/UUCvlfv+vHsE678zuaaLYRApWpbPPSayzzmlBIIWFV5AW4sm+NSsBDVJpItg+Hczbvbd+/emQFjwBjiEDsTcJmnCUtLU87TLK05ArOsqVaR5plV5Hw83v1wfzqW1sA5qMBzrqepjrNKAyVgRmR89/7WMdScEKSLLoYQYxy6WAmP41hFlWicSxPo9p6DBya82Kn1AhJCzB6RGZSedyvLLNHaVm18viAaPSAqSAOtMfJ+P+z64Fg7F3e7uN8NXWBAba0oiHV3n0/HcRylNhE4Hcd5nsl5IB52h91uR+QAIIQQh9770HVd13U+Bh8DB0/ekWNihi01iQiG8ljJqYkWgff1vi7jque9GJRAFRVAdYmHFHBDbF62jsuas/tybe9VfvLZzKy740UVCQFwEbO5GNztFUS8TBIiMAJuLFlrchKtqdF0l+xRCqil717M7DXiUXuYvEixwnbDpploUfNSGLtcOF9gA8EvoFFMWVAX3TsLGY3s1K4ftpWoBG9yy7bQ7IyNIBAuAmXvfavZIC2LRor31v1dFXBjtRY1CRFtYiSzzxe5FKdRRWCzEuv1MyuiNimALsbeKvOw6rFY02oIATEpAKCwdwRwnvJh54Hku/sf/sc//XI4XD18Oh/PU2n6z//4y/v7xxC64/HoiDhiSmn03sXQrygnRDTMamut67qUs4V6D3f3/tYNQ6cNQNC8YR66V8Muzy6O0Mvn9aXQ/L/tsRSa/0Sf/scKIv/oQedbD+jVX1+I7RFZix4igmhJaTqPVqszPCcDgqrp7RmB8hYybqv0cDh8+PAh5+y9f/fu3YcPH0II0zRN0yQieU7WfgeroG1rLaU0np7GcTyfz8en0ziOiND3tN/3RVxumIvWAg2AGZ1zwbubqwO0mqbRE+/6ITr27Pq+r4BzLlUAHQsAeRgO+27oyW0936utuMgmbaHV9irAM7Eh2OaogAo+AEFrNaO0GNxuiF3vHOn+0F3t+64Lnklqqzm3XFprDuH0dDw+PgEAEU3TlHPpugHJdd0QYw/I0oCZ+36wKsjGa2yXtMV8r57dqwLGFlcZJuJLk+SVJ/RZx+htye3V+ZfnXFq+tx73lozVNwd8Dhf6HMZdTE69aHTb3n5ZkLYT5GWI9WY5EALh0n1/edhe/FqA8EcG8Mc96y+ttcsxuXAq9PKOtsVoa8r8SFir79bQaUcpRS6+aIv2Pn8xuvwkfX7LSgu+fBcsVLSr2jsIO1QAIqNwgw0Cmms9HiGEIEiffrhvCq7rhPzj0xGJU2nv37+3dvtpmkwvZRxH2y4W9LJqmubgXJ5TF6I2OT489iEej8eU0sJ+qARAqvClBvwXQf/nJvnfj+34YrT3O0fqR+YxfNks/d4P4Isz+Hd90avVLm1BQiMiIm+rawv1dG3UxaXJtNbaai6tgve+7yGlx7vHO8ceFaRUAPDe15RLTYzUtJaFTrO8+3D7D//wD/v9PqVJpLamJYNKRUSVWktKKdVjRtTofGut1VJKAZHgSGM/juPT03EcZ+S466KITKk+PJ3neZ5mqQ08QQhhtxti9PcP36MKaQOlfhhurw6EOk3pYTxRiKW20zi5EHznkZwiImDooiJN06StGoCzlOJjtLKlKpK3KnqpuaQZeo/Rh5ISNEFARiQAR9B3waQn2EEXXBeDYOuid4zBkWdqtcwpqbbow+nxwTMedj2AzrkCgAveeb87XBH52pS8d86hc7av+Rhc8FsntW4IW2al1YQQbjEEAsMqh+R9tLwfIsqGHUQkZFjjD9ECsDadwyrLjlDXAici6sVW8iNciHjRpbDtPnSZZtg8qpcTdSUmEREx0ZzlQ4CZNuFAa4SwvxIAbZg9u3wFq02uH4tASwV2VU5HAKaWKgASOQABEAXL5zvja90Y+Jacp77wKi5Xk5ro1KvYUoHQwfMd0sVtLoGjxeKbma/2NLe0Na4SKCVvZg8vGLQ3gWUGnefJ+9h1nQV8kWQqAtxUtbbGDNM08SqZBIjWiaiqTQXRgMxAgCBNFYjBEVdo01Rvbvvz+fz+qw9EWJucTqfQdwDADISutYYIh8MhhFDz1MUgLY9z8g7TrP+/f/m3/+1X3x6ubv71X/7z66/17u7+arcLvmulNoWmkuZ8OBweHx/xoF9//fXDw0NrzT6/67rj+aStEYTvv/++67r7H+53/S76kOfUD1etle9/+92HDx9Ky4ioZGBa2vau4PmVe3SZJf7px5f31T+tot/PT8b+zM9ZCWb/mBf3cy/6j/U5v9+XvnLPLTJ45b/XWudxEpFd3yPiw8PDeDxFH1Ch5bJEaYAiUlI2lVrjon3//v233347DENK6Xg8wkoNZcIJW8OA9eJYcaK1BmjAzlZKmec0jnNOAuiZgqibx2Y6f9Yq3ve+6yIRSM0ESqDec9cFozJpKimlhlibpNpSrqnkJBURfexEwWqWhj7dhsKYQs0z2ORgXoYsL2jsgwNSQG3BuS5458g56LqwaqWKtIraHLFpEEprwfPQx7UJiWPo+36HwMwO3XMQagwg/PLYEBy6Iji241KE6NXxKlC7/O8FTgS2Z7QNyLbzvoo8fmReXdo8xM/04cGXnb/tRj779ld39BPDrFdz/uK/Rv5Cb8cBkY0+7fcoKOAXjt95/uXIbPv1q+eLiJfMdpdP00RCLPkNK3jp1fBcXsbrCFeNXxy8d4iwTQDvPTM0qSaSfnV1QNRS8rCL8zyWUqycrAjF/CSCOZVPPzzGYbi6Hu4eHolZEAy66b23CO/x8dGSRibDaRINJWUQIUAQ1da0CSo8PZ7Op6nWllIZz7P3cb8/fPbpb9mFt7Pi79Heq+P3qe19aTH83EXyx/qc3+/i7bicQGb2tnMsqXI+npi9Ya7GcUwptdYIGiiWUqQ1772aPGbOROR7H6S76rpf/OIX19fXOWeste/7rSzUWtMmrVapTZvgWpM3zkORlktJKZ2naZxSbUgcXNgButM53d2XOWOtlQi6noehj9GD5Fwys/rAQxd3w9B1AZjyXOecJPhc85jTVGpqEBwCWQKz1ipbRxQRESAjpVqtTkbsmNn0QkXEajW0SDQsXjMx9iHancU+DLsuBh89Rxd2fRi6jpFqyVbAL6XVlKbj0zDs0TuD/ABgCJ0PHTrHLrrgiYzDOnS+G+KwNC14h45p+0ckcJEwNIfeknYWyV1utRayfCb+QABwF9FYg4vdBAHgBbYTLqAuX5pXl997abo+e/KrdOh6vtO1h3q9TpDnQuDWPagIjAj0Qvfp+fNX7jUABEF4hrESrVQNCEvvmr39WYkX1AqRCEBLcuLiY7fv+Ow4yFtPegsZjSHTGGYQVHHrq1gHZFmScEEdvtzEepRawMwevGj5qLVaeRQvihSIanXZFxNhjbdJAUTJKoyLAiCyQ0CotYhoztky6q21/X7/3XefQohnmHLO+/3heH5EHPcH9t4TQWlqVGRTzr/5/tPtzT9fv3v32//4968/fiWiwXsiYsAE4L0/n8/Wh3A+n6MPc6kppWchLYBaaysFRMpc0jQNXQdN53HaDZ3vY87ZkDoWxFlGoYGoVW2/sNf9/YCV1cv93KF5a5P+5ozf22hvdeVXtxEdABgz7DRNv/jFu8DuP/7jN4/3D0PXeXJP90/exSUCqC2nNM+zqhrs8Pr6+urqahgG29mD9/v9fhpHC/Usutp+aWmpPInUUqslGHOt59MEFPpdh+AV/NM53z+Mn+6hucwMMbq+77xn1FKltpocuz52+/2u74Ii5JxzLU0VvSspz6k0BRfYxYBMrUqMvchkjrNzLvpg3nQpuqQNFz9gSZhsEDtL42001MzGtEJ97A7DLkYfY9h14Wo/9MGDVim1FSGFmvLpNEqeWwgikqZZag39Pvadcz50g/eevQMA1KVl0JJXny9A4vbCYvZwAbP82ITRF4zkb1oOvoz+1pdQz985ky+v4dlkvrF2l+cs/1199suvVv1M+/zlG199hVhL2edWECKu9SETFxfLVlx4CZsdvey3+6nrEb/w2mVK9vLWlrTwq1BYX2glvir4GUKX1oGy+VlKrQ0EABZPjohQ5bmmuxlRwMV1MykJVb0M+DefAxHGcVzYG0RqLTnPWoslnEspIbh5rm0Ado49S2ulqUNogHPKD0/Hm/0eAB8envouCNEwDNPpbBCtx/sHRDTi6a8/fsXMU5prytJ1jogAS8rzPIcQ4lUsuUmDELqmtRYJ3aITqYTW77KZ+Vez63Ju/7HycJ9/wl8+/joDzd8TyflfwObZsS2bjRWCmUGprMcwDM65kotp4sTgpC3epX3aRi/Sx3B9uGqerm4Ow9DnnKZxJCJiV2piRlUAaW1RbjDIdc1TWbJ2gjmXeZ5rbVUEnY/djjBOY324Hx8fT8djUwAUiNHvhhhDAK215ibJEURPfR92u95IHErOpQo573ys7ViaInO/P4S4I3K5lm4XZQaLw4Lz3nsAUUFHHgDMMKuqNFhAPTZ6AASISCKCqgzYahUR58IwDLvdboi+CzzELvpAAKUs1l0a1CqttMNuYMZSi6oawXQMPTAP+x2ztywWKjD7hUKaCfh1HlJVickIM5UWzbx1+39OML5EIb74fdVBwE0FCbY6nJGxmb6usWvST5qErwwbrkCJbeqtW8CLTBRalKNgyeNNgUHX/rb103ip3yHqKicIa61RcInmFhDq0optBCxrc/brJYAbrlV1bV6Tt3f6eeP35eMCPfSiaeF5zC1KeWu/YSH1RgBQIz9aa3uXByJsb94SM8zkndQVKYqIACoC/BLDiYBWk3zlChit2kLP78B7LyLzPFrXqSFF+77/dH/sYs/cTqdxdzgcp+OU6w1ijLHm2fTYgZwL/P0P91f7w7v3H56Ox68+3haAq6ur+Txu0vMAYO2GH96977pumiZbdD4GUyCax8mz29/s5jFN03QdD4A8TRMH8t7nmre1YOZ+oZx5Y+T+Og3PX/b4/aM9+PMavz/Ww3vlHT+7kIiwEozVIkulDeDdu3fjOE6ns9EgpTGlKYUQ5ikbR0nJyWDHh/1ut9tpdKawZS/GGKXWh4eHXb9o7FlyfyvmlVJ8YJSlOzDnXEURuet3TH6a69398fvvHs6jIkI3OFHt+hBjYIJSS63ZO7WG8X6IsfMWR+ZSGigTWbtdU+hiGIbBh74tDPtqdt16yIjIII/kAqw8/a21VnUVg33ezbdNmYjmOYfghm7Y90Pf9330XUchBNU2p5LTLKVB1VqrNvXe99HlVHJKIuJCXLFFzrTml7LiSrOiKx79VQVIABAv+EdenPFjiKrlocOFfXoZS731ivALXGU/8vmXH3KRn3wR0Fz2k+kFBkHWu3m+2jffaKbuRVXyxyOwi3aLi09eQj1VM/yfLwj9lPX+4l0vGjNeXMXFx74Yk89+xSt2ze0nM0srTdrCybMUkCXGGEsRpaqMrZoPIC8/dnuUhkPWi3ukdQgNHGt40bnMMUarkdda9/v9/dPRBxbx05SIIDKUAoJgyXmQ3EoGIPY4zeM8z+/evfvtf5znKfu+60Psuu5pvsdVWaLlgoin0+n6+tpYn2rK3vu4lgDHccTzGELwT08hujjEnOs0pv79DboZ1lI0LJIp/OqpXQZ8f4/2Lo/fv2/vbzfge3Wsteugq56kmR8R8d4TQMtlnmfbv6wO14q01mgFUHnvh2HYD71zDkMwe8HMnh2s7IKbtdvw9GwNZw6bSJOSckq1VBVFtBhjGqcf7sZPn47jBIBEHEDpcOW894QgUlQlOIqdC8HtD0OMXsmCFELH0LSKTLnkVpHBEoaI6JgxumeO7BUsoGvP4jYmlsQF0aWrS0EuKKkQgBDnCfqed7udqQL1XegieuekpZRSTskpisg85daadwHalFJKKRGyd8G50HVdNxycc7lKlYaC5Gm72gVVawgZQiBUA1u+fIJvZstlmvEizlsIV55XI5v9u5gJ1sumBu59lmxdAiH8IrvK55f3Np6vzimlvLJtS5vaEtrShSl6jo0Q2Pa4Ld+r9LuvYRsOXVN4CotqoqoubJmA661vVwuwRsBwES9u4eOPLqoXcR4AqJpn+ZmusucrvEj0yqU5v6ibMnGrudVqursbzmVRXVcQRV052+y7L0064gLqtbtYW21tRhkHLwKodUq0Jmb2LD2DSPv9vlYjAuWU0u3t9Xx6tCb6LkRtfCo55UzCXYjH03jou93u8Pj4uO/iOI67frg/fzqfz/thJyLamvf+/v7eOjGsvaHbDSGE6P04zzWXp6enDx8+lJyPj0/e3wKjbU1mno3iT0E3vr7L8fzrNDl/Dccf2rf3pdn/x3Iu/nRJzldoqI3X33ryFnEs70+nk+Ul0jTP82wlblOVhLU/L4RwOByMT3YBZZUCAPM8393dTdO02+02wCSsqVT7Rkt3WBRosY4d05ge7p/u7o7TBMTQdQOhy7mauph5eY4gBBdjjDF2XTAXFRHZkwseEWtrKaVWbVWwcYuSc33fbyOwKdLZOllyki9jkVfb0+V41grGpRJXnmhv1XvmzYiKQErJZOjNyVVtIQQTS7u+vv7qq6+An0HYyM+XdPnzbdgHX+i0++nT5hX+Uy+QnG9hnD+yj+gXDstctTdHfXPYi6++63KWXkZsdnwJufo7J//bX16U1v4Ako4vjcNnv+Xt769u7fL37ee27rb/quo4lnmWnGsppcmyyuhzV/5q/rw9oVZJaaF92cRd53k+n899H42JfrfbzXO+ubnB1TEKIfguAlBKaZrnruvO5/M4jsMwPD09qerT05ORDZVSbKuxZXI8Ho/Ho3POdh5ocrkXpZS6rmPmcZxrrQ7J8J/bgtWXrcaffXB/t3+vDvelEfmR/qSfdbQ3vBWvTtjWwXJB9EV/8PM3sJ7/arkqPGfwrWfJFpCKba+rY4/EjMwwjWkYBiQcz0/TlBA5xs65MJU5BJIqjiD2w+Onu9Pdp4PnGHE6n1oqfQyHq32MrrSSctrBDgu0lussvotX3T7n/HT3lGtZxX1UwPSapTU5HX8QsStkqTHNAuxjOMzp9Jvvnu4eYDc4F/xpPDvvP/7yHcG9gooWRmWE4GIfuO+7Xb8HoNKkVUgFSgah4PtYniYfDh1z3/eMkYCcgBPFWvP5DABd1yFTlVYVchOniIilgKSmAAzOoTqspTVo0nmfW641d44YSdPY7YE4cczDHocIJT1hwdh3dR7b+XQ1dF2I/3b3b0/Hu12/b43vz5XdPkbyvnfd4erwvhsOIsTkmSE6cM51ITruyHU+9sSe2FtXtYrxRzlEIvQIRBgIScFU3wiUZOHPvJx1DS6UEIjosgFjoR6+xALYRKI15lrToUvn3GR4yCXXC7CU71ba4qaqTVprTUw/rzQAEEKLf6pKBZMdYDBYrAIiO0UAJMSGYKwrCCDLVVhOThBBtx5CVCBUBKe8XY8goEmtqqJnAG1QRVHF4icAAMcVwLhGFVBUlRRVrQGMECClNM+jVV5DCDKPiMjk3ZL9VmJH3tdm9SRCRLECmpUxNWwbMV2aMV3SmxuQxAbcyfKYtkCPlpql2zrHW2sCTRQVSGpC4N1wNZ6PzP44V+ChYn9MIMyIXhUcSK1VqhBAIEAEE3YvWgFImRgp1YwgDFCkOkBySASg9XRq+30/5TSX8vHj+++/v8ul7Yab77//4Z//+Z+//+GHyL6U5qLbdeywIkKaxuuvPrSUdn04gvTe7Xo/T2PvKee8i/tvf/mru6fzro9j1a+vvjniMT3lEILzvtUW4mGa5WksFHbs3VRAJ/FuFzyO4xgLP/36/h//x6/mPD98f//NL7+WVqbzqT90DECEImgCvJ6YkQuLvnGSLvv2Xu3DX9pgv7zx/jy78FIK7HcfP9/f4p/5OQB/OKTlD3z9T328dX8+6xC99Uy3xiDvveE5SynAuDlW8zwj4jAMFt9skVMDNcC9TTVz5HUBWK8RQK055zmltlA8OxHNuSHysN+lpE9PT7/5zXettRCgScVGu10f+845l8aEiI7Re9dFPwzDbtd3XYeIYgXCNVa7TFpaWOlCsBHYAppLz3cRLgCvalKfpQmscUnbBmoZRgVkZSXV5oPrQvTsENUTq5aUppoSALRSjylP02T1DABAx4rI7Nn7YRj63RBjVCAjtdpaET4b2L0N8pZfFjLlpfUbLta2ftnrehFwvPnMWturiGoBRGgDIFDRFTKKqoqgKQNAg7UDchFpWo7SpKkoQCOCRQtp+VhSIkDSlbgMXwfWeok+xeftbL33z68yfMnI9flDCWDJQNRaN5isKbqZeIgHmz9CAAJkLpqz7UNJsSmwtqZLjhBobUrHV2w1F/p5Px43Xz4dvWjlt3dtU+Lyc15u4mtGFJbuiOVPqAjbRS0lBoTtksDGMoQltd6aTNM0DIPlY2zJW8nZ8kFEdDweYwRmLil7ZittmMfQUtquUHRhCyqlBLdw65RSwBQEEQy/HcJSVjeVok3ncp7n89OxgQC5nLNiba1pW/wMXBtRlnQ6/SQzo/+96cr+HH17f7Yq3Y9c50USAODCFdqyBLDBolpDxE3mw5pspilJrkqaczYDkVLqQhyGvfceFtVQZHZNBZDIsaqWpTG92FS2OEOb1FzSNKecWqmGtqsCqTTLex6P5++///77T3m/g77nnJtq6/td7GNrVQGI1DnX9/1+1+92u76PIYTSams1pZSLiIoANNWtI957H2N0IW5ZO8vVMD5D3SxXCeJFRBWZuUitYi3tYHy9oG3BToIwsGPyDvZDf7UfomcmJAJtUHIuKSFqLvPp6ZjmOYbgA5eSpKlzRN71u2G33/e7nQs+V8C18W6BbjLh2vH+pawUAKA2QieigKYngUu0txSpnufAyp8CW9+CoVkIXlA8b/uxwQ3IRAUtvWaZA2jWSQfPFSMFwtaKqhrHis0jXXoBfWstSTWhNvSBPCIjewebcDsAAq2MWc9UztsGps/2TdcK2aIHpC9pLb+UU4FnTOlGML2sBPtFRJeqIbL30fLSteYeqypUZghKjhfGzdo4eAUEQlABFFwpsIk8mDSVrm7Es9S8ktXTNtE++/N6j68uW9cJvFUHzCZv0+FldnR9pus4WL9CA11VJJY5YZycy3zQrZINS30gsKp6YoF6Po8xxqHfl1KYcZ7Hrt+N4ziOY63SdcN4Ol/te2aepqm/uWk1ee9bmp1zkrOq5lpTKZ4BVgN2uNqFEKzSD4jOOSQspUzTZEUK2yiY2RE5IkTMOT89PYU+BnA5Z3KL20pKyGQAWFUVqaoN/O+IxvSyCfW/q+X72QoMb0/4cSOHv+v8n3j83sXCS5sHLx3Di/xVM6KyDW+yunXCzFIbiorIOKd5nEjBe89IWx1r3eoAEWuTpk1KhjXYMrNnKTGrvZdSWi4mideUa5FaBYBa06fH093d/cNDIgAiYO8BAJmItdaSc77qAyJG77uuW8RXgyciKVJqzbmW2oTQJDGNReKS0NLonayKYBjOS2k6733NBpdQdAzVQhZBBJPcBABSZQBC8I6id++v07ubq/2uZwSSxqgKWkpBacw4nfPpdCKi/X5v3nHJcjhcd32/O+zj0JPjBti0ATFitV4Ocvy2b+HNBNgivKbPlFrw41HE24BDVrjfK7O3vWg+kaEeCVFVAMQShIL2U6BBbXl1oeqaElAAVGwi0KCJIjh2VtEhT7zEEJa2XHq4kam9+P7PXj++ifAuB4cBF3ZKBcLXH4FL/4PAwmxpuP3G7GEhKsNAEUxWp2lKZ2mNiKC0EIKlU6UJSANCJTZTBIpECEitVFIQ0O1nQyAFsZ9vGFAvwSavinyvyhZL7G91atTLAVnGZNXhW23eZgkVLyinEZFARAWgIYDFr4I+sHOMClJrNUTVPDcDrJVSYoyllN3BxRgtr9P3/TRNzAyKc0rM3AqGEM/TrIIhBKg1pZS8c33H3jWVaZpk31tVeyvoOl6otIdhsOlWV7VLIgoxtlbmeY4xMJGURs61potKzCaguPV3vuGTezlzXhi8/7Y1vz9T395fJOBblsnLNMirF7dMYPBLv9pm9ow0CAqoonOuljKOc0pliF2M0Q9he++C2SZUUEGqOc8ls6kRkWOCVpUXsEyrqbbcQNChQ4Ix5XnKqsjs5lR/+HR//5ARYb8HVWiSur5zzqnUUidV7boOAByRcXd573GBoixInCYKyIrYVKuAZ9ryKiCVVoyctIqgBqxhy7ctLnQzOMuWZlFCcmpfBKpIygqOYRfdMHS/+Gb34ebQLxROjUCt2kCoBJhz1tp2w7Dr4jjOZU4NvO/i1c314fo29r0giIhtz0iEq5ro1rT3NtVJuiZytIEYx0ZDQBBBIrRa1+bJbiu8CVzurWvrgsJrs7csDLZ9QUBAtKmC2jVuVlNUQEFUoKECaNWF+bqaFjGggNIZvVHAuhAoeBc8B4/EiGRfbKBDUps/RPpswm0LW7UXtjjv89CMNbx5fRCAXKZCTYzJzOxy3wpAhChaRcAQgd5HEUFwKT1IqaU18E2lMTsCVCqtIgIjEzpGcugYmQhdFRETulsqrotL8sLEXfQsLiwqut3PKodr0EpUQF0kY+2t23kX2U4RYQZsz0b++aMUDb6La+ejcazqEnE+l3WZ2YcwzU+tVe+d987nBk1EKzt0zqWUWil9jK0BMxBBjH7bBKwFlr1XglzL1dDl8TznNM4Uog8+qEIudZrnvu/7YcilGOuTAbhSLaUUXzwioluEHpnZITC7mouIqGAphSLN89zvOhEiQAAkwmcA9htagFfG7++hHvwh0R78NCOHP+H8n3L8IQ/p0s7hm74TC/IoLkhoi/w2Kuo85ZaLYwbRPM2IOPS9cy74sBFaGuGvNFHVqpxbba0pERN6dqrKjcXa8qY5z+mZBrPJ6TinlIg4eBzP8+NjLgWGARR5HBs09Z6853meAVrXRTNFhng2UhiRmmuZU0q1NJEGgICiaG2+zi9aCrqqm6rCJabUYGOqWsVySqgry8XSw4u4dNG1plJxtXnXV7t3Nze//Kbb9YMDxVZBWpWGrWotIK21VlMmoq7rLGlGRPFwOFzfHm7f7fZ7IK+mjgrUtCgh2H+ZkQmf5R/e1vnWTRQFgFGfbcOXHFi5qPldnvmiRggAooqAChw8iIqqxURbEUzQQgRB0aoCi+qUAICKtFaMfG772MrMjOzDwjoTIjIDmaGmLUUpqzSg0zV7+SITuMQ32zhcAlBfGT8FYEC9qO3RCwS/qau/CAOXrxIrXS9FX+8jkcMxVJNZzhVq85551X8FQmZGduiYfCAiIVQIC35mTQvbhUttZoNeFR2357I9X7xowwd4Yd62d11Q9shq9piUQF6GMqpbi+fLwqcyLqqslrFuIkq4eIcAqsrMMTpLP3ZdR1QAYBxPNzfvLK8zTVMIQXVRaJnGtBsCIcTYlVKY94jYms5znuMcnWdmcjpNkwGe+75PKeWckanve0Q0/Rbza2utNjdSSt77lktOKacEKC76PKWrqyuAitbF8VK2V1/28+jL3lP9HKnCf7fjz9e392cO+H5nkvPyr2YPtt8v68nj8ZRTqkVyrlLbELvdcFARY1gARUK27i7j8RpbgSZItEGQ7dsNFzOdzzUXQzzknGsp01wACJTPY346nUoFKyc1Be8BGQikSQKsweHQe0PHhei6PsQ+sKM8tWmaUkq1LWBFc58VAVaCSkagzTC0WvNspzGhd+y8r7VqSSXNCNHStRuoBwCIUaS1WrGBYwgE+y68u959/eH6422vrUItwAQqklNNM2iT2lopucxMBADH43Gapt3ucPXx66uba9/1yqwWSiITktbyXEpjGwRABkAGZDWu4IuDVEkVZenvesZ7yLPDf/nL5fa6vL6VeS8sH6mlJkHFhBObNlHQJkbfj00LmSkUsbqT6tL/LyKwZjgvLDURMSHbL+trTi1hvE5CIVoCHqmIa0/3qrGuqkhotGG4KO0983MBgBIqbKCYdeYvg/JiMCyOUpDl83WDgSxXBiCbwA0z7fphHqdWj7U2LTl6dsTGJwCIyg6IyDGkhEyIBH7ArdvyMketSzUU18DtMgoHw2VcxKxLm79pAiqo6HMWz6YuPxdllzUrK65mPURhQ35sA2PlfefJwKFSLc8JItAAg/NCrE0QKQRvcJKbw9VEU3A+pXR1tY8egqPz8fThw/vWQKQ6506n02H/gUB3u53kqUgz/qA55+Pp5In3+yHEfp6eeBqRyQXvgp/SrPNs7Ux5mq0bChGtlQVUT8fj9fV1qSWlNM8zMkjTaUwlN0VAEhXLFawI5It4F17iof4e6m3HHyoz+3ODxb/UcL/63m1j2so8m+986VSKSJrnPKfWtOWCon3XdTGaEp733rvgnKuydFy11sZxdM51PhCzqqZWpVRpLeVcTFqvVFJorc3TNE2TQoyhV9XzfJzGhghEUKsoYojEzimUkpuCRh/YAQMTUQjBtOgMEjZNU261CQgikhMgARAgpaW96dKbtprf5gRsPXaqWmsNvkMEJAUhWEVwSKGJaAMVYIJAEANd9+H2sNtFmqYitSF7Uikl5zShgraWc665eB9b09NpLKV+eL8/3Fz7flDAUgWYWAmYQRVXG3AJ5vzxmbM+qWUzJFoqbVZJYsClnkRICk0aAwohAyohKahV5mxj1EZKaO34KqCktaigagUx4AOhCiJDTU1RpGqDJkUaiFYj2QF55l4hi1mtSLyqA6qRJTMBAFthcgGyWO/AcpsvA9Y1VLNw8CIkwgWY3r40PgCwGUO82A1fb4Xr62jCPwBwIX4UYvTOoWhOGUvByoLkiN0QERhaVWKpBMRiPOCFFoY/fkarCoJZKfNc1idttdVnn+PyFjbipO06F1V690La6aXb+np62EcLgAC4i4ElkMBO0bWSBUAMsdyWSt4mosLe5awAk5E3xQ7np3Q4HGIM3vu7u8zMAIIKhDKeTohMhH1PY8mlFMtXlFKmaT6H0O/7GOP5WFNK1uRqGBYjvHbO5Zwb6LYSLWs6j9P14UqbaBXJIh0wsGWLoIhlymmdKPYkL1fHZ/e9P8H++jd2PJu9V8HQtoA/GyS9Pb6UXPq5Onk/95lsFstylVuMv+XHXuWycsm6EuDaQmJm48SzttDW2tXVMI7j4XA4Ho8l1avd1ffffz+dx6+++qaP3f39fSnlandIKY3jSEQuRBGwhlY37EMIhJRzbaWWUtI0pWkGgDwlEWD2ZU7H46mkzMzD7mCyk9M8G3IEGIhpHOXD1S4E9/T0AAK3N/vY+ZRSVry6urq+PvR9rDUbs8OSh1QEkdxaFREgEalNQnTffPXheDyXnPdXXWutERNgms77oRv2V1aiT/PUark67EvhUhKy++HuzgU/5RRjX1MeZzlE8ASQAQCud/37m/2hcypl13WttZpnENkNHWh7vH/YD8M4jiH2XTecp5RKfffu/dff/oM/XDvngUzMzytQFVGF3CozMy24mwWDQ2TOL74pxyKICqpsLcwkqgAkQEsOb63GKYIosGOD0crFX1EBqIEKgIiKghi7ISmKglYRadoUyeIikaaqkyqqNq1qGoqtFZBnzCGqIjzzaKecLXW2uBfMrgoBuujMjAlolYX+5lmuDxgQkGHtYQdAISIExpWMY7EEqJcOwubGoWzkOwZjWV8nsVVmg6arPfE+qiqu8MvtSoDczfW78Xg6Pz40aZWRBaIPkibnAzkWIBeijz2qlpIoOiUy+WNwa8CHINoQUd+02F8iMi6T1JeKVyoCIqhqdDWllC70Mcanp6dhtx/H8f7+fprmImy4aFU1rCYjtLY8Guo6x1RrVWi08Hg1YvSopcCclDl5H2/2/unpqbbqPDnidzd7IPzuu9/+8pe/+vd///ehj//yf/x///f/8//l//Z//3/cXtP333/3f/rnf/73f//3XR+XDnSkVosPodS6i7E1vr29+e63DyHMj0+n9+8/xqE/nk7o+ObmhrzbX19JqU9PT+/fv7dhqSm3WEIIITgi+vrj+zyPTcQR39/d3Xy4meesivf3Dx+//ui9f3i42+37YRhOp+N+v6+6gNIvE5uX++324qtl9ePb8k/Zh3/WPv+nRtP8+I243+/Nfz3B3Ntv/P2uYStNm2e3BYIO6fR0zHM6DLvg/PHpaRzHq6sr42ExtOc8z4bPNOeOAZXIbF7NeZqmeZwIsLWmIqXUaZ5qrVbGGud5HMfzecoZQIEIgFBV+x5rzYhtGIaqudSkc2Hm26vbvu+tU7DWRZoZCFGwlppqVXTIrICAYH0Odi9mTkopInXFJ8Jlc+HaX6itVS3aWpGKIlBrLdK6CAAAArsBvn6//+bjzbur/dUutjKJaJOirRJSa0KABnsLvqso45xbk6vr29t3H1zsDGAoBsFfgfiL76JARJ7sARBfZFl/5KmtS7cBIBr2xP4EABe8yLiC/wzMsf3UpYdajbzZGhAEgCzMUiUEUGAAEUDVkusGt9uOrS3EqJOZPACgY1IqpdjdiYg2gdIUiqqWNjL7RTgQYFELBQSy2t7ShwEAiEzUFlq4rZtiM0s/Ojhg1U9Ym991xY5cRgOIiAsmAtXaVNYRUwXvXPNW5ys5tVSxNXUBug6auBgAqdpEQoYmwEWRra9RtSE5IQUldAjISAICgKrIBvmBn7b7XQbBl0ma1ppVyL70RstkbBAYsnYKqYQe0WqrZvfB0FVGX8C86E0iYmk1pQQAMcYpp3meAaD3ywC21g6HA0gNIT49nn7x1buxFeecNHXO8bD3jpBgLtmn+XQ+vzsc5nkex9GyNaWU8zSZA22BpvU2bNSgDvk4zezdPM/X72+OT+f+IJ5DnvI8zuxda1pyq2HDD//YZPjSJvnfLf/5Y2bvr7x6d3m8wjL8HheAKzWXZWNUdZqm8/lMWUtpiMxECyFnFa2yMvdTrXVKcy5FkIgoTxmKWtRYa5Ui0mw7BWZfq85znqakCkQsgMfxPKd5ztAaeA8uMCKLyCUNUhN3Op0E9Oqwe//+1jnnGEXqwme9UAlDkValkfNqEHNC77v9fi8iteYQgiNIC1NMtTA3embU1NozC5eZ/7osaURoIqrgPLQZHMFhH775cHt7c9UHYC1NaspZpDJSA62l1JyJKJXGPmgrc569i+8+fLx+/xWQ04XRn+GCTVJVGZgYmK1jiR0xETl8vbmrKqEa7BMAQJuKbO14vGDWP7f46flBX74srcKaENOV7B8VmoA2gzooIrY1ydbSLCKW1jborMkW5vqc3vQe0DE2JYJSs4IQQmVamqyaKBYOXkVJmJgU0cwsAjR9ztaZlTPMfqmy1b1UFuul+hnSjM2r/8wAqD4r8SGgAQEVAbC1YuYQl9ZAXBFghOzIuaXgNE1aq7okOWnfg+6InTbNDcB5ABCciRwIKjcUJHTIAEBITkFWcK0AisGHtr5JvMi4bte/NjUKgCIIIrTVV7MsaK3VCttfXNfkSGUlUl/YYmCpBhAAQRMxChmBXJs2ZQRkx0jEQMxtainnWssw9KWU6TyK1t2eShFCyLleXd3kaSx5fnx8/KdffpMSE2qrmZlDFwgkBKgV5nm+f3r8sH/vgj8+Pp1Op48fP4YQijRBeHx8/PDhAyrM45S73uZ/F6JnLqUI6HQev/3lP9w9PSLT4d3VNM/jeQ5DhLXDHYzr4OVjfwVggQunYTsBLryKNzn2/5rHF83eWxv2+xm/nzuEf0gc+QzX/PlmbyPf28jxpml6enralS6wE+fPx1Mh6kP05MZxtG+pAimlOaVSihIzc0kVqxQmw/ghYhcjE0ltqppSSjk3Ee+cAkw5mdRq7EAFmMn7gIgiYJQNzAscAxGHYXj37p0RsoA+8zqqqgCWls09pOBzlVwqso99CCHM82zlBFVduqpVAcD6+XAFzlmlQVWQVGtDJhV1jpsAornz0nXw/ub6/bvbIXCrKUsGbLVWbdU5X6Wdz+eSjDCQc2lzzoC8O1wdrm99jLUIq+qCKScAUgED+AAAKjGgM7OHxCvI4dUzfU7gKCiuYasColYAwfjZ53vJMno5nZa05qZuI6vZaw2aPHcyixpEs85JRHKrpZS8toyIyJzrokfvmBwrO0EoKgCiTQokUtDaNFR0nohJtFEFZmTjg0NlInLgl/t9Tlcu925xqG6x2hIKfyEe/kxSCwAQDQKxaq2aYUMiEEFAoWekDAIIImkFEEAkIKeKtVbIRVTHnMwyse+UG3KjICbaJViVWVzjyuoaLX3VAIgiCNQQEaBt2/DPWqf2HK2cwcxFZKPA/eK2gLBRigd2iqCtmnUnXXsVUZtKKpkohhCMG7W1Zj1CpZTz+bzb7UIIT09PtprmeYoRx3Hc7XZXV1efvp9EpORGRK2oiFTVyKSi/W6XplFAj8fj3R1aPmkcx1KKQTqJ6Onp6fb2lgCNsN6w06UUz7Dr+vM8zfNsMLqcC5MnqqU0iynXLeJFNWebBq+qPDYOVg2FF/mSz7Aa/Vc9fmqS86854Hv1XPElB93P+hxTC7LfLXXZ+WAsTaoqTdERM7RSAbm1lmqptTZRADKoCDQSaFKrrGwv9nOuNad0Pp/nktmxD2EueZpTrjAMLjhfq9TS1sIkmAFm1PF0qjV1Xffu5ubmcNNqtrtrrYk20SUeKaWQC8F3Qn4qs8kd+NhJq/M05jnpTqxOzojaGkhjBOte11Ycge312pqVTxCxtUqOa6oAQIDdAB/fHb76+P76augYsaXWtLZkvBIikHM2E0voQuhOp3Mu7XB9e/PhqzDsBZxgdUqoRKuUHQCAoSThuSLrjIpDAbW1NaH36kkhiFr34ZqWBBQCFHT4OY+tlfri9c3sUTUAZ7MckakNq2pdICpG2K/G8V9bTbm1lmtJKaXSSilNpakAOSIKsY9DH7roXEAiBQiepam2mmrT4qBk8IHJZ5jW9sS1W8M5Zq+7yMzMfiUK1ZWY03x2AHjuwcG37v3FSlRVXLrinss5DRoorO0QYKMMqAiKi3AhLF8NoqIiSsoKz5XyVmtW0VpM/SF2qNyEqhfA0KlkRARhUVZhUmZmQCxrc4pu/NELd4p8/vovH7rln+2xaEPV1sToh0yi8kfMXhElVZFFQMM5B4oNVFUZ1EgqbChUsZS2+n9pmiYA6EKMna/NnY+nXT90XWSmPM2eWAScc+fzeb/f33791W9/8599359OJx8w5wyqKaXoCaTtdjtHrNpSLr/5zW9+9atfHa6vnp6ejufT1dXVbrdj5uPDY0opOA8AJefqvXNuniZkvr65Gn87a2u//fV3H775GgShqVFa11qdd6qltVakzSXjCt6+tGdf2u6+dM5/bZsHPyXagz/M+P2po71LL+ayWvtzzZ7Ropt/V+viCoUQIsbf/vo3KaWb62toejqdQJSIxmkupRQjayJGxFZbKSVSB6KqsmGxzEQZ+PP0dAQA770SVmm5gSowYwhOJDepKtW5YLUW68kbx5kITMSViFKtlwW51lo1HLaIj845N+ZaSkH2se9CCDlPZrNV1SRLiNxCwEgLm/t2QimlNRXRlBK4UEpxIbYm1iS83+/fvbvd74fgCLVqqyqSa7Yxz62mlKoldVHnOY1TIhcO1zf7qxt2QQSc75fnsmQ4TeHarBuyKe0t6U1g3OpQLx63qeUttTSDK1jAh9oAlBu8RPrhRQPD5Z/sv7JGUaq69NtZhc/Yj7fcb13YdjTnWuuczOpJFbFGkf1+73yMQ98NOx+CCUqoKoWgtdUsKrW0qrVJKo4YVQkXGn5gcj74LoKrE1TvfQgLv6uqIuAFvmNVjMOl0te+YDbWWPF1CkREEW0IcRmApZhnNTd70zMNJlUV1WaUlYgEWForrWqrKJOqNkH0ASioMAChNbxy48bNsRMnbg0sFi5SUoO2Lo1zLwXQL67/MmDdUhS1VkfYpFmW3lbWj9T2RAQBmyrBIhsJC0i5oXPbx5o9ttKaKUtbykdV2TmTbk4phRiGYai1hhAsBZJzOp/PAMDsY3TjOHYaVNXKH1VAm/RdVFUAmcfpdKomXj3P8zzPVqfPOdsrEBQRU0qm1TBNk4txt9v1IWZtd3d377766MhPU/JDOE3Hec4DR5FWK1Sp0zR13fDZnfnt/vlK7/5VzPdf2/L97gaGv6GA7w/5XnMYzR7UmjeIyng8j+OISIScyjTPMwiq6ng+i6ogXeJfrPrDBGhtuU1SyiYvl0o+nU45S997clxanVMqDUIAZrYES87KtCxyZi6ltJKY8XDYX13deBdLbswLBrVZni3nJqooljnJrR7PY65td7g5HA4+xnF8EhF2hLTqpKOWkqyijiqt5M08l1JU2T6fTYttdbpvbq4+3Fy9u7nuggdpgEKgJc8cgvcetI3jOM+5SgPLQYF0Xbc73B4OV8y+NVUgxwxaXz45MpShIW4M0uIXHjWDQHz+cS+FSOPUlgoAC/uMZrhoZza8iCUpl+kBC3PVMk+awCYKYBZUlRRqKUa2osbBUWrOueWieaq15mpEpRxjDF3Pzu2vr4g9d8GHjrxDWsIIQQAgBG5ZtLSaZhHNVqBcvxAI2TkXg/d+vtp3Xdf3OYSwsFyiY2YbWNHnnpOlWsd2U2+GqDZ4qX/CSAqg3nrnrHoHCkZtoq1VRJQF21U3/SlqjqTN81ybIjCzJyJttbU2l7lIEyXXqbKKogL64BBRKwozi1ticSaxrKyQOkarKQigJR7WK9TLxXvx3yXOXR+RIslKcptzPp/P49i+BAFHYMsPyzLWbPPTuIlkbXMgx6IIqsfHqY/RULiqYsSFOWfHYZqm0MX9fi+iXdd1HlAUBVoup9Npv9+nNArCOI5XQ1drJkLnXCrJ9gfnGJkU4Yf7e0Tsus42Gc9ORN6/f39/f48KMUZDlR8Oh3EcpTrLo9bTUUSOx+O7oRvH8WYX5znP89z1voFFtJJzDqG73Aa38XybMnmLEoLfK2D4Wzx+at/eXz+k8w+0fObb2lI3z86SJ58+PQHA0PUppdPpZD723d3dEjCFuFWMl22uVPFGrE4V0T5tmqamRrwJRl2dUkqpNYEhMjPWWnOW1sAxEIHV9s7nc6u63/fv3r07HA5Wehw6vwoYLWDCJgoEzjtEzCmP4wjsuq7rdgOT34g3LWlpg1Nr3R12IYSt59eyna01Im9pNDM8zjkiRNBvvvnm483+9jBEJ45bJGqSC0jf9yGEWtLxeEwpiQgqtdac9/v9zfX7D323K2ZuvcNll16e0uWDY0SCpexhTet8Cej/XNPeZqr0IpKrWi+L9s9R3RvSpstvf44nFoCg5pyhiVgwWVvOuaZcStE8maUlRBfCbrcbrq5j15EPyoTsiBnYEZFaD1xroIDOUdNSRaSU1KS1mqplxXPOpSkicvDee/erby2e6LrOuNkch7dmzzknAsxMn9ddWaLbyz5CSyOLKNHaJk/PKdIFybl4VGVjIMLKIG2esmG4rMOMAGpOrbaqwi4qO1ACLEiMBIjGJ7YAVAiBlRfEEAIJWSPC8pgu+vO+FGdcPqPt7uwwpYhUAMLnx8HQwJdqGCoCTdA0UuzbSYkIgVT1eGq3t6UPwXI/sCpLd4fBYsoYYynVOefcghEFgPP5fNjvT6fH2MVxGj/eXotU59h7P49qxWDvIzMDw93dOTj34cOHy1rM1dXV999/nwFjjDlnw9bZ9Z6Pp9sPH57OJ6ssvvv6o/Us2fKHtaxjXbqvjNaroBk+x9sCL5Ev/7XjPDtc0yUXbP6vwpb3X3Igz7BvtALK77Bzr7wMhJ/Xt/dluZTPv07LHie4gbFNmE0J0UgBTXBEDNqOUggRmK2tzUiivfegNJ7Ph92VzPX97ub/9f/+f0ppcky1tpubWwB4eHiYUrbZlkFyEwa05EWesoFcuhiVjujalOvj02nKQOgLxAx6d/cwTfDtL969e/f+/v5uPOXr/RDehamEnPLpdEoTOAfOLR3ookVUQ0/DoXeRqubWUFXnueScx3Ga5znVJuDRE5Afrq9+/evvH05H50Ichs6TF5E65Uw50+3tbZPqnOv77tPDd67D2MP1zVBrfXxqBtcuuXkXNe6PDw9MDkvbOU13x9sOvn4//OqGhpD3gTvPNU0/HM/O4/7d7ZXU8fFpPI4wJaq1FRWGGHaH23c31+/3t7eEIdeCQA49KaljdV6ZF44vFSRhBHQCTpChOc2kwBpsh+UG0BRwhYDaMwVTa2mlAgAas3ZrRFSXoJGcc2DS8IQAS7s0bFCRdR+cpBn+E6toSVCbSmUB1owKoq3UUvKUU8otC8h9ciEMoe+6rot9xzG2LmbnyDEigluaD4HISnKOQoMqWDiyc6HgPJenOs5tSnmc5qdjPk8t5TynZGXRr7/68OHDL//xV7uvvx4Oe0QsKqoagmNmcAwAjYCZnffOuTIvwh1EBEqbnSulwdodsuQWrW6qNmKWagZnGVTEdjotkVlrVKqk1OYEteq+a63FCNcfblIK49k1T2lOKdB8OpfzeMz5Og/X+0PA4NqR9JaI0DGDYwYHSOgIYZpGZCL24hx5p1bUROSKImKBJAfPzNXaH9m04wkR1wx0U9WBJaWZHCuFjDFDd2rUH/hhbKqNAJxCQxCFxg6RJY3eeyZtBeaSg/PETl2XBVrD3W4/16fr/eHu7t5HPlwNT1AeFUTB7Q+axynNXRe++ngrpRyPx338BtH/x69/HUJ3GIb7+yMH7LrwL//6n//X//1/OsaSxy66WtJuCPd3d7fX+xjj/d0DAOy/evd0+k/ub87H0//x/bHQ8NW7a0KtNV8N8enpu5urWGud50dkfTrd4/d4e3srjo7Hc5fnd+9u//Xf/61zBNOEJben868+fD2N+e63x4/ffC1C40Q+hnFeuHueCbtRAcAt+7AAANJCb4SIjraQenMuGWDr7cE3++26T79BVHxh3/5SD9KXatJfTNrDH+Owz3Gv/v8TTf3ftEewmke8HHrn3DAMj3dPnfP39/fMPJ5G8yVtcBatEIDFwZzLMAwG+GxtaZmfpqnrIKU0TiYrC7XVccrjOItIjISI4zimNHnPVpSe53o+T6W0vifvvWqrtRGhqh6GYX+9v7267vd9cC7XWnN+Op1aa6XUZvQrgITOIC1iTv0q3Q4A1tJkNiCXJW2FwMYKSEQbYlzXXrT+EKLzFTMiRO/jVb06DB8/3O66bui9I8zzCDX1Qxcco+p5muZUirQGisjI6rwPXby+vt5fHbqukwbN0AOoRAsL5dv5gxc8k5dUnLCuye06X9RyL0pRny3u6trHZ34xXHz1MuFV1Ko6KtYWrSLQtJYKIKIt55xSySXnWlXVdzsfQt/33dDHGNk7YxBdiLOfs6uL2QMF9EwVmARJNAZXehGppahRw3WBiMg7ij7k7jfn03kaf/3puw8fPnz77bdfffP17nBwwVuoUUWaSlUhx8bI2jrw7HwMnh0g25wm68MTFTC8a2N0jZTRKbVlHAy+I2rglZwzKdRa52kax7HloqqMxM1vdGum5iExsgKCOMAMWMZ5HEdQVd3t+yHnzMxkz7uiIrAiOrZI2mRJsLa2VBkRhbf9ZOmiAYAtOl+Z554l96CBsdqg1FqP4/l8Pm96kG8Pi6gUdElpt0bMIYRWrZw27Xa7WqtzvM4fmOf5+nDw3nsHLJDzTCGoqvf+/v7+m2++VV3wk4hwdXU1TVMXIOe82+1ODz/YbalijNFYL/I89X3fWtvv9/M4qmrOcj6f066LQ8fkCdVVL7yQPSksgICccxh684M33YbT6dQNfa2V7ORaU0rYeMO4GibOCpnbLrf6Q0ZWoOvaWvpEL5fhK3v2X/JwG+3fFuS+tMMXMe8CQ/hM5//f1rFtpttGaQGfd+679H3puvv7eyssp2mutc7zrKsOnyUYTZokxoiI8zwD0DAMiHg+n0VgHOdxytKoNT2fpuNpylmY2M4fx7G11neBCOc5q7rWWnBwfdg5547HpyYao6+17Ibu9vowdNExMgKjVmnn87ghMJCYiYkZmKdpsiI/O2e62Kp6Pp9zzvv9npnb3DbN68BhiAMjLkBwJBBttUpr0OwGixL0Mex3+4/vbz68v+m7MHReSprHsyftdr1nnMfpNE7WPlhFhJCdi/0w7A7Xtzf9bhdCTFmyVGkqqA2aI//K7OHaQXHBL/xsAhfJvWf6TDVsBT4X48TgPbjshi8Mm8iiB7toP73EswAAtgpKIlWbgIi2YtJ4Bv+pUnLOc8mtNQFApGG/izEOwxD7zntvCA4TIlj67Ew1iQgJAaAhQxNEBhAUYOQASMyiSCFyF9qcSRZFdq3t9Ol+nufjaTyd/+3+4em3P9x9/Phxd9j7EBBREJamCsSRJ2bGg4vOuxg6H9CxQ7KfpZZNGkkJAdtS1UJlQN34GazdvracM4rmnMfzeRxHVY0xDrHbRpyYnboYI6kYyBiCT8RPTabjUy2FAJk91ck556GriFZwagLQiJgVwCqsZuIWUnWKZPGmgpj1IlTEtiQ/F6mI9VEqwnO6smp7enp6eHio9YudUgvxHigzA2gpxXnq+v50TF3X3d//8NVXXz0+TjaZSinew/HYrg+zeu8YiGie5xgCIoYQPn369O23vzS5Me89EVzt9v9293jYdymlvu+nI6c811qZ/TAMKaX379/3fT8Mw+n8FLtYs3POTbk9PT0NnYuBd31ErTHG1hrk0qxQLZLLPE7UH4KV+UVkt9sdj8enp6d+N+SUgAkYoUmaZvAsCNqkQtuWjypslDgNLtN2igjGVCNba+z2D34UAvq5LsC/9I7+sw+nLzGsbzO/r6LAn4IR+ms+LmOIzb+ziSK1OaLpPGptNRcQnec5cLTTnHPMi4idc243HKyhx+DmlkNzzpU8p7nWIoicUzkez+cRiKCL3nlSbYQQO8+MrRWR6nw8XHWBXdfFWiuieo9d9G7XH3ZD5wJo0wrgiRS2ItZy4boSdjCPT0dbbBtdZ6ktpaTaiEC01pqZFxY3E3BQVa0NTWxGRLWhaEqz1AoKjnDowrubw/ub65v9nrG13CRPDjQ4dqCtlpKm85xyzq1ZG7ELXb+/ur463Pgueu/ZO26FaKkYiYhblZJePZQN0vJs8HDBu8Pay7z4oWJRWns+cm6t8cJv8sLsqSoQvqp5PH+UKkoTKdBEazMMi5mfXIs96znn1ho5FwJ77/1h7733XedCQHehjkQEhLj+15JBBhIBRyAKyGTxMCF6N3jf5jmnQVJRqShqrNa/GHaImGt5fHx8fHz8l3/717uH+6vr6+vr6xCC72KMccEftVlENC0awikuqoomuPGMXCUlImUhICCBilVV6nO7fSkF2qK/aLVGBQgh9MPQ932hZcSQgNA0AsUxY6vsAgHWlEua6pzHeULEvkdVRXZE1FRJVRTJBqoJgho/HBDaZBb37Aatho2YuYkKrqjdF7G9tNYkqwgp4Ol8fnp6KhfBnq4BotGubkOBiKBaaxXxbsVwqqoV6VOS0EHO+eZmOJ3GcRwlhKGLwaOqdl1X5rnWmlIex9Nut0up9H10jksphvyqrdVab969//V//Pt5nENwPniTejaUpnXmee/7PtaS5hnu7+/74B2BZxq6LtRquSWbhFYp73Zx02ewRtuScyt1nmchDENH1rykAp6l1gYNL3i6AWDhdZBlywAAy7tYtP1KAWN7Fl+CSrw1cn9b+78dbrkNy+Kupe6tX3ZFkL/Ot/6N2jwAMAnvjcZTN/iy6jzP3vvz8RRj/OH7T0TUSlEIVtkmIoBlA7W04TRNlnpqrT09PdkEyrPUAqqsgim18Qyi0PfkAxMRoHZdZOfnaUxpZEc1513fO+dSmudpItSu7/oYb29vd3303qsKIUKtZZrm08l7X2utzbqnlUCRHIgYvD740HVdjBGJi+TcqndEqDUXqa2LgRClNkby7CzCQwACFFHU5hhLSd7R9S72Mby7OXy4vbm92g2BtZQ2J4J6teu6yFBSmqaa5lRbrs2gcSGGYb+7urraX10hERAgKpKanIKqmMXaEpKXU+hVbnNbfssEu0y8rDyNm4pvzdkUE42zDV45oU0BgJfV/wwDtWvwWqGtyaUmJhxRa01zKdJyzlUaIHoXumHX9z0Og4V0itYBB0y0FqIQLItLuLknze6CEBGrAqBTQGJHzBi9pthyaVakbKIiV643GtKr02n/6Yfj8QgAOZe7u3vvfdd19ny3loa5ZedcjbVGvxxhgQWto4rATKpG+a1Va60LaCuljVltcyGY2aIT30VHDASyDA0uonnqmwI7h6I+ht3+oKrpPEoqT8czc2/Il8ZI6sTASozUFAgUKwKBOtUFqKnQAFBIYJP83YL6VQ99sWS2VAFaa9KqoFfy5/P0+HRqutCcLbHhcggpmgjJOiVgLewKM8/zPAzDNE3DMDw8jM65WmsXh+DH1sQ5F2N0hH3fO+fUuZxzjOHu7u79x6+Px++GYQjBPT7cxYC1FgA4nU7/2z/96re//s9xHA/7rjnq+36e54/v393d3UETKVWXT/ZTLueT3seHGN31biAi9sH7mHM1B9FQRY+PT/v9gYhTygO7vh/mnKdpdqqAFLrOkZMqQMSKNVchIDLOWQV3uTkrrL2SsND1IACa0gUibGToy9hZxyd83vj9rR9GzLEGfJdcfC9u+AWB6duB+BsKdfElGe62S7YlNlKtjUOYp4kUAcAyeJbZFwFLG8Ja4bOmcqv8Gfgqz7UUaQKtlWmcWwP20HUdERnsgBi8d/MktULXQcrZeVIoOaXW2m4IV4dd58N+iKYH6pBaa+Pp/PBwdz6f9x8/AoDxYzUVRhKqDpmIgIS9H/YHH2Kt1bzF6AMAlJIAwHtvXqSlPTcTDgqtZm2VEaacovdd6PZ9uDnsr3Zd5xBbJm0CtXN06DomOU9THsdWSqvQKqhS8PGwvzpc3eyGQ+c7my5Sm7YKqgiCQICw5YovDdtba3d56AVn1VIiujB7FrWIiGN+/uubSvtlTXDDAaoqS2q11VKkgaqKmYTSUk7NGGCQyTuOg+/3vu+1C1YREURrhmBCYAI0LSBSwzHiypoNlspTkwElRGBSQuc6bYG63EqtuZitxSaxsV3bcHXohyGlNE1Ttp85G5w4xjh0fdd13jktoioNShHQ2qRUKczM3jMAIAGiilFgeyJ0uYjNjTmnUkrVLShcdBxjjP1+F7sOEauIc8ESlYiL2WtCzEzeQW3UqBt6JprYjU/H6TxutWRERFanSkSkIK2BWKpBQdCE4gCxml6dykWIbPpRzzI6honVzRlXqVXVQ6n1/vHp4Xgy0jKxKiDCJRLDZhohbQ6uiJhUnuX/n56eYoy7nSdCo6T3Hkqp3oUQujpP3vvz+dyHQESHw+Hx8fHdxw+I6hz1ff/w+HA4dFYHyTmTC92wOx+P05wRse/71rL3vgsBtbessqWOQiy1wHlK4zjv+r4pMrOPgefZGo2sAHE6nVJK1u3QWnPMnjlNsws+pdTV2okWrcGxQ6q1NkYRXYM9Vm4iCGAp5QXPsrK7IeLCUnTper5Ydy93eLzoc7jcUQH+xmqBbt1SjM3BWP62mA/WdnPaXtnM4d9wwAcrQcUacyCiSEPR6Tx678fzWUqdc3HEudacs23HpbRpmqywN02T5eVs9zTjV0qZp1ZrK1VSyvPcENfOvFaurvagNaUJySMqIohI8FDSLI26Ll5d+z52Xdf1IRJqy1kQlXiapof7+3mevXOlSpPFa6OFsxEBoOu6ovPCdcQ8TdM0JhUkB4Z7d56Z+Xw+5zn17z8E50spbI64tJZLLcUTE8DQh5v9bujiYYhDYI+CraCWfReiY6lzyaVMUy0pT7PWDhp45/f9/t31u93h2nsPoo4ZmlbJ8zjmuSAiOt5qbG+hK1+aRW8L7FuYbuZhI4MW7wFAoBlwl5D4QovcUA+0sfyv3NOSUyut5GzhX2uaUppzrdKIPUfvQnAhdF3nQyR24ngTkzPQikGG2TkBwLWkt7BIAzpxAAALGlEWYkgkVUHP3jsXhEJpuVCtKsIVaq1kUt1IXa1xnPI0xxjzNE/TJLVVLJVdc44VQgxM7JBIERqI1qai1Bi9GjWaWsBtBTTOFWzQpDVQYRtPQnLsPCNzCMFHb/o+ZCoGuMBomzIRMHnxFFXrnLQJMIZIIlJzbU3mNJpYhEVU6j0zoxORDIgqjoSVjT0dFUBEa0MsyMzmGIqYIvoztaHtUVabaDUjIjMV1WmaP3369PBY0G0Unq83X3NMEUSaEIKt0/P5fHN9mOfZuaVU8f79+99895v3799N46nr+qfHyZyq8/l8OPSn47EPwXvf9/3xeDydTtZkst8PMTx4x+cKJWdrMBiGYR7PpZTi6HQ6Xe37UsrNzdXDQ53HyblgadWuiwlSzvB0PFm1mMh5F52PqWQRIREVSQnu7+9vr66NXHDhTEAkopxLSbnEXECcc1YfNaqNreqNFRARSZlXiL6oEigbExLIsoHjKl7xegxx4cL7vFbt3+Tm/1OQnPqjnRx/i7f99pqNoYmIxnF8f33zw+kHbTKdR0+c4bnfa9kvRJj//9T9WZMlWXImiOlyFjO7m7uHx5qVS2VlAYVGN8DmcCizCMnhL+EP4K+i8I18GwopZMu8jMwMu9nNafYKFArIyj0W93D3u5rZ2VT5cOxe94jMrEahAVS1SUqkR/h2r9k5R1U//fT7eBiGxWJRv6UUrcDUOI45aUoyhHEYcvWMrTh7ztl7J0W3h1EhGyJjQAQWi26361XlbLVcrRZYZTydI4WssaQsVMI4xhhb58/Pz1/uD7VEIzRKTMxoDBq20Jok1nrnnCjEOFV7gIoEp+gYhrGywqrCBRExoIiWnDQLGJo3bjVrzs/mnXOd43ljLapgKiF3rjGE++2uP+wBQLMOQwBosLC1fuYX8241a2YiUpIwI4jmmMJ+jDECGe89MRdzqrXeKfi+/5geBrz3dIofRr57YsvDr//eD6zOGHLKW4+Fo6RcYiy5lFJAMRUJKcWckQyybbrO+tZ6Z5wla5VtISAmdIYemJtXMwvGySZQ7+XDgI5+dnKU1yIAQBhjMMRsCAiQCSwjqAqOYWycc26GCjFGVGm6tuqGRO+ttTkmQrTEUCRJamazOr1HRJOCl4iCaKHK9lEt1ZOv3u04KU5DHeOB4yeA2RjD1hpnjTEna/v3ezxVaUWVncMiJQZBAARia31TFHZ3t8wcOdYWMgC4nLQ4UQEmVBSYkhGZAlt1rQczVYjV3LewNXJUr6vTivXKOZNxSJwF+hDX210EcLVCBBRAUIUH2lCnai/nAojMLJpjzCdc11rb9/3z58+//Pq1tTZbO5vNbm+Gmk6llGazJ2EcAaAqpxDhbrd7+vTper2u4QoAVCGltFicrdfrtvXG+QIoirv9/uJs0ff9+Wqxvr3p+/7x44WIgBJ6LyL9Ie374Hf7R48eecs1OT2FlrrEt9ttY91sNqsdPgE1xjASiJSYYoxJpW1bRsqToBwqT33VI9YNCAyTQ++0vUC1VjqgBPQwsP3wtN/v+tj+27z4//h/+K9PNc/037HfXx8n4jToetwtxw0A73zwzt54CC7B/Vf+YP/mvevHPv1jb+A99YH3kpFT4/r0cX2oRepZqSKiRXPOTLy725Di2A/Dvo8hQtGxH7abQ214IOJ+f9jv903TPH78+Orqup16ckEVKhLy5s31YZcAkJgJ2bV2NmvYcIyxbRuozWQEBXHWNI1DhKEfvbNnZ4tZ20pJMSZE9M6XlIkIAdfr9Wa9btt2tpgPYYxQCybTNA0ShxBE1Vh3t9443zx/8UHKOYSIbG7ubhWo86wqtZVjyMQYS5EPX/xktVht19sUo2Eexz6NIxJAkSfPn8wab1AJUme5cwYkxn7/9NFFCmMYDqggRfa7/TCObKylOSl2TXt58Wi5WBIwClo2oLjfHm5v7w77PsUESs66rukCaO2V1ltaSiHCk9STm9pTxhhTgSkBJSJT/e6rMss0XpZDCDGElFLJGQCctd77jMhMR7c+RAJAJUItmeo0eYU1RHLJUIqGoVJQYkr7vu/HAIZs03aLhZ91rmlc2xrvm3bWdF3bzsQRV8+E2jer/zEpKBIiT9XeaQUa8Hx0Rqd6IKuoCCEyotau2SSuhsxsyABhUVWEamJeQQVGqlVP4/18Pm+8L6WM/VBAENQ7a5mr9AsBMGJOUarXXE4phhRjDEOKIYSkUojQGEZDxrCxbK1hy8YwGSJCxYlOoqioCFVrBhEImag+qSIqRaUUhUnRUwERWUpOKaUcrTXeOwICVQBlYhEplUsjqqCVbgiKhMSGT0P01SpPTjRcmArDopJzlpSWy+Wbm5vV+cXb9eb/+U/+u+ubtRAXJEUGRJgs7IEQAbRpunEcUy6W2VmruQDqvGuHoe+6TlWqreaLFy/6YcPMMcSm8SUfDvvx0aOzedfe3Lw9O1uVnC8uLlJKV1c31pmLi0ellFxSibnxbrMdENPq7GKz2X7wwQfbzTrF4J19fHmhWUTyxflysZi/fvWKEA+H/dOnT+/u7lrvrdEUJIyhSL549Gi1WpaSVQoiqhQAGIZwcX6hCn0/nC3PLs4vADDnQsTO+VzEOHd+fhFC7A/Dk8snh5T2+94Y0/i2PwyllKZpK8SioqLlSN2YQHktWbUKJdXhnRpq71lE0yle//f3UeT9dmDpQ/L2X+dV1S82+gOYrJ7+eK/P9zAFeC/A/D5cD1/JjyUpD8NkRau0KCqkcep+5Son1g8l5UoO9N7X8FZKqWyC7XZ7dnamqn3fV4yt2jWMI6wWKxEZxnEcQyHw3llvbIWzLFWYBYCZWUoqRZnx0fmKiG5v3xpjnjx5Ytnd3NycLZa1nVMVT5AolxJi7utAgnFjiimL8d5YX13xWmsnhWYycRiGITjnmPGEEdX369hMxqeIhFhyKTGBaOcb25nVrHXONM5YAs8IEg2IbXy/31lGZHMYD30/FgU2jaqGw2iN73xn0JQktVpV4L7vi6hBH7QoKKFFMDkpuPc1NicVjMbBb38dUxnEo4BIPo7O4RGdQUQ6Vg2l5Ppx1ZgupaBIyhJCCDEVUPaWjANj0TkylrxFa9A6ZQIyeiRD0cMcjum0/B5uv+loUURRAARRrYPCiKAAxoBoUdAiiljlQFWVvYGiIkUFpAZJEBBBQ845FAEBw6RZiJCZ9/tdjKGU3M2apmmIoJQUYzaWq694KSXlVN2mAMD4RaWbTu+lThzSNHdYZxBPn0IiEhKRmoKoqoBUiiYzFyZkAznrJA5ByMR2KudDihzYWyWiGAIAVBormKPluoiqWtM8fJq1mwoweSYUqPopAAAC1V4YanO9H8PVm7eb7V6qSxSiANWCum7u2sqqOSuB1I4yTx2Ne0Z0jPH8/Pzq6mq5XNYuGpFhsklTjGm2mNU5hFl1PgG4vDy/2276/jAM/WKxHFwopTQNAMAwDCnnvu99O4txHGISAWQ8HPabu/Ws84vFrBS5vLyMMT59+nS3XoeYgQEQ7jbbzXbXNA1bB2SYixrT970xZhzHUspsNitlarI03k8K6appDGEYS8kFdLvZsHNERgQqGqoKMeQiZRqhQVCt5TXX3cMTbwVPcvDHiPCbJKp/P8//v/5lqlvZ8b+jCwciiL7H7QRAVTjlZKcb8bt95w/7cw+Tke/3hKYXzCQiUiY2BzOLiuQyDAMjsnMJhziGvu9r9igi1Z2nklYqPNj3fdfOD4dDCKFKgtSt2HXAZFMchyH0PYAF78F775zBycQyS47GMKJhstbI08vHKcQU4qztmqYB0QKl67pY8t12E0JybNg6RQopjyGmIr7p2Nn9TZ9S8V1HRPv9QVXbtoVpINcehv5wOFhriUEFDLMURkRm9n5if5EgKUgukouzvFzO592MSVizQe6ccQgk2Tvbzdp+vyXAkMp+34eY0fissj8cWpktZsvV4sxaLwKgpMglS0iSRQ5jvNsdUpbZTMF4tF0psZ47VTXt+0jab/v0T73VY2FURwQQEeHoGTShW7nUTLZ+qqqaYtFpMk/VWMu+4aYBNsZ5NtZ4Z61n45gnQUUyDMcc8yF0McXX00ghAAIS0kQwxKmXWPE3QizV6ggUUKsCqqgoaD+OWkC1oKBBICUEAYQCGRnZMSuhCgA7bwCaNGYpaej3hMUyWWullBQjoQMQhZJzLllKmUBgaUAQFEEIavyteT9ZQ8xsDdQofpq8BwIAUTk9psKGFJUA2QIlINbaJGDDBmzjmCmMY4yBAC0b1ZKDEBGxMLkKAwNIKaII1vjpSYkg1fCPACCTLR6oSsWOVQRUkXQceyI6HA5ffv3VzfpOAbIoGFYlUBUoD/Gf2om3jJKLSDHIdSfCNMFNKaWnT598/vnnH378kxACAlcZthhhvV6fzbu2bd9ev5m3bUqJGB5dnr+6ujscDjnnpvHzWbter2dN04fxcDio6nZ3mM/nfb8fxrGosGvC3eHujtvmyaPz1dvrzWq+uLp5++GHH/Z9D4feWIMg+12+vr6ZzzprjLUWtYAUleLbWdUqa5omjqGkXFTqsDzVAchcckrWWlIJ/aDWI1DOohprKZ6zlCKICbGanKDSUToABY2e5rnqKV7L9poYIMp9nYByjI6/R+f/3+wypzHeKmaDR9eeOuePD8DJh0ye997z75DG815V9/18BL5X86lqmczw+JR45pz5iLzVISooUI4NpNNJDQAT6HSE4JnZOV97+ACwX+/HcRwGLQV8C97brmvatglVwjhGLYWZrWmcNU3TWcZURgCYz+dt26Y09RvW6/V6s8tZF4uZIYwpqWoCOJHXmS0qisiYhs1u59tuPl9K0VIKEh8OQ4ypztVmycY4IVAtlpicJ6ISk5SiRQi18da52cXZata2IY+oBUtkJSbQlIQKebOYzesk2RgKsgOwQxz2Q3l0vlislk3XFkXIggYk5X4IBfn67u711du7zWaMoenmHyl9vLjIua/HaT1cTqNmv+1zPwWbUgrWZidNLIx7fLsGVwUAYCRCrSMHdVorhBDHkMa9FC2qyoasJe+Nb9Aatt44Z50zxtXM5sFc7wNM/kGH7+HH95+lCSpEOmK2SqSQSwYoVTSPQBTrjgNAEVCRBAUE6j8LFYxxpIIApaigCgCSpc40zWKScCSiIolkkr7s+161iB6leY7VsDIqYzkqvDGhMDITG4NMUB0S8P7tHLPgd5wf5DgFRIYpGiEVFCRCtq7xYkwpZUg9l1x/VLU1VgBk0VKIEAkraWXSAp3SlBoSpy1YswdlqqJF9XkSQ0rJWrfbD99+++1+nxGhFKkiiIJVVUNOM1ellLZtneVQxiKiRIgkRawxIQTvrR4FaKrnSQq5ZHSuIYqbzZifymLZAUAVaVqdLUop1kJKoWk6Eem67ubmhpmhQHVvGIZhtVrNZov17SAiVaRi6A+Sc9d13o81ZNbxickyM2fj9W69nV2/ffb4wjknMSCB995YW1uM1ZO96zoBPWx389XSGAOWK33XNI1Bk0LUVBCxZJFS+0tUSlHFnASpvlM8zgFVikA5baVaV08nv0xwiYhM9fbU+vrh8/8/rch3H/bqXahR4P5dFagarXXnVJTj4Tv8vXq3p2Gm+tcffBhHHedaC546CFrHabWIlIIAhjinFIaxHtBwPNfqJEPbtjHkOkZaoQREjDHe3QUJOYQCAE0DTct1kuxofYDWerbYzRrvW0PMJDdvvrl89Mh53/d9n3U2m40SX758PYaw69UYKIrVzJSZ2di2bWOMksgY45lzzv0wllKWy2XbtvtDX0oRjX3fA6FrOtVQyQWIAEVqpEHElOqMOXjvreWucYvFjJEa35QUDJM1aEgklxJLD3tn/X7f90MAsoL20Ichabs4n5+d27ZLRFLEsJLqIeab7T6U/PmXX337+k0qElNR3CTjZ5fPF77Ux1TvKhGhs9+Xh//rXPcwywN1q0kGGYoqTELStbwTrRKgWiTnHMfQHw5xHMexZ+utd2QdWFZiZWJnjXeGLdnJWBxq9+p7jQdVrUl3bcQS3JO5CZGqUUV17QEAhQlSUqBKJxAFBIXqniiqOu/anCUlLLFISZJzyQkLiOQioCVBUpFMwMRAwF3XOm9Om1dVqrzXbrdT1ToBRkRN0xjvvPPRMTKxJTaEhtgwW4NMaAAZiQC5TnWBVEKmiqCACoAqkD5EUAiBCA1DzgikxEBSRzrUkKiGHGMMjKSqRgQBco5IggTVwVwJc4yINSugOtWuRxQa6tS5KNTB6gd+v8aZvu/f3t4lAbWkRQFI6hTgg0dEk4/8O3O6dcEw8ziG+qn9fn95ebnb7bquy1GYufHzFIbKx3RutVqtYoyA8mL27NWrV8vlTFW9t5vN3ePlI0QUKVUjr+tm+/5QjYQ2d1hJ4N77HIbDYdd4e3a2+vbbb33XvXr16vLx09WSb25uci7n5+eb9e2bN29m3nftJOkwn8/7cdI36vfDvunn86UhuluvXdMBRgOeFIfD6GyDhod+NAupZNFSCjNJ0VASYj0BlBiq5OZRA0Co+iwSPtDDAUSt2stHeTOkKSmb6v739uB/cpeZ2ul0tAej+0aFHn148UECe+qmfP8A+p1cpyQU3s0+fjDmPYxSIlJzmfrXpmnSMMYx1mk8BsxQ88qufhcAGGMqm4uZd7u7OistIqpYxTkPB5g5MqaQgaaxrvUAMgz9MACgMLMztvWNs14EhhBSKrOuM8Z45wh4jGGz2e36w/7Q74eUMjgPyhhDCbl0zpvGO+dCCFnBOscAY0iq2nXzSiuNOalipXE2TWetBQiqlZIDKQVE65xlwChCRG3jHBvA3PrGORNjREmGyRluDNsqslUKALx69Wq7P2Qla9wupLvd0LSzyycvuvmCnROkTAKEfSnrvr/ebb745tuvvrvaBXAORCEmMW/vmm+/+6PnzXw+997DyebJGiKiH7MS+JHrXnoDoCpFVSbgw2pv4ouKokLOGVW1SEopjmEcxxRCzhmt823nutZYXwySMaZrXNMKgjJB5R4iw5Gk+WOS6PAutH6/XwhVARWVilbGiaiCOHZw9DoodfYAVEVqN7eUUmetyCigVZYKSRSRlGPOefKjBwjp4H3bNI6IVUtKuaLxU2PneHO8b5umaZquNGKM4fofM1tTTXZquw75AeZ83FYP3t39ZsejRCcW0SxalEQKQZ7QWlDCnPPusC9FvHU5Z0AEEEZFwDLJpXFKdWKPmLnSl6rMTR1dn1osUm99nRosqCAim81mv98jgHxvpwsQw731RCnlJMhaIRtLE1RTFZfGcXz69Onuu+3xC5SInHP9Pm2327Pz5Ww226WERLXHv1qthmGonTZdaOttzIksIpH3fnfYr7ebRxdn9YtTSk3T7MZ+u92a89V8fl5f1Xq9Ozt/ZL1j5kHAWue97/vx5ubGP3/MzCDadM1h2NelnlLa7/c559lsxkQMWFK2jWekElMYR+tcjLGEMJvNThsk5xxTYGZrhYhYsXayAEi1YKV8nRp+026qY+z3N7BG9Knkg9+v8/9vdpmSp8HiU+Q7itZMRS4AgOCpz3fK9X5P3uoJ5Hwv8p0++xDqPCGWdTi3lOmvhpgcyxhjjPv9vu/7qjpNROxcjTS1vbxcLolos9lst9u+7wFgPp8DQAghZ20aQFBAIALr2FpGxCxZVXOJ1lrLDpFVsWQVAQR6cvk0hJBC9k2TSr67u97se2M5BEAE46wAZUBiy74h59lZAwpFSylDjDEmNtx085N9lzEujSHn3M2XcBz+rWEvqjIfR4kRm6ZxlixxysNkYCNljId5N+saZwgkF6qiYknu7u5ClIImZh4zsHHkmqKUCUpJgqQCh2E4DOtX129fXV198c3dvkB1TwCERHC134fPP780T2AqMe3D8buahP71r9OxXqXhvD+qdh3JLDVTOY09VCxUchnHceyHklK9A+g8O2+dY2fZsnGeGsfOSs5KU2+JVYlICXDihh6Rf5jEkqfj4wT4HDvhBFBIAKCe2ZUdeRyC1epoTsCMSoQghKKH/V6rKzohAVVrPC2QVRREtTrdaJFcsU3YZ2u9c8ZaX/tfNeYBEDExW2OMtdY6T8YqIJnJzL3+B0RYDThwYnjBUb/xKBB3v9EIJzSFVAWFmcE6FChFtIgWAQAlVRQist6F1B8OBy1KMxQ4MDOBLwjGkGQF1JRzowYAVQVEUFGItAa5esgiKBAWOU5YC2JR5TCONzc3wzAQQiiKZEvt5CoBvCNLXbVXQDIBEpGkIgJsXSmlbZtxHL331jpVXSwWtVVfFVKIiBm2293d3d3F+aImZsNwcM60nU85DMMwn89TivP5fD+GpmlSlqrdc3d39+jizLczKSWE0Hm/R4xjcGxijBcXF9vDwVra7XZVsV1Vt9vtanm2KdfXt3ePLpatc+NApNC2bd/3lZx1OBxub2+dc7PZTERSTrZrfNPEnEIIbAwDbjY75xoiUsWaCZUszJwne2qsylzMXA9CnnRbCECO7joV1a6dnXq61ucO8ADk/D05//9mlznmpw+5/vcg4XsczodV1I9Fl9/h9V6v8fsx70EbZnKbrBJJzMRoEDHnPAzDMAySErOp9m9t29aj2Vqez+fjON7d3RFRCKFihswcQkgJvIdwyDkDMpwoM5UUQBkeGtIaNl3TWWvTuJ53XSz5+upq1w/MzIZubgoYMAagTpsDGWeMs2Qm84QQc427uejMz51zQJRzyTk719SQUDVZ0J6ifJULoWpSQ0SN823nSEEOQUQYAQAM8bzrus7nMQz9IY8hxjEOEQByzocQBLPr5vPFStHcrjedRQAA4iK678fXb++++vabV2uIAATADElACcjxmNPu9dXbR9Q0zWq1qlgrHmVzf9tnfWpZVU2Qqr6NiFJn0R7UW3BKjGoJmEvOGVS9903TBGRVHVM0qIbbieqQUiUvFhEmyqBW8WGC9V57773X9rDVPfEEoACiylEJWEGlTAGSyaJVYyyAqlpjqihoKUVyyTHFnCWmnLOWUkCBsFrByaRIFw6Hw6l7Z60lqlXv5EaEiCrVWgEiJZi3iJir1wSirQ0zIj6uiuN9Ot7AB2GvRmytbAgmAgWLUMRaqzFX3rMxJhdhZ51IojGkMVLUTsdxNM46NqhGVUVFUhLVxk1CUVM+qiqVeMykqidp04mPI1IpdSEM2+02hADHqb4fWydElFICUWcsIpZjTyvFOJ/PN5t1znm5XIzjuFqtvv32266Zl1JUi7W2abikMo4j81l9Gfv9vm3b6nBy2A8vXrzo73Zt21rLyJRLqFvvcMiK0LYtxL4UMY1lZpXUNM2bt9uLi4urm5vZbFb9Y1erFSp88eW3L54/C8Pu5uoQY1x2ba1TF4tFSqm+vc1m8/bt2+VyuVwut7tdyMl657yPMQKT815V1+v1YrGoirunzWWNG8OBmaf1+8DGUs30TFXpIQ3wuNrh4UGqOqmkwu/l+f/Xv0wNeABUOw1KE85ZMVzi2rhQmuSWUPN9o5v4HlOiB75Nxzmf+tdT4Lz/8zdc8iMg0umMewhhqSobhqkbeeIuV3mMOic0kTZP32vI5pwLlErhiSlJKYUazSEmSIOMu+KxFXLjYVg0KzV+3A0l5lW3JOsOuz6GvOjOXl9d54TL5dLwbLffF2Hv/W4XivVZozNku0XbNpLyGA5SkqTYtn7e2ZlXywExWhwdObNihTQO+ywpp3Jzc9iP4B0kBUbrTbNaLOfz3A9bSXvj2lFc0zTb9XZI6fp2ePJkbht/drbUknbbveZc4uihtKRxe3v+9JEEs2q8Jio5O+6MMY5pHNfW0bx1zqmmUOI49PsqK/XJ+ZIpaZ9zH2I/bHf9ECUpbweOsiit/dP/7L+cXz7ZD/nPf/lXuei/2VtVDWO8226ub+9ut5tdtplVSFSVQVugGcIs01yVBX/57VvpLtqLsjRkDRE1iaxmBIOslErhnAkURNFU3pCBIqlkQmQEYkKqigGQsyYoBUG8h7YdnRcRysdFolgk5zJ1/gQ0xND3/RhHpWK9xdaCN7Fa7zKBA7BFdJQAiGiahsSgUQbHzIadsZ6ZI8ZjPVkVLkCZiSgJEJHC/YlQyBDRokz7ZRK5fsA6Pv3L/aoGDXZnLFQcD6pGqCTMst1tQFRzSTEOw0GyJpJiIOcsIEWLFBlL4MyVx0TWcOGYrUVrvfPsvauqnW3V9qzp2inUOeNUJxmZUxhHxBFEABVEQREigipGQHEWBEQ0gwG2Sq1llBLBxQ6ESg4M3rdzEEql3K23s3lrkQ0ojKPEUD2btApV1/SQWZFKKaXOBRYGALbOWgeGT10GhaBEY6L/+Zd/eZdK8iamDFRYswFgEs1Qta6BENGYNLKCMcZZDhkCZFAoSEG0KeWQ4Ilhldhvto9Wz20emZeqggCr1fnt7a1B2PeHMJZHT568/u5l49rVahWH/sny0Z9/9+dWYD/ePXp0uVz5lFQhvXn57Wy2MAD93X7WNeIpRtTV4uKZ327uvrkdfnp5cTgcPnn+rO/Hddoe7u5YYda2zx8vv/nyi8bbp5fNd1+/6tfbZ88vLfFmPzCaKny4XK2GYXhz8/bR0ycmjxI15GCHg3EcDtvG4OPHj69eXcHQg5ammW12fbtYoNqDaLbzGAMVnRmyQJQLYLbEGmtuLwyMWVnBEDIzlErjVVWULEAT6K7MRzUcrGT/esJL5YjQw1hYAIDA/p0GsPfYlA8j8W8KeycsqDJ5HgxCTSp8RATHEfWpu3H8BQ+HB067vRLJ4EdSgL9xXvCD7+c3/6iHpd79NUH89109nNzWRFUrXWoYBhKsVBQ09gTEaSkAMKnUq3Zd55ybtE6YAbK1ANXNznK1a4hxGIZBJYNAthV9AuectbbSzUPst9vt9dXtGAqTc43hlPsAq7PWGJNiub29I4tN0zQzZy3HouMQdrvdMAyI0HXzqhORUqnyK7PZLMZojEHicRwtGmvIWM4xhDgS+OZ8cXlx7iyVFMZhFw6HFEdLaHxbh7mrw1w/hGFMWUCUkoIoDSmv14e//Pzzs+1g29l+v1+vt7/+7rp2fVLJQ8qhZDmKKz546nTSPEopj+NYRRHBW3EeTh6wP/JMJ9The+2E01qtx3f9xhMpdOo8VZBXFQ2bYmpFSFj9NBgA9vt9LRaVjeRcF4QAkSmASEBKRQRFcxIsRNzSCdvUafhHGYGBKmETAKQ28VTpKPCPD9jOeORGwQOI/sTGYmAoCCQMLJAJkIWUyyN7Wc3qSowprsIw1h7eYfM2Z4lxjLmoFkQ2hoDNfN4xW2uZjbOOvWt9Y63xMp/XovCEddeXd+rlvLdljKIAFigwOTgBAQkqISoDqwURddZp9ffB4mwpxakKF9VSUsp9Djm24kspmHPtqauqVcfMmjMRwWQta4jIWsPM+Sj7nkup+29aGAy5TH5Dm+0+ZmFmdi6H8YTlHIeN6WEKfTqs6naumpyLloZhmM18nYebzWZDjM6b/XZYzGZd59Zv42JJ19fXv/ijz5bL5WazOTs7E5G7u7vz8/OU0uXlZSVw3txsuq4bh2ytRYTdbrdczFIKTTMXETq2Eq21QExUVqsVsrm+vpb1+oMPni8WCyi55JhEmTGVPAyD6WaVCHpc8FO7rormVxqz934Ifc55s9nM5/MnT55cX18vzs7317cffPzJq+ubx0+eANMYg6rmNI5DVCVHiISlKGFl0hqAQkoApeLLVXUBCyJV5jAct9X3zen+0wM8TUqTLWElcVWy8DFsIxEoKSkoqRICVHx/gkHqPG3t+Z0gihpZTufS9wvh/5jI994P+Q0h/QGeee9HU4UJdWp+wBH6AihSzTaHYdjv9yGMDi0RppIp59osKaUg0smcRUTm8zkRjeOITNbaAQIRxr5XBWGbUio59vvtOCohLGZYsZGjTr5F1JjT1dubGJIoFsD9fhhGQICuYxFRhFhyDrGd+1W7dI3LOSNQjHEI42HITYNnZ2fdYq6CmksIwZMJIdzc3AxDODs/A4DGWYOgUlDFG17Mm/Pl7HwxQ5B9HNIwxHFglNZZay0SgOg4xs12fwgpCYWCQ4E+6Zvb3XbQtxuI+OX5+nB+8fjld998/d3Nvt7HSQgYBaAKMU+KwAoCKhMwpgoQIxwOh91uZ4wh7EpTjg9L30toTv3a0/rRd6PdCRmr1Hwkqq7cExKA90Iw1dOnyi5Xj4KKfYUw5hAtsSXmaaJcARlBjCIhoqKpU/44ecK1QqhTXFdBAGCYxuERp4b/sSOSiShn0SOKO63e4wkodSi9vlk5vq8aPrWWHDSRq1Gml6eqxhbfdO2s4px6eZaTxDSOMYtmBGaDSMbaI55tLDOe+n/RuYdZwgkHexgVoGI2CgDI9cBWqvJqAqeeyPHBVG0aRrZWkTBN8JrmwgiScxxDDLHaz0I1jIXpqbGx+cGoCYhaaysS56zLOQvUp6wigkyEkIvGUm5v1rvd7tBLAQD3o6l93fdHwanTG4Scc9v6Ybc9O1/2u13d1NUS9rDeM/vKVjs7O7u5viKit2/XInJxcXF99aZGmtvb29Vy2R8OLz68/PXnX64uLiSXtus63zjnnKfNZv/hTwgAJj/nkp1zMY6b3f6YkHEdtF2vt7NZe7ZaQsmb9W1KmRxVO1nvvfeL09Op6tExxu12O1/O6ioCgJIy6HR20Xm3XC7fXF198ukfXL2+evTo8uXLl81svlguLZccYwhRFMg7YyjX+UWFSU2WhJWRAaAoINWpTkEkEFFTRauPVDKik3pn3aT37ijHbfs3oWf//VwmJzmFPVJUUqSJ6nMfV46HDx75VLXoq+1ffCe6vGNR+/AUg/+4yPfbVnv4LrflBOYQTrmtiCASISpM419Vmr2KvWbAxjnJRY4xT497/Ui0A+fcOI59P84W82PypeMIiMCcUhxFJEZFBedhNm+7rqsFIrIppQzDoe/7za5XVTLOKKWSQwZrAInW2zCfl3betu2i6ZqqPBtj9L4BAGMcUVosFrPZzPuuisUgYtu2VSZUBLqua9t2Neskl5SDd7SYnT86X3VNk4Y+hsM49FhyY9hbZ5kUpMQcSu77ft8PQwJhOxZe9+FuP+4TbEc4APRjKre3ojQe9gLgugaqVYGigBKoKpTqMHR8Xnr0OUHElGEcx8Ph0HVd1/oK9BUsxO8oDJye8vG2v9tzVj2dBUy1qOGT4OBpwpKqjaJIHVdHREEwSMcRjlQtxQ2SQYIiUgqhQRJE1iQCBQ1KIbSooqWqfuSEx4JCAFFB2TJR9SiAqZjQ05Lrj1P5p2VfhVFq2MsnKVGcVEvc8YA+vYWKuzCg1pJLlWr8Z2MIfNc+lCQ9ElFQRPDB4B0RsbOGWK2tsmgTiwGZCJktgACQSFZF1aJaMxkyqqKIwAIqdXb+6FhSJvcrKAICqkRAYp3T++pMXNP4MZSYKluEj+LppzJ3gthqs7aa8eWcSp45j0wExMxSB8yraEzGkNL13d2RqAoikoexztU/KPfePwpOqwKO0i2nbLiWa+v1+vmTp845yaVt2xCGs9XibFlHnuDqzdvPPv1kPp9vbu9evHh2fnZWDVvqQqqiFiGE2bzNUs4Wy1ev1jlnbzmEsFycbde72WymWq6urj7++GM2brfbOecfPXo0vnr95tWbmfer5bLkuLm5U1VRGMZ4OBysndfudd3giFgjonFcSlHQzWZjDDnnjMput3PNwlr/0U8+fv36dbtYbDY779urN2+d9wDIbFKMacwRuOZqBRVAUYVNHUUvhGbatQYEGFEQ3vHnO+2yh03tScnzuEF/z+s/k6vTLiiBcqk0rQm6qfMcMnmOTAoUMvFfpzF+omOOXFO8ybcJTyvwlGr9R5Z6/zHXA/TjXjuqlIIqRAYAQDUOYw5pv9/X+DfG0RlTVKCU0255WIJU456K1zVdWyGjlMA6QATD9c0WIvANzOdz50w1S5uMimLeHvrNZuObbr3eIghZ6xoDLCnJ7TqpQiz5rG0ePToHxmE4FMjOuZhLFvXexxi7bl6KxhgPh8MQorV2NV+IiDHGOZp1bdc2XcugBGAcm1nXtM7m2Pf9ASSBaMNMhggViuQUU0oxhn6IQ5KoXMRsg77ZDld3SQg2ARBgefE45xhCUCgtwKsYYCI/1xXDlfyByKpZFMox8kFlBCNUNbI6y1iFApIiG6N0b4P38Pp+2Hv4TGupR0RVYaj6/hzP+vsFYB2fpGG4Wt2OYxbBLJhFY86qAmgNUeXfjlklq45Ehgwjcn216RhYtM6eQw0/XFSICIFPAZvZENFo7jHMqVQirBSS6h5Vvz5X91SRkPHUt1ZVnt4Teu9VBe6RUq2xsM7dngo4Aa3b0Rh/QlylCIoSlDzNguuRcCrMgIZQZHpDUM2U6KhlxQAZAQGQkJRZlQqgoIQcKmdSQEsN2wiASIYpMzBpKchkvWu6VkSGw86kVKxjZkLUInEMqKAtemYyDISoVSpaU84xRgEltsDETHAcPLKuDbvw6s3VkPLRPY50kpoEOP4PEUVV3tWkBgDC6YCuohDDMHjvQwjPLh9/9/KrR2fnjbP9fn+2nF9fX5+tFk+ePHl7c9W29Pbt21/8wWfPnj3783/37589e/L8+dM///M/vzg7TyGulksUPVvOv3t1/eTx09u73WzWOrcOIawW7Xa3fv7sMsZ4tlq07azf3VUuFju7Wq2YebNd73bjzc318pNPVqtVv12Xot6wiOz2vfW9tRaZUI9JgMgwDH5wdTTl7u3N42ePvffM/PbtW1olZzwBpjGmvGXfrB5dXl4+2e0O3nsma40rCWIURrTWlpIm3pVM0stl0msHKZpRCIkFFFANCiAcB97qnX9Q8wEAqFQHrns85u/5qP9rXiZmZQBgtQSgoFzPLAFgJJn04pHhmOUhHXUOj7okdQceBwNqFlC/a0KBv1/qwW8s1H7wQnynGngYwH7wegjavPdl9ZmVUkDAEJKCIIYQQj9ut9uqm3DYH5qmccacwND32DTGmIlBDlDpzsYYxOAcgFTGpU8zAACAAElEQVRTo4IqiHXaz0xnE5osRUtOKeUkOUmSMSSdzZx1zW4XY5YKg/kWaph0zoUcU0rsaD6fv327DyFUX7SmacZxpJz7vgcgY8yh363vttu7PSIw83I2dxxms3nrHarGOI79RnIsMbXOAgkAapGcY0ophVhKGQ6HIaQQdQQeoNwe8tt9ejtOS/uzTz/8L/7r/yqF8bBfg+bD7k0UBYQ6/SOAqlIQgKvxAKmWkzaXIDKotSAi1cWwTOPkIiilFCE+eTN8P+zBg+H00wqseNHDqdNTMcHHRjQRmSKi2ThLKdaGT0WxYoyW2dZVm7XaEgBqKQmN5CylFBWs02y1MB1SnhaDgiKcwt5RCfS+YabGMrM2jEePeD3ihqebRSoAIAhGAQSgqAGjigL5KLmtCrX8ipNg8CmBQ0Ugg0Jo2AihURQRKCWJANkpdIlA0YzAxMBktX8ne6Oj2rVzDh5gnlwtHVBBBSalGwZEQGFD9VYgskBGUAItXEi0sJakyETMCRPWUdemAdGx34tIzKmaGNRHXym1iOi5QUQmZmcRNIGEFBWBkUjk2J9VEQHf3W73v/r8q81uiHXCmskYlhQVAGnSelGtii06sROOa4b5aM87jp33/bBv5nOR3LZtStD3PfsGsZprSoxxtly8fHk1P29ijLvd4dGjR4cDbDd3l+c/9c6cLechhGfPntytt23bIiobyiU22JytmhhHxC6E0I+DIB76sWu70rW3my0i1unVvu/rDOt6Pez321nXzGazcTggMiKN47jb7ZbLpbW2nrTGmFhy1RhqmqYWmuM4LhaLunNKSgx42O5+9tNPf/XrL3xrX7168/jJ08OYRKBx1nETRXJKQYHQQb1hMPkwANTD/zjoX/Q4viZYGAEFHs7z6cOa7z2AbTqffy8Dn0nVPVpBCRmQgalqsqGiqpG6HaukoE6Nhno23Pszwb17gygSqgDWKd2pZ3PCfx4Gqt8a+cV3NWR/M+v9/tB8h2uDuUI1TKpackEmIjZIqFBSGg69Hn229NGlb9sQy6mo16OMWUrJ+iaMkZnb1lTagve+aYY+SLW7JFAlYAZmJKKua0+ODWMYc84xFwE89CFn8E3HxoV0OwRYLt2ChZhn8zkZsx/6EEYRaawnImYrEoxxXcfL5TLnzIjOGET03pUUh37fNtQ0Tett13qn0ZEalJySpIAlGSjGoJYARXLOJeWqmjaOY4x5HOOYtE+6F92lctenXYARoGuctX51+ezx8w8sFs0hDvuX377BavYGLBNmJ7VFfOLkqkIBleptjmTtdOichLbhiJaXUkRoCoNHYyI4PuX3envwoNSbUGtAIqpVFxOjHk1tmAxzSlKPeAAAwpTyGEM/Do+aJQFqgRRjyZpCzrkMfRDQyhLCo4LatJBCngIDTJApGraViXh/2SpNYIwp2eppMKCSoQ0jUU4FABShipMAACoykCMHiIJmgn+PmUGOBUAeBKzaUFBCUKxGCEUmMo0AYAwjAlX3FEVFLMawMJYkJ9Wxeg8zFyKSfB9Q6XghosckIlJfOVe2KoCiMz6BFEXVXEQAUVEVNeeMCGRqsAckJFOLOarLzFrrjaPqk8DUh9E4S84gIhl2zITA1YIAtBbEeAQ5gfB2f/j61dVf/tUXh74AAAKLIFsS+A9fNfUEABQNIZwv5mPoQwhd50IIxsAwDA3h2WohJXWtn2BUgjoP+t133/3pn/zxkyftbrfrh/3jiwtQ6Q+HD56/2G73/aFfLWaSsiFMcVwu56WUEEbn3O3tbdc2r169/uyzz4z1KWdEHEPY7jYKcna2PBwO69vx+vqanlzOlgvRXIoYJs206w/dYt44i8KaFA2TiogMfe+d2+73L1682KzvLi8u9vv9arFIRTbr9Qcff3Kz3v7Bz/7gV7/+wrfdd9+8PHt8nkLSonY2s6bNscRQjClKhaeuHCqjAlXFVFXU075jJCWsBK4aHaZEtJ6udOR2HqFOfejP+vt4mVyQAEkRGCdXnsmQkJCOvTpVqG6lqoxWFfAdic6Hde59eYtKqoDTGNV/LLb5Y9Xej3ezH5oN3f9qeWCiVA/Y6lRSsf4KoKtqSkqGZ7NZyvtTuD2Bcjln31LlgxGZ3WGfUvKubZqmD71BsATOGWIjIo3z3vvlcjmbzUi1MkVrqZFSbrv25mbY7g6zGbnG+RbbdnZ3twEU5w0R9f0hhNF7i4jjOLZtO/aDMcZbt1wux35AJi1yGPZny5X39my++PjDD1TVOQeSO29yGof9FiQ7y21jSU2Jcb3eSiklV805TEWGMfd9n5RTlj7DPuR1zJtBhwICMArd7nbx3/97JF117qcfPB1D2AfIpmQUZkXBXKoYBj1kXWodOsYqfDT5fFaj8Br2avQSjfoj1/ervXpNY5FHIqJilSA4xtEqwSNCRMpE1jjvdWJbTQ+6fn3OuWju90MYUymy3/eb9W6z29Zqj5m7bkKnAcDzNN9WMf8j956Z2RIfFbGttbaGPbTWEqOZXicy1Y8nDRRCMgatwWqGojXLBIVpqZWcU8ki4r1F5JN14mlJS4o6eRc9EOJiJK2Wt5OdAvOxAwr3Ye94rFPFtmpt+d6GUquqelTgRCUSAMUaq5UUUBSOtJOaMlpj0DAzlzwxlWpMylKw5EpOsWzYGmfs7rCvT6qoWuSiUmehrHeplIrG6hFYY+arm7uXV29fb4siOGcKmpTSpEk2DZX8QDb88M0CAKikMnF9h0NYLrvD4eC9qxZ75+cvrq6uZrNZnZzz3qggEL9+/fp/+Y//5JOPP76+ennYbs/Pzq6vrw+HAxFJTrv9djE/O/Sj83YcwnxxNgxDCGE+azeb3fnq7O3b25/9TJVo3nZEeDgcYoxPnzzpuubVt98xwt1dnM/2jx49iqPv+14R0HDFk4ypvhb5lOsPwzCbzdbr9T/8h//w2+++FpH9fv/8+fNvbg+PHz++vX6bFV9++91HH31yt9sNfWhdOw6x5CStOusGpVJyTgoGAbDAfQsBCbBgRelkylwFDAjcNw5+iKvxt3PO//1cZozFIBkLApAFMpKxRFS7vkimBvBcSk1SKQowsyqQ4tTsZkLGoxmLHgUroJRJuvQYour9Ot2y91HK48c/nLfJPYOIHn7jj93l9zji0yNVLaqt98MQuq57u3+7bBdxDIRkjXn16pX33ncujoEZHj9+fHN3W/JEl3LOCVIIoToQjeNIRIfDAZG7WVehhq7rnDfX19v9AZjGxaJjZ+fz+cXFRdc2iLA7HDb7nSj6dpYFhjAwJjIQS146++LFi81mMwz9fN61s6bv93d3N8zUzmfOOS0SS9jt9iBq2V+cnUtOqJJjar1bLB4T6HZzd/P2rupHPH36tDGm8RCHogTE3DBrzn0/DH3PzI33KrDd7t9cXW/2PQI559aHsNlpX0CdKWDHErMSW3/x6Mnw6pu7Q/92vXly8eGf/fIvvvziVxEADIFCEUElNGbyVRUBJCAAFshaRKOIZ+OsI5oYfTXs1eeSc25aV8cnTtVeBdiJiBBUoZRSpBCRM1jJRPVUqgVZ/SHMzM4yMzEzoFZWverEibSGpJSYUgpjDFkKGVaEMYZhjPvtYbs97LaH9Xq/3ezW6/XhcEipLJfzZ8+fXl5eti0QkThTlU0EoJRUUpIwtZBqlYeIBOi9r9ri5TbZer5aS0RAU1OwEtPJGO89EJ4oqeqPDTmRUgqoeiaypkBh4qPMQqp2x6o6cw0gKqHgMdUTFS2MVG3RJxnpUpJIngh40wapAS9PUgx2+o3valOMcWuMAeRSiiB475uuNdZBRpGiWUuSklNOqeRYMZiazRAROJuHUCq7m1AnP14ka6x1SlhUnj5/HkIYQmiaxjibVSBL1WRIw+Ccs97HnFSVDKeSQ9F/+i/+pWUQZ9dDAkIytqToDVlihFxEVJHZKGDOIjqVawAg1WoKkREZYb1e+8bO59j3PRbx3msuBPL2zZtH5+cicrfdisjjx0++/frl6qwdhvz111//9JNPfvUXv1x03ePLi9bZVNzLb7/99JNPPv/8y6dPnsUYX768bRq3W991XffkyeXVmzcxxtls8eTJk/V62xpzGMIHP3n++ee//vnPP1uv1x9//OGv/+pXqxWOo97ebhFxuVy6ptvtNlny8mw1hDHf3lxcXJw/uhhj2O128/m8cw0ivnj+/Ksvv3x0fvHdN9967/fb3X67uzy/cK3zptnsrr/75utPPvuDIvDlF1+fn58vFouUShyDMQ2xSyGggoqCgkUCJRIsWUsuztZEgRmQCVVAqFI+TuYVULHQKnLGbBGrVByUMvXZYWob3B/U3z+3v39K/61cP/bz67+bnEAZBcGSIqKyQEEDlXcHUmoKqURa6uwinmDGCkHRqas8tTS0PIQfK0PvIez7MCP4e8gOvt/YEwBiHvoBEeuMNijMZjOsksBQUkoXjx8dDofFYhHGSkPPIpKk1NGFSt9iMjHGGJUMd10nBYZhWCwbESgZrKXZvCPkOqXHzLWhVV9GLTIA4HDIVUJlGAZjUu3Z9MPu+mZHBM5DayyBEIi13rFxdoaCaJBAShhR1SIwaeccIjI0dLESWcxms/m8cwyhHywb2zU5xP6wLzEZxqo1c3319m6zTUWNbWcLe7vZvn41qoN9hKEAEkTEIigASPT1q28W3eKzn3709HJ1dXN79frV/pCXHbw6Qv0P/ry/2aAkWOqosyDke+z5vm4upQjdY9fwAKA+lnpTF+phn/hhqXdC/iqxZbrDoAr3FiIAOpEGTd3B0zzJbLbYbHbr3X6/7wl5sTpD8sQuFRFAjrHp2tl8cX5x8ejyvG1bMbZ+IyPWMktVUaHv+wq/TnxwhQiS+sPjZkmAZHiSPylSzc22t2sAsNbGxlekgZmLMTnKw6hDRBMSikhYqHojABliZUYFVgXAAsqndgNX2WwBAUHB6kiA1WwGTyMTzAzHiYJSSh7jiRF6qgVVtcEyDiFL5aCyZilZnEtSoT8GA5gFNJUYQs6ZVaSqzIjKEaTNUshO7W20hpjB8MmzcKqVvWNnJ3tThCyCTAKT1lotd4Zh+OVfffHy6vpQgFNF2hSOSIxCAdWKxcG9q8P9OfMQHKqZPYBl5pyTqjKbnIvkgl5yCnWOvn792cVyHPZdZ9++ffuTZ48vL8+vr15/9MFTBLFsvDWM1DU2hvH8fLU77Pe7HtG2bZtSms/ndcrQu3az3s2ezG7Xt0/y42cfvPjuu+/+5B/98TAcPv30083d7du3bzfrfrPZ8CS214pMpNPadgGQtm3zYtH3/Zy77Xa7WCw2m83HH39Yrd6vr68/+Mkn337z9SeffWYIlvMuo/viiy/OH10u5vOU0nq99s51XeMth2EYhtBwi0BCKKJSoBCiKiIJAJ28hqc/9cRNe+cAP+52fdeZ/QfP9vf+8XfFeTExZ1ZlJSEmVlPPH5mYHqTC0yQDgCgwZBAiUJ2MiiriWIs/RFBR4nfe28m64WGcf9ie+TuNfKeFfv/bmeou6sNYBcZWszkJp5QrnyWFjIgfffRRjPGzTz7+5otv6zeeGi21mTSEqKqlaK1Yuq4jjCklQm08IMBs3nlvRcA607R+u90eDof+MIgIM8eUUy5IcD7rTgCa8W61WsU4OxwOwze3sw7m866KIREqlIxMpNh4b5yBSZGyuvaII6wZcbNcVNYoIuacckwpj0USalXMpxDift/vdrsx5JDKmGQ8bELSMUAAiIkPsQwC1mImUmJmZGsXyE8uH33yyUfjYXv99ma7P9wMcN6+c7cnB1cgVQA++Q6ATnw/rXAWTN5PpUKdOWetEiFaI8K7lwjhCda7bzfU49IYc6+KORE9EI5q/Tqpy0/cDDLMYibKFREZw9Ym0giiTN1yOZ+fGXZ3b9dJ6Yl17W6XQuy6ZnG26hbzbj5fLBYwnxljfNUjmAxsBQEOh0NJOecIRUQkp3To+zCOfegZEDKd+n9EhITecilFS0pRM+JEMWWuH5yWKxGBEhqTUio8taZUFaRUQo0QTdQhxcq9RAZELpqPw3akpEfbVZIc6+3Kx6Gjo4bndAbVu1pzCgDY7baHwyGMiZnJGAUga7xvbdsYY0xjEDFJLilhKlREYOKmllLKcfin7riCaKxlZ8kasoaq9p+zdQiSnTWmMnu0TiwwMxLFnAS0cTbkdLfd/MXnX97tcgFgY0kZja1D+ippIvvexzy+Hwg9WqfCsUk8aXUCWGtTGlTVGCMplxRBupQSKFUJzQJ6eXn59Vfb2eJsvV6XUv7ws8/+p//xX2jJj87P7r78UhcLAFkul2/fXn348SfL5XwYBtGMpDXsHfbDer1dzhZv3rwZY/Lev379+qMPP/z8V79er9c5xyeXF41/olL6Qz8McBh63zau8SmXLAVytoj7vhfJZ2dnAPDNN990swYR+91eUg790PlGc4klaknOgOS0320QzflqmXS73++7ruv7PqdkjYGjth+TLbXaK4qERCoFMhBjUaGCQGXyagDk2u87ClVPpzsRVfbm8Zy/Z+9PUeN4tH8/HJ5i3t9/8DMxCDMaA2KUBKsyoxplQWRIBFDbP1wzgKM1xZRxw4OIMuVuUpD4HVzxN4T93wkWnEpWgBCCJ1s3JBY+xLi9W3dN+/r1Vde0XddV2+Wvf/0NHqe+WNQYow4BYIwppZQzGAM1HSOi+Xxeyn659IjqvT2O2miVddjv9zmVruvqkB8RNY2t53491r330kUBIJDLx3bedu2880eZmHpEHTY7d37edL7ebe9M1Zm1BnMuVSKIteSxr+1DLLHkLCKWyRlKKe222+1uPwyjbWe+W/S7w802bPZAFpoWxmhHKEUBlStUwYyWsI/j2+s3jH/0i1/84j//z/50s775v//f/tvbtwewAEqo1RAVAIAUToAbIAKyYhFAASzvItWVHPTwzH033smknlPpIDhZ1h1bRXAUnNSaEcMPLqdqFE6kovVAL6VAmtaqtVYMuVm7JHbsu/mZJWf8DG1ThwtTSt6abtagtRm1ELAzaC15x8YwEiJW525yVnPJJWqRGtHBGTO4tB4TAComKiRkylShWlvlMEVLmXqTNepgdYGttAEgIFJCUYaCgiqp/nwRqTPsxjX1e09dgPqWle4xJX2Q9knOKiiaVVCh1FH3nOTQ7+q/EBo2yGSRVAW7FodDn4o45wSwFDXWwpzGcaxODsC14FIiMERZtM7CFlU9Rr2cs1RpZGPYmBr22FlrrbGOmQtM1kLV0p0BclG2FpAkCRl2zvVhvL29fX11kwEYiU1DkgCplFJh8FOHCR9cp2rv9Im6upgpFjglGXrkBpcYUUtJka1z1p+EKdi4yu08HA4/+/jDxexf9P3+g88++8vPf51CBJHnT5+8+ld/3u933tn5vOv7/nA4XJxfWmsRoCoozefzN9dXf/jzn//qV7/89NNPL59e/MVf/PlHP3kRx9B4N5/Pz86Wd3fblFJKyRpXs7p6Rqlqba9UtLxKWl+9ev3kyZM3b9589tlnm81mtVod9uvLR+dvr97Mzy78vAlxePHs6ZffvvTOFe9VtWkaKDKMiRAWi8U+jgqowiJwFGqlQswy9fZwUvYuVclFHtzdk5Lz9875h+f/fYz4fsCDHwh+f7chsP4WM4TELFbZFmZGZSQLlY8CigCEjEwASoVpeo88mYfVCycnyUnbDBFV7mempGScRM7wxPfEChE/IIHipO3yt0b/ee/4Oz2qLCWlBIKlFGRHzIYYClYO/5PLiy+++Ori4mK33yNRPww1Fz7R2+CUKDGPQxCBtmVEvLu7Q+Dz83MpwVmbc5SSgLFrFyB6/eZ1HVMzxjRN41yTs1ScaTYzpRRnebFYNE2jWrIUbLwx6Kz1lhs3tYUmGk6GWdc03tZ+kmFkUAC1VfsDSk45j/VkzKoKgt57Y0wMw83NXb8/FM1EhlwzjHEfxzEJOGgWMGS4G0DJJgipkvMFVRVyAY7zxmz6w+tX3714/vjy8gNmYusGOUwx71jqVZyTAIoQ4P3clIAW1QLTyGN9RxXnPCrmwBHUPAY8nRjSlaL53g6ZiJ6IRUrt6glVtq3SiTNZMQkmQhKB2pObfhQCGTZqZ+erZi5hTCkUYQPGnz+brx49vrm56feHEEZAUS0JJEkRnIxfS82E6o8CAkS0ytawGi2SSySDxrEsZjqvlu6Tz18pJRcRKUOYJkaMMdUHh5FYBPv98VCBmk3bwMw865qJx4+aQYoWUGAAzJHq3RA5QSgFgK253wVHJBgASs6SNZeYkxRJMeQa9uqfuUQpIJrrnyWrsQpFqlFbSjnE7JpuuTwzjp1zrrHGEDEis3VsrRUD1hhVlZRTSvUtp5IVtCKZpvXoDDpD3hrvDTsSqaJlIlJVQhEx5Sk3QSayBg33w/D6zZvrm00GYOQx5ZASEEHJgsXhj50DkwpMFVk8hb16q2rz4kRns9aWAUpJQGQcTrMxWUSk67rtdu8NXl9ff/rh86ePn1y9efPzn/70008+3h32IYRnz54tl58jIjF6Z1Oy+/3+g+c/qUktM8dcZovl66svQkpN09zc3Pyjf/SP/un/8N+nlNiQSJ7Pu8vLi5zj7jBut9vZfGGdRaDYp5oTpxx2/aGxbj6fbzZ3i8WiGo/sNjL2Q47JsnFdG4b+sNueP37Seff6dgPGPn5yOcZSh3Rqr7zeWiJGpImNIViEUAhRaQJdUKrQ8STPUoFPuY97yHqUaJdSJ3fr9Fo9zP/DEl2/q4LPhJCYRUSKUcsohlTFMhrWyjpHRaVq/EGVzHL/oo/FbGX4iMipDfNwmL9+wYN5vt+lJ29NjWOYiBVN04iIZL2+vq4ki7r6Qwg/+clPanr1sM8vIpXV0s7mwUTvy2w2M8b0fc9km6axBjabTeV3LZfz+aLbbQ9XV1ftbG6MYTKIXHta9ZVUc/FZ11xcXHhvh7EfhkEkf/DiWR3TabyvwsH1t58tL7yxgpBDTCmVkAWRVPb9XkrSo1AOg3pnnXMKrpQShnF/6Ic+5Jyr303KcghpP0JQEOOQDYRUJCWhICQqRhkFJOVcCkI+uziPY7+9u/3Lv/jzr7/89Zs3b7787o4Appg3KeNPDteTBtQErIFUk1fQopOjb50Vq2fNuyPq3w9+k3DV6cR6MCd635qq0VFEThoux84R1QLoVGIm5mnLMgNABnWztl0sU5QUEZQNOmjweTfbbreH3XYY+83mZn/YjnHMqIvWCwAzk3Dd2AxIAEAIgAZRWakAM/siAEArewo51e1PUpZcdpsNDWNKiZHq5smqApIOmykbrIIsABUY1TI3VS5NjlJ5ohkRmOp9PqmrCBRQAgaYyOg0zUyCAFCKUnlA90OTIiDiarNPjYCoWiUVISF5e3sFACGku7v1zdu7/TBa433bLBaLruvmq/l80XVdM5t18+WsbVttmZxX1ZJySfk0cWSMMd41s66ddbZxxGysNd6RVMFxjDnpyRYAQBCKCNHUkU0p3dzcfPXVV3e7nQIoccq1RGTFzMyo+QSvPVxK00l1j3NOfy2lMEGtq5xzmvI4jhers8SYY6rAOIg654rivh8fP3781W7bNO1ut9tutx988Pxf/6ur29vbn//85//6X//r7Xpzdnb25PIxGYpZ+n7ftl1Ku5xjKdp1XT03nHNnZ2evX189ff7s9evXH3/04bNnT9+8eXN+tlot5tba5XLZ930/hqEPxjrnHOjEzqvgRAiBFBaLxetXb25ubp5ePh6G4fLy8vXr123b7na7RwsfU3r27Mnt26sxp0fnT9+u786eftA2nHMOY8g5N23jnEtx7PveeP+gMq5rSQFUBZVABYFQRRWhTqnI0RLgdLCfbIEREUnfm9t+ML32A6Xe6XE8+OCH49/fVqSYqr0xRGYWMaUUYRRhFQRDKjXsCQgKCRHVmSpj7hmSNFnuVpCqnks11Mtpq9Mx4X8v2j18838Xwe/7t/j4bJGZQ9g75zRK0/kYYwny5s2bnPOrV68q6O9d+/HHH9/d3Z0O39qV4QcsAGZ2biI4dF1njffeI4Rx7GPU5dJVLZUqy5RSmigVIopgrF+uPCK+eP7UGGNtFbA2iI9iHIfx0LbtlOwTn7QlEbTzvuSsRQwToS0xgAIzlhxLiohoHeMkRJFK0pDlcDgcdruUA4Ma62PJIYb9EAXJNSYXHKKMJReybtbuN5SB61AH1c4SAIi+vLo96+z52bLSpl++fs0M3pt9AlQ4Wg8c7zQA4PvLdyr4ikANG0T19Mn5/nB8N/jd/60KjMFxnAuPo9anmdnTmhRQQDCVx/8A6MNjGWGMQcPAU6stEoBh17R+ZqSYFCX2eRzDanGmTO2s6ftDKuN6e3e33xYSXKxSKvV1uTqQPlVTU2vSGNP4DgAk5ZxzYmusbZx3zhmiUkpJWVJeHM7DMEpIjMjMNZYBQOq39bZIyjFGKQkVGDCOfUGqEmUl5ZSSFlFVZSQ0xIDAFZYsUO5niKqQpmRVFBQVVOWTZ5O1tvFNfRbVN/Ve5OyYIM7mLmd5e3179fpqtz2MAUbs1+t+fXvnnGtmvm39YjG7fPzo+fOnfHEG6C3xKcFtnKuULXbcNM18uZgtFmxZQNEaNAwRmJkMY6JSWeI4Sc+UUrQOjKnu9vtvX738/MsvxpgAkNhCzkBkrY2amBFLrtVHOS2k+kfFP4/ki9OxkJJ2jnKWEMJsthxjGobRPX6SjQ0hVAZAjYjI9nA4PPvogzevX1ZmzW63++jF81//6pd3t28/+4M/UtWcQs6xaZp+HGo2RkRN01xdXc3ny4uLy6urK0TcbvePHj169d3L1ThT1evr68vLy/Xt29evXzvzARskNOfn5wK42e6yyhgzSjDGFJUxhrZx9ba0bbtazapwTUrp0fnZ9fW1cy6ldPP26vLx08bbr757mYmfvvjEj3G/WXfLM+99nVeuu0DFxBgMOwBA0pOFTt3CSiQgPDXST011FJnEfx7UzQ9C1zsnPJ5aq7+Z5PLeB38Pl6lz/giqanDqzzOBqCKoIGqt9pj5+AYmp7r7cHIExx8QOB/Mw9L9u3ov5n3/FiDi3/Vbr2l+hchTGI0xfT/kWOpiev3yjfdtCKFr54vF4uXLl3DUeKzhzTnnvS+lbDYbQq5JnPWuhj0iGocQQmCG+bxDxM1mU3uEt+uNNc5aS8RsXNM0bdtaaw1j1ah9+/atYTo7OyPWUkrf72vMK8eTfTqt1NV6dD6ft85GQimJmc9XyxCC5FiZLNvdtj8ccs63G2nbtm0soQnjPueMhp1tvNeonKuWomFCTIJFRdAqGkZh6wgKM3MRRlhY+MmL5967fr9/c319c7PJAqnPaMzpmdWu3ntGn6cegEzKk/efOpV0p4KvUlreJ7aAiuip2iO6xxgeJjencvx05j5cXzVlUbifxa48xvOLC1VMOUsphtk1ntApmZAiGzebtauLM0Ttx8N+v0OmcRwBwFp2Rzc7nsjxIrkogXOuspDSGMZxLMYa733beu8JsKQcMADA/GzVti2Idr5pfUNEXN/LuCul5BCHYRj6PgxjKQlV4ziY+otySTiCliJFVEIUZkEwbIiQlBQnxjWpalEsUkBUSq6FMFOLSMDknOu6bjabVRUFqE7cMVZk8oRPHiKnVIhvSymIMGuJjE0pXVxc5JyHcTjstrvtuuTUtbZrvWst1N4bs7e2bhlmRoNd17WzWdu2YDDnXA9araJuTABAAoiYVEopCii1YU4kItVh9epqlxLDVAseGyJFM6ifGE1HhOFU4f1QRq2qBcAYE2JMKVfgIUaYEtMhVjxGgWZNw5Y3m019IzGOvnV938/n89Vqtd1ut9stIrZNW3dBCGGxOl+tVv0QnHNvXt217ezsfPnq1Wvn3PX1zdnZkoi+/PLLf/Dzn9/e3j67PP/000+/+fqrzfbOGLOYr1arc+MbYrPe7odhYEiLxaK6Lsy6xhhTYgKAZ8+eDcOw3W5Xq9U4jvP5XFW990M4IOIXX/x6MV+dn5+9fv2yXV4MScZxnM2Xc7cchqHaUDNT13XCFgAA8wlYB6UHU2TTx98LTg936IP56WOH773a5j8I7P19g5yb5FnIARsAC+gALBUDaoWskkW0aiw1Fq1jy8haCtdhDmAUrUeJCCMJ12EpIFE9tlGoNiAAVIEEFI+z7YSVaKdEJNNNRwCgH3HZPnXs9ShD/NesER9mr0QEITXWrZouh3gYRkPUOL/ebG5ubmpo2W63jx49+uijj169eqWqY0mC2g/9itk6MFDOFp40GGpEQICIrEiWYoqD3TBqQjbdYtmdnT+OMYcoIWpI+7abEZFxfjabedfW/pb3HvOoObFSZ2cx5t1d33b2bHFWpM85xtTnJFqQqWVyhrykjYSgwGnIBy0AlNUOm9D42Tjwbld2u12/X6c8GpO9tz0RGrMbRiJ6dPmcct5s95ZsNIhscz8ctgchOJ+fRS13t7tLOwypjANgn+35IzSLdb8BkPOZf/Hhs//yD3/Bo/zz200gukbZZQAARSh1rOXoJI4ArvrMT3vEZIMCOAI+YVLirLkh6jxrOYyHbTlbZaWsbAsXYgSxIAzZShziHBiMMUBVWZqECIiMo5QSkCVbUompmNYxY2HosNQZ5+rkRkhVo9KqqCoa49pmJgV67WOMsitEZJGZGbVo6hnQNRrHFGPMatq2/cnTJw7h6s3bw+Eg5497gH0CuRsuHy//+Bd/PJvNXr18mXM8bLYGyk/On3aPH0PJ4+1NLPuRE3XePr10XTvGpLnoGPZ3a4iiSI1tdLmI3iWVbMk1HjetNUZSKfuDSeKMKQp9DISZnFMjZQx5v9UxwDhKiHqbBxBubXu2NAuXRYoko1jGwQtaIcoSY5Zc2DjnXH9BmrLm4gQtMjqHbWua5jAOxtnDdnP39oaUWf24O4x9vkW36df/9vWbV4fd6smjRdMx0dliefnobAijYE4qt5u3b/oD9G+NefRcY4gHms3aZRMC9n0vgLbz8/l8Npu13RyYCaltOkQWEds1WUuQlLEkTghiJHtLuunPmlWOSH6+J/tvv/jyv//l1SszHykCccmBGVBFY/K1BM6gbAVNgqRFDIoFQc0FLBtGkFIKpWyMMSTFgCYYU1QCMbDeb5cXq/3VNS/mu5fp/Mnj/X5fSmocpf3d2fkqef3i3//LP/nFL3755/9WCl5d3xyivvjZP/jn//yff/fmu08+++TP/uzPPvn0427WpNyill989vN/9e/+bePd5ZPlN999e3Z5blpaXM6bvf3m81cvXrx4/eqbb7+5+ejDF6qz69tX5+cvbu/2ZysHwCjaWXq8nDmIfd9vh7Lf77uu6bpuokqB7PrdTz/6eL1e361vvrt6+Ys/+MPL5fzP/uzPTOtnZvb6y5uf/PST292h4dnVdru4cI0zQ4z7YaSYjHFN1yBOnCmpEvBSwxUgIGshBcxIpo6rFVA96gxSYUNIhKxIRVUK2Afay0f5loxIU69D7Almq6q0U5L6zqyTPNSZ/7uIc+8FCzOO0RhStVrFhWpizgqGVBlgIkVDxS4LsKkE5TpKDABQ+Wz4ANisWcOppXfiT9HU9qtyCfzgfeoPvrjvv+53OFp/jYD3ENOvL6LCFDWFrHWbqp6S3No6rj4JehQTAYCUUh/GDl39UfUrRUoWMdY4ZuOdc84aG9P0i2ruXH9XJWJVdL6O8VUgK4TQOHJkFG3jYUyxxJQklP2+yMAMhh2T5qwlSsoJSjYoMafcp5ByLFkUs9D+EHO6jUH2+z4MI1LpGnbeAOg49tZaIqwanqWUmAoRDSlB9eNuTBHNKhXBFjmBQlPKxkiplH435pzHPvxX//h/9b/93/3v/0//1//L//n/8f/S3x6clmPHRd8b3ZvUkPVYAmLJWljA3C+Ad+axHgilnjQQTo/mZOBXv/Rh4onHWbG6EnLOlZF4/zOZEdGyA4BxDKUUg8Y5t1wujTFvQnj06NFyucxZjLOI2M1nLz744PbmWnMBTabx7J0B25UFWTMcbo0x1jvfdmiTFjHG5FgyDDmWyZ7JcLU3R8PWuxRSydm6BowWBSQ8m3d+2RlnFSWmUIahhDEdhjD0cEFg2MycX8yKoywJST0ZiFEOcdgfyphR1LJzjbfWyQedplyGEA9DGYKkDIhJymy5EAQbBtt4LJKH0KcQYxCAw+EwjqP3/vHjx5er87ZpLlZnzpKA2ob9rBtTf7NZCyYAOJn5wYkwYl3btm3bNk1TFZOPzYJKwiSqgq2AE4NCUVWt9dVnr0jZbodXr69ev359d7cHcD+4rrg+4iPj9wSDHzGCSeeFjlbAhoEIDEKdpM05E03SJxVNiTHOWl8Xatu2CBJCqK8/5/zFF198+umnE4na2rZtq2aTtbYIlJLOV2f7fpjNZtvt9s2bN5988snd3d3jx4+/fbtNKTRNk3Pu+z6e5fl8vrm7LTkPw7BeQ57PFjO/XC6JARETsB6nfdQYRDSGnXPffvvtxx9/vN7czmazSjH92c9+9vr169lFK5pPUu/VbKQuckUiMnXSB46CHtMk8WSVVUGRd6S9H5yn79PKjqSVyWP1vrbW35nrwF/zMvs+OqZSVAqbDMWgWDEMYE0pqkXVAjCIgCoag6SZmRWMgLIAMydSQcCqIAS1GgMAQFJUhXLyukOAabAPYNLgOCrK31fE+iMh8OFhd/oA7vUCfuB6r4l96sHGEBCxAvfDMGgq+/2+7/txHEWkbf1sNvPeVwp7CMEYk1IahsEQkDkpiUDOORVFmmJYSkkFjSFrue6H6kbrvWuahoittRVTUuQYY4oRAMahGLLEnpGLSsklSZISu5lFA8goRVNJ4xDGWDTj+dKnDIKieRxiEOWcdLMdSuaSYbc7HA6ha2E2WylCGPvqDmOtq+eOiMQ0UR8VqTJLxxBTSopgvYtjhuOSl1wsG88uhtR09puvvvqwXQzD8NV3/+5f/pt/OwKA+a35xmVCO0kAS9aM5YSnETIQHxFdKKBF8RT2Hq4KfMA4Z2adussqIknSacr4SGPBUzZ61FmeVMRUNagQnnqNCABMbIxBZAEtRXLOwDRfLq21bme+3a4lZyIypkqcJCV0s3aWFlklp0EtZ0tkyODMe2vinqxFa8k3zjkQNdaXgoNSgV4VhBidN4YKIxk2TTPGlLRY4wiQBFzbzFZnPHPAoKo+R5jNJYbRb/u9M6133ttFw60NVFIpzpC3jksJmx0RBQ4GjfcNO0topHHoQY1XQUmCQOisafz5k8dDGHchmG7EIkWAfNOwzTGmlFT14uLiJz/5ybNHj7u2PV+ujMWUUoFkGs/NxZMXz2MZYs6OjXGuKjzUnLJaH7fd3DeN956IgMykHo7IQqpCk1dh1TKHAujbBjKTcYnoZr356ttv3t7chAR1PTw0Pj0hchVuO8kanNLuKvEDAFRVUqUAlKq1bVF2PQBACMEYu9sdLmazu7u72Wy23+/ruqn2s4YxhDCfz0spaRw///zzP/mTP1mtVsMwMPN8Pk8ptW07jmM/hJzzYrFg6zbb3Wq12q5vW//zwwGXy+Vs1g7D4Lw/7Pf7/X4YhuVy+ebVy/PV0rDGGLfbYmh+kp92zh29Sk6BhFSxgquXl5c3Nzc3NzePHj06Pz9/9epVLhGQx3GMMaQcqmcvpmScV2JAPsY8M5nw5ipMVu+YEpESIJlpXA8QgVUz1g6FIgGpHrn6DwawT6jmsS2ByPfWK8fN+/cU1f6Dl+mHmI0tgCpo3SQNZUhAxQmBqmrRaTYaVYhNOnZijDDAsXtHdWbxwTsjrRYN79g1ENFpwr1KD9YVe2J+KrxD+PkNLdAf/OsPfvF7EPM4jsw251yd6iTmzWZzqvbatp3P5zUi1qG6KoIVYyyNc84hgzFGUqmteOc9AMQYsxRnoZ3ZpmmITCkl51glqtu2ZTbVvGYcYiy57/sqzTXsD7XXzmSdc845NsIkN3dXxEKE3nrrOt/NZosGwdxcfRdjFFZV6UMUwSGUu7s9QMNs+1D2PSiBICFTFLWWAaBK7J84HdXluSiyaGXZxFTIGu+bHAVgGkgosVBrLBvMgFkvzh49e/oipvL/+1f/5i++fYkMaAnib7fgpBZ8NXQhiEhKKYQwE8GjlAYAFZ1krb//E07pzomxWdnV9WibnBEfqNMdVbimbXmKeTXspYJH199qCwCIysylaNM0hkwIgckaYyxzznkpZewPr74bgMzZxbmqphSGMbpZ6/IoYwmkg2ZFK41RBjdrwdgMKKAFgdmQI9fllBLlknNWQ2CZvBMQqCImzpeYYoiWuG1nfrnkxQxQMxRRBWJyWA1MlcbCWCyxISVSBmQURjWkgOQb181SQSmaibKoas5jsWRRShINIM77djWfLZbt08e43dF+4+KcQE3TmtajkLx5U4/1JxePnj17djZbVLMJ7x0ZE7KWUgzy8mxFdiUieuhPTCUAcG5qY3vvpybfpCBAD+kzKKpFoIhIFZiZpKPIupjyN69effntd7tenIXx3e1OD7hTpz4TPiDuTvygI5+FCBQmniEAEBmFXIv+pmmGYbDdMsa4Wq1O50+Mcda01tqc83K5PBwOw34vIjc3Nx9++OGbl7+uZgjjONb9O4ZUtZ+ePXl8dXV1fvEo5/z27dvzR4/Hcby4uHj16pX3M2QcQ9jtNuerF9ZadtYbrJZeux1aa1XEWuserNsaxUMIKaXZbHZ1dfXhRx+s1+ur12+Wy+Xt7e3jx4+H7c5YF8IhlzKOIzczQvXek7UFDABW52AE5io1fuK9AhDWDQXEApoRGCBP0ktaz21SzffjeoLIWP23jp0sfEBJ+1uwWf07uswYQKWyTSUrlSIiVBiIQFUE7msyRRAQglz9GRWBax5dVKHKeFc9HzAC+MCQEO69FxCAVLTyXCsyCogw3bU6wfcfDngP+6U/Vu29J935EO0spRBVyWDOOY99X2lslT9WSSs0uW9PAwyV8B1j9N5ba13jRaMxzvrWujakeBgSKllrmYphBtAcUw1shKgiIYecJeYsAiGEcYx1Ke8Ph1IEBIkq9aFtWnYGHj1+Mo77YRj2/SCHKGkbxhJCBs2IaBsLALtxLBkOQ3p7J2Hs2wZSgkMGzlDQkPdWso7pJEasAMTsnAshWGtRIJV6FuacwVnDzGycMQkdikKM0TjfsGOA/T7/8R/+cQjhn/2zf/Z2vWGEsfxNRktL9dQRAEAiqpr94ziWUowp9wi2Uu3OnVR+TmD46bkfUQQqUk68GED73mqB6bzD08851Xwi0ljnnDONBUWIAKBAqAiVIOBdw8aUasVsTNN1H58tb27uNtutYCqLxXEcV4z3tmsyFrUcSIEVAAWpWS5VMCmMucjkt8DkvGlaHlIBEMPgLBirWARJjPGLxZjzentLRM3yzM46JUTvCYqWBEpMgNa4nMeYMTA6D86hN9aTY0YCEmCHbL0gjVn6fiwA3jtjXHPmLdk8DkMolJNpuu7ifLY8A++o6/xy2SA3hvOY2ZgUckmZAC9WZy+ePlsulwZZRcYUS5+7rjtfnAshOnSNR6NFBQHGcYxjKirGeWtt23XGWtt4thbJAkD1UlAFAZWSRYrkAqWASJ2CBMBQgNAUxNd3d7/8q8+//ua7rGAJMd/v94ftoWr1pcrMjEf64eTIUYdpJg9RpAd9pXp6lKw5l+Xc1tnKChvUTgQS1U4HgjJzhUBzzs+ePfvLv/zLP/3TP/3uq1/uthtj7DiOi7arzJcQQtPNU0rL5dIwzefzYRgeAQyHw8X58tXVKwXx3oOU3W5Xo+x+v6POE03+JH0/zrv24vxy3fc1zqlqiqWUgqTMPG+bu7u7Z8+ePb58uttsb25uZrPZixcvvrq5FoIx9IpmGA6OrIjMu3lSZqwHM4NWtBMRsTH4DqdsUp+ehvkAufprqSpohfWmEHhfUCCr1Lndqpg06RSqKCAoPfQPeOfo/h1eJiZQ0eqTbLNkS6UUaxBASsFSvWSMURXRXASAsyUtik7EGFZVBVtNKyrawEZPsi3wAJM8nj8IoCCoNHkSEZ2gzjoU8Y449Y8lCH+dG/feN56+pSJai8VCYqnZnIj0fT+OoeIldeXtdrvNZlPh75iTIR5jtONYofwYshKzNWS4JE0pAVWOaJzQtjQlfSGEFHMqOcYcUimlxJhrEWmtzUDABhAFOGQoh2GMQFxeXr9EKMzU+cbZThWzSEqpHwbvPTpXVIZRQpR+KPsBUgEJE0+SDZBr2HsjhTGWUqY6ktkY07Z4GAdK2RmTVFU1awU/ak/FWmsNcogaxhHbzjnHiE/O58bYw3Z3e33zxVdfDQrtstkNo/ktDaRigZhLLLmoJSIESSn1h11KoZZfD2p9BKVTIxaPs6EPo9dEvdMMR8pSHVp40CM8xcf7dmDtttZfZ1zjnKu9KBRFLIgEIMbQKXMqklQQEWezmWlbq9g4G1I2pYz7XTlbeutIofbw0BhlUjaKUlD9bB5jzKAJQES5zvMaQ85h4wwh+waMVSapctLZWGvtGMRsKgserSugbFCVCwgoEAKyqmXTuJDFWAZnwDnwTJYAVHIBYGCDgxtUNyE0DTvv/XxBDVm2qkrWovembe1shl0TUxJn3GrZEjfWxsN4u7673azX67WInJ2dnZ+fG2NUwbvGMLvGdF3nO6sIGTMikiHLrs4mVlDBWn8s9dqmaZh5suGlCX6EI00aRECQJvFxIDLWtGDcEOHr169/9esv7vYJqjvAD10TkF0zkLpwVEXvOb08Ze9HSK5qtCKmVBoDKeWcp7VRTU5ijNXNzjamHg6LeVffVNd1KeXVavXy5ctaxW6328vHT0SkWoy1rd/v92eP3Ndfffv8+YtXr14R0Ww2227X9S3P53NUsdZqwVTyerudLxfb3V0uzhEymyyl73tjqGva+XxujKnIU8o552wsWWv7vmfmb7755mc/+1kY+88///zi4iKEwIylpGHobTtLYUQzpBCX575EQSBGAnLVzA8REbjA/YD1FNtAtABUgnId41MDWLt9qMKKOPmy1vG+qkmmqCBa/eoUHiap3z/GT02H39VlYoKSJRdMsRgLjeOc0VssotGgL1QKZluSgBVxWdWkYtQAipAR8AoARQVFVBStsioLAwNaRZVKOgCtjE5QAKlGMSQGKoFZj67bk43Yj842/FZv7GET6KQDCVOpR6o6m80247pingCQUhIBZ0xVyKyOCn3fs3FSCRdsRCSktACYz+cxS0rlFN5CisqUpDRQSKVoKaWAKIDGGFOcLEsri7yIpFwQRRQSTOo1UnLJGUDZqGFFKq0n6xyQ6UPoD7vdNsQBumXDvjPNrKSYpA9Fk4ICOA8KrFKMhXaxsG1HhsjYWdOEENr5rFvM6whgOULyzlpX02NGQ46ZxxStbYyzaGzIIYCYnB23hvnZsxfDMHz2wUf/zf/mv/nv/t//41f/5J/I+flufPXbrt6YIMQ8hpRbB0womlKsMmDWTqProlVNEsrxWHwwGztdDwOYaD6p2JyiZoU9iQjz9B2nOb+TEQ8iqq/GD4iI4CwK13zXWFuKgKoxRr1PKSGAtZYnou5sGON+HML2cLjb2LbJ3paSVQW52uhI0ZJyceSKRBAtxzkhKgIqxlrn28zWNh6tUWJBQSZqUBVM1ywvzhGRGgdMSByLFJQEopKzCCtkgEzU59Fway1nRim5ggtYBAAsmkyUiQqzWAOtx65NOoJCQRRitJabRq0V4lgi+wa9S4eBAcDYKHK734dhdMacr85roWOIZ8vFrGnbRZNzzhKAECc4gZxzcYhsnG8AESuBy3cz37VsDJGpDNsauItiFmA9za4Ao0EiNghk2DWCbr1Zf/71t198910C8FN5bACAHvq0oIACIquIqky1XDlRpKoy/nQYSykASkSMyszDUM5X7W43lAI5S9M0KQ2r1WoCDA+7WdvVzPXJ40fDMFS5lhCgzp7v9/vz8/O+7501lo0ltsS2tSVlg1TnQAAAVOpERyllu9s8urzYbDaSMgCzwfV6vfjg+Ww20yI5F8PMTCmW7XYfY24XS2s9s3UuHmDb970KSgEhQIQ6QWGtn80WKZWbm7vZvN3t+77fL5su5wwxxmGsPQshJLbMRtHgRJinpKSggioTL0wQTeUnao1NtSwHIyqgSEiV/IiT32p9dveslloI6jRCcw/d6VE67vfhMjGDQShSMoERVBWo/s4IUiYSSqnqD8rCQqBFxZFMY1qIigWAFRVyFQFWVgYopFzj3H3EeqDeCVAlAACgehW9f/1QgvAD12/+d32gvH4iWAJAndurH9dqzznHQF0zOz8/X61WVTHQGMPGxgqLGa5lBxE55y7I7HaHMaRyxAem0/l4hiIqktZvCSHEnABIkYmMaskZclbEFAgRoAjmICmDJWg7aBvOCYzRjowiVFyFmXwHbD37xth2TBKShCipUBZhgZxLUpgx2LZrupY9Ug6L+byCM7PZDEoN/zGmVADIsFVL1U/AuIKQhtD4GXPVIJ2iTpVJPIyH7WH/6y+/+MM//MX/+r/8L/7b/+l/+PW3r2DmoP/rGHzeX0kg5BRzKVK5YaUyWkvKJ90QBRUVQVBhfeeC98JeJW2yMBwrudO08n3Ym2CWd/z5auELAOVBvGSDJCRSBZhUtBBg23UtNP1hSDEiYoNo2JA1M+MNAqim7S71I3c+19dbxBosgAlySikj5pw1k0xc1VIQSsneOLYGCNlZMjaDEpIQsrMlJjJ2dXGOyMbZgsTeIhZCYIECAJxRgJx1xWc4oDWua4vBIY2iQgiGqShZZtfNZqszMbZr581ySd45R411mXiYBzRs2waNzahoHVtTkPowKluHTGwFwRDN2q4yLPbDHh1672fLhfO8O+yLStN0TeeUMJWxDgGb40QjEZF1dVCnGm9OO0WldlJLKZPADCIDKykzo3HENgoESVebzVevXr2+HQWArcvDYN6db6oiQfWDozYQkEJ515vzyG2pvRUlBTYVJ4Cmae7Wgxw5wCWXrutev35trd3EWJfKOIa2bff7fc6567qUYLfbnZ+f393dLZfL3W5njLHOMDOiWucql/LibPnm1cvZYrnf7/eH3c8++/l333232Ww+/fTTw+EwpMCs3jeH/bqUwtYcxj1IXsy6tvE551LSOI7Kpmmaruu6rmOEankWQlhdXtzc3JydLb/88su2aZ49e1alNi4/erw/DCGEGvIllxgGyQWkdpOQ0MBUdiPANGM77SZQREZURCMlAoAAcM0kJvZ+FZ5mqLAn4NQPUETEqSem932lhz2m35OuXr34yfOPi2oqmkWKCgCJQhFR0SKqoiKggJOLpRJB0kl/saK5dMQV8NSfqzxzmtTeKrtqsgojZGJiNlMVPE3y3wu44btMzncx0vf//TfcytPheDofa5s9pWSIAZCRUPGwO8QxfPf1dzc3N87YMMY/+qN/oKqLxfKLL77o+x6R9/t9KeVnn36KACIyjOOz589zLrvdvh9Ga71xFpGtc918vrASxqGUxMhFpzlrmAZjIBcpokrIlpFEBMTYGEoWtdYxaxFlgqYx84VdLReGKYSUYgpjUYHFYjmm8sknP025fPvyJZuGjN9tRwWICdqmzTmTwsef/ARBLh+fjWGAlBbLZdc2XdcVlWEckTiE0HYdIvXDsO9HZG67uSKEEAnx5z/7g1cv33y77R1QJNXGdKvF3d3NLz77+X/+v/jHgPj/+Zf/87/+5S93OZuzOfTp/cdR1Q5LVtH7NKfeBBGrYh113liDUJIhcNYionHG26ZtO0MEooRgiBCInD05w53YmPUJw31y88Axh/3pNx57eDXO0YnmMAlb1y+oPAvCSVW0rj6ikjIiIWEpKqUAAhEDoh5GA8iE3tiZbz1bSTmHIFn+6pe/evnNN6vF4tmTxwwYxoEEht3BGQsKOWdjLCNJLt5YAzT2AyE6awsAGkbDKSXPWBWztIi1znctO6cIQKCIBUrOWUoiBVJFBMPWzRpljZJjiVrN7USdcSVnQ8YYK6rsfTOfkfMIMvZjLmLQMJumadHawzhm1X4MQz+qqAHOMV29evP1F1+1Mc2alpBU9Wx19vz589XZyjdNypGNca0z1hKjImBlwormUoqCca6dzdquc96zcYIgqlXPRxFiSjGlIlL3pEhhZsuW2SIZReK2++bq7RcvX//T/++/+vZ67SyNMXXtrORYjUQAAEEAlWoSw+ScVREUNYZUpWQxBnMWIgUVKYqo1hoGzDkjgRZpHIcQLIG3REQI9PRiUfsaKSVnTcn5/OL8sNvvd9sXL1589903z549y2mfc64du5//9CfXV1fL1appmlLyOIZ93xMRIM3n8+u3N/NZh8Q5Z5Hy+PElob69uV4uFs65cejHYVTJOaXlatH4hgljiClF51w3mzdtdzj0XdtZZ1NKCLRYLK01IQQpUrlZbdtJlhiS+/8z96c/tmXZfSC21tp7n+HOEfHiTflyqpHFIiVKIpsaKLYaUqMbQs8yYMCNbsNDu/+J9hcD/Q/4gwF/MNyAYbcMGGjZkmxZVEtUSSSLKrKKNSczK/NlvnxTvBdx487nnD2s5Q/rnBMn4r3MKtIiqYOXFzdv3Lhxzrl7r/G3fr/cxRjL0lrjau8vLzfzoxNAt9sd7t5/0xgXAi+OT+om+BCOj06SCIsYS1rdNMaSsSyAAERIhggIUAQFCYFQB9V0GyEAKu8RESEpZwl0nFx92WXYyevLnq0XpKHp/iN3rIavv/b4rC6Yvm6TQodRECgx+CgAHCOLmJREhESMgIAAkvJCccYdKKVTFBUlqIYWVtXC39tIAAA6gck2MFMEbevthAmoJZUYYjg//5p/avjw2ivX2I26v6WdIenm89CBzt+EEM7PzxXcdbnaVlWV5/l6sxmPCuUYe/78ubWZdQ5Ns6sOxtpyPAbCuq5x2veWYkohJpZE3icfEhprMxJGn1IMKoYpIMSAzBKJgUEYEgCD+CZmx5mxWFVNjNFaQpsBwPHRLULrvU9JLF2p9VrbQpMnk5FzriiNSHKZocB96K0zVUi2KArPrMqfiMgi3FEWsaQ61C63Ews14qZpimY/Hy1+7W/++v/uv/1vz95/+A//h//nv/itfwkAxaSol2sjnRROf5MBRMS0Uc5VqVnXdgRIDCxXm4ElGnFKBZ5SYDYEIgwpClC0faJ3HZd7BQIcLHRmpld2wvWOTgut6t2eXIM+oTBq/YyZUNFqElDaBBERgQKCQErAYMkgocU8gTQCE5uxb5rlZvX8pcszhGStzWwWfcyKvMgKROSYKEnwtY+JfcjzPLOOMkd5EalN6xHRGaOdL0gMKTIYRmFiSSwcqV/MSMZROybLqY/wQCRxMEDGmKzI81FJxhmXJebMEiJKTL3UERFZJHVBlqiwWb3bL89erC+WxFBmeZZlRTEaj8fHi+PFYpHneRJGInV1RCTUzm9BavOGFnnYTlWa/g4zt+JTfdbOwgkFuEUKIhoBTGKa2tuifPjp47OLJSFEYUvucNjrRwIAgXKzCqriAyq0v5uAGujqte8n0am9FrUIN5eH4jlVI1dLQXmZq1JmWZZKeDsej7fb7fHx8XvvvX///n0tFHVJJIpI5ux6vT49PV1eXjK7xdHMex9TnEymKo+nPbliVFJNxWhkCXxdNTE0dQBJ+iMU1lE8ALDW7na7uqmKoiiKQrvvo9GoPhzm8zmiLJfLFKIKXOu4Idksxkjk1uvt/Hjkk6BI5rKQArA4S5gopQTA1trAcRizYsvMqSkJY9uC0V2hQl4Ge3knNfYCAgzSj6ALiEob3zTR/+bkfFbBVwCYQFhQQopRDDIzBAshmZgwT8QMLOgSInMyHCXlFhK383xt1KWAZDbAACIe2SJRYp2xIQIiMKZNCNBdVXu1kUMGRQCHRdFXFCuGz38Wz3fjf7tcvs0bWp4URB2+ybLszp072vJ5/PjxdruNMdbBZ2Xx1oM3iWhU5t7748WRiDQh5nnu6maz27gsO5oeAeF6vbVUWMKIlDjpHmNhZg4BHImzNkXhEEJiRLDWcRQGEYEUFcHfBgdFUWRZljiE0BhjymIUm1Qf9g/e/hJL3O92EhNaJSwGFHDGKoL/6HhuDBZFFqO3RNaStZasQUMgrHIEpGIZ1zthxpgsz4VjYm8LYwpsmlQDGCfFyfQAaXnYfvP3f/fR00+993Vdc0ZUZqJMLddWt37dLTz3xpEAQko+RZYigSiHdJJU13VT1U3TOGPJkiBGYY5AA+pbhbR0kwlXk3naNbnh9nrFjJby25nBqsA+84tauUBQUmdAElXnE2RmCczMIGCMIYPGGFfmnEQEIwuAWEBnjBjKQe7Pj6Z5noNJm4MpEhoQipvtdr3dzE+O77s3ITEBWDKh9ovJ9HJfp6oK1lFKmBLk1jKn6A0YS5RIDJG6DhGOKTKDJEYWArAohpAJnSWDIikCsjWoyowtP45SZrAYY0yeWed8isANAqcUUgzWWgSGGEASANf7XdM0VtCGmHZ7PlQFwHg0yvN8Mp7N5/Ojk5N2OLqq0BrSjrw1aAQAWl1W48gIIBtrTZaTc2CoVaYCYUiplaMQDbakpZqzhMaajKxLYAPIrq7Xh/qH739wdrGxGQraUTlLl2vB2MmBCnTqbgPjkAAApP2uDUgiUNk00Z2FLMLQF8k1YNA5h8iNeGZ3OBxOTk4uLy/bQJC5KIrV5V5n7C4vLx/cu/ed77yvTeL6cBgVRWjqMs8EURl9T49PXr58aUtzNJs/ffo8iYxvFRcXFymlBCkzVrnHycB4NLaE1eGw2+0QuMxdWZYoOukvADiZTJTanqeSWUdERTFyLn9eVYo2F0HnXFVVTUO3bt0SjCJYluPz5XrsQ7Xfu2Ja7Q+Lozs+CHB0zgFyTF5z3+CpTUGIsGvLqZQjgOZ1rLmd1vU0gelNqDb/Oms8eNRdBDfBLMMAFK4Hr3+a8E4bJZEgIrbDhwDEkkBEQrQYk6TEKRlmiAxZQuKYLDCgsEkKR2npEbgVociS8rFoXcuoXhNKr97QVj71r3Y3RNlb+uALB2M3w5tyw+H9LHeqL3VeJb9dod8YowAzxZ4BgI6jEtGLFy/2+72IrFbb2Wy8WCystWfPny6Xy/l0dnp6er68FIuubowxPsUQgsszFXfFdjbfGGMcQ0JiBm4iAAgjIwCaVleGOSUQFYhGtkTWYZYZa81isWDmw3YjiUejaUbFut41dSyKfLVaH3Y7kGSJAkQSCCyOoPbeABzNppYgczaGrSUu3EiRinrtKSVBDJyufIDpmluGiqJgrhDFOGpYDgncDKZ3jqd3j6PD//Pf/b988NvfmrnRm28+eLxch4rHo6zCBNdXMw2HRq5/F4gYAUKSkDgKd3dAwQVVVe+rqspd4YwhsgQ6yNX2gYbbo6d0GXq+FhPYva1niL3WG3zlp3BVSGcE07KKIqrAIrJ0bYwuOMgzZI4xYRQRFoCWF8vZ24vjEziKwiAggWMTmhjPHn/y4uXFvTcfnMwXNnPO5mWZAUvcHS4eP4vCD96iMpv7Q+IAZJzEwBxD46NPZZ5jXrTOAhkJUBKhGBFDZFI3hCWiptyQOHQCiZmdRRCOoWmqmHzI8tIaFDFp7y1iSCwxERkJ0XsGhBSDYTCB96vN8vmLy6dnzvOdyWJkYDweTyfz2WzWDvakxCDOWQYgQ9iSiouSOkmnmaCrrm+mtrOVwszcaz+ICGjUwGSQnMvIZokhxlTH9N0f/ujR02eBAUAKpwziFAdZ+1WXpc/Wh3PD7StXFqYlLo8q+wjq9oZaMSklgDEzK9+6XpX3XluDq9Xqi1946+LiIsuyk5Niv9/fvn37/Pz87t276+1W/3pKqSgK5sjM40lZL5sYPQMpFsZae3R0pOQYVVO3qluEZVnu97vkg/6tIsu1LK+JpkIQmqbZbDZlWeqE1Xw+18mfk5OT+XTy0UcfXV5enpycjEaT4NPx8cnjpy+zLK8rP5q65XL59he+akwTY8xGuSCE4I2zZK4sLbVur5VQIGXjVDVixDbEQQQJbajRbnTpKEZ0poGG0BXpdGZfdX6vWu4/XiL4x3OWNnIgMFqfRSQQrV+Aj0mn8YFb5RcWiQmMiE2ShCFnEDSABgUFA4gway0TrGArvKRhp+J+2uyCGRAxRUEEauGdbdUCELnL9l6tZcEfMdXrPejQ2CkwQgsaxmS6ORXSooWF8/PzPM+Vo4GIJpNyPB6Ts7dPb3/88EMRScJHJ8e7QxUDj0ajycSv97vNZjOdz6bzucRLLeHpSrIWEUkEsywCQIwxta/bkKIKs4i0tEDO2dzZojBZhonD4RBCCEWZ5db6upaUFrOyqfbb9crXB6vyCAKGEJLYjABkWhaT0dhSdIScmFBUzLq3PoioY3zCLYSuT4zQGEMUm0oguMImAhzB/S++NX3rnp2PosBvfft3pyk9/fg9SFA4GBM22x2Ru3mTAXX/t6i961VHAYgMTSd8Iy19gc4y1vWhavJRkelQkRHBXvWpN3adnWoxRDQ4huXQQV+hFSqCq77gVUY49M7qifVTSLd75rCDk2pv2FgjgGJQAEUQGYFBUgwplWWJ1vgQfPTAyInBx4mxW6RMxIYIzBFjiCnsDx8/fvzBjz8oiuL24mh+fISSfODc5ZHQ+1Dvtk3laTyejMZgiIMHi908FFoBw4CcIAaJAazRYBIFkRMCAycJ3jChGALVrQVgSdFDDMZaiIF9Q0Zp5YNxNh4OBqDZbH7yox99+oc/MT49uHXn3ultkDidTifTuc6xee9Z1UisIWoVSZGYmSmRiCQjKAbRmCy3zhERoGFBQUnCkZVCO7EwIzGKYEJDxEaLEAAUvN8eqmcvXvyP/+w3l6u1y82hShnBbrURYGqh9wMSRB1ekHadkPKHdCkIdQamlzHRHQesnCSEKLra1CZoBNwOF8WoILjcurIsq6rS4aWqqt59991PP/307bffXp09/drXvrpcrVSSJcaoQFAiGo1Gj588y7Ks9nG3250cLXJnp5Nb+/2+rg+IoMQRKcVRXsSYH3yoqooEeNxqycYkTRNms8V4PF6v13Xd6GZNKZ0cn+52O++9tXYymU0ms+Xycrlcnt7+QpHbxsfZdNFUfnY8jz5sLjfATIAckyWTDDfB28xEiW1RRMi07FkqmyyACAgkFpCRUFWkEXsSSumLmSpo1fYrAK9kx1i/pps78d+EUqcVSYwgwoAWgU3n+YA5MiGyeEwCzJxE8ghOonVJtAqUddrybJg5b3VoTcvwJiSWwKR+gSplW6eygKj0pAB9MawNurtl3RNtfJbng585Rhhme0ioDTwLV1pC9+/fj01AxN1uV9e1iGiG9ODBAyVwKcaj8XRSluV0Om1diKGyLGcz3jf1YV9lo+Ikz1OTUgrMjKJLhAxBIiqKLAZoUgIxSGiMiUlSiurzkFpfmGVZZpCALy+W49IWeZ4ZG5pw2O+sye/eube6vNht1iiQOQsiIOwICcUSIsDJ8aLMM2OZMGXOaLeg9wqtvqvN8jw/NE2MKQkTkXEWrTHGgFDDTQg+z105hpgVt9++b27NPCdI3GD8ha98Yfn42WG/d8bMR6PzzVbo2neBigPpY45X4joiSAKRU+TEICwJwBJBShxCqOvaV3UoMna5saQOZ+DqrtyeDvb2gw2a7fVn0j8naslf+1f6aLRbOdcY7KAriiIaxJbrSiICgM7Eb0NERANKXY2GLIogYwiJY0IRCRGFDdk8K5Jx89GDWTHKJiMT2FeN934vUh8aqHyqq0NTnz95ZozB3NlJiVmWGp+axh/2h+3BCM+nM2cJWCQEFERCUhnMGJJvQt3E4E3mKHMEWtGLCGCEY12RKckYApTEEoP3vqqqMYv4uF9tqsMht84aAyEZNMTy/MnTH33/B5+8/xMT+N3b9984Pp3lIxrZ8XicFyMRCSkJIhqyNjPKMKxyTqSaCaaPQowCJY3BgR6bhg5DeSNEDMAGDRBKggDCyW922+Vq85OHD//wg4/XNbjSGkRnssbXBbgah9y/12wodfoa1tqUEofU1dygb80ys/YRemp7DX+0Th5C2G63ZVkeDoeyLPfbtYxEd40iNuu6XiwWmud98MEH2ojVoVjvfVmWBFIU+ZOnz8qy1Cudz+f+4nK/3x8dHQHAdDq11kKKeZ5DK8bbJObRaIQshz0fmlogjcuR1mb2+/39+/en06mIbDcbRKzr2nt//87d4+NjgbTf71duNZ/PT06OV6vVbre/e/f+br+6e/f+4yfP3/7iV56/XNm8CiGQAQYxFg0gIKPFEBrEDFvJWdL701lnudrQyN2wvyAYRERQXU/Bjn5EuMPjy82E70ae91nW+/Mt+edDVP7obo8EhUXUVREAE6heNolIiiLIiVmSEQkxJiPRRZeiaJlaUlsdYIdgjAghS3vlQiIIEkVEhyKMKpwiAqSWEbU1QG2E3Y7XdNdzI1372VM9eF2xuA/w1c+1bo/bDzTGZCOnTiKEoN3jGOOhrnTJXl5eZlkGGWgB5FBVWVbkeV4UGpp1geFVYqMddAJDlNg5ByJBIifoTa9SnhOpedV0iwAkpRBCNJPcOSMpiSTnnLN5Zs1uvUqhyZ1BQ943HMEa44hV5/BkMQdJhJKitw5SSC1sR0vyr9yfXncNjUUiTpDnzvvaFJkri7ws3CjzlhLAfHH0xvHi/vgUf+Uvvvz0+fd+8N5utUVRAfarHIuopfrL85yZpatnXf1daySklIQTMHMyCAAqa4Us2jWsq2LkAoIBTmiuXF3vTbV40md7HYWuAYDA/Mq+wv4ThiOAvRvW81IHKH3lEhihzVyVskjlaffgCVuKNQfIglaUvRcBgGMCAEfGGGsMOWMdUX5yK6CkumJOqWmiD5jgztERfvHL3vvY1E8ffeLGpZmU/FQOYUeC1f5Q76pquytMvhARQ0KJwZJFYhZOHKOv6uawr1MosDSOdB9DFAQU7QJC5MSH3W6z3SWRERmOiUD2u92Tx582+8NsNBplGaYkIaSmOT87u3z5cjEaf+HOG+/cfWOWFRCSmRRZliGZkCIqT7chIWzFeo328xARRBAMCQCSxY5KQJSCFUQnGdXzKVOYoKAhBu2HtJMqIfDhUC9X6+OT07/0y3/pt7797cMhTsdTYLBgpuWkSX64u6/8HwsikkFjKDM2YQiQAIAImYUIrTUGjSRmSiqX2n+CMaQEmIi428l8nu12u+Pj49XyXHveRGTzXEtBb7755rKule/icDgcHx+vVqv5dLzdbnXMoAlB3Ztye45Go/PlKqW0Wq2I6I49KYosBA3rISuyGH0IweTZeDwmlM1mU1fekFNDFGNcLpfMbIybTCaaj4rAfr+fTqfT6VTpgmez2e3bd2Pk589flMW0LMuq2SIaXzWOTGi8rytrW4VkC2IMWkt17W2WIxGCVj6ISJgTYMsyqXVN3S6tIIEirV4xt134iNKCRa75pNc4v+vFzz/l/M8iIgOTUBJWejVGMerUGSMmEiYGDwmQmW0DgZXBU4t3wIgOmAiMEUE0BoXQIKKhCACEV9LkalyQANF2hhKGRhM+I4f7Izm8G8fwzmKH3kRE1pqVtGp2n3zyydFsMZ8u1B+Px2Ml5NRhnfl8nlLSx7t3767X68PhYG2WZbbX4dNAbK70H8ggaIwBbG0EARgDlHRMRDoZCkRGIgQWLWIRqM52HI3KIsuZQ2waa7L5fMQe1+u1bxqQZF2OSFUKkJAIrIGUEgOURRGjt5a9T5mDJvnelapXDiEgg7YQ9MIFgZDIGKVNn06ny9VOOBBRRK6CF3ZuXDx5+vTn7z749OmT3FqNWGOdMgeHQedsWFjWonEKQStF/eEsMSYR0N5OH3AbQwCgDHAKHycU4GTdcHTv2kaSTmAWhxXs9GoPT91eGkZC/ZOu7El9N777adevEAGthlmlRgRgAUZOEHUAvcVCt3zH1hg0ACxNbEQEKTrnOLUq3q40KUuFcdWuevv+/awot4f9+Xbd1PVmtfz02VMv1agoJYmv6nE5McYwQjYqaJw5IiEQAVRRWt/4pgkpKsM1EAonAWEgErFkmNnXzfL84mK7jUBuXBIZCbzf7s6ePqur6q03HhzN5pxSFNlttoXLvvalr5yMpvNiZJvkhCbTcZ0BEcXUIj/JGDbIKTlyiKhVCsZr7XMYFFeuvqpusFW/x6t3EhlnLVhkJiaRqKSAf+1v/Dvzuw8+enr2k48eZ1m2u9xRV44e/gnpxEz5yoeZbuKl7+t3hD7t7jciEv1Vt5g6EUZEjFG0x5HnuY4hQlu/Y+fccrn84he/qIDJk5MT7/3xg9vL5fLo6Oji4gIAyrJcb1+WeVGW+dnZ2entu2VZ6qleXFwURaFTvymlw/5gLB5NZlmW1YeqaZrpaFyOR1VVNaEadqMvLy9DCEdHR0XusOPn3Gw2KaXpbHx8fOzrBgAmk8mdO3cePvxwVL78hT/3iz/54OHR0dHZ2dnte29uKx9jzLKSgI0hkpbeKKVku5vQB4iICGAQk7KJac2jy1V0xswARGzfo0ara/C/LsnrLcOfdWnz6jDH09sUgBIYBiNgAEgYQdEpDICCRtAkoZBMHSAaCWIiZUmySJbBRXBRiMklMQrqjK2EESZkIgvoWhwn6ZSkCCWBKCiArIwLDIKqNskWMSPKAZwIAVhAUjepbABICMgt5gjEkMHPOIabpN91Kcl8Nn/27Pl8MgPBUMf1av386dnhUC1mJ2VRfvELX3758mK9XjuXx8irevWlr3zxjXfubQ6re2/dP4TqL//6XwFnvv/e98GZyXwqhhrv94fDF7/4hdXyMiMISYCMIEYAYx0YqusgIN4HQmuN9V5QrDWOIzaeNytOUY6Pyum0DH7f1AcUKIrC1ynPyhRxtdxOp9PF0ez5i6cbj+gyJusV12E4UBAD2youZvDmFxab7XmWg7UOwI7HJw4os25Wjh2aFEJT1VV1yKzJ88Ii7Xe7al+VRTYqCgAmhDUHsK7ah/lkvnm58svNn3/n3erR0+M63TeZO9/AvtpXhw+fP71IwDMDjYrnqFiXoAJ0ONV1FVMAZds3pCCPyDyLbAWcwMiV49FkXM5zmyNSZm3mTEZAmIzDvChskYF1wKnV51KkH1kBSUkpE6Hj3CdDltAoAE27bYbAWZs5C4TMLEyq62NtbmxOJhOwgDaKgBCI6SiudKESGgJCQRJEIX2LiAFicmQztIQIwswhSkoSwQo5IUtgtAsijsgZAsxFDFHuKLfgHGYZ5QYzMA7IsqA1bpyPSnRTyu9PT25lxYkbzUw+NRnFdLi4XD5+fPHJI1d5s63MvubVLq0rv96vzi/Onjz75MNHY1eeTI9ysSZhgZkRk3wSlt1m++zZ05dPn6xenvnLC3PYT1Kwz+r4cndEozfnd+ZU5h5HDZpdtNtwYsan2WyMeQYuz0f5eEpFWeUYCIOBSBgcJovsAHODGYgFJASEGEPyIdYx+eRFbJZlZWmyjBFTi3kANMggLAwg1pKxWheQaTjcmk4vlxeBaAd0KfSt93/yk+cX/+5/9Hd+91vf/a3f+ebJ0fHZ2fPptEDiQ9wjmOADCJd5kVkXQ+QUraXMEqdIBNE31lFeKPEskLFFUfq62W2bO3du7fc7a0xTB+sgy40j4OQFPBkRCIljQZBZR4iTsgze545GuckdFgXuLs+PFuNUH955cPf5k0dfeffd9fI8y91uf3jw5luHqrZZnhdFCKFuagT0dT2ejJ8+fXL79ERS8PWBo791fGzJNFXNKY3zXERSiIYopYgAWg1TdCGSsS7Lc8oyC8BNUwlIURaj8cg664p8Xx1q7yez2WQ+PzTNartNAOV0drnZuKKsg698c3LrJBu5qt4vTo7yUaFA0DrEopytN4eTk3v+UEMSg5hZl1lLIKCMBwr+RovoEBxChlKgFM4ERNK+rhZDTIffU+BLV88zCIRgEJWVgtrKAFktKjNzO0ALys+lnC/wr4u07LOKnwaQAO3nvGkYHfeBVUrJIxpvCBDJGQUDivHGo0HqiKVN2700sQMot1yxLd+0ctu0qE4ddWxBBy3as43hALpMuwNl/fGihmGOqIR7iCggWZapHMHp6el4NNZi/fn5uYhsNpvD4eBjUBn0qqpWq5W1Ns/zJ0+eKHflZrM5ObkTA8coy+USOsJPPf8EEmMrFHCoDjGyyzI0Wc4UAocQqqoJCayFsgTjbIwxhISIeZ43jbcAIWTM7JzRAHO9ZrHtVKR0k3ZO2EMajeDuvflhu5vNZiJRWfOVb1o6erY+JUopZcZBiz2+egMRzcpJVdUVe2FGUaCNWywW2xcvd+vN6smT+nIXgSxAYYETwytKysMFo/dTb75yYELQwBAiJwWJMBkNyvvugve+bg5kjTGusFf8ZMoVrq3111YFtEvRZ2wd58tVztF/CGpr4mps9jWI6uHr/aGfL73IAyJyT7EAAkCILYMRCLAAEiJyC7IXhc6DiHLftMpwhBkhGnLMZSYi4mPw3tcq+gMsAB+894ej0cjltq8Y76vdZrN58eJ8v1w9e/R4PJ1o+9llmYgs1ytIHLw3ScbkbIL6cnPRxMLnzOzyLMsysqYJnlUIAhGNsd3S1ak7vqYag72aijEGkfVC+p/qj4pOUV1JcPoSt/KGUCeA19/q0XTy+MmT2fGtg5fJePLhex/9k3/yT55frH/1r/767/zO77x8/iIIOARjTNPUzIzmagrweg2NhiEvERkDxlBMKYTALFnWDTUCKK75xtetADTxV/3d0WhkDHS01O1K1sE+FcwbjUaHw2E8HiuT52azWSwWIYTRaFSUpU7dTKdTAHDO3bp1q2ma8/Nz/ZCmaUhYqaCKPG8at12tU/Tj8bhc5Lvdrrtj2h8VldALISieNs/zk5OTlJK+czQaGWMuLy8Xi4Xu8dlsttrsyrLcbrfL5eXLly+Pb9+Tri+gvBYqQHaFqu3wropmv7Gzrh1qwRGRBCEhWoCktapB0fImLKO33n+8hty/xsP25uNnORURCSExAzJyUt50BknoHILWRQkEQZDAArA4dEZSO4etzNXq1REccBQAMAaZozEGgYSu9PaIWj17RaBoixUYkVpStNeVmK+d6uBGXxkvY0zTNOPxGBhijIXJ1+u18mOt/Mo3zZsP6sPhcPfu3ZSEiJo6iMh+v1dZSOdcCOHhw4d60y4uLk5O7iwWi+328PHHHx8tTtqJUSJrrU8xhEbRMVeVwO5MYko+QhDISphMxtba0NQhhMyhtXa335TO6RodjUZ5nh/2+80GJkfc1ycByTkXhQnSeATz+fzy4nw0GtVVXZaltZZEjDU4aOAbY0ig8j7Ly345th0XEGNsbqyPFSUxgIXLGEkvp8jzsixTOYKdbwKPbZYFv92K7TisXy0+D/eSloKJCFOlLyuCIMbIFkWo33vq9qqqIuPyXJis7sOhSXv1Lw6LloPMPiEiWoOI1M1v9MP70Nl3gHbqnweaITfc3vDzU0paMm0hGzFxTCmBABCxABLZFrFlMLVNwysH2kprqc6D0twTGYPkjIhMUq7nycyRk/e+DnWMMUeTZZky1ak00iQrZtn4lpsDIa+r/SHUdru253rmeVlYMqW102IO+UyDXPIozhRZmee5AphDjAlZLELqSW1aO5teA0ciY5RlyVzp/QzaBwBg81xpvqnTGtO7p9zrWmBUYIv+YlU3WVmOp7P12fLi+fNvfvOb3vvTk1v/2//mv1nvqqLIMwZXuPVuyyzH89luV/WBVBtvDYaU+vMxxhgDzrkYfPSBGSaTUS8+rCxiACCQBJL2mSwZcFmoG1DEdUqz2Sw0B51eb7xXT1BVlfd+Mpk0TTObzZ49f3T79u2macqyfHb2QuXxFosFoDKUHsZl2cRkjJnNZi9fvlyv1xqPKjI2hMBIztrRaNQcqr2vm6YxeVEUhQovgATVthWGpvYpcpN5Y4xZ2DwryJrgIwIVeWmNq6vmcrkcjUbKAP7ifLndbpsUy7J88uTJV7/+5/QScpft6qAn0MJ/+IoZte8EwfWwADuYIWg3FhWsCYQDb3fFt4VdaXQwh92hNzqniP+6crs/6mFvhOr9/6r5uKqed5edUhJGZJ+SSSmhCApAYhQHDgEZ26qjahaJJ7GdUqhymAKCiDLlc0wIgmQQgRLoTrhi2R9wm14nthnMRX6Wt8bPgLQYQ76qR6Nxtd03TWMzc35+rsFatW2U2pxaMTA4HA63795WZPPR0ZEClHe73W63U+5BALi4uDg+uqVbZTKZZBnVdd3nLiIJkVxGWZYn9lEYvPc+quylMWABVHsahGKMIiBAMXKMQIWx1hpAZ0EL8RplkXACZI5obG+gZ7OJ3rftbi0pcgocgsvz3opJJ++OiBziwCGpP4hCZK2NVRPrxoI4Y8tRsRN5cXHOnCbWjsvRrXfesfdkdbmtAqz8WeNZ8KZjePUYfk3GGOEInRBaTClB69hSSmzYGMOsSk8BIO9xNz3HprGIV3hOGn5+n85i5+R02Sr9WO/j+9/tP6EfzgOADssGr16X3kNFu/VtJJbWrCcWSMkgEkpL50omDjd3Fwn3/9vrwGGnPGKRQcQZo0MtwuxjE0I4OTkhwiSsCbR1rTQ2VqmqKp+izRwQJWGdmTPOarJuyWTWOmN1QOjgXB/RpxBEiAwRUWy8XhIQyVWu3O190LmXHqpiWBIo/yK3KaBY03LBdXFJD90cuqU+vtHTqOt4++69n3z0yGTj3aH66KOPOKYPPnrvP/xP/ydnF8vf/OffiJyyLIspAcre19dSxoGcTfdNXbX9+nZdSowI4/F42ITuK0x9/KQ7peFG93vTNPP5rW2omaP+uVaXuGkOh8NsNtuud7du3frwo4MuIfX0OsbgnKvq5vj4+Pz8HAAmk4naBGPMer1Whu4sy5CTcmwG7xeL+fHxsbN0dnZ22O6Oj49Vmb1pagAoikJ3bn8DFVZadofegdlstt6sWu3oulYe0dFknOX5+fl5SqkYZ4iY5fm+iVnmdB3qdenC1i2jHvGG8YRhcUU6oIsilrDtOXfj2oJ0lSDqB3zWnvpTPvRP21dPYvi/NwIrfYURojAzSAKUgNA1SrWGC7phgYyAoKGQxAoaIRWhJSAQYEwtO2JCgCTYlo+UUbavYGCPNeeWe6ld7oioFuPzL+/VLEStoXOu6Uytjit47/f7/Z3bt5fL5Xg89t4b40IIRDSZTN5+++3NZqMFCgU3a6X01q1bL1+ej8oJM9+6devk5MSFtTWYhFmibhUwtqMQM0iGxSSJIcYokATKUa4VV5X0I0Jm3u/3KUKHMkMi8d6HFEYTaNFxXTiQWMU84datW7vd7uTkZHl5vphNe3YJiMlimzcbIteF4dLRs1lroStrWEvJJxIwRMrugZDWm42g2KI8e/nilLO3Fnec5Ivi2VE5QWjOu0HyV+95v3iGBU8gg5SUHvqKSJPIKq7AABEZQIkphhBDw5QbY1rsE7MwAwuwcExEBCTAojP/fW25d37MwswkJJ+BhOrNXweMgN6MDschhkFhO+fU0xFSK5qaZZZTikHdUmKJAoUYEUv9Rmr1yK6NT0A719Ov84QskgQI0ZJBY60ldJmIIAkJG+f6jYBkp0XejBsRsXkmAE0Mzrl8PE7Bc0zMbAB1EQIRCKiUdEwhcQIS49palhtl3dnolarMj9F5akRjAMgSasfHEKR2Jkt/q3dF2BUS4TpRgGYPvSPUX3HOhZBRVh58nJX2/Z98+OTJ02Jy9G//9V//z/7j/+Thp49/859/Iyuz3X7fxJDn2Xbvx7YViruyA52T7kOf4XftnEWMzFwUefAeREQSM2cmQ+mAMDpdDKoc1FLEVVXVX5oKKWRZFqMHaHmoAcBa6wyGphqPx4g4n06a6lCWZQjBN/X9+/eXy2XTNIvFwo9Hy/XGOVfXtapLhhAKZ621kliYt9vtraNj5WTZrtabzWY8HmdZJhakG5lXpjS9nwq66a9XnSIifu1rX9vtdgS422zfevDg6fMXRVG8vFg1ddhvtvPjW4OVTz4lYB5yNMYY1SsrNRq8xkVR/91qR7//wMF/yiKglrxfD9I28FjfBH9WqR70Rc5Xr3Do7fp1hogiBgCjoGEUYsRoADkZIkoORTSzswgCKMkKIbMkIFQXC8BCBkAoCQChzvYb0yV5hloF2n4jqeKodAYIBlHe5xG1DJf+0AqHELFrNWld2zlXFIUKrBPRxcWFZnUiWJbl2dnZF7/4xXffffcHP/jBarWaTqfn5+fqC5l5Mpk8fvx8s9kYY46Pj40x4jVu0noaFKMS0Oyrpm4aBlvkuaCt6lhJjIFTgpFz1too3DRNZLHGCqfdgZ2FGDmEYIoMyBwOh7quRyNIQDHGKGzIAGCMkYydz+fOOQ6+KBfpRSRs5/GJKHKQThkEBl8lSDIAzpkiyzxLjIwCxpgyz5s8JI6VrwAgH5VkDWaGMnf28uXqsuKThiPs1jvxjPyart5rvwLonFwyDoAYWEDplNuvsigKQjDGWEeGDHNSQaiskwoaZA+knltdO/OVBnuf6vWj659TD4Dr+QFc2e5B+khXFC0wYIQhpcVSl0lIRDbLUoiAMXpIKUmMkWuLZMqcsFc47TxzL2c2uEv6JBhiRIYEIEEiISnntM1yESFI2LnwxBEtAeV57pRJqm7qkGIElhqJCAwCESAxAEoCH0TEkwUWQXDOoSHp8L15nvfOQ0SwpzdJQERtruyuISSvbixhS6apwOSOOqANETr/lwbTLH0qZvLR2fnlrTt3X5yvvvX73zl7sTrB7L/4n/+XlW80hS0n40+ePBcEIA9XIQT2/QJUBa/BEF7fw5NOlFyTsP1up+/QnKbP/LClbdIpdV2ooBJ3IpLneVVVeWFcUSwvd5lzSluo/Y7FYlFV1YzZWqvw79FotNlsmqYBAGPMbrcriiLP8xgjM5+enk6nU+/94XAwIGVZmqJAgN1ue3l5OSrz09NTR+by8lLlZ5XwumkaXfytLIwxMUbtGp6fnys0Qc9zOptobxUAptOpf/jI3DLWWufg5cuXb37xSzFG8d4iJeYQQmZsX3weLsUbM9NXC1UZO1v6ke6fMKJR9EU73oegfJ6ICYSwZV2GwcQ6tGJ+f7rO7yrbe63zGPY5rr3e6S8oCh+FKkkpCWGMjjiBCCZIghjBZFYMSmrpW6KgFVRaPETUm4X9vUNlQjT9wIOIpG4MmaSD+ogAti0R+fzLe629izFaY7SXi4hKc0BEp6encESjslxerKqqFpH9fjkajZhZPWJVVS3zwuPHdV2/+eabq9UWHB0dHXnvF4vjpmn2+/04eSI0SCKSZVlejmKS7b5CREPGWhsZA6eQIiAZB0mi1i289ywxK3JhiZGdBR1iK8vcWltVe+99nmPVcAiJEYrCChCHUJTu+Pi4OexV8bIoipYzczKJofG+NgatyTQ9Z26JMNuv31rnXIoJIoPSRQJba63DWNWY5bOjBc9GLncZEky8OcB6t12+uHx29nyPWItgkb26qj5rUSFiZAZOSJBAYjfN1VauoK3EmpY9JDZN3TjXG6+uMKsGri3LMLMMoO19FgsAIlfd37arN+BwaQf44Aoi0dnKwVANX+EvusthY1DIKBsqMwOITl+QNQ7BGIyRU4hJUhODqRMBoiGDRNYYJDREgMCpN9Ot0xUAgNpaRINoQTtP7eA8RiNXg97CIpKEQaTar/q+KVukSYmGkjFNjASIhBE71VUjoMoxhEpBDYgpMYEYBEmx6yfIMHnS26KlS9E+Ligsor1X0HGDmRaRdC107pERvWo5dLAv/VIwK54/O4tJvvO9H+72FSO8/+jF//X/9nf/y//l/+qHP/rRrjpsDnsgmE5HIXqyDHyzBCXQDqTr1Dl2/Nr6BkKyzgUABKiqPRE6chwikggzpKT5t0iChABYFLreqG0wA5Rlud2sy9E0d857PzuarC+XdV0XeVlV1e3bt/f7fX3Yzad3tlsQkSLPlr5xzq3X61FZ7HfbR48efe1rXzuaTV+8eDE7Oh6Px/v9frvdavnXGmONKcsy1E1dy2QyOT4+Zuaqquq6LnIngkQWkVIS5qD/S2RTElVtCiFtt/sQUkrpyaePdbZdWTX025xOp1kpy+Uys64KHBrvsjJ4LzG5ouzbB/3wokbMMMjXr/YIGERB6dwbAQoocawWSrTCCdCCjBTGCV18qaRd160Cge5T/NPzf1Y+NxYexnRdfNSOMmqDAyABIDMYE5i1qxeBkBAEJUnKMhEQshgSAgFF1PtpDGISRADDmvwaJFTSym5ZE13NKb9azvysytVPv2ZrW6Ev5rqqpBPhmxQzjaq890dHRyGkBw8eXPr15eXl2dkZM9+9e3c+n3//+9+PMR4fH69W25TSvXv3zl8ub926dXZ2ttnvcoy6sXXLjUajqq4jJ5tngA4ImyZUdR2C5BmVZeZjUPsVOEECERQygBEJQhIfIpF1eY47TCyZswCQIiCBMSiAIYBSSDx9+ng+nz9/9uT09OSw33IIzlILlbyal+rMmag9JUNkLZmUWgorxMo3SRJaEktulI3m0zDO0JKvmnt3bi9myJfV40/PtgAM2GMEXv1q+vysXdp9o8VHATCpnVrpiFyEmYWTQQMAxqJwO1+vAUoPRQGAlDDGaAxe9eE7IsFh/D7s80GHE8bOmuufBAALQ4rqq3Wu3yDgzazxqkeFDMq/iSCIPgSLhNY4Y4yDaEITfBJJjWe1/sYAMxpDTAIYOwdPZITQdIX9WBhnDDlLgEmiMEOLx0maMOkW7G+syZ0hw4ScWvxX4FQHn2UZIyILd601cjYzNoO2GplCVOBGAkHEoEWtAcRJvazN2uoZWaOBPg8CSkGQjhVTCA0Sx9Avg6Hv7G9gzxmkl3O5343nRw8//vg73//+4xdnyxqOj8v3P3n4v/8//h/+6W9+IxsVTQjY+Mo3sUnTxZjX9bCI2v8tRFTyI1TutEHC1xdglYbCOaepGPC1ohF3Uppq9FOK3vsyt3meL0NQkFqMMcsyBZtMRtPtdnt662S/3x8OBy0MjkYjjcaOj0/2+/1sNjscDk+ePhuPx3mer9fry8tLAFA4KAkyM1mnuGtvrG+qw+EwLsq7d+9eXl5eXFwYAuyIYPQSYoya8GmMu1gsAGC/39d1PZ/PEYSIttvtZDIZj8e3b99Ggbqu58enSq7WpBATl1lWh4CI1lqtbSoQSX1ejFErqDfqJYhIcAVpQRIAJrIA2vEiTey02Qdt7d5cs9LSsqR3xGZ/NoeZzI5fiWevjNeN5yKSQG0mUMd+p6i8xMKcmFtidNCyh5BwLQyAKAIszJIAtdyLCu7WVolyWgEYlKQzrEp1qjsOWhartvLSmjBtA8GgvjEo67/2FSJCpMNuPx5PfN1YsrnLv/W7/2pSTuq6Xi3Xvmmczbbbrdoj51wV68VicXS0WC6XuuYePXoEgLdv3/Y+7vcHY9x+f0A0zjljbI5109Quc6PRGJCQyHt/vlydX2yn02mWj7a7qqoa47K8HFmbRQ66Y2MIvpHMmqIoOTXz2XS/90Vub5/eEkn73TalCMgp2cMhWQfjyTiEGIKfzqbz+Uz3Uoyhqg6jsiiyzBCOx+PovXMuz11KMfg6yxwgbDbb2odbp7dr73f7Q0wcYnJZjgBbvx1PZvvG7xs/vX1aHC8agpjir/3lv/zv/Y2/9eiDD5dn5x/85CNJsAWphQNcs2V9dD80eb1ZYWYjZA0JiyM4nk+m43HmMLfGGMqszTOnn4AEzhpDaKBVV9LBF4X8C0sv5SHMIEyIhsgQobWDv9gK8TG3MiBdBa7FZRhjIigm0/Zjzpo86cmnxC1fMPREMO0cGiKiQWvIGEuqIKWTqVrf0SzDOYyREFrcalP7ug5V3VSVpJgaLzFCjMAsIYS69lXlEbjxvqm5aVKMFFlSxJSQBVlIGFPCyBgZY8LEpCTwiiwT0N4nCkhMwKJJmCFDSMycQiQfQt0k7yGxIbJEAMIxhqZJIaQQoveJkzAjgiGiPFNmlm6bEhpEIkHVmgFCbfZRpzTUYkG7GKUVuSSiw+GgJZC+pBxjlGzyhx98+N3v/+AHP37/+x+8GE3ATcbbuo4Izy7Om+Abn0QgL/Msd/vtIUdjrW2aGhEX83mM8VAHS1DkGSGmFLljPLCGYowIIMJ3797ZbjfVrpqOx8aiCEuKRZGLSKjZWckzRwads7HxRVFYS7ttmM2KIncIStIY59OZDw2IEMJ4PJ6Mp9baMsenTx7fOjkt8oI5gnAMkZPSUnCIsa5rZ+3hsP+5r361riogc35+rjliDD6lZI2JIRhjijwnwrquQWQ6nc5ms9FoBMKHw0Fxdj0IoGkaLTJ575VPP89zpfEsikzD+s1mk+fF2YuX293u7bfeMTa7WK6++rWfB7IhsbFZEwIwGGtxQOLRl+GH+5cG/VoDSbdhO3XX1r9bIKNpYWKIrdgqEmm1k7CLQ9rhWhRokRzSVWVagNLQaH9OVvYzvuHGoa72WpGzz+0Gnbybc3sAwLq1e6k3QAOUWEIik7gJDCaiMWCTgHFGCIUiGKOeCk1S7StGxEBCpMTNgihg2tdbxv3u2gGMshl1RZVrbBrDisfPcuU9pqO3ziGE2Wy2X59Vh8N4NNXWcZ5nDx48uKgvT05OZrOZAj5TSlVVxcCbzSaEoEVCGqiuW27R2wDMkpCjxs7eA4P4FHeH/aHhydRa5+q6AQAlg2AQRoisUG/rfVC+gJgSoURRFjG72fgsgyyn5INwnEwmeZ4ryiZFH6NHgcyYPMsAoKoqO+ivqO1VkCQh1NWeOTlDIQRLmOcOACh3dWqSRDAYo89EMmuij9/4xjfsthmNRl/+6le/+wfvbfzB5Fmeu8N2r1+Huo3e5PUt8RuHFXKETILGIFll8dJWhEhiNEQwoHNsRw7UenafwcYYY6/KkojUTyxQ96IxBoBSSowtedWr5TtdADSQ3JPBCOOwTfhZa4mxLU5CH8G20ExjBFGgHAMz62ycelBJLCL1dq+LVS/WIjECstSHetCiaucsWo401XwRAAILihzHQE1q5c3UmosSgBOhIIEhVscGyJKEgSxBggQJwRiLAsQSEwNwJDRCgmSAhNACMoLh3g5eWYrORBCgoLpdIn16RfZ9I8/TFaIhRZuzioQQXlwsN+v9H3z/R0/OXgSA0cniV3/t1//hb/zGv/WLP7+D+P4ffgAOUdBmbrvcjka5jVcOVeXuEMAYYmZLhlliCAA2z3NC0lKHSqw0TYMIKaXG+9B4awkASJSc4KqDq10VkeQcOOf2+z1CPFrMY6p7JRNrbbvjUqqq7WQy+fTTT9966y3dy7PZrKoqsjalpEAzBc09e/bs3r17dXzinNPpiP1GmLmu61FZNk0TpLEGdZLh8ePH0+n0+Pg4RY+IWvBUBdCiKABguVzO53NE1PHi09PTsiybpkmJzs7OiqJU+EyR56PRaLvd/uJf+OU6ivd+XE6a6FOIu/UGjSvHoyH0Yfikt/zXnkgrrYPUMlSAKJITdAQWsWfsVNPdbS5hQG0BQidXpO+hzx9F+5M47Gt7eJ/lQkSk1VECI+0ZkwgIQkwCoP/FqMJ5ZDiRIQZhskxEjIBByCQCNICgOHIBsK0PFWEkRcEm6DgS9cYRgTBJJ04LV+npNcf8s1yzJvIyAByGEObz+XQ63TBr/UHj/aqq1pv1aDSq61rt49HR0fHxcfDp/PzcmCzLCh1m15+Ox2OMJlpDg6kXabEYoHu1rn1Tw3iMSaSqfV5S0zTMSt3bYhuttb4JhsDarAdJ6YyqJF84NMaGEABhNh2PirI6VKe3T/Y7jDEapCzLxuOxMSgpkHHASdGPHVoBjDEsst/vU+Isyw51oxInOtFRHbwiAKv9wewPYEdc++35xT/4+3/fPzr/0r23Dr6ZjLODy1+utu3w/EDxR4/Pqn8iGyAE5aFmZhA0RDZDCf0d63QZWxwEDgYVdNzWGIN0NQ3Zby0RsR2Joi5YRFRdhdQJGPUo837+b1jJUfII6My38gsO58+66+hpmUAHb1pK7lZ5s5N0Ryy7BmSMMYUYQkghMnMKPoYYY4TUgmzVrlDq3AwitXytHToDARENEhpAMmQNAdYQUcnQhAgYGFESMKIBBMNGEAwjJwGRJIIby8DIkDTTQ+tUKbooMkQhg4YMGEA0oHoI1qBCFAZGADqKiVZErANLSX/fuqOnGpCOXaEPOhUt+eLs8NHDTx9+8uTJRTi+W56+/db/4x/9g//g7/ydk7v332W/rKvzDz8tphNmBkW3DhJxdXumE1u31ookn8BaybKMUJqmKfO8LEsClhSK0hmL1cHr6iABMmAstBkzCFrQvczMeW7yPL/YXCLEB2/cX17udUIp1E2e5+rJEDExn5ycPHr0naLMDEFofJ7nRZFVTUgpjYrcGVIH+fz585//+Z9Xp5hSmkwmzWEfQtjtdggwm01j470POn+i+dZut8uyoizHeV5dXFx4H5wT50hEynJc115ErM0A4PJyHUKaTCajPLt48TIzDo2tD1We55Pp9HK7L7KcBF6evZhMjwhA/8RkMrlRHoNXBACGZhNaZrKhbxNdfurYWjCL4jZIurZv/+s6xgefg8D/0zl6w/Ea5os+4RtWCzXYI0kJDUJqlR4F2wY8JhFVCAcwlhNmNgEg+oiIrFDoiEQqyd4GsxAZhBTSYqzOjkjfzxtiZJm1acR9RELXx2aHZ/5Z16w4KOwC7SzL6qa+vLycTqcg8uLsPKU0nU5Xq82jR4+O37y1WCzG47E2oq21TdPsd1WM8ejoVp7n3dwMAkBZln6rsq6k+kp9AmQcAAASuTxH0+yrQx2jT3Fsi6qqETlzhWQADAzgXB5CKMuiKAprrcsoz/OUojGmKLrxUk4my7W40TRN+2JiIDbG5LlzBvs8SePr9iu3tiyKQ+ObpkHTTvIZQ0WW1XVtkISjsyaL6eVq7Q2V6aQ+HA7LtQGbav/sydPLOgEmPJ1MjmfNZqdLKCVOiYliXyp57c1PDCDAITSClQ9N42NOzGysQRA9216nrTegPfSOmXW2ncxQ9uFKNgxiO/mhzUSRTulZ07iUeiAMda0muF6S7RMtgFYVud8anxNX0VD2CIEBkFAAIwgSIhlnDTnrUqYJX5ZlKYS6rkPjY4wptijHcWq1uFgkKdxbPx8wAhvEAEAEgSyiIJo9RQMGUQiMITAt4kCnYAlRGEmQWVAkJcAdeOgHz511LhpniUgfyVpjjLZulDK9HTzQbXWddwOVtwVAa1Uqv0Z4TZ5QrlFxShcCJgDw3u92uxTN//gbv7k/+GIEX/ulX/qn3/ydv/rv/a3i1uJgePHm3b+Q/+pvPPo0pAg+TiZFta+L0aTvHGsBwNrWRjnniCA0TTufCqxojjzPQ2x0nsdaKwLOOYktdM6Ya7hflY4Qidy1k/spw/V6fXxy9OLZc03XdNJpXJbW2ul0qm2/EBsFc569/DTP8/1+T0RvvPHGarXy3r948UIVPVXDFgCyLNuuN5vNBhGmozEC1XXtyMzn86ZpXrx4cevkxBijAns6/Kdp3/3795WrU7uJWogSkdnoRBlbqqoim/mYirIsiuLx48eHw+Hy8vIN770PY6Isy6bTaRN8b+GHFQ7maxnYVS6ouCZgVPwGaXWDNE7r+NU0HlLImHYXuHOB6vmMtM19assXf7p4Ttvzd7zW+Q28XV9LZATDCC0cFZJmflrpSmrVIpGPZIIw5i4AEBmlMSWDYFuOQ0qAMTJiIouITImU5RARRQx3xGRC7dA6MytjCwBTK0sLLFdjyz/LoTs5hJiRdc6xTXmeV5tqvV5nR4aAbiUAAIAASURBVIUu8bIsJ5NJXXvlFppOp9o6VgBxXdf7/V4LoTr8MBqNmiZqjhhVk8ZaVU+11qKNiGgthBCcKWaz2aG52GzZhFr1XFICwZS5Fr/KSoHOnLnC2gzR5HmW5/l+v2lio8zujY95brM8FxFALkd5T/4kwk3TNE2DmXXOKeO2bvvkfdK1RkREapPIoLU2SQsoJgEHVNqcyafNalP5uvKR0+7s4itf/prcLS7PzjOAjUCzWU9u34n76gpqIZCSKL39Z4YdgpYhCZggddM0IQ/JppRskRMnVZ+IAQ0ButwYgwlJwcOJtaCiMyzJA7JYJEEVgmYgBkAdserW6s1aTW92e0Pc69H0gAtrzdUke6f/0A+5i+Z23fRS65JAtWwEWIfZGUSAhQCRkpYoCZCUp50JWMpxkWIsfBkar72o2PgYI+5TB/BhZk4cNB20ZACZRHeiwjsFgGqTQIjU7aEYtIhiwHA7OyyAhpEFSZCFMThpBxKssZlzRe7y3FrryoJIlTdJ1GMwd1Gm9CN6qqECAGB09rJ1e7q1egMy9HDDiToA0FcAoKqqi4uL7377aXWol1v/pa+/+8nzZ//Zf/E/m9w7rSR6Y2ZHp7fu3vv4w48++K1vG2tyl0WsFWvTOuOkulpGP1OzSU0o9e+LSJZbpJYXRhJLaqGkLAlJiNEodICvuDI05Grq1DSNtdYYWq1WxydHu8v1fD5//uQpgIxGo+VyOZvNMiO+ad68/8bmcqVrabve3L59u9pvbx0vdrudMWY+nWxWl9baF8+fvfnFrxwdHW2326D9vKKox+PDfv/pp5++9caDk+OFiIS6UY1r1fyrqgoR8zzXdkbTNIi4Wq20yKk4c618np2dEfvRaAQATdPMFmZ7uaqb5u79N1+8eNHCtlPSifuUUvLhRt7VZzjDMG6Ik2jFe1tyso5Ss024+WpuD6EDs7SkLZ2qg/7sVSf3p4rn7JUQrrX0hk6iv+Crbh8mYQICEpNaeLRoRkuAyJKShMgUvIjUdWxhxiiABgnIEFK7aVrVREyIGAGlU2Pv04W23IOGGRClo424Yu6gV7j1Pv+Cdci4rhugNqVQgNbx8TEhee+zLMuyXAPJ+/fvny+XRVFMpiPoBmy12KiH+o+yLGM8aBRGRC4zxjnEaCK204EIxoCPbJjzsiiL8Xa31dkp7w8pAVALCxJmChzYq/eMKdXez+0oLwoAiDGOy/Li4jJGGI9tbp1eVFEUGldaR6Hmpj7s94WkTEQystxJpGpRiBFiDARiDFYhGoy5syFBSsEYQyzjvDApC0ZGxiy3h21MxtnTYkJ1uHj+8vnLfYEgWbZpfPP0aWFce1fpmnrGZyVGKkWJ3epiZuGu503GEGu6kFIytjXQwwonAKQkAADhCgrfNsBaBQbbo0ZhMBc47Dn1+Qdc9aKumDZhEPNprZJoMOnVKjMMw6yrCbyELfyYu+EGstTSL2h9A1BUxpoMWcoy68qiYE4+hBBSjJJ3YYSicGOWUkBWeghJ3SbrRoJJh3uSgHBKLNiprVprsS0+JjQEBoUsIk4mpZo/p0yveWbzzDknAEDY0Yq3OofSZ+2dzxsYQNHMrt1vLP13OnR4fcKnVTWFsWiP+eXLlx9++OG3f++99Xr75XcenC0vT776zrtf++qPnnx8fP8OuSwrR7APf/5X/tIHv/1ta+1uvTkaT/dVnVLKMktEIUZteLeQ1JS0UcfMTdNY0w5xq3KsIiERMc+t0QhhUMRKSfR39aOsNSKw2+1Q2Bi3XC7ffueNcKj1xlb73Xg8jimWZen9qqqq6XS6Xq9nR4vxeLzd7N544w11afuq0dpSVVXz+TyEcDgcjo6ORqPRbrfTRG02mynuZrPZgKTFYlHOcpV/GY/HnNLhcNCV33O1WGtVF3Aymcznc23vqXV6/vz5nTt3ynKknQ5+/mLbbO+IhBBcPlJhWyKqq6r9suD1ZYxh5ax/5aoQ2tENIQghgqota7rXYsI7MqKr0aCuFgoIyMKqUKYN8X8DentDD//qLyidUiumi0LdTWMWJBKBpD+JiE3kBDUFESHDiIJiSZjQkIhBi8IGDWl7TxglAkCiK7fX3vAWoUDMQ+LgXuft89z2qwcRWbwKZHRb6hweJnO5XMYYAbCua1WKMb5dtTpwWtf1drtNUUajkQqI6MC7MUYHfUZtSTNDRBtTr6eT5846l0B0E5alSwLee0uanbScQ4SGmZs6zCelMc77WNe1Nt6yLFPmFyKwFszg0Etr8QIUByW7lmoEWUg6z2HIGONDzHMbq5ohZFnGPjFznuehPjiXidDU5ndmR8HLclebEoqy3J1fjoybE6wZgvd5bmA84c1u6CfgcyuBoP1aQkzQd636oMo6m2dkoCM4loTA7QTb9eAMEZNv1wkRYSd8o/j13tS28amelfLCXKcOUQuNiMrQ1f8VTdx1i7aTDG2NnZMoivgqz9PQlQUB0ajSMiF0p5FM1/Brm+FoEEEgAiMKEhpnLSKWWdsCnJTSsVamlDgm5ggs3nsafA4iIokBwxJByWiSKFpaXVKLLVKQpbVkDTiDiLYsrbUmc1rMJCJjrTEmcGIQaUf8hI0Kk+MQbdDnAV3zEftatqqsDZt5N2qbvT3RAaHNZvPxxx//wR/8wfLlgcBcnF/+1b/9N7/96fsPn356dOf00/Oz45NTJqxWq4ePH0GGIiIJLBkZTMXEJM61rGDq7boCXUopFXkxn8/bXVmUo6JsmgqAc+sgMV1nk2EGQjBEjFLX9dHRbDwuvPfAYTYtm7o5HA7z+byqquPj42fVYb/fj8sJMwfv8zz3vtHO/GKxWK1WMfnFYrHZbLIs016Dyhg557bb7Ww2G4/HMUZw9tmzZ7nLyrKcTMa79UbbKMfzxWg00jEJTmDIcQrVoRFG51yRZwCQZ+V6tQ0+nZycuOP85cuXIYRiNKr84eWLF0fHJ7PZrB1BsbaqqvFkvq9Dtd8fdvvJ8cm+Ci7L8jwPTdXfBLk+sX5jL3deTzEp3EdfV87i6jm0oM3W20Hb0uuTvz/W4Nm/xuPKpnxOb6+/7N4udGguUPgWI2BXLFWHFIUpAQCEEIggeONM9EackRDForgMKUFKlCgl7dVrxkMJETW4bu8h62ZDAKNdve40rhSehm3Iz79go3CIgdvTfbjf74+mJ8aY7XbrXHb79m0lrQYARWCPRiPnXJ7nRVFYkyk/bIrcnSooWmyWWWa2zrWty86MlmVp87JJ0jR14JTnee3DYV9Nx2AMGNtRhpJBlhTB5pkxJsQmRu4xyokp1MEY41T7Cjm3rm1VdsIeqpmZZVmeuyzLJFyhkLUqS84y825/ICKOiSEWZWkStM2PhjLnoo8Z2ZP5Yrdv1lXtBJv94dmZv1/gbDJudnVNaAwd1msH18xHZ/hAkXKvHu0au/6KHmodLFIMoV9peP1t/R5jabuAKSVj+gUD7nXblYikU+aD656PU+rtZt+O6sto1F1a3/a4PkA6HEiC/txMx7fLzElS6mHTRKg5BZKkBCgsgoCGDDmyetrWiAgxG2ZRjkROIjLWP5VYa8lw5UfbSTpiUaSY1lTbSUdDWsw01mJmiYgBrbXkruQRVGiZTUdPA5C6oStEhFcAucOSVwswaxv8V3HkDbc3/EaU7nKz2Tx58uSDD54DTxOkr3z5S//sn/2z/+i/+s/zu7feO/tUcvv05dmsHKft4fd/4x/TeOQv90eTUol72oCu83769dmOtMwYo4D4LMsmk8nZk8e73c4Zi4hKqg6Jm8ZPpuX1ddt+y2RRu3R5Zvf7rfctg/aLFy++/M4XDofD6a1blxfnq9VqPl0cDgcHMJ/PHz16dHJyIiKTyQQAmqaZTqcff/zxvXv3dCZP53oVHKecOHmec8CLi4v5dFYWRUrp+Pg4+Hqz2VS7/dHRkSpWMrOC5lRUvdf11MJSXdfn5+fHx8fvvvvuarV6+vTprcX0o48+FcA8z5fLZV3Xk/lst9s9eOeL20dPdrvd4XA4uXd/udrlA9jdNdvebcZXvSB29cnWtSHrWHonv5DaYXZMPbBz+OsAgGgAFJL9ZwDg7A+LlG681BmMAQtDV1VBRIgZaLmDEDVT1PwMCSRJEkAUISGIAinJzrqAWbJOsGTKDGVoDCQgQHFkBB0YBgaMgAKSaiArlCEJGTEkYIBJQHWg0IJBMNSPTQExJtTGIiOpTLAGlTpe0aHLRcNFgJCkyDK0tN1vR3mBJKenJy+enCHCrdPF8uIlIBuH86P5tt6v9qvRSTmeFav1ejKdhsifPHqaIt46Pj0+useJOIXJeL6sLiXx0ew4ND6/fVrXtYeYMLMoTR2Xu533dDS7/eEHTz1DUUy4jqo4ur5ovCuY2aBNBMzJZJDnmc2Ccb6OS2MBjM1Hzm/rW7dPHz58aNJhNjPLVTpaZAfvjRVJsalait7CFRgZ0SQfPIklY+KB0HEaGWOy3MaQIkpRTiIsyRblaFJVdZYVWQb7fVUas5YyisNpjkUIh8P0KHvDjQ97v6uCncJzEcQg0wwaHtexSLjNIXGSCABgjCND1hi1bgAgGLUCrnBfEMgN+BAzAleYhKEJTYDcS/BswLogEFMio2m0cbZM0PSmkxANGSDTJiUxpFqisEihLOcxMu4adA5KgyJk2/AImZ1BZgmcUkzMAZGtRbRoxEEUllbG3QBFxc5o+Z0Momo3CUcrIhRT7x2vd8GvMUNq7wwtgvdX2167+MARuPMfJERxAAJSX0KdAeqVO7jjCO08TbuwXbrWkuDOluhMSD9MiR1IJ5mU+vOhdpSYu+8LAASE+mEPEa8gWWpLW4hodFITrLAAqFawBdVMBmjEcvANH0JoIAWCkNuEiGQ9i1tXTRC3jO4f/e6P/8k3flTR6DIe3rj/xvknT/6Dv/hX/oI7/uu/8jf/+3/yD55engOhWW9/5Su/+Ik9fvbs4sG92z959mJ2PNnt6tHURjT+EAUMJxcDnswXodmNC9lsVs6BKwjkMBmfxFA1dZhO5mggeI8G0IBxZltB5evpdJoazoq8rnzjeT6bNw0ae/AMh2o9Ho+rupqNS7KmXjfr1XY8nj579uzu6V3FkR2q3XZX3H9wvxKqEtYRwn4/n88d0SjPQ12XzoAkZ7L9YUtEZJ2PaTGmnA9NSndPpt/81r/6+a9/7dMnTz789JO33npn23hEU06OQtNcnG/G4/Hx0dEmPhcJWWZiTFmehVjtq8Mbb7yx3e83m43LC8zNk+VZWZWz2ez0rXubyxf5LD/E5tGTT09OTh7cvxN9oJSq5cu0ufRF8TTGNx/cLSgWlqv9MjOjFhnkyOgkEqCxmffetI5KSQraAW1QZ0ltgwrRAKau0+sQIiJCKz+rG0GFxhgAQCJDArCAEcEqvAUggljVdgUA4OFM3WDW6FrP78pf0meUlj6/5vR5SJBXI7VXfzrsiLz2YObIKcbYxOB9DCFEVVpLElNKiX2MKUlKEjildBUnXtV5htUSvoY4H0YocL0U9llnrrUItTKazGVZ1gSvNcxeD0W5X1VVCwAWi4UaCA24VIRILYt2p7XmruGkJoV6uCIfjUbT6VQnb7S7JiIxRmEsCoBBla/v/w9HvxGv6Eh68H1RtG0t6ZR6dKgoyzLVlOm7WZrhackLBgmZCj333coeYt4jPwEgMzYv3KQcTadT0xIFdOEhXaU+0mn+6remo4H9DUed8lFr3c7kgTEwHBunVs2nPYfh2HtfyB107GB4IZqL6zn3y6Zv9vTrREe4ek+gr3jvXy3jaLb06vLuk4n+fPqzeu3RlwRfi2vtU6L+hPWw3eEGR5Zl/dDbjSfZ4MgHh35C/7b+VOHVSayfzVjc6HrgK0e/40VEZxOl656ioczl++ogDI337/34/Q8/fFgHX/tw++4d4+xf/xv/9v/6v/7ffOmrP7etDn/xl//SR598/OTp01/65b/0e9/+/b/2139tMSmXy+W9Oye7za5rNUpvXvQe6pVOJrn3kGXZbDbTrEhTa+j6Jh0dAaSk8OBeIBP6BWM7iUfnKKXk60ZzLB0z3+022pPTvawD45PJZLfbzWaz/X5flqWynCgUTqEDujhDCIrnnM1m3vuvf/3ry+V5URTz+Xy5PM+y7L33Pnrw4EFZllG4rusnT548ePDgwYMHVVUtFgtmfvr06XQ6ffjwoSLp9CjLkogUqHJ8fDyZTKy1dV0vl8uLiwtmWBwf6Zrf7/fM/OzJEzUmQ1WKG8tguIyHT4Zf/WetEKXTu/Gjz1pyfyaHGU1mP/VN184VzeB1xs5vdr3wvrbbKe90vPPKpEkG1AIbQiRGBDJAOheEAu20bzv4D9IzeQMQ6F1X3jfoZCU7d983caCrz+ir165DQ5QYgg7ZxpSKrDjs9w8//DhzWeayw746HKpyNJrN5tXhkGfZ6rBKKVnrmDmEOB5PhNmQZZbj41v7/T6lJABkzKGqQwynxxNEdLkzxiTmxND4uN/5y/VueVEJwmg0EaAYAxm0DkMQAHDW6CiXMZBljlCsIUMgkmbT6enpCTMjcAih2h1GoxEZzPOyCdHZDMns94fJZBJj5BQJwVhjDVlLxhiL7JyzLgOAxAKCZI01LqSEaLz3RMblRdM03oeiKJrgqQNcdekMGWsb75MwKyCFDDOEGJNIdNSX9AkIlBtFGE3XsgaVegPQZkAEYUEBi5I7HGfZuMwKZzNjRkXhnLHGOKfwB5NlmR0omNBgv0mX/UhHTd59yR1FBHZztQiAqFJPOFBp0F83zgGA0qt05JUgSi8BOp6DXVuesJ2lo355v+rkbv5oMB46XIs8OH8Z9CNffbwRCd2wRIaMCkEYozGEziCYnvS6N1BtVX848DPY4MIM3fAsdi8itBnh1SRyJ/PbXjIMrk4AAFLiFEMMDccGILblF6QEeHG5YTRnF5e/+Y3f/oPv/WhX1Yc6nW+3x4vF3/7b//5/+J/8x8+XL/+H/+8/vP+1Lz69ePnv/Lt/69NPPvkbf+Wvv3P/ze//wfd2u93FejefTzx7QxYEUmARMQDGkEEpC2sMF0V2fl4fH49ms3lKvNlshdkYY62KqaYsd85SjI0kcMZo2CdJvA/WECJkhlESETrnnDXCLJJGZZlStETvvvvO5fJiMh6vVquU4mg0MtYuFgsQOX/58gtfeGd1eZk5p+Gj9944Z8j6FJ3LEosxJjTV4VDNFosY0+md2w8ffoxkJpPparXa77ZvvfXgww9+MirGt2/f4cir1Xq7Xt6//8YXv/zl733/B4j4cz/3c48ePZnPFtv9HpGms1me53XTeO+dc+PxGDhqu1GH9yUxCM7ni7wsX54vgUwxGh+aZjKZuywry8JHuIpOrve5bqzqbhMq5aZiA4fjQwCgllzBna0NJ2yJl/WdcIV2kXZivcWzILxmlwz39etf/+N5UVOOpz/jWztPMnB7r/x5bFspVzerHXSgtpdpiFxbdBFEMARIoM/bOFtDM6R+N3U3lwCFlNi7BbLpn+TB6b3S28NXX8Gmrp3NSCCGeDQ/kpQ+/PBhaPyoKENML1+8RKLZbA4AWZatqzWCqes6z0tEfPfdL9RV09TeGDubz1aXa2NslrmUeLfb5lkxHRkiyvIcDdbB+yZst9Xlavvi5Xq9SUg4mUyZpaoOiZO1pvGMiNaYlGLwbEjyPDMEIEwoLOnk+Pj01jERGQJjzHZ1OR5PjDWcpGoCCOZFWdeNgpuFkzUEICjiLCEicVC3JyKJGZFsnjmbNcHHyE3TGGPR2KqqUuI8z7FVntYqvqQYE6e2ZwTIzIYcICbmEFMS4MyiABIYNNRKLSq7WB/f6bdAoHJUqaWndQRlbqZlORnluTO5taOiyJzJnMuyXD/HOWcGZEU6gQfXKw2K9e+Ncmtlde11dpoQRZsN6u0G6Axo+69Xi5gFoB0pbZ0otPX9Vlzz1Sznsx5B6YU+NzS+2spaiB8UOYaPr47/t7eCbzYy4XqFYNhpSylxF8y8Wiy58cn6OTK8A+1JX4lJ0PAKBEQkhRRDk0IjHAmU44UYcbevfITL3eH7P/zgd3//e58+Pa+CeID79+5crlc//P73isn4P/3P/6c/evTw2Wb5408+vH3/7q/92q//47//j0rjjsaz3/5Xv3///p3zi0tTWATkxBwFtSJHQignx/MQKmfN+Xl9+/Ykz4vVah1CBNauNomkxNE5p0z0wkwk1hpDJsUYQkRkZ7M8a+ksijw/ns9Zom/q0ahsqsN0Mrl7+6SqqulkslqtQBX4QO7evZtiWF5c3Lt3Z7vZ2O6bSin5EDKXhxTH44kAMrOv93me77a7o5MTACAy2/326Og4hLherb70hS9983cezmf21q1bxtnJaHyo1o+fPN1s1n/uz/1Slrlnz87u3L1rjNlXB+k0VVhEUW+TycTXh7ppijx3zgmLb5qYUlmOTm/fefHyYnF8y7iMEcezaTEeEZKg4+v84DcWZ7+iOrfXBz29ZZaujN4KdQxDLlJqIZ1sAO66ZS0tFw4cZ/ceubakbjiYV17/47m9zyxyyusO6FrWIqmTX7563p3Q9V2ktoKRmUNKPnITk48hJAmRY4KYwCf0CYJAYNEmTWKISYt+wIIMnd60cOwiYG63/PDAntr4qijK174/7ZpoHanN9KeTyWTiYwicrLW7fX15uVLyoaIoMpvfOjkZleV0PEEBZ+xqtdpsNqPRSEFi+lHKvDBfTJOgKC+hMbp/mqapqupwaHSVap229n63a1S6fRDaX9H/a71F9U2kA8IVRTGdTrUsqVXTfr6HmZVKRkuvw/qe/q5avf7F/vI7jQtQ1dwsy4gIJYFCDRF1LqIcFWWZl2We5y7LbOZsnpnCITBjRwEDLSjAkG13vlIV90uNhJxzzlnnWnxNnudanh3OeA2zsRsOBrq6Vr9KW7bLptHiKofIoS179nxg6jYMEgkgtzXZ9jMTK07kasH2YS+LMmEqx82rTuuznNnwR0L42n9ozfAfGOr/9a8MH3XlR+HAqX+MwsDxtf9QEkrq/1dS6P6xJOaYOKbhk1ZzjkUpPVGAYKgV+lOOq2WcGogNJw+pRS5EhhC5CWJd+dHHn/7W737r2cvzhOAFCOCTZ2euKNZ1/T/+i3++rvb76L/34R/efuctGBVf/vrP/y/+6/8qn4z+T//df3d7sTh7djYuisy0YBzqEgRJKaUwGhfWmhCaPIfM2NA02+0+c4VIYo4iiYgMEgoDMhlQgSGD1OJgCVJi60gLw8IgkkbjQmfgNHXL8/zFixdlWSLiZDxWqRMtGwJAURTL5VIJqRFRWSx0xk46OmztmJRleTjU+plvv/nWfDKVlOaT6f3797///e//4i/eDiG8//77281+Oj+6dXrn+OT0/Q8evfeHH7zx4K3F0cmT52cJ8PT0jnP56nKzutw4cqN8hGhSkul0VhQlAMbQthuGc8a3b9/yvjEI9aGySPZ68eNG8PR6h9H+ys2VMcgITTuQhIbIdh3hm5Hin+3xUxQYYDAV0MaG7cAAE/XdRG07tXH0jQ/UoSpBSIKJOcboAxJZZwkJXWRjNNBAiAKAzIIoKemEPxC1XAlIkqIQAWJr8/TWD6v8HQa9PZ/X3t/h3Wfm2jcppXI0Or9czueL4/nJ0dFM14oW5bfb7Ze//OWyGGdZttsdHj9+8umnnwqjtXa9Xh+qnTEUY6yqPREtFguMO513TpxEJAmHGL33NqPpFMmW7XogE6PGCthb/P5uppSsUbQ36GmoV9MpnxCSzlR4L029uy2S5zkippSKLC+KInGE1EqHKMhN70wIgVDYYIqQZVkIwiJEJog45xCvolRtlgAZZymBzcj4JubOcpn7iCEyM7BkWQbbqmFhSCCQGBnIalms/160WyfSzg6wMIKwMCP2hb6UEve0xUjOaZbDKSUYtBP60O5aOtJNielAkqGIWlUgUuSLPlqNZD9jx/VLwhjDnaobXAV/7Z8drqgrx3ad3kE6DGf7I7z25v7O0HWChcHtuoamhusB6I03wyvJZf+GG5+Ag8v5rDswfMPVHrke7MPrbOIwqUyx4RggRV3bkTklaRJHwfPl5Xe+98Pvfu9HWw8JbYJoXZFCnRCcs9/54Q//7t/7ey+2q4YkQfrW977z4O693/y///3f+O//Xl6Ux8fHz1er3GViUgrMzKSE9YIAjMIEspjNl5cv5nNgjjHGpglHi6w+tJdPBm1qAxoSIGu9jy0zJ6m6rI4AqeZa23x1xmbO6sADEVxeXk5GYwQ4Pj5++fKFsiNtt9vMmvF4vFwu7929e9huNQwVkcPhcOukpTvQRuzu0jd1GI1GFy9entw+5SIeHR09efJE4/WTkyNmiJF3601V++1+9+d/4cuPHz/5lV/5ix9++OE//ae/+cu//MtojdLiU6vTG1arVUppMp/dunVrt36JiK5D6jaIh3394uL85fm5DgJWT56jc3SxXBydzCaz4dd6I8N7dakP3twWz7tuXzuroCa5a230FUCj8ZRq8klHpwdI0DYh+ifwuUCRf22H/awfDPfYNSsGCgTTWkmrGQQAqoXXpY/968hIdhC8R05NJEIJziCKIzQWKAISQDQA4Kwk4UwUFGsAmSi1pSVodYgigFFTqlDSrpfYbVocnLAAdCw5ihxi7keajLMxxpDSdD5LKR0Oh/t33ji9e2e/2W42G45pOp1KAgIzGY+fP38+nsyePHlS1/V0Mm+aZrVaH6oqcqrrQ+2b6XRajgtoWv284FPwV+lLWZa5cyFRCCElNsagTWAI2uxVQ0LQoe+YojVWKwnq0qDjl5lMJhcXlyEE74UZ6gpSSllWqGNTHqNDtU9dUUu64ESdCiFIJEOU5VmWCSDGmFKMii8JIWTOojAKEwESWbaZIBiT5ZYB0RB5SIcKJYEkQpNZigEAOAqwdKKJeB131SpmAABEjkok7pOooHxKyXtWesC+7AbXqyt6aVe8otLO23W/cgUP0YCaiCK0GZuIsLOiAam5ilXVWwpGACBMgiS6zkgJTrS3N0C3Y+vDh06ipW7pwvnhlmlRnT0SrQUctyQnN9xe/7vpOikUq6ZJOwXVf/jA8xFDTxuGV/xh7QfitSEf1UV5rTmDjuq2e1RjdGXmrgzfwD7gIBBpizAxSIrCERBAnICEmJoQzpfb3/pX3/6973xvvYdAAEYAjWcRhIZjOZlMFvPf/f1v7XKZPzjBcfFg/vY3fvu3vvX7v7fZ707z6R9+9NHt4+PtdktHIxGvhRMLRIqOBdlsV++8/eBydTafTuq6ZiaJwDEaAiWfsdZKimTAgLTZadIKBzmboomhScCJ0BKicxRj3Gw2BklHlcbj8W63G5f5drselaenp7cuXj53Bq1zy+Xyi+++s7683O1WWovO83y322k+p2FrVVXMMhqNVmiZeTqdLJdLIfTeHy1m2/VmuVz5unnw4MEPf/jjyWQ2Pz66vFx/+uQZgv+FX/hzn3zy8OT0trX2d7/1e2+/+847737h8ePHRZEba5umUZo3AEg+bNa74FM+yq3FsiTnchYUhA8++MCVZUuyuN0lpo8fPjw5PjbTa1TUw+X9Wi94w+11v9i93s+99C2wzplhO9umhhxfcW/YCtjCzcmCP4njj4PkvBF79i+++s5267ZenxOIyo77kAKnyBJYYoKQJCZIDDEhMzBDSpKiaAqUGFSeQEQfb8atwz/92nMbHq22AwARZVkGhsiayXx25+5dIPIxqMbC+fkqxnjr1q27d+8qOcKzp2fj8biqqiIfTSaTw+FwOOwURrjbbXW8Rgt3iCbGVAfvY0hREI2gEUYwNkRWda4QQkrtyfRFzj564itmZ8iyTLF81lpFTit8kRkQVbWGhyA9faLFvSHKsU8o+6JEX+FUCQgRaZrGGaOtfkvGkSEUBCaRMnOjIi+LPHeIICE0IfgQqtLleWatAQNACNSVRl9dGCLQ8ZQOY5W2BqK14h4h2R/DEsoQ1jG8kOE97C9cj/byQ4ze9/zj/U3u3zMsisYYJaW+vnfjQl6t7bx6bkPsyc3NPcCp3igQ9cDa/uhRnZ8DGdUasrTzEaID9cNHka4XMJgsvvFHP6vudMPefWaedxWKsLBXUgXd+CwYRUKEp2fn3/zd33v4ycoVaJ1LguiyJiUgMNYWk/Gv/rW/+sbbb03ni8Wt0wdvvbk4OqpD/OCDD2NM2+12khe+o4Pvh+g1UNGzWq1Wk8nIIDjntOPADEpiiS2AE60j22PwWJjBGLSWrCVjibv+gib9EtNus22aylprLI7LYrdZFZnbbrdEdDSba0XEOactD+gaQIqzzbJM7Ybq863X6/1+H2OcTufT6VxfPzs7izHOJtN33n0LUe7dv/uNb3z7nXfeIaLEMF8cF6PyR++d//CHP773xgNmOF9e3r1/73A4PH/+/Otf//rJyYkGeaenp7du3arr+sMPP9QvRf/ibrez1p6cnNy/f//Js6cKGX3jjTcQUTiePX1S7favLT++ujyG+264HrrfaqmIrjtOGhIYgSC0M744cD3U6tD+6R6fB2l59Y6I4txE4TcyCPU6mgzqAwE0Fo0lbEMt45yaMaWcZmsMgugabtcuIQI4ElBhNRVYBAEQhpY1qVVdU5IbAkCUlPozHMgQ6bgJdk+gG3YSFMmzjFlC4wlJGKqmJrQfvP9+UZTzyYzQBO8/+WRjyN+7d+/2nbu7zc6STTECwNHiyLpsv99vdjvrbHWoHzx406cYYnjnC+/OFvN6txfgwJE5IVGKvD80IUJdp7qJdR1aZTRCZf8gsjFG3wgzFAUWeUaEhmA+n+5326Pj+VtvPlivllmWOWvquvZVVRSlD369qg81jEpAorIcKR0DgqQYVQ6LCCaTybhwk8kksdR1rXJoDGLI+hhHo4kA1HUNHUHXaDQqcuesIYImNCF4QBKQkOJ4Ot3udta5qq5ZoMjz3bZyFvdN6EhoITK0dVsEEf3OEHqEkw7eIRICgViEIsNZWU4nxaQsCucMojE6dN+KF4YQsnaUnIwxhq4IpbTB2Vo0IiLSOqezTiFPzBxSFBFjrXNO+Q5EQFR5lbkryLcYGGhlb69gAiBAZNqdqlcyMA29O+/96NAZDHe7QEvgLt1QO3TA5XawA7HVJ+u7/IrO7P6RMaANY2uNtWQMqYIEESESGkEiJCEiNGisTjdePTfGkNGfAhpq75mhThQNAYW1TEKm+5Fqqkn7RwbJNwgAKNeXUlm2JXQf6ro20pyfnxvnppP5oW6CCIt5/uLi7/39/8/Dx892dRPRJIHAkAAQiEZ2t9rWdfWTD39y7+0HnNtsPn7y4qyuGmri8w8+CZtdrFJM8e7t2+vNDnOjfQyIElMCkTKjybhIKdy7exp8/fzpklAym+/3fjadAgRrLUefObffbSeTkQBLSlWVjAEEdsYhUggxMwIiMR5msylw9MGPR0UIoanrWyfHmXNNXTlrvfez2fTkeLFcLrfrDTm33+/ffuvNGMLhsJtMJpxSCOH27dvvvffe3fv3N+utK3IEAqTtdns0P/IhhhCKIi+yfLO+bKrq+PhkNp08ffzk6GTygx989NWvfTnGtN5si6I8PR396IdnL84//dV/61dNln366NHR0XHTeGaJMR0dHWVZvtvtmFld2tnz5zGmk6OTpvbGGR98npe7/f7o1q3Hnz4+Oj7KyxEaWl6uJ6MJIZjRTON1HkiX9DLrQ5/XTkwBd+uuXUlds08AEBV7j735RcTUkflouYKxxfZzJ+yhtH3cbY5hteOnQ1r+eFXR/7/c7OckVVeBITKStPhM6KE7KEMqB1ZSQUyArboAa7YnkSEl4QRJIMUe5yY35/kGielnpaE38lFGUMg2WZOVxXQxDymWk3HklEBu37bT6fTNN9/UAsVqtVouL1++PH/27Nnl5eXhcNBA8u7du7PZRCS1SBBETc76FCQJhMQp8Wq7BUERvFju17vDaDSeTubawtQzUr4xPU+lOksJlCkGEfvQUlehc64owFpICZQdDboy5hD+fiNkuVFC7Ee7bgT1+obcuiLPc2vyzI6KPAVvkBEYQSyxcybPAEQy6whFYmIWrSYhYhLm64lBX/R8tTjWLsRXkqQrjMzgGPKxvTZHGU589mlfn87CK2iUG++/saJe7avdqPy89hxeTZJejwV/3SGf8agDgzcerbVoSCz97I/GWWstWUMqittHry0I7+Zjf8LDS9YF2d+r4QBiXdeL46PpdHpoah9iYjo7v/zBex+8/9HHL843VYTGp4OPPkUYBM4hBGb5nd/65nw8/eSjh812X6233/v97754+MiKOV0sEODicq3+NYUoUYjIEhgCRDEE41EGHIOvU2o3BQBozV+3FaJYq9hgtexgHRApIaTyj7ZDoppq63U7ayyBr5vjk4XWRUZlEX3tvT89uVUURV3Xs9lsuVzqWlVZ88PhkOf5YrFQRJvOBytVU490QzRFkQHA4XCo97s8d++8805d12++efrjH//YZPn86JgRmOHnfv7+fi//r3/4/wagk9M7P3rv/el0enFxgYiLxUIZPokohLDZbO7evTsejxWaoBoLRBRjXK/XIcV9VVljpuPJyWJuUHLnFAsm12eFX+0y3Fj2ry7ya09Ua0Cf3Nwa5upFod6bfVa94U/o+Clu70ZsC9rbu3LIojTnAEqT29XQSAA7RsQu17Wk4mFCRGT6OVPRfx3CGmKS9h+zer6QOCWJsRtzTylyiiwxcUzcoTdRpANtMw6m2rHnI75qQ3anri5COVvzojjUtcsz/ZDJZCIimXPT8XQymuR5MZlMCpfFyIioOiBVVW0P20dPHr+8OJ/OJ8bRvtqZzIFpMfFDO6vpSF3XdVKuA2MzZ53FDqKjzMB6GzpQJSmxng7I9yPn6lxHo9JaCAF0xpk6jSG97J6TfvgN3qhHqfs0HUdR+w2EWB8qX1f6hbJEkWQdSQqZVdZ/JoO5w3FpCSB3xgAqBtUA9PKwQ6PW1TEI5WZW1B80GF2H6xHVjThmuA9ffWz9nPfB+9j42HhfN01V+7rx3g+n2nWdta8kXWRJYsvTL4O4CjoJgtfWz/sw7rXbR1kxew3y4ZMe7zb8XyDVYibBa8+Ns2Rt/2ics5kjq6XoP8I/46xCbYd/VxnSbpxh+687bjjsG26vB836mMqyZIDdvkpIMclPPvzkX/zL33nydHUI0GvjGU3oCVLTmNw1dYCYnnzyyG+reTbmfXj/uz/64Xe+Cw1gYmdM6bLdYUfWcOS2O45oyRgAYynP3Ww2YU7ee+h6qAYhc67MnTOoZscZaw3pGi4yk3UBH4qQKJ6FJEXg2O8IdYGqJaTwaX2squr09GQ0Kph5NpupvkEv+Ky34vj4mDvG41YJiIhZrLUEKInzLDNE+912dXk5yos333rj1q1b9+7dOz+vd7tdOR4d6iYmcXn+pS+/vd7A73/7D8pyfPv23ecvzpPghw8ffve7393tdvP5YjyeTCeTxXxeVU2elzbPjm6dhCRVE3yKo+lEAJj54cOHFxcvyyK7d/du9KGpDlVVtWq9nx3i3Hg+tCqdEyEQQjD9CFvXq/ts1zisfwIAUF+f+1M4fkrs+aqReq0leq0tgLZowgaTJVZWMWvQIhgk4CjMkgaurHV+kJIwAydgFk7ADIETJ5UsEWa4EWbeIAB8Nf+D6zZreCGMYKwtRiW2Olsdx3SIdV0/evRov9+rLoxzjsg2hyp43zTNYrGYTCaLxQKArbX3798/Pj7WEr8wMkMSlo4e2mZuOp1WTb3dpwyhLJ02okejEXcKtJojquvS4H42m52cnCghp0avvdW21hZFYQx4Dwon6yTdr25+f5nD73F43xTk2TuqG4whkhhZOCZJCYWtwXKUW0KEiMJksCgyIiBo8fZ0NRPNiHg1t3CdXWiY7Q2/lNemempeX5uN3TDH/WNPE+O7o65rtVO+bqJK/Aw6f8NMZbiKUhuR3VxdP3VHvbqJXtvzGz6/8Z6OBOzm8/7RGKOPZA06C9aCNcNHdG74fPgehk48SEeLDalnffVRur87PM9X42AYUKUwMxiqmrDe7gIzGXd2cfkHP/jhH/zgJ56BCFyRk3UIkDtnCVNKIFCWJTCk2s/y0e/9y985Hc3OHj769m99M724GI3LVMfLi2XhCkQSQzHGHvQgwoDgnBmNi9GoSCnE6J2DjmO1/Tp0nbe+JqW+/dZ1eBNz1GieiJoGVLuOI3CKWhEJoal2+/l0EoNXoZ/og6aS0+lURLbbrXOuV/wpimK1Wo3HYyLSSQZr7W63q6pqv9/neVGWY71jqvy+XF5sd5tQN1/50pcZ5O792XqzYYS8LGPis7OzO3fu/tIvffn8vPrk0eP5YrHbHfI8dy7f7Q6r1aqqKl2fOlulX42yemZZVpblbDbTYfbNdnVxcXE4HDJrg68JUY3bUB+KOoLT4Zd+Y5e96iZelw7eWDOEA1d3/XVz43f/pA/7x/gdabFkytiBorTIeurIGuJ25RkxKP1IEiFYAjJgiEBSt2EwJYgRkzVEJLbN24iIAZKwERTGlBISJGYUJNFfbMdyBzdRwUIiIiBGRBsr0AURDAAWEBJfmzkGtNaOx+P50aLxfjSdjMfjZ/5pWZbPnz+/ffeNFFIldWiiCDZNg9YAwOPHj+/cufPgrbdEpPYNETCyyQzUFjFwayKptwsdtKSDOhndcxBjFAEybXUxxhaXmBKM5rPxeIzCzjnNVPTRuZZdwlpV1wuHw0FXturOAIDqhGnvql9n0Po8YWbR2UJE55xzERFV0CezlDsjYMhgy6tyNenoGh9dZiMHjsE6YwyExAapcE4g1lGYk1gy1iaWQYlDywNEAi2N8tXJDPVdr+nTDmOs1gteAQvxWvolAyscBISjQit1As8jK8E+QJu+iMb/iIjIIomZOKXU1+8QkXVQBxOJqFIBsmhI87Psz6vN384IDQdssV+3AxqY9nlPkdLdlBacmbp39p8iCtE0Sn+UQAiQQQhJQAgHzwG5f0+KUVplT2px4yDY38IOjqfT/SLtiQjIZ1wj9j6vrc/brPYhCRbl9Pxy87vf+s73vv+jOgARCFCK7GNUdDVLFI6QudDEnCg1Hmr//X/1+/PTox//wbfh8QUILOaTy8sqgWhhFo2NTa2NEp2hsRZy65xzzKmu65QgU2EJARFo6oCFR8TMmsw6EfG+5hSsQT1hFIHU8eAAIjCRDgWKtVdl9sB8dnZ25/RWXR2Wy+Wd0xMAPhwO0/E4m5jLy0ud1VP1zbOzs9u3b2+32zt37hARIKFB30QR8d43vlHZ6uWyqveHzNjCZfvN9vnTZ+PJ7s69N0No3nrrnUefPl1dbh689fbFs2fWZj9+7/27d+9+/Re+8MnHHzdNc/fu7ffee+/tt98eFUcpyWp5aa2dz472+/1sOnfWbbbr4+M0P1qUZala8IHTeDbNsqyu62q3L7OSBBzhoXN4vRvGgYQ9vJLwKcJZxfOu07J/Bu+dCnmglgPVyvUG+aqlgq/Bdv4JHn/kbO+n5nnDXYE9hAHBoFgig2SJnEFDQCCAjCwqsNK2ppLmedAlfKzpXZcIJq2FDot1n5XwwSuVPemGMdTo6HecQJLwZDbVqXP1uMYYHUjXRaCT6fq/0+l0PBoBwHQ61UxCJ8S11HODADNF8TE0dTh7sRbBLIMKoPG+LMbOZZvNJqVkDDjXmkIAYAbNV4qi0KtohZu7oExPSceA8hxEYLvd9nSO+gZVbOgnu/VihwjGPgrulXL7LkirERhT9EEJ/htfiyRHaA1OylHubEyeCFwGyGKJnCVnLVE3uDkoZnI73nK1YPrFpTSe0tEgwKBgeKM+PEyJhoyUQ27MjlzxarRABiN9Q3jnjSnJfvJBQ5Mh9jUNjMKNEvGN9Tbc7TgoCg2f3Hjxj7RRP7PEShaN/dkfAVrZvL6Bpxlk/3rf6oNhmfqVM7n6fge5MgAIYRLJ8sKH9N3v/+Ab/+JfPnpyXpaWjEkivp1VhT4ds841h8O4LInp/Nl5LvSb/+gfb5+/hMxCDYfVJgMo0CSQyMAAKbHCWhMzIpRl7jLj67qpDk19wJYgrYWSVlWl3W4iss6oS9MNklJiSSI6twKqZi8i47HrcJg5AHCIIokA9vutjg9JisAppbTfbufz+Xg8bpqmh0MXRXF5edlTQCBiT93Zy6AfDofeqhhD40kZYqPneTgcFouFc248mz5/ccbMt05uv/3uF9fr9Ycffphl2RsPHszn86pqnMsfP378ox+9r592eXmZUlLBB93I+/1eV/Lx8fF8Ph+NRjHG3W73/PlzIjg6mhdl/vjx48+y5ze23ucneTf8xaurffAidUkRtqXR1735T/r4I/f2XsPD0pW2rnp7eAUsJmzH0a6AaebKCiOirjbdPJCGBqdTHNOeQUta3fX2rqySfDYK4Vqds20vdVaViEzbWuMQQlmW28OeEUKMRKTVRWOMAljW6/Vqtdputyr5sdvt3nnnnXfffbdpmsjp1p3bs6PFfD5XjePuprTfqF5aVemOhRzBuVxEmhAODTCDc6jDecwteY/6MO0vAkA/O9835PQoy3IyIWPgcDhUVSUdl7SIaGmlN1I4wM33Vr43VT1dtXSaOypmFKNXnRytGWp1aDqdjkYjjgkFLKk4NUBXG7G2bbD1a6W/891SGS6bKxv6ao26X343/MrQBd7weUPPBx14Vy2dFjyb7gghpBCvauwhphBjCMH75EOrx/tKOAW9WM+gzP56b/TTNvBr48WfpYg6/BOIiMYCGiAzfESyw+fD95C1aAwao3tSELnbuq/9B6+k3cNt1YcLPWgoMYTE2/3hvQ/e/5e//c0PP3qSGJIAA7EIAFhrrQGQZEAyA8kHEAAWTGwAZnkZ18EyOkZgaPaNA9JqBxAmYUXaMrMAFHl2NF9Yay8vL7vdAYiQEhBRZggSE4ikyJK6AonOcUfRuqYGfy3CHBWbJp0GFgDostcw6+LioiiKyWR8OBx2u11V7SeTkY70Scd1rgzv3vsWV+ycIiRjjLPZTPeIlhl1yxPiqChbsXjm3W537969YlSqj/zxe+8RWWuzr/zczxuXf/LJp/P5/O7du/v9/pd+6ZfKsjw/B+Vmuri4OD8/3+12++0WRcqyVF3PEMJoMnF5fnR0pG2U5XKZUsrzfDGb77ab3gLoNfb24UbQdvWcpKWhf20Apx0+uenPXrMpXveGP7Xjj13kVJn1n17qaZ2fAAATWMRWEkxLNkaHZFlEBDkxK2UJkoGUEICRiBlTEkBiBmZSIAxROwQmhDe24k2s6+tUA0VU9Yy0upWi5GVRVdVkMtnHXVmWt2/ffv78uRKZa9tsAPGiuq7X6/VkMilGZZZlNs90HTMzdxITQ2dDRGUJTQ1J4OioLMpxVVW7akcEItBBNDUKBiJglvF4PJ1OEZEImzroztEN6ZyDhJrtjUYjPtQhBBXCBYCUkhWTZZlrieTTcO1iJ/UgMcYYWbUgpMWgp5TUqzJK0zQIUBSFSdIEXzdNUURELPOy8g0AaC/EGMMpcUwoZB0xUwgxxQTGfv7CeHVRDY1+/7yPmkUU6Cy9t+sdDxJxx9FDRKwAGwAAUACD1peYWRlbaNAjBNuO0zJzi7jB8P/j7T+fJEmyPEHsvafEzJx7kOSkqrq7umZ7Zm529yCHnQVZOcGXE/yz+AQRQO4AESydmx0+NdNdvCozKzO4U2Oq+h4+PDMLj8ismp49CFxCQjw9PZyq6mM/4gDImg/2NvhA8WT4POFDe7u/2wcG4YePAAeDzMPlevhfcld15U4yKoQIIEZAr2M3Qbl/vbuPnvuHn+29j/39Hs/hFXnPs/B9v4u6rs+vVn//+T/87ne/ixGKwl3ugyUKAAaAiDhA0wTDUQAkxslkEquGOeSIm5uVcxCqCAkMgRdg4aatGa0rsiDcv3IhgCzLJpNR22zXG14uNHvDoN+qtVmWqXHs0CPRsNc2oG4cSpsmIiAhgykK9BJFBr0G1ygxA+Ocy3N3cX7+8ccvJpPJfr/fbrdF5rz3zOVyudxt1vph6ghNB2aDL4puzDzPN5uNloPjUfHg+Ki0FgCstc7Y7XbbhjhbmNNHT04irjb7J8+e/uf/9NWzk+uiKF6+fImIr77/9vXrH53Fhw8f/vDDD48fP5UUELGqqpOTk4cPHyqnULMQRHz88Im1dj6fv3792jl3eno6mUw+//vf3dzcXF5ezmazoSodwt5QbQxg6Q+Gt38yYiEiHDb2EUU+YLCHSnkA6Fr0//+6mHw0eb9LA+8lnsP7N4lJADVL7GtVBFL1CSR0hMaAJ3SGPcnIe29cZqw31llwBp1BlUc0hqwhYywRkXFoLJB1NqLpKUpkCR2AAfAIBsQBGgGDYgEMokF0nchNH0wBEYiQjMryd7+x0xkUFIgiAoIKViYQDrGNod5sbiRyud8ToPXeWl829Xqzt1RYm5W7uqlCnk9GxfT66jo0cXm0XK1XbdsW07Egnzw4bTgIAbarNuxvVteT8SwEefvj9b4Mr1+dXa9a52A+H+dZVjUlgGQuRxEZczHJ/cgIhBAbAMgsZBZOlhNLkFszyvOT5UlsU7lvYhCfYd1Udd2IJAEWiQgBha2FUZ5PJmMB3JUtGO+LRQTveB9jJKDcZ2BMiK0IGkIBWS6mPvchNgBiDSKBMZBPxnVoyRKDlPt95v1sNibgFENoW0NCBCKRU1htN1Ut2SgPsYkSvTfGiLRBomQonpLjRBKJE4kgdGojImTQCSdhsQZGhcszi5KcwczbLPfOOUNatJGI5MYQokpEGk0+rLX9nuzqVxHWyZghMcikVBEh1EUhICwhSIwUEsXkBTygB7QsKSUjYhCsysaLMDCnqJqv6q6kqqMJOUnUhBhRYBj/af8j9VKfOuAWBQxK6nJfEgRR8r+gGiAJkqiUAZKgYTAMvVdF/4NoDn86ZdMeugxAEbonho7GzQycJLECwmTASidFgwk5VlVx0Sm5kvcISUEyB90qfRtQA0bozM+SSOoeNobxqGiaarNeYYoptG29z7x5c0VgF3/z+df/9//5P766WCef7wHB5ZsQhKwYm4AMWkLDSYhRrIkptgjBUIMUAIGBBCUKITJCixANgjeASVIgFE5CCLOimE1nwNhWpcRmsZgU3u32O0JwDuqqLkbZar2bTghBMu8IAQF2212KkmVFTFFA2sCAYL0NKTLwZDZRcHg2yvLMt02d2jjKcFLksW2R5WhxBIIhcJ6Ps8lsujgmajfra4SAkGKsnzx68N13X+a5HRX5erVaLGbO2JTi+dlZCDGGQNYwcNtWAjCejkiwadvJaHL547tZMbII1Wo9z+3D2Xj19s363Xa9qqajWWwb7yUvCLGZLbKmLeeLCRKOp5M2hu9evUHHy9PFrtpQtT579/oXv/oVkL1a7x49e3m92hqy6+urUe7P3709PTmmzOfzeYl0vt2dPHx5cnJaFCNOAoBGYb7GkkprdrRO6kxNgKwhYJAkKJ3pR5elkSh4k0HrlJiEk7AxveqYKsn3TgvKmAVQOXEUIAADYBEEwahcPOmERCICkwSVmUVJxEAiSosTlf1976cTyr77Q0gxJGH5Z1d7XRL6oTjfbRjUqk4655cPYdsQkYwZiqFDKavD8Yn0k39mZhRtXyBSAiHTVX4MnFK609pibaJ2slX6oHdeIWgnJA2AxizLppP5crmUKLGK+21Z7ar9fl/X9cizyj0rTEv9nW3mY4x1Xfs8F5E8y2OMqpFhUmqaRlHO221VVdV6vW7bdrnMUkQB2O/327L1nqaT+Wg0oikiSwwRRZwzFnnk3cibZ8+fjvKCgJumQQFtzQ2qyohGRLTPmVKKoSnL0ppMZwmafd/rTTHefn1AalhzK+xyiAJ1ziGBlrbYi2xlWRaTCKH06WHuHXOomva2S4lknHGSkgCrTmfvG4W9L5TaERERgYrLdI1+xs6e3iC5zHnvAUhnkHB3sd1rgd5vwtwZPtxfosOoD3tYORS5Do2ZWSKjIWQma5BMRETTRkQCI0YMgJaVt099B599KxZD/Xj/oM1wMJmTTkbu8IXdK+/ev/1nGkH3SrTfp1P6z+umalPm7tMR0Xa/Uyl2RIAQEsN+X1k7+9u//dt//+//vc6NiCi2TdnWSGb48+HJ8acB60o7gBS1XB6qBxXPLAo/n88B4OrqCrmaFE4/XmMQWFK61YFTj/J75FQ4qMXhwGFHv5cU4gDoD9xJKbVta4n0/Y5Go+vr6+XJ8Xq9ns0z7RluNpuHj07VhrNpmiIfI2JZlsIUQhiPx01IIYSiGLdtG5pqu90KhMKq8jscHR3t93tGnIztZrOZz5cvXrx4+/bt+bVsNpv19vzjT55+9NGLz//hb3/44ceHD09XN+v5fHl0dDKfL3e7jSpyO+d00nBzc3O93mfj6bt374wxoamfPHnSNpUCRz0zEVX7ChHzPB+Wor7l9wfVH+5n/uxCwg9Sejpc1eHX3jmzqz7Ee/ekA62y39eN/ed3il75Z4e9joZ8R4Va25XQf0a3JykRGAQCIQPUTRM6yz3b3Qc6ehIKCgMnlsgCait9eMQkYABBIkXaUQL1BEaj4TB1+2gAk3ZfVRf2hjOFOktVHkANhix5AABrLRKmukPAazhEgLqqsjwviqJuWxXZ07BXlmU+GgHAeDyOIZAxKSWJXJa1Ma4qK3WgVcTzfs/W2GJUEPmWkwio66xHhyjOUGacgFiiyWT04GiZ5zmihLZBREsoksgiWZtSWxSFCKaUnENrnYjUVayapjQlWqO9mgE4AJGEMQpTHyr0Qx/oeohorQFCCjQEA+ypuymltmmYOcsyDClK50SgwGgi2u5VzzPFGBOSMdZ7akKMg30bGl0vCmEdvhtjbnXFQEgE2hTrtnHGipchtBPhvaNfetIh3C45PNycQzuut2gEgM4/T+e4Q2vOGOOsAiTSAOhH44iNIBgQ/W3FEhtgB8bEQ1E0TCIdVq07O7R1iirp0hkVgbJaFVjclU2Cen+5XZwgjIgfxEzCexqet5tce63QHeHYz95/avPTQdg5PJhQbs8U0tK8A0GbYUDblZX9H1dlxQyGXFnt6rZlxNi055fnf/Znf/Y3f/M3TSvZuEh4+7LvNW+xh8sC3LHO1tet/beUQLSxY4wqGzYBRrlTFna521dVlTu2NpOeZpdCTKkDZBoDamAy0HuG8HbYptOeuQ7nvLVt3cQYqSistQ20uthSSs6Z3W53cnKCSFdXV8oBPzp+jIg+y26urr1/ut/v1Zs6xkhEu91uPJo1TTObzS6ubgZfa+AYQqhryEbWecsxnZycfPXVV4GZ0GJTz+fLZ8+eXV9fv/tP34a2BAzn797FuDk5Wo6L/IcfzmIEQ+8ePXr40UcfffTiYzIwG8/P9uVkNFksT7PpTMjPjh4Q2tGoMOPR44enVbkLIbx+8xZdNplMVvsGEcfjsYpdDNnMId/3tvQ/2H2H1w8+zMMp0u3X/U8GlH6LEOL9IMqKKD5YvB9a0O9F4vfW9p3/RYCfCXs/tW0+EML1gzCAKARMZDXC6bnWBzxScIQ1aIyxiMqbISIERrwt+IZSb4hL2EFjUPM+VlZcFKOc9w5BDUoFxVtfbP1WhFTd+QAZKHJL30EVi0ua8ZmUovbliSjPR+LYSOdhrVLr1rvxZMYgkQMy+zzzxk5G4+1+R8Yws8TYNE0xmgyxs6qqEHi9hvEoFiPIc5+HPISYUmiaVG/SuLDj+ajwtqlTio1zbj6fVlVljSCL99Z5I5C64xLAWjtw8/O8YObdtlYriRijug/oh2atxXCLCkFFEmkjmCgyUIwMZJw1xsTYfeTWWpYkiQEghCCQEMAiJUqSMAk7Mpn3hc8gsTGAiIHTvmVGznIkazEBApBgklsP2O41QD+TQwKgmFKIHDglZlGminBgtYDobK/vbUL98+EUG06ue2mpaPfk4P4iwokPcZsa9m4N6A0xM4kw22HTiggkJmdVYpJBhuL4sMfQvcEuUetmpYenxnCkHwyhYZiT3Z4pB0X5z+St72/Ae//8yW0Pd2KeUhcOo869o0WnGPceVkSisIgY65u6XG92KICG1tv9f/yPf/H555+3rSwW01agLBvp1eEPlsE/cchAP2PT65qBaVwWAXUtX13eVPtyNhqNc6yq/WyeaVXHsfNMZ1HDljsiqIfBT1fToJMw5HzDcMs5p/4w6m6vQC3v/X5fFkWx2+2Wy6UiroGjMSbUzaChWtd1UYybps2yLK1uZrPF2cWVSj4VRZF7W+1L5ltNpVExyvN8vdshmOl8FmPMAJ4+ffryZbve3EwmWdPu//Ivz/7tv/31n/zJn7x69X+bTt35WXj16mw2W8zn06LIQgjv3p6/ul7/6tOPLzdbm41vbm7akMjA6fHRy6ePT05OYoy//d2XTerw4Sml8XisdIX3U6XDgHf49R381/3p2PC/9wbe710I0QDK+/f/0LL9uTrvp/ocP3X9g4v8n75obabKDqpVdfgKbnuYRETkDBGIQRi46r0TNJrBa8wSEXS95A8wDm4Npu9dUWxLf+lGHaqABh9CBg4nMDOnyCBoyFrjFOZ3cnIynU6NMQpG6XFNpJQ+BQQrbmU6nSouQAmqeZ5bq+8M4eA1D6P+EEJVwXgMxkBd15vNpixLAJhOp6cP5qPCa0KaUrJERZZr8qUSVERgLCVJdVuX1b6qSzVgE8Q2xhACAPgsG03GPs+Fer8bBAW7yt21S3dVnlUhnjv7aW+9s94hgfPW9GatwpFjcs6lFIalaY3JnffWEeCoyFWYjRlCgCaExCCklGrlqRhBYsAkkEQBO1GlCZSJ2LZtCEnUIsFZBKNwgBjbnuSDqkjZyfxo/B8kLvGOguXw7gYF52FBDlFzeOq6rve7TVXu2qZKKTBHEeGYOLaxDaFpY9uEtglNG5s2hSaFJsY2xvaQbthvraRW1kn60RowA+OdS5eHDaXboHl0235BsAgGZPjRW5DTB3/gJ36wEyt6/6fz1UMBBJYDwXsCoAGPLd1PEhx+9AESgwimJAmwDrGsGgFyWb5ab//m7/7+P/yH/3B2dmkzJGurtimbdoheh+aLt4eG3JZ6d66zaGtVpWygM1bkycQRUVmW+33J0On51XV3hfoudL+JaACX6cMOpJc+C7wl9hx2C4b8z3szwFJ0u2kNd3Jyst1uvff7/X4ymaSUZrPZer3WhxqNRlrYEZGiu6lHXSnfQA8NNTjTurBt2+VyCQDr9VpPm3fv3nnv/9W//u+Kwidul8u59/D23Zu6Kf/oj3795PGzoyMnDJcX1xcXVzHybLZ48ODRfJ7PZrO2ibPZPM/zo6OjplF2hFjbqWOXZfnDDz+cn5/nee67mfzd3PRuv/0Q/HGYzN0bN3zwSv+t/0Sv4r37C9CHaTMHi0c6OKneMtC1QX/35/ztdf3pMf8g8tNhD3/icner929bW+EAygS2CIbA6Yxcqz0DegQ5QqV/dYYgBp0hpwKBBPcaOfeCn9YIrMN6hhQ/QJ/q7w0HbL87/ztsg6Hs00tRjLMs08PROVcUY12aIuKcizHuqtJ6N5pMrO8mXsV4rPS4zkwyMXBnDKswybqut9utMIYA06nLsg4eBgDWGtVQUNXNm6vVer221p6cnMzn874SFUBJKai+FnPby090NIyhlzKZTAbLPY24WrPKHUMc1fXw1jiyhqxn5iZoTn3Lh9MjwznnnClyr0ORIss5JWRBYAIwCM6SM2gQs6xrEzoLANC23LZtYsUlUK+5fruiNK3WlCVGrkNsQoyJGUSIjOk+24E8N+yrQ62Qwwc8/N/3DRyGvzq8/ZDIUdd1R2lo29gGjq2kwDHFtu4iX9W0dRWrpq2btm4GkZcDckM8XGmHS1dv0R2pLj5q7GmRRBJ10rvd/w73xMNRaDdEBFaVgfd+i/RAaH1m9fv92d93jjANb3xLtDyMPQBw6Ns8vMfUr7H1er2vG5eNqjZ9/o9f/L//P//x8vJS/1DljbSjc1gWM0jqsUA/U+118kamm9L1BEo4PT2tqur6+sYYKrKsruvttiQCpQrgoLrgvQYbDbr3GJ/Ygwa0OBuqjSEcQg9zVfCn1pHaLG36nn9PTKomo1EKcTKZbLdbg6R3AwClAehTh6ZFEaUhDSYhyuHTSbM2QlXYTCcj5+fnbdtaBx99/BRR2lA9fzHdbHZ//dd/PZlMTk9Pj45ORODs7OLmel0Uxenpg9ls9stf/pKI1NraOTedTququri4GMr0k5MT59w333y32WyePn162DwfPufDs/d9MvS9FAHgA73Qezfq+XPot4DYqZcNKp2395dB24wAgP+plsdPlUnvv4thn/7zCQyEunYPnpXvSIHA7UI3CBbRIhoAzVs7uRYCADZIBjsbUXUZ1KaEHgaA3Cm/AEFnGISICh5Qjx4AAIuCIESqFKMvQzV0oe8pYYfzMwAAMTCisRZlALULauzRZap7wxnnnIs2NbsWAHb7PQAcHx+Px+MQQmJuQhiNRrptdutNnuehbTnGsqyt8fuq0i2x2WzG4/FksnHOESJZa61PSIi43+/2+31CyHLnM194Mx6Pl8vldOwgtTG1VhGyJIwcJYIFlzuEzt3NWtuE2IaQG6PRN8Y6MZPpas1uAxsrAIKqvkjGAAsIgs8yIkqh7XDnCMIAgm3bjsdja8l7j8Bt23JoiTq7bT1ryRpLxhnrDBkU4GgR8pwicNVCG6KxmJA67KwAoFExLADwzjUcWYQFAkMIqY0cGYAsC8aUEim0l5hZEkt+2y0carXD7TRcv6ecJCKq5nO4CRFQh0bDRwQpcoCIUEmy1hrrnXNkOTEQM3BkZykatsly1GaYGEYBAgFW/2gASCIKmtCXSsPrJIgdeq2XWTEoCCQsAInAoABAQkBCQAERQlVWERC8/Q0i927pbkfR/r9ORghAiR4gSAAscO/30C+STq9WK06AvtWpiTINBBKNzQDMfQBkFhEN/3XVpJSaOn759Vd/9r/+5Vdfv2aBYpy1KVZtS0Qk0Ah7X7QxDjkfdFP22/B6p8vaw0xExJCSTRgRc+e96YQjYoLMkTEmxYgA1oL3vg6liKQEOtIDaLpTqJMi63Kd4XAfGpLD8U29aSUihhA4Jf0rPRw0YdrtduPxWLXHbm5uHj6YalyUnrrHzKFpizxPISZhBcE1TSMizpimCU3TOIMdsYGhbVsCtEiIOJ1O97vq5ubG+9y6rKqqOjQPT5fr1aiqd8vlFFLcrTfv4MePXv7q+ZPnmOjq6iq2bVu1q6vVN19+/X/50//+zY9vHz9+DMY8fPCYAUfFBEDevXu3mH+qDaAYeb/fP3j6/OHDxyxRwKhhgEBiFgBA8sOIqf+NQ4v+XsMT+ubk3V7lrfojdAf8sLqGh0SR4cpdtV68M+Jm7DzNDzW3NFimvtH6fmB+/3YtZeH/Z0ZHPecVQZGsrB1LROxuRyEU6mjsoi1NREFKhEKYtNRDvJ87DHtBpVtS4hQ7K75OuvMnLocfwUGEVmoaaQ9E21zM7FwWYwRAaz31RuRNHRR1UlXVriqNs4ujI59nddNwL8iir3O/3wtzipFTKsuSiGKMIhhjats4m82Oj2dakAFAjG0IjSZ6ux0AwsPTB3/w6199/PHH8/lUAaj6yo2hLMuKorCOWGLbtk1bDUe8Mgj1nSrzb8iphyChmWx/4htEg2TQEJH13qOxIpIEVAxVT2oVJISBww6oqs1ERCgoAJyAxQA6S9YYSJFjQJDM29yhACSGkORQuQNR7YTIkjHGWCTFc4qaKQowCKBhhhgT92KhBKhUxcMI9359cNiH+PkWBXY2BnerQBRgiW1oq7qt6tDWKbaSWGKS0KbYchO4Ddw2salTEyRECZH79ccpqMAQSJL7bU/hXnVW0r2yrE9hVbFJbo9+1LIssYhgx2f9ud/U1WeiGHDg7paf+n0YXUhuJ3zDf9GBjOrhqdHdWbqX17YtdOWUnJ1f/uVf//0//O6LyBCCQqK6/L2JHBOgMQP0oAudh7jiD10GoHVi0FA0Go3m8/n5+SUzWwNaFeV5Php5RB14y4AIGGZ196r8oVPCfF+6CHrcUCfe1Au7M7P2CfX2sizVAGEymdzc3CCifq0pBIMUY0SBqqryPNcBR1PVJKAfl0ZTvT4ajWazmd6tLEstBEejUVEU6/W6rus8z9u2HU+ypt3nhXvw4CQvsulsslgsdtvyL//irwHg008/ffHiI0JTVZVWjdvt9vr6+smTjrHXNI22br788ks91i4vL6+vr40xp6cPhxSwiy6DsOqH9K3uBZUPx4Hfl3iOQ4X38/cT7IdW9+7Yh8l7se2wtoP3Yt5wy63f3nBwHD7c4e16UeM7QKM8DO1BIIi1xhlyzmSWnCXvjPfGWeMtO2e9s84Za423xhpjDDpPlsgQWkNGqSLGGKLODUTb+n2LTGO0ki9Axbqpt1nDqPk1KgOp0z+l/lf/KXdCmNYhxRibEBApy/Isy5m5Lmsiurq43qw3j04fX19d79bb/a60xjdVbYwxziZOz54/L6syxBatmS1mz54/z/P8+vq6yLPZeBLa9vzsrMiy6XT2zTff7XZllhXPn30UWQxldRvKfb3ebKsyAEGW+cVi/uTJsTHQtOXbN2884cuXTx1JaKunTx7V1Q4JDWFKsaqrEKMAhBi99XlRING+3FdNa51z3gtQPiqaJjCz95kgsrCxzjrnUmWMcS7z3quFoQAgqmwmN21LxuZFAYJROMvzPLNXV5eLxWy/323Wq/ls2rZtnmUgXLdt5jLvs7ZprfNFXoSm2ZYxphhiDCEmIRFJDEnYWBdjhxsCSW1oDMJkVLRlJSIg7L3JMsOJraMHJ0eTyUhiFEhFkc/G49xnRGIAnPfGWue9saan4Akg+CxLzAJinTPWsAgSWuew02tlIvL21lZJ3di7xXGg8OIsEgKnGGJgTiCAAixsiUTF8FJMnIQTCkvihjtXJTVfR1F+kOHEiKB8DA15oAZjfBtDOoIcEAJySprTCicRViIjp0QGoPPnArw16RO4y0saSEkCbLvuhjAnAgEASyjCOilHYTUlYU4oDGAQFLUKoNBSjp2mgQjoeEBlTllAJAAaQ4QIzCAJmWNo66rcbtb73R6Q3r599z//L/+vv/irv6rqJqZUjHMAjMyBpY6MhMaZJCg9KA973oJOwhVxpiEHRTglALHWhhC9tYCSWArnTo6OEfH6+rpq2VkCEGDIvDeIKdYI8PjxcVuXTVs1tRCBcz5G4STzWaEITxUtUhuExWJxdXUznU5ijGrTUxTFfr8HgNC2IKCyv0WeZc45SyAyHo/WN5vT05M8z3/88e3Lly+//f67Fy9eNOX26dOniLDbbsejom3b6XQS6ub46Pibb7+bzmYAtC/LzGeItF6tivH48vLy+Gi5224B+GixPD8/m01n65uVc85nGQgmTswSYmqaZjQZO2fbUF1dXTjvjo6OyrJ+++OVIbNZ76uqyfP85OR4NMoTN6NRsRgXl1dX08UxkB1P519+9dWzZ8/buhFO0/Hk7dsfzy8udvu2qtvPfvOHDx89dvlU9cyGmnXgsN9LK/ujuAuKuub0xpTSHYrOYS8QWbSL0TXxWUAEBbWVpUtNnVJ1qyDrNhdUlDJAv1al1wcWoGHAD3jX9uHu9Q+N90DkPSSn/DT0+fCDOMxrsUeviiQAAjCaRBMIohgLZISMOENWEZ5WDCoZUsf7qhos8CHotvQYWQERJiF9J9j5rd8aDH0oL+l6x93jaP1ugUIIbYpqYjDgC87PL4loPp+rzEEMHCO3sZxOp+/evVucHP/q15+uVqs6ttkoQ0PL5VxEqqoCFki8XW/2+716N+t0+vp6U5bx888/35YhBqgji+BiMRuNxuh8ua9Xq9X5+dX8uIhN++L544enxxcXZ/NxvlzM3r37cTYdNW0VQ1JaxYC26tWVuuRUs1dObGwnIlpVFVmn04K6rqe+W6+qVUyEjjJBJOtCSJEFUkyRyd36mFu6dVdHRNMPPCwCIKOgNeQIwZg8y3JXtQ3UwAigOEYCMNSz4gC0DHKGvLUog2axSqkQCKSUmpAADRhEMtChk8AYQz0EKYRg+8Emfggndjc9ugNe7eOcERE4aHt29AlIIgkRjCAkTrENKCmlDgVKKGRRQNQYBAMgJgYmw0jKPxdAkiiAmFAgApq+ldI/xL2i83Z1c8fx6PJQVWTl9xPQD/Yt+jcuwF1DU6s9JAANxgKE3XVg6ZqoHWNEIS1wq9rX14Jy//GxG8OmQAIgiVNIoXWE27b+7vtXf/U3f//11183datgbmbWd6yvui8P1bqpf3wEFOB+ojmcOf2XpYuQYowC7B2pUYkO1RA7nV5vyHtPzG3ThdBeD1oDtx7frerZ3oO6AICugkGHVkslRMyLoi6rGOM4y4hIOBHRZDzZbNZqlF3XtYjoVH673eYuXV5eemvLspxNxk1dp8xrv8QSSeIY43w+L8tacS6ZdYvFQvtD1npEnM/n6/V6Np4M2ZiI6JBmv9+vVjfWUZZlKnhWVdVisZjPd99+s3twCkdHJ5vNxliYzQtr7cOHD6/PXtd1u1wumySLxUL9ZE5PT/eb6/Pz87Ozs48//rhsvrne7Far1WQyCQcivXKgjntI4Lu3yw5O5jtL5Z+6oKoF6ZMA6PmsmiK3/DxElE5UqMvqDtuVCYDuGMndURq6V5IOt/AdcUGAQwKDyB1KzU+9dm2lCIqSsA7+PAESAhOK1Tm2AYPiCC2hN2QMGouW0BKSAWOJQIjQEhziwKPcnhFdzOOuxOvfxvBmoIe39BPLBERDnatoDhUa0IY+MLMB0LGXSvEBQNtGHSxb45M0N9ercl/r6VzXtUP78OHD0WzqnNvstg8ePaTM7Kvq4ZPHAJDaoHDkpqpTjNPROEq8uLj68cd3r354K2C+/vpmeZJNJ8upy66vNjfr1Xq9IZ/p5zOe+BSq2Xx6fLIwJFVonB+PJ0UKVZZlZbWr67IoCusdAwIZQ+S9R8TAzAg6nBCEJOyMXywWZV2Vq01uHRHVoc1CK54ADSOklATQWuudV/okJRGRFBMzixp4Ixkkbd4OdHXNwkH5mSwMrFBAg5A7P8sziSGEEBO3AKRzXhFJUTmFKTEKe2ecBeCYON0uGyEGYMEQYoxsUJihjaFpmsyQs5oTYRSOkaOw5qSkTSrRF4Qq5CDUdUIs3l8/XW8TSUQk3mJksCPnquMAJtXbDKGNUahBYEJL1ljLkAiIYrcCUfVIiXXkxkYcEDF2jURtUyYRldwlSdBTWYWN9FaUBCiiWmkCjNLjzxgNESFRn0reOga/H/MAVLMFQBDUD14nt6pYJF2AkQPAi7ZV9TsEYB2qDPFIOmfBhMOziEhMMQQOrUUg4NQorHh3c3H+13/5V//5v/yX1bbOirExZIxLKdEtM70rVJkZTNcoOmy0DtN77HUNsHPoRCJqYzQAo1HuvW/bttqXKSTnQBIwg8ud9z5UVUxgDCgYMi+s97jfi0amPVTqyaW4ZWZWeXdFx4Bq7/VStDp61PxS/0q6KSAURXF9faWQt6qqsEdmrVarB8fj1Wr18vlTpfnGEEQkz/MQgvLWjc2W06P1enu8OIoxVqvV6elp27bO2JTSbrebTCb77U7vnBmjQVqTwxBCVZbe24cPT/Msk8Qc+eHjB9tVVe3bqqyvL6/ywgPjqx++m85G/+bf/A+7sx9ns5mIIJjvv381KsbOuV/94pM//8//6d278/Oziz/8oz/5h999y8xV2Rhjgyjrig/SLGAGY27BYgezvfvUvd//cmfy94G4QkPZM5RVQiiCKIBIiVnPjaRiT9zB/hPc5rj3mpkfDHvCBqBzRYF7Ae9nAjh+iIR4+JY0WVMwiwpPd0hOYoNERshgR2lAGACc2JtjcdIY2AEQpFdaEQERElZlHNVQSIiYKA1Pqn/OBIhd4qyT0sNur5LZ8yzPsowI+xlbeXJysr7cvLt5d/b6bLfbUUIiUl3aJ0+f7uvq4uJisVigNfuqUjQUM4NhS8Z7JzFF6qSrVbQ6y7IsH7940aIt6qrZ3KzLsg5BrMVDwOryaPby+YvQVBfX68ePTqbjcVXtT06P6rI8lP+PgVPiLHODkTpoPdRpaXJKaTZb7sr9drvvsGo9HLRvRDBLRGMJ0ZBlMsYIoWEDYO4M9o0xkpTVYBFAR5Wgg5yYGBiFUwwpJYM0HXlOWRsCSwyS6tjlbTFFTxaQOQUCMGRIJIaAgGRUUY7QGosG0STmqqnJmZhQB66ZIUKvGCD1T4C+Ehoy90MQ2sECpmFB6qWb4elaMrdpLPaqMYgE1iKmlDo2JMTYVrUxxiZLnIkx3G/yyBUmtoBWB/EswEo3IZEAAtL5PYnS1fggDB/+1uqzz9FuB7o6oz+McEPP9nBjDldIgAlQOrAPAKBgQiEFAQjoAS8AqjQwTFtJALA7/jWPGR73zgmi6WSKCuLlFJt6X223Zz/++Ob1m+++/frqqs7HtiiKqmmdcyEEtZHVfpZmOHxXmaMr+N4/+jocUDdkIgBnScu4zkZDgDR2YsdY0JWZ52CtTambCscYmDkvMs0RlZSpoBJFYNZ17Zzy2Q301HhFaZpOiICMMaFtPGEIoSzL0WgEPekzz3NVM2dm5qLa7Zl5Pp0Js2pATyaT3W5XFMXVzc10ulDZF01S3707K4piMsqVzFBFXizni8UipbTZbMYAo2KSZdl6vSXj9ImMQURcLBZluVcGgnPuwemj6+vV9c3lA/fAGHt5cZYXT0KsjDGPHz+uqjqK+eKb7//Fb/6Qmefz+X6/L/c7RTaonrAxpixLO14OUX9opcgBcIwPpCH0W3p/ECYiAB+Ogv3G7MOE3tjJj3USLQdSLId/pb2Jjr0i/XOIDhCgW9h88BoO1+1BeXfHJkWLKDv8ze9T6oGy3BG7Zu0tPj2J3EY+Q92JbA0qb49ASFUfkfQ6GSAAItB509Cb6pVy6HZrdPuxg4ERAoukqF+SaNgzxvSgbmbuUJsHb1g9/JKIWKCuSBKp61CWtQ6rh/2w2WxijLEKeT46OjpxSKvVipw1zo0m49999eXy9OijTz5++/bNeDTSYbXtmMq83+7qulU7ZmY4O7vIfL6tmnfvNvsGiMBnOJlMTeb1wzTGfPTRsTG4WW3J8GRaWAd13dQ1Xd9ctm1rjCOyKUqMLETkvPFumEKjIUEQ5pTSdr+bL44nk8l4PE6CMSVEY61lbnQhqW8qWUfKWDC2k+uNoaPx6cNKJAPM0Ri0znBMxpjYtNZasq5JgZOQdm8kGWMMRE9SeBOEAnKTmjqBJEigc5nUNdCYmVNolUqoIUM78wSIgWVfNQ7zcab8vxBjZLbQCR10MPTD3O2DYQ8OKrzD/yIijgkRjbWHu0JErCpBiCFAg5xS1yJLMUjqWG7G3Vr4pgajcIOCkhSx4nMG55CtMYYHZXTFOyMCGOloW0KkE0YF0PcngE7URHRqj5SAWeA2Y4NuJsLQb3sYDhIAABThbuitnwziUCzeuWcXVrVbw52sUZdSdtlAR6sYoCsACRKykFayHGNTV9vN6uby8vzs4uxdudsUGUynYzGYUvLWrVmcsi8IulfcF5E/dSzeptrcRWeNeaPx2Bhsmia2QYkEMbYpgSHUSKCKENbAdDq21g6KBfryVUXMOdGmxZC6abAcjYoBfjkQ11arRlK5nM8Vcl3td5PpJDT7zWbz6NHD3XqjEgca1ZqmmUwmTVlZpKurq+l0evbux9lstlnvxqPRfrsbT2fffv/9y5f25ubGIKU2jPMiy7LVajXKHzB2JLLNZvPw9EG129d1bbNsNl0URXFxcZUYFovF9XrlnLm+vp5Op9vtZjabVVUTY/z66x8+/vhFjIk5NW0YjYuPP3l+fX1VluXy+GQ8mZxfrReLhVZ+Z2cXRVGsbq4Wi0WMMSXJ85EyKKaT2zLm8PAfMssBGdR/X3Bw5zvB78PxQlGTCD1D9KDUw171p5O1hf7kZx3G6VOlXsPhIKTdYjKh73keMPYAdHbeea/q7pH+nwJa7X2w1Pt9Jnz6HgSZAQ61r4jI9iotHYBKryOpOBkRqfKv6Wg9t+eUJT2YEAD6BtbwnlGEOSEiIHbuGBr2ellOPV+gz1OIk2pm38PadvBrtThHxPF4fHF+qUwX59xkYjbN1lpbFEVbVuvN5hef/qpJ8ccffyyr6g+fPlWe6WQ8BsJQN2UMxNLWjXKVMl9Y62+ur778snn4YE++AIDZDFJCAWjbxhKOx2P1wQK8ef3Dt0VRvHj2XGGu48no/N3ZZn2jtARCmzgSGbLW0O2UAg5KARHZlx2bfjabbXZl27bWmpRS4KA5r4hwB6wHkZhluXPO5xnXd0Q9eJBiImOtbWPyxrZcE6KCzYTRONe0kVPKsoxjLRwIJDOUWcmdyTOWCCmISDKIxoBVuFu6ld1jFgAFbUICjEnquubco1oN91tPc6l7iLthH76PwUspSQ9/BwA6cBPuttRdQz7u9TNBcCAUa9iDEAA6lqfern+bMLYalUJKgSUmZJGMrWUxhhxgIkTsSGmIUVW4ukdm6nVn+qLzMHG+0645hBXcOUfuN3PgMEk/PLneLw01jAH0Zr9yqybIKQFAlxseZMcpBUOgSpehrfa79frmenV9Fdq62pcGYVQ4Zo6h6b8uAFR/XxQQwoOTUr9UvIMUvXOeIIJ0mATnfZZlovb2MWnFZi0FZu/NUOqJiHVa6iW9kZmN6TDbeZ5nGQ8gz0OKy2g0KsvyMOwhYl1DkUFRFAJBxTy997EtRaQoinK7GxzBrq6u1FdIAZ83V9cvX74MIaQ2pBi1P+GM5RhFZL/fe58rxvvhw4dXV1fb7XYyGueFtw7Laq+uYQq6CSHoxLEsS6XxpSSbzebZ8yfz+Xy5XN5cb51zNzdwdLSeTCZPnz7abK/LKlhrQmg0P8jz/Obmu1/9wR8i4tOnT8/P3j179uy7b79+/vyTlNJyuUREhaEO1f9hkYcHVBNdqEPNhx9iKfx8mOjuD0Ygqvwm3Bkrp4N/Duc8CqAw90ou98q4g+B3sMLvFXlwN1Hub1eExz+n1Bvexj1YAaJaGyAAI0mnOoaIKFY1vclYMtaSMcYRdmLvJKbDZPZdF4BInd6KdFieXsCbb2ebLKpPjSklNrfMdBEF9GvDp/fwA0A6jOWUkjDfGopyAkY+Pz+H2DUPHz58OB8tNBtqqmqxWBBRva9TSr/5zW9OT0/Pz8+VW5pSaqSs6jo1bbUvOcT1ej2bLh4/fmxNVjevvSuy8Wy7qYKI974YjWazhSsKPUarqqrba2Po9PTo+HheVVVd1yypLHcqLOuzgkWiMFlrjIvxNoQjKvBNG6HIzGVZThfz6WLehNS2rU47Kmx8kdsDRBb1Bj26QzQJUMEzY1zqdb+UwJBCBEeEklIi2/keGGMQIrJ4Yw0BARMnYYbEIEwiBFB4G5iJ0PuRQ4ltExkMACAZVVjo1OM6oHlV1Wk67bN1YWYlESnW4FBcY1h1h2HvMEvtBxJ3jtd7WJghamJqAYB0wQAxAAOLgHGemQMrSvX2BTAiJG4TpzbEyJIYATxz8kzGOGa0BtFA/xRREgAQWrzlFHZEad1Pty++M165E8Duhe3DSCZ9f1Krvds9j0N6e+dE6D4fc+efiGgIYeg+HXjq6hQwxVaIjHDb1PVuu7q+Wd9cbVbXsW04hSLzjqrdvjTZKM99WTcahkGtHEAQkfWg+4mLvmbSaaQxAOAMOufUVzk0NbBoVBuGHTqrk8AI6Jwjim3bqvqJvvgsAwCo67rIx3neaUQg3mbhyg3QOKQLTKvDlCDPPRG1IQ3gfkScTqfKOtBLlmWac4Mq+TGrYIXCQXWwp0OQPM/VuQwRq6qqqgqcm06n+90mcz7LTQhxMpmUZemNPT4+3lXVZrPJR8XR0VH55u1ms8l81hFgGR+dPsxsRrIfZaOPXxY/vlp/+uloMZsJV96l1fXlaJTPZrPxeHx2dqYxm0XG4/FoNLKQqqp6/vzl9c366Oho34SqqoUxpdhvDR5GJ8YYLS+YpQMk38qg0Psx4meanP1JdScEfvBe9zTwpLfPkds51xD0bsNehDt0i+ERIt/vD3VpXCQ4MP/8PdE4v+9lkFI8lM+4c73vXx3efsCtuW1V3dvzB33Ln7scfhaHm3wYmOmxDgBVVV1fX6sH6W632+/3y+Xy448/XiwW6/V6NBotl8sffvghhPDrX//6s88+u7m5AYDZbKaTP4VX3dzcqFX6eDze7Xa73e7HH3/UqcB0Op3P59pwmM1mi8ViuVyqwplG1j/4gz948vQRABSjbLPZfPXVFzHGLMu0jy8inER1mXU+cfg5DzomOgMjoslkovoyesswGDvc9ofXFSc5CC8NMaOT+ELSTavGDkrIhb5aAoDceWcsYqcZE4KEADGCQt2stSN9I8YggjEwaIYNX/qgYThYlXIv7aapyWGxBXfFWX5iB94v8uCgzht6pPcEzO5tVC0v9BNWrpV+SvriQtuGtm2rOtRN27ZqtKqvmENMSuljVozcIFN3z7T93tTh8AUfMlA/GCfu/fP9BS8HnYDDJxpu/5n/PbyupMMYQ6irUsfg5b6p6rouRZJqA7Uhee9VyeH976X72N87IgTvvJ2uS2Stdly892VZVm3oA1Kqm5AS61RvyHuGLVzX9VCyONdxzHWiBn0y1E1erFVwCvTaZkN4gz6kKVyTe+Hy6XSqgoLU89l16eq30y3UplUfBp0RKiGvKIq2qtW8RU+Dm5ubyWSidWrTNJvNZjqd6hPpvG2z2RDR0dFRlmVq+MDMCEbdQHWbAMDDhw/LGqy179692+93v/71r50zq1VnIfvdd989evRIyX+bzUZEdAGfnp7e3Nzol6UhPx0IPA5L9N6xKR9qG/wzw8F9HvZP3fO9gxxFuuH3B+/DP325t9cObzT5aHL4yobXd+/K7etmr4IbBIAkCIBCxGQoI/FkKM+cy9H5ZIvaFcGTJ0PWsjFsnBjL1kbrxFghw8aKsULYD/lIiMmAmA7hyYQMlJASUgKMERJDEgImYENJcYZkGYnBMJKgIUJA430OaASYRYBV8JEQKREmBAY01hnrSahpYls2283+9Xc/bNe7ly+ez2ezstmtNzc2IzOdUeHz2ej44cnTJ4/rcg8cnzx4QDE6xMu3b9qq3G5W293mwZMTP/JHjx5/+fU3ZVtP5osf363KhkdTszieVdV2PLbL43FRYAzrFLcCZYrbP/7VL72h5XwuzF989cW23I+ms/V+f/r48Xxx1ITUVnE6mWUmr7cVRFksx03TMEvbtrnPnXXrzZrIGmtSkuVi4YzLi9Hbt29T4izLfDFuxcyOjvdtDDEcnZwggXXG544MuMyxSB2a0XiUT8ZlXWK5S21rEPMs8863Me72FRq72myK8cg6V9b7LHNk5Pr63Dnc7DY+901I602bBJzBGCFEaNqQIiODiKnasG9iYGCyJYeohgtJXExTZwtnpKldZgu1XHLGOpdnGVmDSM576k31uuHcbXkqJGCUcR4TJiEBTEyABgnv5XoAKoGC1oDpRWVBGIHJ6A8gCXU/QKjQsaEN2B2pEii1hpPhSBgRgoSmbfbGgHALwgyROQoHICYS0zQUA4aGUms5WonIEVMb68pIskYcdTo8yJF0tJYSpAQpQkqk1bMIhxZSQmZkJmFkvUM0Rt9HAkmKo8YO9RWHK8Jx+F8TA6VoOFGK/U+CGAwKxMAxSkosLNylHSk2BCyxLjfb7fVFuV63ZUVJYghN2dxs1mVTicWIXEvrCh9upCnD4wdPL86vVDMTBdoQM28JxQI6ACPkGJxYx0agJWULchRk64gyR85cXl0BCgsU3rdtmxIfTaccU5FRasPY+WpfjvOiqfZZTqHhJ8+W5+9uHj1ctE3tkIABQzqazVKzNyCA4J0t28pYms7nVzfXdQpNCovF0eX19fHxSV6MLs6vZjOHZCILs6QE8+NljCLGJJYoHEOoQ/XJRy+vb86Y27bc5g7nk+n52fl4Opsvjler0rq8yEYQU4ZsONxcnANzMZpEcVUym4rf/fCuyPwnH//yd7/7KrPZeDLfbsvZ/Kiqm5v19uToVAS2m3WRZ9w2EsPb691sMVsen+TFTMTPZsdf/Pbr3a40CN6Vs0n28MHi66++evf29R9+9tnx4vhNhWY8P7u5+Rd/9Bskaery5ctn6/XNq1dvjC2ms+UXX37z/PlHf/c3f/s//Kt//fLxo3y6iG1o60YSZ87lPiNjhHkwhoNOM0Vnxt1AslNdUd42AiCyIqZ61lynSIGI0AIwgIAwQFKmKgiIJCWtQhfYNJUW4Qid+isJY4oobDkZFmKxKWFiip1ULKRO0xZZQH/0RhZIkUWQhRKDJEwqVcuo7p+/L29Pfo/+p4gIH+riD0n9LcMM+77/nQvc5ubdGffTWQAcHGfQKwuoz0hKKqCutmiMSqW/m3jiARdQxav00dq29T73NrPGda155xBNIhtCmM0m0+l0v98z83Q6DSEAp+vr6+12u9/v9/sdAHjvHzx4sC7bJ0+efPXN16ub9bNnyzwfA9mU+NmzZ6PRaL3dXF1dFUVurc2z4uOPPx47c3Jy8u787IsvvtiXVVVVV9fXx8eno9EI0fT5OKCwangOWc+9rl2IAQH2+/1icWQtzufzum6H/LRDavW5rd4CAILdg2hSrxnxINOsWAAdcugYY5Cu6DshrDWliDgHIQA4l7uwajpBPWMtESW+nz92G4RuGxoHFZ5JxgROGRu11zgsyESE+m4hERHS7TcrOqM6hGLfecZ7GWu/5G7Hfod2WqDNXmd10vl+ccadVwPHxBiM87llD2gIxBCw6fgSpp8vDi13st5aqyAjiFE1UaEvx+Uu8Fjkdr4Idzsf3SX+nDg93kW0Qt+rgrtgVwCQXlhLRPhABpOZA3Nbl2VZqliJdLg6aVNUeT9rLfosEYXYaQIwM1ojMQkzvOcs2OFDBxMzASQwhqxzyjSIMRLd+tCor4KItClNjGdOhyNeRNTtD6BgKO66CZacc21TduczoTMWAJqmKcuybOr5fK6VJQAMtIROfak35WhTAkxt245H+a6uj08W+/1+NBrdXF9D3wkYIDPe+4ajrtgQgrNdhyaEAMYCgDGmqmC/379+/frTTz89O3vrvffeaU+ImVU8U+Wclsvl9fW1CCiSZT5f6tiibdvr6+tHD04nk4nqSjNDSmm92rLEX3/2v1utVsfHp9ZaSWyzjmh4dnY2nhT6RCE2i8Xi9PRYO7H39shwuvYMwjsqX7/nOOz9xwQAEBqkzw8rSDnoZMJ7Xbr+Guq6Y2Y9LrrXefdPuu4ID3dAkW5ALF2Tk+H34e39s97qUJDq0WSMM2qEc6fDCV3l1QfAnmulmB8DAEb5QtqRF+G7Fa4ueoVIJGQESiSIYhEkJTaYUq8DC+qJeih1mpBUKZLu9ZHm87kzdpyPiMgaXxTjGNiOJrpWiqLY3KwUilKW5Wa7+fHHH7eb1X6/t9bM5/OmDk0dLi8v67rerjfCUBTF+fnFbLr4n/6n/+s//u6Lb775po3BOVfX9fPnz1+8fFbX9cTn33//6quvvw4hWOd3u5XL8KOPPhkVeWpT23SS1ihgyRbFKIQqhNDPLznGFGP0TpiZUMqyPD4+jZyWy+XFxRX3Jr3qfGupc+MMIdgQRARtp6+mzQ2DZK0dJNn0WNGwp1txOAq1iarBfrdfAacspyqyMViMMrOrBQAMGYOE0ktzdqKUXZjpSijWV659nqZpWoNB2y/GGGM1/zAHR+fQrToMeyJC+g0bPgStwN12xWHM63/3w2ld34wmdXmAmrmLFaO9yhCZmZhFIGmcZgZOFBMassa71vvIPsvEJzCU0A7NUiIKKamMjnPRe2+M5ZRSuh3gGWOU/3f4OhVpch/ffbDPf2psNmyug34QChGlA8ISDkNxUdMlPjhr9LsOEiWmcr/bb7ZNVQ19Wp2N7ff7fR3QkxGMgeumBQPGUOBkrZWYOAkYMXT7soe5DUBSagUCWCLjnFHsSeisRYaBEJGK9XaztJRkoFcbYwCYCIwxOtLT4Z8kJGv1Q7DWMgAZo1ARxaOt1rsXL16EJuih1DSNdrzLcq9Gkt57IgwhWNfx+aqqOjr65Ors/NGjk7quDYKIaB8SAPb7vfdZU5dt244zW+03bjpxzkXuOuptW7dtSwZW6+2+Kl+8eHF+/u7y+vrZkyfb7c5b572/2V8XRYECTVXNZrPJZHJdrVer3fFxnWUZCtV1HWN7drafTcYKVSvLMssIEZoYiGg0mnz11TfPHj+xxpM3WZaFpr25un7z5s2f/MmfXF1dnZycbHbbxWJxdHKMBoaxxZBGKyIeDrp9cmD+Ze4mMffW5IeGDn1uo4EN77VMhyyoC2GdXhActt9ZA1mvtNJpj/Th8DBK3uaLMTIKKEKrX+QoIikC/JO8vZ+6/YPvGfqRcqdco3HujiC+od6/VGfefTV4G/bwYLbJhynGQeQHuNXa6N9V99SczEBj7xU+P5DyI6JW2cxMAgDkXW4dIWJgARBrfbTcNvV4PEbEEMJutzPG5HmuA5v1en1zc5Ni2zTNZHL04MGD1Wr13XffjZdHH3384ubm5quvvlksl7/5zW8MuYuLizc/vvbeX69u5vP5v/k3//vJZHJxeeace/v27T98/lvjaLk8/vHsR+/dp7/+1enJw836Zr8ry7LGxMxskEzeiUQ0TYNoUkqcQEdOnDERWWP1vGDm8Xh8fb1S0HaMsayq6WRivRnSt05IQsnR0oW9IaIMBgjDcs+yTETUhEVxszpi0QklkcmdSbEWiZkbF7YuIyhOgQ90sTXskTCKKIi3WzCkKzW2bRucCc7GlGJKjsyhccywGGhQJYCDGTB0qRa8V+cNAeD9mdOAzB4enBG1RIMDwoA26xRYL6K6XSIimDhJhIT77dY2Po+ReeJTMtYmCjqRVX2ZLoqEoMeu95lmV7dgGUqIKJYMoBAaQEZAlgSiv0lAbzm8Xex9cWr9bZw1SECIAkkYBRhBEqd0ayKa4FY6Fc3wkcIQ9lJKLYfUNmVZNu1tzAsptjHElEKKKYGEkKhtErdtQOOM6SCFWDcqhkakqbq+ChRIijfSL8Mass4REjPHvp3QBW/9XhITgDM264nwMUaDpPtCJBEBAU4mhXPOG/Tet3WSAwHekBJB5yQeBYwxdQ1Zll28uxgEgKy1bdvGmIiIe+HsEEKWF8Mi0Ym13hlS0hQ2y3I143xwOga1qPSmaRqcTUejUVlH7z0bU5ZlXdfTqdntUpbTt99/V4xHm82mCaFt28vLS0WjWGur/VbngicnJ+fb/WoVQwjeW45SlrX3vqogxpjlhTGmDe1oNNrtdyGEk5OT7779vqnb46MTREOE3rm6rv/qr/5qtboGgPOLd3/4mz8+uzjLCu+c0Tc47ILDWgoOk0XNwJjlwOdSOmWDO0XR4e+D2AYDJkW1FIbFoOJbvTMIwy0Cs9NTUl0K6BQX+grvoPWS5BZqcBD2SNsD0qmaRA2WIhLag2rvf3OpRx0arpsbGmZFLx9WemSRbM9YUF91i50xAPYU9UO6kevUJgwiJtXlUNGHPtMdftSwQUlT2uVl5q6yYKtsf1HbCEk9HMKw9ELJxoxGo3pXrlfb2KbpeDyfLqz1xnJdl86Z7XZLRM8ePwGAV69eVVV1eXHBzM5lbbtumtY5n+dFWZafffbZ559/vlwu//RP/3S3K7//7pUAMcPx8fG7d+/+8Df/4le/+pX2zYui+Pzzz9fnN5PJdDqfXl/fVFXzyce/ePrk+fn5xW672W92KaXcOASxljrdvJqH/oMIa7KmtDA9Duq6jklUhDCEwNBp6S4XC+dsSClzt/hvMm4gxqmHEXWE3zhEgpRS0zRDzad9rUGQXkRx4TYBpAQhpcLDbJrVN41Bpb9JjOm2u62amr3CQLeIQRCt5hNDDzalFC2bHoJ0ry/ebQ96LxvTykIrCsUDEwGRHMDJ1Oyj17Xs26Sd2JHoR+uoBzukhCoTaxDJILcAYoQJKYGIqrIQNOU+hQjMwIJFAS4jawRC6nzg7IC/iE1omyY2gYhUK7WTEQcjIpQZQUJDAAiEwsLCam/LLAyi11FAfysxiuH+b+Vk6HmmOTHre2cZcJssXWeZmTn0H2P/kWqQa9qKY0ohqu0wHLSjRcRabyimvg7LkKqK0VDV1C6zZg+JOzxt94UMJ4mwKuyqirQuyD7q6GEdqP8mYwgG0Vo7n0xrrkUkxZQ5G2N0CDHG3IKyj3QvO+eqfWsAUxvU4JBDEBFP3hijM+/pCAzg1VWnveKt29XtarUdj7PRaLRdN8zMEuu6ns5G+jKKoqjrejwexxiLogh1ned5tdvrMt7v9+ZR56A29GBVIA16Eeosy3bWGZ8Y6NUPF5/++sXDh49Xq9W4KJqmWa/XL188CyGUIt779fqmKIrpZFLuV946g8SQ2raeTsfjMRiL1pLPfeDGObfbQlXV1rpvv/3+k08+WS6Pt9u1cAKAqqp+9w//6Jzbrm5Wq2skadt2eTRtYuCeBnbYFeCehXzb3uzD3mEww7uQ/rsh46Ckg1uiTicGJOr02NdqClPtW62DmXBX2HVF/xD5sK9qeqJeP+rqI5w+rgJzlC2s6xx6/BHDz/D2frLUux/7DmMkqP9kX34NibYgChIgikY4QtRyz/RIk9tBy620dneFMWEymKIest0zHfSgEbHnMCBzV+0JI5D687H+7yH8HRFJOsE3Pehn02ldtlXVNFVrjJtNaTQejUezq83VsJ3yPN9ut69evWrbdnV9NR6Po0hVVWdnYbE4Ojk5WiyOXr9+PZ1Of/Wr6TfffPftt9/mWfHk2XNhmI6mv/zlL4d21ldfffX1N19mWeZ9VhTFq1ev66r9l//yXz158uzHd2/fvXtXl1VoqjzPC+sRMc9zdbvt56NEpFWTKNceQhtDSind3NwU46kS+KqqAm6BMKRorO0wZr6DL+q5RoDOmMgiMbEFLeD0LXPP59WMWLPdLMsUNTeQfL33SUyUlFmILUCK01Gx3TVM2IqkEEWEjMGeHYV9/WS0gR2TOERE9U9MggDELCGxjdFRp4V/zypv6N8S3UeF3dZ/2Jk0HV45vByu8z5qIoggCxqEfkzIiJj6EjNE6al4PY4aUYATB6k168LEkkXjLIJh7xHRWui6nQISmWNqQ0CBFDiFmLy3ZLrSUCihDgIsdVhrFkHQBJmjCDLHntVLwh3x/N5vx5J6xsXtJERXTt+k1TwjcpdYpIPcgplDihxT3dSEgpKGjrRe9lVVh7bVUYlyWsgAyR4bIinr/XS+sBZTEjIASYATiqamhALUu3BYR0QELDFFDokF+mDdKXkMOqKOjPG4K3cKq3DOxbYVQk5AHjebzWzs1XiaEGOM1PsHESCKJOZulp/YWntycszMTcPjwkhi1ZcpS3j0aDoej7frGw17bXtbgkyn0+12e7JYxlgTURcvfSc9ob6yxhhDPoRGEZiK3myaRpKp63oymZyf8WiU7/e1EKy3+198/PLzvzsbj8fj6WS32eqYXM2rLy7O9PrRURhPRiLCHJummUxHR8e03a6JYDIZM7O6X+n0fb+rTo4fWOvrsilynzv/dr3J87wYZTerK+fMrtwmicvTI0Ega1Ls+I7SIwDuIWyHfXRYvX2omflztw+xo6Pw3VYs2r3U6o01q+qfXYMZCAzzO2ZFWyXlp0UAGoimBzQ2Ufeb/vC/Y8vaBkH5Wd7eh2/HD/c5EYwuD1bR3tQpxpKwAdNRmW7loBD7WxDREGCv96KcPP2wrrY9DgAAgABJREFUAQCAHAADByQWZm3og1FVbhKQKCLCCAmFgBlTMt13h52aDSEjMkICEEBKhER4y2Tr0BkieZ4vl8swit7nAEhkinzcSptSmk6nzrmrq6uLi4vdbte2rbbElRsEANdXq+VySWifP3/8D//w25ub9enR8Z/+6Z+myOPxdDSaVHW73W5t5pum+eKL356fn1tL1T4sJ6dXVzflvv7lLz/9gz/4zWazu7y4buqw25XIqcgNAGkdZgy1bXN4yg9lkHPOZf7metM0zcXFxfPRBADG43HbtmCdjVEkKQ+haZpxkSGiN53olzHOGBM5MjNFANMh+4fm5HBm6vS+8yN02ssKxlCe51UbbDLzmY+bNsXWjfLppGgTxKbzZCcC1Sk2BCwA1Dv9srYMux516omJChYwSLmzGvbwPZsY6eUboJ8aYl/HaFowNPpRlcd62sZhy1QfaIiC+uF2GuZoDBhkw8wSu2Zg1os6Do6+6vhlyUROqanrGCAGCbm1lsiCM9ZaYEZJCQwCGEKTeWbmmEJTc9twk2nBBwCpb38QGv2NBCCIBJyEJXES6dS+BAQZ4wf3IwFLosFSdTjFtOBLcgvvDn0bWc87TQ66sBcip6DTB+kB7tx3umLHywbrjbU+CHAKQMDAddPOLTpnEiYAZO4KUOo58kRgtHnubUophpBSQgSvfdkYHQERJhYkMF2QTylGSYnIEqUsy0LT6IMrFCXLptRrU3CIaHNrbV0n3eDCbIzRvq5Bms1mTdN4C6PRSFsXBnCUdYv/sLVurUVOxph8NCqrLSyWOgtcTKeaDjZNa4PXiTgze2t22yojPCTklPv99fX18amfL4+aqm5W9XhEm83m6vJmMpmllCjzRJ3ai7M2pTQuRnVdZ9aeHh0vpjNJkWNo63I2m87ns1evVj6vjEWlLc0Wbjpb3Kw2pw8f5flIv6k8nxlj/v7v//7keDmfT5sQnzx5oiz1x48fG++i3CGrdBFOpW4PY95PxDO812i5WwX2W+xAS6VjYg/6d311CUMUlP639jl7o0lVRILeiyrdhjFM98NeFAahGJN0ZiwQU69AJRIioLzX5PzAe/vA7QzA8P4wXeWUbotQ1PPkTnQU1rbSEPP6SzemAeC+gmQkod5VFQ7IWLfdqr705tuqTjgBUxeG+0RD0xnq9XQIAAgxCRlAAjTk9uXGGHN0dARAoYlNG6AsEcxsNru8vMzzHBG/+ebbi4sL60gDobW2SuXjx48VT9XUoW2ugjRHR4vj4+OUpAmp3Dd13ZZlySIhhB/evF6tVvv9NoQAYE5OTt5898Z7/2//7f/xl7/85dvzs9evfjTG6VdJ2KElYwyJY4xxv98bd9vdZu5gfkQ0mU3Xq11Kqa63ug60V5kXNjjLgdGQoOhWxD7rBxa0SESm84GRoZ14uBOo9yDU5qfpcW51XTtnvXNNE4ylyWRUtnFXRhAe5xkFrtqgzUODJACBGQlIFb9ADN4ZauvhzIBJMDKExK7XJDvsc8KB4sZhYUc9KvXw9nvbckAUD0loZ9Z6sLJl8DQ3RERWgJkDhigMDL7IIXEUtrFrCwMnEUEQUORLTEHACIBzRCHsjfc5cOKYgAXR5M56n7ONLUtqmjYx24Qpt5Y6QA6iKMmRCA1rc5iEtIMKByAsRIzpw2GPBdEY7WxKf0YAQOQkiSN3DC1m1vGBc467U4+AUBJziilEEiEDLAAcU+hIXWhoNBm7bI1ExqQsy3yWcYi9Eh6ECJbIOuOEI4h+49i9ZkEEozrdgN7YOsQQEgA4Z0ilhUJyzmg71wCiMQQgiRWUSISGKHOuhC73tcbkzoxGI06tJWVbgkYs/e29NylZayHGPu5SWzfe+3ExUk0i59xiMR7gyrparIUsyyAGY4wjitsYY8xzV5b1g+PjzWbj8qJpGk1nFekK1ldVZXOnkE6dTdTb/cXFBsi9/OjFt99+az20gQHxH3737b/+k39xfXWx2+2J6Ga9Go/HCaAsy+PlQkRSEjXhk875r0aceO8RIc/9eDzWZTwajaz133//6o//7X+vA3hrrfeuLqs/+y//9eXLhw8ePDg/P3/y5MlX33wrhMujIzImxKG06k4VIuqHTf80Re+D0UE+JFp2Wyx2vW0S1iGfVnjQB7xBYAxFuJ/nqcaIpuBa7SmeTEQY0/DiQaNbEhBJMYqIpK4MA9XSAIY2CQkYn48++JbutYBuu7pd70GdkACREJCAmMWSsYacBeehyExRZN7ZwqBzLs985r1z2gshi2AsAgp2PulKjSZjCBEQhoZndyACoCGDAJ39GXcankQUqhoBrTHWWGNN51Im7L1jSZBEnaeNsZasIcvcEJoUuW0iIjpjy311cXH17u276XQ2LsbqwzcejY0xDGwsGENN0242G44py7I8L+azxenJ6WQynU1nZVldXV3HmLzPlssjtNw2bYwpxiRJYkir9eb8/OLzzz//8ssvtpv18dEy81Y4scjZ2dnx4vSjjz767LM/SCl9/vlvz87OjDXGuBTTeFSIcGiq0bhYzKeGMMbgPNR1zSzOud12v9vt86IgtMaaqqzbtn3z5vKjj1+GELIsR8Qs95vNZjIaTSaTGFtDZA1p3MrzXHQyZSiEWDWNdw5CzQcUaZ0LVlWlYDZFu6iim/deQQ9VVYcYRKhpmroOg3t73YQmBEtordP46bPMWhBJBqFw3iJKbBEhyx0gZJlz3hGAN2ac55mzKSaSpH3XW94xdJ6ieECRISI40M+V3o6rm+Rp9toX91paafmiH4f+IBlCg0hIxlhn0ICgWnwIICq3HjpXAUBAQ0DqIq/VEHOIkpL+QIwcwj60HCJyQhZUflpoY9s6YwiQUJAhtm1T121Tt20jmJgTIhAhISCoekbUKyIMivTiJJw4xRCaFAOnyCkKJxAmBEKwhoRTiiGGNrTN7U8MHEKIoW2bpqlD27Aq7HRNE0EQliQpsiQCYUlt26AkEWmbBpABqW7b1+9+3Oz2+7JhpGwyyfJJG+JuV4YQU4pt4Jjahw8f1nWz2YXlvAghWt3eAoZMkWXOWgKpyn0bEwH4zBBCCDElsQYy70Ibi8J5azm046IYjQoQXpWNJSiKAgV3+91kNGrq1ns5PppNJkW533nrUuK6bHKfz6az8ShDwu1uCwDW+6IojHXXV1cKYHn9evX82UPnHAqMx5PP/+7t6cNJnufnZ2ePHj4kA21VN22dez+bTTlGllRkWVFkKYbtej2dTjfrtYicPDi9uLg8PjrJ8yy2TWiqzJo8y1Lim9XGZDmQO7u8YIF8NMry3HnPHJs6ZJ44xfl8VlcliMzm05vrmzzzRJDnXoT3VfP06VPv3Xp1U9cVImz3m+Vy+e7svMhtnueJ+d3Zhc/yPC9iTJ989t89ffLk+upSUpt7+9VXX/z5n/3DZIyrm5snT58Z5+vQ/uEf/0tyzucFgAH0Qxo9JIKH5VqHWO71Kw7D2OEM71CrUw6KyK7gVWodCAvHFHXtsnDSxgMnVs6eiHBKrBDgpHzbmDjGqKZ9in5hlpi6rA1Y1CQuxhQZEkOISsbBGFmvJ5YUsU2cItdBYpT/rbw9EUExIIS3UHK1wTtATkNSPUgidM5lzlpLSKLpXqcGYr02ZOq6BtPtQVTVJELhju2QVE230+MGZn727Bkzx9iqxqa1lrwzxhjjUJIOLECQe72MxXIpIk3TtE0VmxAj73blfr9/8ODBfD5HxrYtBSIZ763zLnvz43e6JA6R9ESU5/lqtUIB1eK7uryx1ubZKJ9iDGmz2SC63bb87rsfzi4u1+tt28ZhuLhcLrfbbbXdnp6efvaLTx8/flpV+++/f9U0TVEUVdWod5e3FGMLws4Z542xgpT6hmdHuFHJiV78Wn3poCzLyWSCiFmWhVRPplNrqaoqhERECQRT0qM/CUK4ZdJo+t+XmHGINyLStu2hwIoWf03TeN+pfrNE7G3eQI/gFDu8CqV+RiXMyRJ5QpAk3FXwKSWbOQQDADFxSDGyABlvOpe9oVYbJnxt20pvO9Lh0FInvjMUc4cziXsjvWFEOswpre5qRDJGldYZARAJbqmBQoKCwoiMmPpnYQH1yrFglY3HgpxCE1EkSqQUW4nQRv0A0Vo0wgggQsKE6tqXmAE4NbtojGHn2Lnu/oiI2NTtUODquL/r9aRwcAyh9J4PbMJQ0R6C3KrYDgmNthQIwBClGHX2zoh68HULzKiLinCMZACTtpO73ReFRQikg0SFEADQWmtMjKENbe2tMQDcNhZAaeNCZHTCL8AsWeahaQNDaDrMjPNYZHld196gI+OdyyeWEKuyrKqqcKA9Z/JEgKoZNp9MUcCSsUh1W0lkQsiyzHsfQ4sCztgsyzLnAUCRtMV4tNvtDAIixqZtmia2cTwGVWtSbxbnMYTgs+5cwg7FY3e7nQKUlEerC9g5F2OkIkNj8jyfjPPQNvv9XkSyLLOUA8DZ2X52XM4m08XCN02z29aIXIe43ZXHJw9CU+33W0Rcra6n02lVVVmWLZdZ0zQKSW2a6vr6CgiLojAGJtNR01YA0Lb8+PHj6+vrTz75JMuys7MzZ8QYc3l5+b/8P/8fyzkcLZZZkT948ODs6tp7P58vWLCNLMiO7iD5byuzAxk/IDyUSYK7PU95D95yGA77+3SlnvYz4ba2Q2aQAy1YOECsACieUwYcpggkHhJLEREO2vaUyAKASSTFjlcaEqQEnJA7erekJG0EEPpv4O3dMgfgA8Vs14PpXZqVttXdjURdF8gYk2ILKEIkQigAnJAMIY6LvJ8icIiQUuSYtERNSZAVxq2bnxBwu90ZY5yzee5JsTMoKaVQN9oO9tYYArRWC8eby2tF/6ck1roi93TkrMk4ymg0YoYkaAYqm6QnT57sdrubm5u6LkloNBoph6+p2vGTaWdnA7CYH4fQ7nb7fVM75zfr9WS8YObValXudk1ZXVzHTz99PB6PrTGTyeTBgwejUf7ZZ58VfgbAX3751atXb2aLIzVwGY/HTo3FAKwzee7z3BtEYyGEZK0lMtpU0chERFVdxxjLsjQG1+v1dDoFgDzPy+3+6OiobarVapVndjabAUDgO0wwRLRkLCYSUKcxffuK4lGqwyD1NMx+9Jgbj3PnnIsJmZwDRySJmybmJghHiWoAJ1q5MLNwct45QomcEntrLGFKEREHDKEWlFDkNsvVFVBPVQXawIHwZueO1Jua3s783mtd0IEMMfbUBWttGaSTj+zvh0SoGrCigUSQkKTD9xsWbakLscitJaxQJDRCwBElpdg2QeXcUhTbSN0E57z33mfgPTrXBoNoBAEZSDhJksQJoG0TEbVGEUh2iHzaytbPH3qVhpQSQzoMe8NbTj3H454WWhXaw/4wGp0cAjIzgkDnh9K1jQAIDfTw6v5YTDElQIzMsYOsQIypqUPbRgL0ufcWmhrqssqdn2b72LLVxhbq0BK7qQMIsDgiRO6a7swioEYHmXUIAImzLA9KJE2cj/P9vuZeX1DzvNlsVu6viY67sVxijUnUkzKdcxoF2xjqut7v98enJ7vdbjRCa20bIhE1TXN6Oo0xKhWvqqpiNFPoyqHCmff+7dszcyCVZ62LMeZ5XpbleJSjiD5aiiHLsunUNE1TxYCIMcLl9Woynk2ns7Isd+tNDKksSw7t40cPCHizvh4Xo/32piiK3S5YazOXAydLLrZxvb6p63IymaTQPnw4XS6XSWS93RuPxrvtvjw6ObXWtHVjiaw1//j3v/vu+80f/Po4z9xsOsu9Pzs7++gXn2bFuInCQoB2gIbhAObU63fGCrcs2EPe3mFo6Jl5HRl3WIqDMUBXzvVcBbW86sEsveeCAPTokIOkraMSajWYEqfEMfUTvpa7vcB9DyRKFBbBmCQl5gRRuglfSkpgoJ/k7f3k7Xo+EMP9iDjAb1BnThqle3yKQhdiSpISkuFhWjfwEbR7m+UGROs/tiw+ev3866bp6BaCrPhPMIhYbm+yLBuPR0VRuK4yYJGEwikFEDTGGnLDYW3IIyKIRWBCQiRjbJ7nyicMoUaSPMsQsao4xtYap3EupYSMWilut1tJUBRFVVUXF5cxRhUkvLy8DLJ5/vx5XdeTMY/G+Ww2c85NJtVo2gkxXF1dTWbjX/7ylzG2R0dH795cvXnz5vtXr5zNnDd11Rpjlsvl1cW5QUFKPvPFKMvzjFOD1OlTpJT2+31ZlkUxGnwyY4y7XeMcqayf1m0xxvFsKpu03m1Z8sViYQwp3VihX8wMhNZamzqXAOyjPh6gOjX4aSwcTsCBjiYiBtE7NyqyJlbSgHAETqgmIqww9i44WWsdYmxqFjbGGSNtlP4BLRFFhtCzBoep3jBQ1HV2SIzpJ5F34NT3aj7Tiy0Nc2KtGpWXqRc7dCz6ChJ7hfTbzoUwJZSko37kbm4MkaPpzBwxCQ8dImiq1GJTUyQXvItZFvLMGk9WexxOzxXEhOqKwACcYkzchkCk4o+6TA1RNMYo+0c6+q4gq6eiQVTqQlKeDwsyhxhTCG2MKYSoo+/MAXcsDQAgkSSgsz2DqIQQPc6JUESU/4QIfZjlwEnlmzUsGmOIbBtj0zQhgEEhIm+pBA5VOZovZ6Nis6mSglmACa0BRE6SAJOYzA7zUf1awZBF8t47a8vtLqbGGwuJIXFRFLtQg2JY2qAqdHlO1tr9fo8sxhhkYRHnvDOdSm1Hy9GZX4ixbbXxU5dpMsklphjjqCg2q81oNNq3lZZuIQSFT49Go86MhVkAjDG73e705KTa7VRmWrv9WZaVZVlVRWYwpdSEwCkCdnSgppE8z73frVabq+mNL/LZfFlV1c3lRdNGAD67OJ/k2Wg0MmSstbENgLLZbFyeTk5O5vN5We3W67XP3HQ2iTE8eHBKBMb4uq6n0+l6tXn27Fmej6pqP5/Odpvrcrf+x3/8/OXzosh9VVW/+NUvt9ut9/nx8WnbRp9NhA2DhZQG/Fe33XpomO507LfNvVrotojrbrmdNyv2eSiE+gDWB4guuEgHWhHUNqr+D/Agrw+9LEtKiQVur8cYUuKYAjNzJA2ESVgYo0CKElmEIbCEKDoQT4kDc0qpqQUg/Tfy9vp/dqTF/tLRsFIiZkhRtMuldZ4BJAVpEBmkosigl3Ab6H2IGGOr4EpvfWEckQUkRBNCALGCivlUsKYBAP/4kWagujmdN4V3zpumqVNKwhGAu0YRCwqmGDXZNYYIUBKo03qe+7Zt9/ttjJxlLvOZSUZIUugMTkMIHDilpOIUo3x8fX3dVI1GoFev3iDiZDJZb26ePHkyCHpNx5MYY57Lg9NHX375ZRPDeDy2hEeL+fXq5ttvv13f7F6/fm2tPTo6qqqq3Ncuy8uybNu2yEyeZ7PZeDIZGyshNG1ba7u1bWNVdfszyzJCJ6WEEOoajGEVUZPehFP7mU3TWFJbFic96dsSCQiSYWtV12DIeIaDfigytMKTngWstaB+423ThFiBWIM4HeV5YdZlayxZy9RCZOiPWrUUFmFIiYnAkjFG1PuC+2OFiFKS0KYY2FEX5oeSBQ4Sstin8wpMZ2bj+gbGsC47KBkiUedQiAO5HT0Y6DEs3cLmnuWipHXsHkO6sXvQJSOJkRENgVKI2iScEqumZUQUa8mAjTEypygs2HAwXPu2tGiNzzLnMpdn1mU6uDadwQUoZEZijCCRRanrxjsS0OsaGMCQAWQjgPq+CNWDVZhZmtAqdCWFGFKUxEkYWNCCIA3AabV4ERZDrlNMGfY4CCCkxJ1+BLBIUiBb4KREWTRkvSeiWIe2jZy6FndmjQdOLSPLOMuTj00MzCBJSJKCAlDEEjhnYpQYUwwiBFlmfZ4559q2ZY7MEQCYY0oBgL01zT4hgPe+LhsiyrzN867ViZ0gUSsCNsu0DaADD+I0dBFEJHMuhSiiamFVXdez6TTG2NZ1Pp9otqdZo2JGmqbJsmNuGlE2ukhRFGuAtm2HWtxaG9oUQpjk41hjnueb9er6ZpWPZovpAr2bz8uzi92uhbdvz6z1Tx89fPDgQVPu97udMebNm7PHp4sHp0er6xudWYwno7aqhVxKyTrKsowQQlPH2Cr6mpnX6+vJZLLbVwzymz/64ywfVRIFQtOWf/d3f3Nzdfni+eO2qefz+dHR0RdffvPy5csHpw9v1vuHz0/KJlICC7do565ZAggAqoWrMwU0t52S2+wEOlecPgp+OOyp7UrvE8vMSWu+3hVROjLCIMKiLpbpdjqYehJCH/ZSTKpOlZgZIqTEKUEU4CTa5Aws2gsNMTFDFOYEIaWUpG4Ffoa395O3w8E/EaFr/uCA3lG2xKBBH6M4p3p06ifidJjdFcsdj+P28YcmlkibUkI0qnoNQJy0hu14eEkQAEbOHrRehYj2loxB6wgRVAokJSESNRPYbisi2wXCfqBFRJvNRkv4oijm8ykAVNW+beuj6fF+v7+6unrz5s1+s++1KMHSyhhrkGKMbRtFRHnHRVHoiHG3281m1jq6ubnxPvvtb3/7/fflH/3Jkz/6oz9aLGa73e6bb75Zr9ehYSRZLOZIstmssqzIc395eekzO5mMl0ez5XRUFC7GtqrKqioJs7Is67pVO7HxeJxlWYqoo03tKqiiZlGMmXkwSTHGVG3Ttu14UhBTr/CJnHS+ygCdopv0vDot74a3Nqiz63xR+2/T6YgZyrLc79qmjQzASAkNIhTehRRa5jZ2zQ4A9N4AS2wDC5huTi6G1BENRESbZoFTHdq8abznYb6lIVbN6Yd267Bp8aCBebAD71zpGy+dpYOKXGAP0e4Gfod53B0/PNDcSde80m66hs6hJgV3GkXeWCFjJSUh6rdEgNjUIAh5Udgsz2Lu8mgckXHGIqG1aJiAIicAiYlFJEoCwBCGQnbo7hIRIw95iZ5cetntdsPBMQwy0VDTtGyMFavH2XC03cni7160QjaglplAlBBx0Emw1mlgCCHoyaWlz2QUQgtNVeb5aDIuTEVt28ak3N4o2JkY3Kx2eWbyPB+PjdoVhRT3+70qxFprM++JKDCjQBfeALx1ldTeOWuNMdBUtbJaYxvqGgwCZJ0/gyFxxsRIiMgxDj6x1to8R2+stdZZq+2m3a4cLWfGGElWEocQvPf7cmNBNNpZZ/f7ve6dHkjFIQRvDVGnXjuZTNpyW9eViofpZ9i2bZZlDx8uw+V6s43nFxfHx8f5qMhGRdPWzrntOpRlGcK0reosN23TzKcTtNY5t9vtViuf5/l8Pv3hh0s05y9fvkRDo2Lyw5vXL1/84vLqy48/PpnNZm0T54vJdrtpQ/3tt1+Px8V+u5lNRh+9eHZx9u7q6uqjT3/DSCExggUAQp/ifuj/d81HwOEo6IDffdgbipOuJ3QHzP9TYe9WnEyDVzdTNjDAZRKL6qwCACfpm4V9V5MjM6cURESJpjGphlRgZgk6sYvMkoQjQ0oSk8QEgUUrP2YOSbsV0kb8b+Lt3YWoHh4nfWdY+FYJVGmqXZPqUFN4WMGKaUBEAwYRvSdhSZCYOSksrlMaMyliG0NTt+rx0hE2YvDe53nuvXXOEqkQDGTeOmd9ZpXXTUTWknNuOp6pXE2nANm2LBERUwpZlo3GRZZlztsQgrE0GheqZqmOr3vYa8ipqkoSHx+fjPJis9ntdjvnnDV+ty2PT48uzy+Ms5kfTadz59xqtXr+/MVuV3788eSP//iP5/O5SFqtVu/e/uic8z63dlbXdVWtVKFRnzHLssl0tFzOZ+NMoC33VVXvQ2y89VVVVVVjjClGI7Ulq8qOFes96Ghkt9sdHZ0YY0ZZZweTZdluX1dVtZBZ31zvuuzUe2gxs+kDiSa86tunc5FhcjYoInbsOmPG4zEnxKquGgk1N4lNlnnvC7C7sJcInIRI0EjmfWxDahIyGK/Esp5Z1Q+ixFA33E3ctu1wfOjTiTGD4snAAQUAMgQA6WCucC/avZ/VDVHzUAOmb1wc7uhbri4aZb+AKmYJCRoCEbJGBKwkAOmmFQKQJDOURBAogTBITJxSDCnGGExVVUXm6sJ6432u7DfivNtFiTvsmggAhHjg6UOdFBEiJohElPqaY/CO0RwI+hGmBkljTIhMvURDhybzblAeSIdhT0etiQfe3u0LAGhiaHsyZQJp286QzhoASXmWGcDr6+1+Vzvjx6MRMEpijoFZ9c/Zkct8dl2V1lodZuvj7Pf7bRULBynBdJQXRdFUdQgBAdq2nY6zsmx0fRZFEZoqBE4es7yb+3oPzpDisGKMNsMu/hmjHmzee518awNjOp0aIv24iGC/389mMyJyzjdNU3i/2aYiz8qytCJ5XqyvrhXJojlliI0SGJjDqJg0TaMN0ljXpyfHu32JxiFiWe6Z+eTkZBMhhJuyLFer1XI+7ro1hHkOTdNeXV3l3otEEamqajqdgkhs29VqNR6Px+OxtZd13RKR834ymWiSPZlMtFdkyI0nxeXF2Zs3bwDZO0sIJycn1tpvvvthMpl67/e7ajE/CpEFiAWlF+Qcphuq9yS9fiEzQ7r1KdMaulsGeIvzGOjEIjLoQWrHBHrVFZVNkW6gxyJJo50GNmalYMehtaM1Yu8eFG/DXgwa/Jg5tpy0qxElCnCCkDixxMghSWQJWhOmTnWoDkDw38jb+7nb713u/Zc26LR01g10CDRA7PoVmvJrKxKNgJjUDUwOaLMMIpJ7DwB1XbcteW/G43ExHhdFEdrSWkeEzNzUVYxRy01L2eHuzfM8y12WZW1bI2K53223W23ZTyaj5XK5vtjrVtQ/KYpiOp0z826z2+/3sU3j8VhELi8vU5QXL16cnPjXr34EwtkUrCVrbVU1eZ7/u3/3f3r48CEY+q//9b9OJqPj42OtMl++eLJeb7/48suqak8ePKqqal/eLBYLRNEt4Zyrm1rtskS6RFvxHerGV5blerWvqiqE4D3ECClJVVW62623KSYGyLJsvUmKTGHuKFAhpBCCpVse2wA3HbqaA7BiSFmGQWyM8fXrS+9z7/3R0dE4xM2uWq/rWLZIZJxrIToiAWYAFCY01tq2bkICeyCXrLWLBjYlgDN0hUubWn0lA30eRAars0HKjoh0g2EvO/5+5BtW4+H873Ab492J4OFSP6y0WHuDRNpdECIgUZ/eThAhoUhsY0wpZUhAyTIRQQBG3asxNiGQtya0LmvIuyxrfJ451zq8X18OGYl+XMMXoRsqchzOrNRbAyr/+nDfDe9ISV2m/06N6URz+wbXwZ3pdsRLg4ME3aY7A61TRwAxAUBnt+CMdSN7c7MtA0xDyJxLLkXTthgU4KYQbufcsycPiCimtN1umxj01ToEa20MkYgMUtM0gSE3JCLz2ayqLkTEW5dlWbXdIDKA10PNWjuZFJnLEF2MkbnKVPvUWmcsgxhEsLYoiuvVqqoSpDLLstC2Nzc3OjLYbrej0Yh6fo4feeec2sz6LMvz/LxpsszofhyNRkPhG2McFVCWpU49jqZjNcxzGU0mk10j22arIaqqqti22+3WmE5rglOb5zY18eam/PUnz/fl2lq73W5ns1lV15PJJISwXq+JaLkcRw7MMQRs23a5XL5+9ePLjz6ZTqcioggDZv7qqy8mk8n++uqj58/m89m7H98y82ef/noymWwrfvjw4cVq1yYSqDNze1D3/TzQdXKL3iQcugIDu0lEkORgT90ml4czdYE7IQCkE+TsRaI17On8Iol03bwh7DFzSpGZYzoMe50nFjO3LaeUYpKUJApzojbFlCBwShGbFFOCxJyShMgppbJOAGBcXvQv9/b33YPjzu1sWYhBQZld88QgGu70tdla9B6K3PkcrMXCYDEajUcjBE4xGEy5Q2vEIBMyEllrVLBQDAka5TT0J6yofRISNLECFDLgOhpg7jPvXGYdjiaj+WI+m89G43GW5z7LjLPGO19440yCgE7yqc9GCDaNJj5KHbiKUrdhv6/WkSsyzNJOZ4XPzcXVux++/6Zpy+VyslxOJ+Pxf/4v//7ho6Nvvv0dUPj0Dz7ZrM+N5ZD2r15/f3l11cbaWDx98GC5mK1WKyRbVu0337z59NNfP336/PPPPx+N8l/+8hcvP3r+5Ze//d2XvyUDIilxnM6n+3JvoHj747sY4nKxAAm7zc044wfHk2ePl48fHi1mRdtUq+vL7WbHia3x1zfrENk6h+iYDVHO4qoqvn13nRins3nd1LsSjk/GDx8fzReTuNvm1pSbLTC3ZVtVzfHRA+8LBnJZsa/3gaNgMA58BlfX71KyxmZEJgYuiqzel7PJuC7Ltq45RhBIgduUrM0Y3XZfO2AEs9m1Fzf7fUOj2fHy5MHx0dF6de64ySjlJkmAlMAALKYkVXCAJCwMxnExzsiRxOAsOACuS8N8cjS/vjxfHs9cbg1iNhr5LFfhVZdlPsvRWAZEY53zaGwSCImFCIxh5akexIyhgQN9jqWBXLflyBckmGICFjCGjAVroXfdIzKI1NkzMQCgYYQEkgAigAAyCKMIGeuQHJMT44RcNC6RS8aJgWCoRagkNco2MGS9c94bMijCIXLTxqrmuk1VHWMbmjKFKoVaUiOpldRIalACQUSJwEFSk0KdQhXbygigGhg1TVtXoa5DaIVTCA1zZBJAQYNkyXhjvWUEY431zmfeOouEUTimqANKnSf0hEEBkNrlNs9DrGOqiwxDU253azLm2x/eBcnercrF6fPXb69v1ntvHCE+FHnxMPeZu1jvNm2KaCIhozxYTtryGlopDABJYC6Wi8cvPwq78vztWV3u66oVTrPJhCGp1cdo7JeLZYjherM3AEUxbtporPXWZZlZzkbvXr8pfJLEp8t8XOTL6awp94v5Yr1ZscRi4o2TpS92m/XR6SKf5Nt6t2v36+3NLz55efb2x7asZ9NssViyNW+vby7rfYVoE8fYcgqL+ezFi2dvXr+aTIqmKutq//D05GZ1aRCz3Hnnyv3WGHq3amdHJyEwSpKmmuXWcHSIALjZtfn4mM2oDPbyurJ2+ub1edXGk6Pj1dWNMeHkaJZibSwCI4Bb3TTWZuPJkshvy5KsI+eMCd5TTCnP/fX1Ks8L43KfFVk+2ZfVqzdvTx+cFuPJk2dPJrNJkpA3F7/76/91e3NBnEyWzx88mp48+btv3zx8+WscH2Wz5XR5VJZlinVmUm459iRRYwgMQmdO1VU1qHxWJNR5FoOhnhjbiWvqBAqjAAtqBcloGCiJeqC2iUNMIXJgpZpyDCki2cQQtZknhplSopSIY4iRY+QUJUaJUdrAIUoTIEQMkWIybTJNoLKSspJtaqsodUp1gipBFWAfoGyw4qxs7b6xZe12jSmbbN+YqslqLpqU/7fz9oZW54cKvkMlTCsibYzQlZaGmZFvle/7CwPct7Q47DpZa4WNjjpTUn5i5ATOiI61tI5BRGeNtZTlznvjLAkEIgEgxXNq+dKLArfav16v1y9fvGjb1jn7m9/85tmTp2/fvj0/P2dmB8Vnn3321VdfMPOTJ09SSr/5zW9++OGHr7/+OiV4/Pj0l7/8FNFcXly/efPm8vJ6vbtKKS2Xs8vLy6qq/vRP//SHV69CCH/+539elqUaOGy326ZpRqPRuBi9efOGmefz+Wg0atoqpTAdF7PZbLlc6mS7LEvN4EJ/YWZnM+ecMDZNU9bNxcUVESBS27Z5nkeuq6pi7qqiIf1XIem6roloNpsVRZG4DSGgav8LOOekjxpDC7RDe3bkPNCWV4xR0BhjJEmKMYTY1CEhAxnnXArx+bMXyFLFljb7XdykPYAD7/3F9d5bIATvIctUz16rKAu9Bq4icQaM6L16ZagwUkoRukblUIbST5R3H1q9tz1P6p1smRkOPPygt2KAYSUejBK152kAWMCgyqaDYN/6UWhM6OLu0EjUi34jh+9oUPy6HcUd9F2HHtS9Vm1TB+rl87t5Ooiiq8gYcgdVnTXGGBUiN3cno/Ch+livWGMPP0ndQbtdS0RV1a3n3W5HgM6bukrjCcQY61Qiymw2a6Kk2IamRZKiKJD3xsA0GwfISuA3r197SIhCxqQUU4SUkiUyRVGW1SjP9elM/4kpctgQFEWx3W7zHEKA3AMAOKUXEulRUBRFB0ImMQZ1F2hp5b1vmqaua1ViYebQtRjIOpsbcM4xR2OMVn4hNJPR6Opyl+f52XmZkVV0jBpwOufqqkWJ49xJaLUhEUKYzWb1pozcthEyz+fn58vjR0+fPv3bL79omurRo2OW9t27d4vZxDlHTHVqve8c2KeTLMsyTm2M0Ropy9JlhTEmz/12u3eZn81mDHR5eVkUxX6/f/HRJ0dHR2oW5kzzu9/97unLj/7+828++fQXv/71r//Tf/7zFy9ezBaLYtShvpnv7Iv3D/yhl3BYCCoILKXbGZ7g7TphkIO/vV1FKqMyiATx3YtWcsOuZ2ZRZeEevxaTJA4pyqAiq8gxFQsCIEHDnOo2qVF8TJCYImPTRAYDYIEQkwkplPu6aZoIJCL/bN7e4W8ABkThe2DOlBKnpNB2Z4YREZAhMxymH/aVRz40DlUnLRASQWstJ9JPI8ao4z1OsC9La603dmg9G0IRbZYaYSLDAMjMkQARqHfP6bpG1sSmVufGJ0+eEOHr16+3681isSCC6+vrm4vdRx99hIjPnz9//vz5999/PxqNPvnkk81m97Y4kySvXr26urq5ud5Op5PHTx+9+uH7J08e/eG/+M3V1dX333732WefWWPWqxUhphibpsmy7Hix9N7vdruzH9+WFR4dHS2WM2OMwTTOT5ZH86Ojo+VyGULY77c6dbDWtq2eiYZI9ZbyFCW0sSzLzabNCvTeKJo5clDbTGudiKhmkrV2NBqpwpmWOyrUYq1N3AyYz5AGTTy5JYf1Hp6AZK0RkRBatZgBIBHSrleTgnotxNCE0EyKUT7Kj13ux9N5HfdtrKM4s3fO2N6BLcYISeNcNqw3xcdrHyk3SrJM7wnUSUop8C2TAe8aCd2LfMP2vtd713DSaZB2TmO33VccAKGI2kuEg2YOImkfBw0QsDIo4GD4T0RIhgCRBYnJANxyJHAgHR4eQKkHoUivnX0Y6g5f+fBf2oZCAAKDiGgNGipGI2stOWuMAbqNZC7Ph5j6gTh3cAiq2ayxlrmb4bEYAEpJttutburJeFnttuU+5BkBiLf04IGP6JuGDRIINE0TQ+Nye35+9vTkaPHs0Xq7ff32Yl3ucTwqRhkJW0fG2cQxRAihQUPWGG/NZDIBwbZtCQARQmg0ChaTUZ7n7968mY5s2Ub9RJ1zBklHYnUts5nTCAfAzhtEKas9ADtn8txXVbXf1+pKjWhSaGPTgogll1kjXQcbNpvNOMsvLq6X04m2ZNu2HY0zzbrG4/Fut3Mu2262uaej2bQNbQghCddtOxYJMQogAHnv3727EfSf/OJXy+Xyxx/PHj1YstB6dYXzaYxRD8Y8z3fberVaTcYPsiwr91VMLaIvy3LhvaTkvd9u3y39kfe+jVyW5enDR1dXVzr20/HQ73775Xy+/Nu//eb4ZPr48dPdthSgyXh6fHycFxPNGxSS0Z3DRtXCtAOpq+sg5iVmFetDRBQQEj4Ib3i7eBLcuf120faupodptLLs+MBx5SDsxX6GxczKOogpMfdk7kFONgZOSd6trgnU0cCJYIy8r+um5RigCdy0HGISQUGDaMh5bloR+Wfz9voS+CAo9rcPaWlK3CZk9sxirWXBkKJh49EAIeO9iHcHPHYvBxcR1VghJOVY9/WHfl7YqURCl1BnWVYUufc2cTAGhyFIjFEXc+FyPWs0rZ5MJjQeNU2zXq2+/vrrosin0+nx8qhpmu12rXzYv/iLv3j+7MlyuXzy5EnbthcXF8+ePfsf/8d/98UX33zx2y/evT3f7/cAEEK73W7/z/+Hf1tVFbJMipHCLC2ZH374YblcqrB6ludN02w2G9VSsc6MRiNdjkVRaAj03oUQqqqqqkrfoL7xGCOA6YaceouGmwQhyHye73a1cpVEVRaZVTZXv0Fl1FZVNZ/PVWBTRKy1HNoYI3bHfUev0ZU+LFlNtwU6/kMIwQA559B4EeXbWUgphCSSYpte36zn8+1kObfZ2Fo/mRRSh1g20+nIGCOcVAi7baPzHTuQQMCoAoUbj8eHz6th+/3VyB2OEodRlqFb/MUH19W9NFY/IoVvIsuw+W+DSv/3Gs8kdW5w3HuFaLQIqctH5UDyfag+h0YrHvyTel3sOy3Z2zh7OHOEPk+/hZt3ADxvVUeju1hjNNSpK4I1h8eQflCHSa3O7e58THfesb429t4TxCp0PPGmDlXVeO+NtW/eXhgAb/+/xP1Xs2zZlR6KjTHdsmm2PabqVBVMk0B3s0lR4r03rkL604pQhF74cPXAlxtBqYlmox1QKHfMdumWmW4MPYy11s5zCkA3IVLKqDjY2GZl5so553CfUX7015fbulZjBp0yYvZhPPWjBVhdNrcXLUB8enoYBm+Nqiv2mbrj0/V27bTRxgStlBDgY0wmFUVRFIUfQkrJGC0nYGVMJmqaBpBTghiTYLisVkJCFSwPItR1LUsiRl/WldYY4shGMeSycn4YhgGUgqqqnHPcjTHmnJiIy7IchmG9XiOi1kbSLyK6uLgYx1FCqdbgjBWDlLFTx2MPjTHGRETxw2DIgqS1zuYE1tqug/v7+4vL65///Oddd7y/v1+tq+12KweR5H9FURwP4/E4eO+1mbodOPMKuu4IygCQUmocelS6aRrnjCToy6h1v9+vNuu+f/iiaX7+85//3/7v/4+//Lf/PjI2TaO0XbQeJTvMOeNczUtQUkq83Z6zXn42tUQ+84sGmMIhTAjnZzLDeRk0ERB+FPYkxRQSgghsJkqUhJM3PwgWGEcmSolCCCEtP+ac81df/eJ06p+enp6ejt04UlaAGrSJzIDWWAYDKdLgwzgMIQRrHfwJfnvICDBnC0pA6QQiMD03QqZWCyGDIlSQGVMmqxHV5Mz3+7qjy76TYeaUTU8zlVlLiQUhP3uNIm82G0QUmrPRBhEzU8yJKQsd0qCYychRbjJRyhnn885aa3BCux2Px9PphIgvXrxo1yttVbrPn3/++X/8j//x8mp7eX1VFPYnP/nS++Hu8eHf/9t/9/793eF0eH+3f/3y+s///C8vtlcppcP9vdVm9/jknHu6f/gn4rqu67K6vb75+uuvP7x7//kXbz777LNV23777bfB+/WmRWTfd865i5uLFy9vnDHDMJyGkyB1gViUI8auj6MPNIU9pRRRHIah78dxBOPECRZOp96Vuqoqrawfo0QLOTSttSJFOKtIW+Io/ik8ozeTem5iLCthUQYhhqX4m39NMyeRhrJKZVZ5ErPgGOnxYZfxAK4gV0WwlGf3H4HbKJImpzZCIkSjjLSnKlfE5GUDLdtv6b0sKRHCRyc4nFV7nwBYlvdyHvZwti6a7aOfoT2fhkyckuIEYnc3lcQLC3fpCy9/LgRBQiAEVsIEV7McMxo0yzNOSYZSWmuxHF8e56gfPgMcPYdEpZcGKavnaMoArFBeITEoJT1OvbxxACB8vg7B82Un0rrkHBQ1gqtKhflw4JiZAUNOXdc1mxenkO4fxrZCo3BIcH11EdIHHyAEUspWVdWGrICLwtZNOTydQn8EiZEpA2XnrEIGYAWkEY0CpTEGSGPebivJrgDAGBWj3BeyzlZVNYwnY6Dv4ebK+iEaY4xV3g8pBmAqS2jbGjhTpuiHZlUbq3KOStscYtk0feiAoHDQ1qu6qJiPKVII2VgqyzLGeHFxgQzb7er4tDPGxBhvb66enp6Eup4pWGvfv/1+u936XRhGcCZNn06a9lEIoSgapU32vu97RDgc8zfffv9V3X7xxVd//+v/GpP/4s1LAKjrmiNLAqoNxBH6vneFJEnAmQrrmPl0OtXt+vLyUgzFtLHb9fp0OH755U+qoghjfzweHx4e2vXm8WH32efVv/mrf/dffvWrX/z5Lw+Hw5//1b9TE0eIEFGrKdkiIssGiAlTJtZgmZhFioT07PejQA5jrZifUz1E/LhgObP+Pjveacb2z2GPowisTHornDMRZ8oQcuIkxd4U6kRmJSUWulqM7D35kFPKE6gl59/957+rqmq1Wb9+8yIx7HeHu4fH/e6ojB18GofgY2JmEPxIWUrt9N/O25tOn8Wr8tOCj5gzAzESolImJgJiVopBZeaUsyEwxhAo/cltmw0yz5s5iAiYl2/K/tfEbBRiJgKtn3tfAEAEMaSc0Q+90uQmRWxE1Moaa23yAQDMLP7EzD6nHCMifvHFF93p+Nvf/vbu7u7LL7+0djojfv7zn0vN9P79eyL6sz/7s7/9278dxu4Xv/hXzpX/5a9/NXTj4bD73e9+93D/ZIi++OILWabff/99VVX/0//yP//8Jz/94Ycf2rbdbDZt0+6fdu8+TJf63XfvQ4iFsdvt9ubmpqoqPwzDMMiblVdIRMK1WIJTVdVVVYUQco6ZABGKUuhcEAI0K7ter8dxzDkXio0xMjYQTVRBpQp/AxVJwqu1RgTn3HAaaF6/nxYiiDRD+GRclHNOIYSQUyKYWsg0a3zlnAGBjTOJ1XAcTmmIzIWWMCyoEmBKwXsK3lqbFDmjgjWIbJtWCFJbt5pY6h8HrUkclZ9ptp8ISXwSDvFsjnW+wOaz4Ll+IoTz2LCs8I+zgZnRLz8HVkoBA2idAZTRyysUPZfzvuInZdzyYmYWx7PB2/kzfvIufnwpVqhm+CUxK+GzqunXpgzPmDGmH/+tXP050Tm/jQxyIrO2c4cAvQ8ps1LK910EKEqbQlzVcLluqlyNcUwpaVds6y0qvXt6/PDhbluAZlq1tdZ6d/ScozV6s25j9MARGTWyVmCVJs2kuWmasR/6vhedaCEwMPNqtULk5Edj4BTBGEM2aq2RYfSj5HbaKuecGM/GNApzhChbU8ToEesQgtZQFKosS2m0xJhynoDEiCgKZwAgebBkin3fby9aIAghYIuChR4HDwCUISXiWQ9daEVl5Y69994/nd4aA5Fgv9//1//6X//8L36xWtX7Q7/b7VZN9eLFC8x4OvaIaIzKioZhYNCS3OecnXOTxxPFFy9eFFV5PHYxRqLsvb++vlytVnI6ffvtt68v6sT05U9+ur26/N//y6/+1//zz373/bvVanXqA/+o6/DJ+gfOCyJ6WoHTenuOcsvfqtmrYYqgcLa5zkQrCfOStk59y5xyYoKcM6cUEk/OCTER5RzTREXPiTLD0swkxBjIe/I+h5hjpBhzzjmy7Z7G797ticCVRVO364vbm5df/u6b7yxHnwEy0GzkggxKtAXgv6XUAwAkBTjZZ4nwPMwZ69KeAUF5ggZlRh8VyoSAiSAlykbNd+T3zPeYs9TFc7l9zmSXTiYahiV/6PoREbVSIuCrmUwGVJAoI2QVMbucjeLMKqPhyV4VAEpr9RxUQKuxH+7u7uqm+te//EV3PL17967vT8aYu3f7L7/8Ejlbawc/fvjw4Ze/+Nfb7fbh6enNmy8/++zVN9988+7du/3+6Fz55svPcQjH/V5Ga+u23W63fhirstyuNz6Gx8fHsetfvHjx8vbF4+Pj4/2DM0qjubjYvv7s5app+q7rh5OxirOJFKNAw31IPjCjMU5hIUpVC1alqrS5yavtOlM0Fhhgs7lo2/XT075tW1WgEO9kpC+ihZL0pZSUFriiNcaIT/qESj+zxKIzq+gQU4yRmJwrUZthGNinGGKKTLNgABMjog8AAM3arS8uPJjjh8fjsc8E2FQAYBSqWmlnUYuZVs45U05AOGqVUmirelHgPV8Ay+aRjacBl7bJHIk/inafxL8ldXvuczLknHESxJuY8ksv8RnQPT8kmBGAmvUFz/IznI8VQq0UKSICZRAIGRU8M0MkVOOslLZEWQZUGpc8cqkyUS0ExU8BO6xwcpAwk0g6KiW8b2X0RDWXKlBr1Irz8wWnZ8dnDYql/lvet7UqRc+LmDuxj/l07IuiFHm8EoBSTIF//tWLwnBVroaEqk9jTCrGlNI40sBQN6U/DN3x5Jy73G7Wl9f3x/5p/7RaNdYoZ3UIwAkoZa115VxdNvcfHsZxXDUtUUIEjUCcLzZrUQ+nCAaAYmgbK6Pe4EelwBibMjOllELb1uHEOccQRqbknBHP0mHonANjDIJOkZJPORISKNCiAvH09LRdb8RZUxqbfd9XVaWUQp7cu9q29d6HkMpSgUbvg1FAqELy1piUMyJ670Hj+x/ulQGxo318PL794f3Nza1zj6fTATJtN/H64nK1Wo2nXinlHKUcc4aqsgKLUtoOQ69R5RDrspzUwwD6vn/58qUwDe7u7u7ev1VAH+4f/Bj+r//hf/7P/6+//nf/9t//7d/+3f/0v/5fDoeTtgXMPHRpz08OICKak4EYWDODQtKIyEaUnpFnrT4lkplnki5CY5fVkj8Oe8u+SPwc9vIU9nLOHCnmxCnHlKfOZ8iJUw4x5JxDormNSTHRpNmUyIcYQhLaVQw5pQR1GzP4lLyP1HX3Dx3je2B1/eJ2U29uX5firX3/8PThw4en3X7btvAn8/bOfgqyc8QEFxHMJFBtAK1CG0KnlLJWZ+bMFIndJEn9yZn0jOQ8H73MJ0+WkAcATKzoGS533l+aXy0h8fZixZyNQaUUZcg5irTSpm6lDILZQVEIcEVRaMCuP+12u4vN9i//8i9/+OG7X/3qV/t9H0JYNZUxZrtdPzw8/O6bb6qyfNo9brdbY9QXX3zunHv/7oP38eLi4u0/fvf119/WdSEk64eHh34c/8N/+A+Hw0FywMPhsFqtNptNURT7/b6qG6X09fXV1dVljmm/32eKN1fX+/1+HMe+P/l+6PrTOI5Tg8KYnPMwDETDfr8PIZR1td3WtiiOx5NSqixxtVo55w6Hg7XWoZI3uNQcS5AgIjkFzsuOaSIFiDOqZfkscNbqZIXGGFA6Z3EThhmjNJVBitV65bwPioEZU4zDMIQEWsHuOBCAAdjkXF5etO1qvV4byOM4UI4KSKEGQNGFEZvDJQh99IlPg7dpJ9MZ6/Y87D2na2fV3o+XsXiQqsnOfGIlwpR24TLwg7MmKgEpVJCnET8xLx3P87B0HrZ51oTDWediUX1bYrmePeiXP18AOz+uEZl5IphPnWMFiBLzBG4gZKsF0rKQ/9QccWHm6k1XPous8hqstSl6MWHPk69nPvVdWa7e3u9jzOsGUmCK8Or2inP0HEAbULrvhnHIo08AUCnY7XbXdWWbYjh1p+MeXSpssV21zqpY2qJwwzAkAB2zLtA554wdhiEFMhuTkny4wAxNW58Oe2tUTOA0+JAvL1sQLaoctdbaYMogTbCiKJJSmQT/nER2FJCl2lNKMSxMR0AFWmvnXF3Xb7//9sXNrSit1LXbbDbd6XC93aY8Ik6V0NXV1d3dHWXlbImQQyJjFTOnlJw1OeeYQ8qxLNfee2OAGFGZdWl/+9tv/sP/6c+329Xf//2vui7c39+/vH7ZNM1w7ADAWg2QUU3iVvIQvZgQQkrBx8w5aWNDCD/92VfMPAzd+/fvT6fTF1988V/+8//++edfxMzrzeZpv/vln//l02734tXniZgJaSbdIqI0vSMv3uVzlx4zsD4/hHlGfi1/u/Q8lh4GzbUgnKE6mTlNVPRpgkcEKVPOOcSUiFLKkmXmxKKz6dP0tYBaUqSQKSfoujFFCimnmOP0X8w598NBtk5mzTxRjRPF49ffEgFBdq5crZrN5uIv/uLfFEXx3ddfM7M2RfmHjonf+32t9NzeBUCahisMwKgAEVlrcM5UpStKqzQq7q2xRVVYa6xSzprCWq3E5ERPmm/Cw0VttBJghdzM6dgF1tpMbFgGQJzohzHlnGKS6xhAlTMxgdLKWEM5ArDSUqagdaYonNE25aStKcoStQKSrnHOKWmti7Js6rooCmLuh34Ye1TKKDuOQ+Z0cXWx2qyqovBhAFDB+2EYr2+vrXOnwxGVur29/n/+b/9bbevT6cAsfg5krb25vY0xvnz5MsYYU5LsLMboY4gxXlxsX96+vL6+zDnt97sQvdVC3I7J+6Hrh2FIKSpABKSc9yf/+vVnT0+7vu+11t0pVnWxXm9OXUfEp9PYts2bN1+8f/9+tdoww3ZVpZScK8ZxTCkXRSFNGJn5jWNflmVR2L7vibK1NgYCAGu0QhzGrqlr5ywAjOMY5Y6nnJmsdSA84m4IMWljEc0wRmYQb2g/xroutDForLbFvusPnpxBjWA1KIYx5O54Cr4nypTCer3abtYphnHoV+uVuLIhqst1Y4zJKQvvYrValWUpithT12JeRnLiWP2R0tKUK8w02+XEf25jCKJysmFiZhZhbhGjUogKcZYXklAx+fwhIjDxs3Q8I4ASv3gxnlXKWIvIjEDMomCpjFZGo1LGWdQqUZ7bOYBaoVY8bSYAhaiV6IiCeOgpBIU0P5nSWlvjylLJrymljUGjlVKMoIyR8g4VTthzrbQxmc7qXZzUMZVSrNA4qyQZYp4GjQBZqbZtYgi+H7rT6Wl3OHbD/ePelu23P+ytwxABM9xc65/95HOFWRsk0L/74cPjkatV+7DrX95uQu8LHS9XtcK8ahof04e7Y7Nu66ZxBjjHru+IyAdQCsqy0FoPo6ecU4p1XZ8O+7qqmHNROKPRWnN398FoLktQAE1j27r2YfTj+OazV9GPF5cX79+/++LN5wh8uHv7+tXL3XG3Wm8S5dOpjyEed8f+RNdX288/+0Ip/e33P9w99GigbuqffvHy17/+9fXVJQJst2sgcs5sVqtxHICorNxw6tab1TiMZeE+fPjgqd3vDm1TUYoX29X9/f2Xb149PT4WhavrdnfoMivrmm+/P8UMtnCorXPmu29/ePnq2jltFAPA3fu71Wq1blfej4fD4BxoA8B5ta4NZ2QOKQkq+Pr6uh/GGKN1Dhg26/Wbz9/s9/vvvv3mizdf/t2vf9207Zsvvjoeu/V6U7fr7cV1SBmVTpny3K6cLEYyTa4mojpEAjQBhai10soAM8nKFWwnkbhTnbU3YNlls3vGBOuS+Zy0B3KmKEKaRDHmIYQQY6IcQx5jDCmFmELMIaaY0hizj3kYwzDGfgz9mMYxjT6dhuBjGsbYD6H3cfQ5JMqEhyHFxGlio4vAIYLSAAqVBq0ywTCOT7vDu/cfvv/h7Xq1QqU+5e390cJuKfsUQD77/pItUCZEooA0eLQDKqWqhiNNIT0xZKZI2RIwmh9n33yGEf0kPRfdxDM6vfhgw6JcJTc9A2NmRJ7f2jMA9XxweF5OLkkuETGCwMqZeRx7gbq8fv0akH7729/+5jf/tF6vS2cBoKyL/dMuRL/bPTLnui4Ph/31zdV3//jdMLAx+f7+EQA+++xV27ai16Ctadu267pxHCPlpmkuLy8vri6dc3DmeADEIYT945Owi2KM4motNc12eylsoZTSfn9yBbRtm3PUBrtuZIbLy0vvvTFuv99vt9upKUfknPNjWNLGJdlXH7faPhmALeWFLGIZ88iSBpqmPhBizpkYUYHoPCKT1WCMAcQcfWalUSkxSiTQGhABGZghhHA6sUcgoqvLC2utgkak4LwnGSYtsnaifwGCPiWxqpkT0vmVLxHxk0V7XrAuC2ypF6cE6xlxgpOVGk9FEgq8SoIYAAkcRiGj1MuwxD9GYDWhJ5e8+BP2BSwo5XmI8jw9PbOAOd8ac9v/OQ2Ve0LLJAAgAytmUDgbNX/0fmnWAjy/7HkN+fEoYcr3c84xgvd+CN7HkHMGQG1cN/gQYN+z0/Dqxv78J2/Wdbn3fUR39/Ru30FVu1M/GoOD96ttyTyi0ZxiCAGIU4LD/qmo86rWF9u1tfbD/c4qVgqMgsK6rj8xs3NW5moxBufcer1+fHxctbUxSinWioEk70Gmyb1CRnFKKZHTk3p6QcwubbaLC2eMHsde6bKsXFVB1qg0iPUmAJdleTqdHu7uvvrqcxmLLgkTMy99BSadEoxjuFhvmJkyCLsDEcUIpR/H43G4uiruHnzO2RW674e2LZ+e9i9fXL8d+qqq3n33/vb2tqmry8vLHD3lMUYqHFprUxgXKqcr7DiOgKSUOp1OSitjzDj2Dw8PKaW/+Zu/CSF89dVXx1Nf183+2H35s5fdOFRtkxnE4vDHrTu5+LJCli2DWiFo4o8W7bIX5v97trrOjEpo5tgxc5rJdkScxJQxxZiIiEKmkJKIp0TKYtI1pJhzjkHUxSD4LIYZ/eCl1RljDpM2MAMoQkQGAn6mYUyvTU4AnAPWtJu+++47WJqc/5KANx2CrABIyL3SqmSevC1hxpSnCCGkcdRKjW1JNlGKFJOorYkqL/BZHT3rSsnVaIl5s/jNMoOVsHfOxJrsKKYRXabpkBHw58KRQkWTLzWqs6hH0hrCSYpp2vZKaWPkiE8pSZKeKSfKVVGs160Pw8OHu6ZpvvzJF3d3769vruq6/uu//tVf/NVftJv1/3v8Lx8+fBjHsaqq1Wp1cXWlrSHgU9/ZsmjWq0j59PioOF9cXb767HVdl0IhjzECZaMwpBhCEJUjJiEriA4WIvKq3Tw8PFR1kXPuOnj1qqmqYhgmJcDVqri5uTmeemvt09NT26zV5GuRiqKIIQmTt+s64TlY98wwg3Ms1myvtYysZxNR0Foj5RCCMlNdbpRKOS8jcC1ECIXGqMw0ep8yW6crBz5ABlAESoFCQISceRxDIGDOTV1VhQat5YgvimL21NWlmzzPvPeFc0JABAErnkkt0+wRsbyjT8Lej8PJec8wMyg1hT45NDlN0IYF2AMKxV4IzmlzOA9Dl6GgQsXIzKA0atCo1MfsIGUMi03XWXqBiJjTJ/nH8uKfn25OVrTWkQgVzoLuU6EoTU5QKPWcjAQQgXESbzt/+5PfJbOEdETU1sgIPFFOkDHxGEMIIfjkUyZQxrjhcGhr8Adoa7i+unj54joMJ9/v7MWL+4d9TNBcrd69e3DOHU7j5asLGkbnHHGMozdWoYKnJ9/yoSna7Waltd7tDuuWxyAmIXocR6OdK8rggzGm78eysG1TPT7drdraGMMxi/SP1eicoxwE36QNpjAWVtelG4ahLGqtbeEqrXUGQkSKGZCIU12Xq6bqR/JDnyI0rXvz+lXXPa7X67fff3t9eTX6SERVVU3L/nl2y9banCMApEQxwDCGqm5TGsvSDMFbZ5XRXTfWdRMpxnx89dnrw+m3Kebc+3GMCvLT09PFtr64uKrrcv+wv7u709coWvan45gTAIDWmBElVbXaNHV93B+U0UygUV1dXxdFcTgcjvu9Rvzhhw//6l99td5cvH37VjvXri4KVxEna4p+9AY1AAq1VJ1nRep5UcEEZkk48bsyggZE1s/rc7GGZuZzyCbh8xabbQiIWUSiZ83NnFOiMYaYKDHFmZAQc45ZkJnU+5xzTiTFIvkx9D4En0IQbCelRDlnyjJNVKStAiBYaiRkKclmMM4SU+Qhx8gfDHvn2fF55GPRpXn+vsgTo9EaQClxdcBEjJQhZ0wpRXKJZqzqNMT4SAWAZ0DcJ8k4z5x/gNnn6EeJ6vmrlbLAWhF4/KSUPI93PNUKZ8cNEZFSGqbKZhzHcRxTSr/4xS9yzo9PJ2ttUVg5+q9ub77+p9+UZfn48FDXtdJ4Oh2cMz//+U/vf/sokp43NzeuLPbHwxj81e1NYqqtdc4RcMwJEWeS6ShVXc4ZGYR45PvhnDB3Xi7MWsOm6wZrYbPZEJHWmHMyVr1583lRFA+PO60tghbwp1IqhLGeNRqcc/v9fhiGsiy1KZbr4wyJXMLektmdn8I4k/msttbaLAhOBcBMlBQqpdEVNgWvAIkhBJ9TtrZc1WWIo7j5SAhXE+ReuN6UUkoKgx8BqHJFWbpzJREpwSc16o9XzvIiEXFy6ABYjqrzi/y4wfC8h+cZhuJnwQRUSuxSmDlTliyJFShQWVJ+JFDiRTvHnMWrj2a8+Dw1PCs46JOab9lrOH/nk8h33ps9D+fnyfjy+5+gRper8B8Y1UutR7N573K4p5SwsAzEnAk4MaWYpWQ/HruLixXx8epy1dalQ9x3e4uMujgOtLooMmIgMAw+gx9jpbRxBXDOIVZlUdfd0z0UMYbxRKk1WlWla139w9v7MHpcMWdixdbafjhVdXs6jcw5Rl/XVVUV0XddYCaoHIhCBSo2RqGavGerqhLvgqIolDLOOUaDnJVSmXNh3dAFYSOcTvch+rqG9bqtm+J+5733T09DeB2qqqpevVJKSYP9TLU5F1Xdn7zWeujH+ejSfowXFxfOYWFNjFH0carKMLPWerVa3T8emYO1ZndIxo7/8A//8G/+4s+1Vi9f3b57+6Hv+7IsjFFaAzKg4nEcUwjeiy6uquv63eHdar3VGqtm9fnrz0IMD49PIte8XpdffPFF3/faOQb16rPXx767uLzpx5AiacPLp39ewxk2Z2fpcqLSVGIKRHme4S2NsXnXPC+hDM+Ldgl7EuqIWNCbibKP2cfoY06URWjexxRSTJF8ijnnPk57POYUQw5BoOIkdFnBv4hzNdOkXplZARKA4rMe5I9TxvNv/jPVHv8IzCmHAlECvfwCAAsYDAAQBA8xNT2FeyEdTvHbQmKc1MvUc/BDRFGmm0uuKX/lCT0kd3lysUDkJdstdLGoWElP0hmlNeYUAMQqXS0niMwzJkLVzL7KDAoxhQAA2loiijGO45iBXVWKPHzdlH3f9/1xfzgg0Ha7bZrqV7/61U9/+tPffP3bVbN++fLlX//1X//VX/1Vu169+fKLcRydc904nIaeFZZ1FXMiYGX0Zrut6lrCHjGfjnsiAs4KOaY4+nHsT+M4xuRzTALShUx6Po/2+6NSpjsNfQ83N1Vdl4+7p7ZZH5+6qqpevXr1+LgPIRAF55z3IcY4ScKnJPtQokIQ7++kz8ujJRsAfkYzwqwLpZQSG7gY40wD04EiABitEyECAZPWaIz1Y59yYAAijjm6qq0bve9GldEYo5Ugyp6LKOlNWYXiaDgMjXNOTJQkhZMYJvhV4Tacx7zlNM85nceMT356vgE+CoSLaPzcxpkkmBcMy5yFGKfP443cLgXIrAXaiUpc74CAGFCBPn8xPFN/eK4gP9mTqKa+K569C2aeDGmXwDmpItLUfNUKAEgk/hQu40BpbM41HwB+ZKUkdR6e7fTJjHAWkco5a1OIbLQo8sQsLq2UEiBqEe+0WlmjKqvLdvvN6CPD7YuXX799AESZBR263tYYUqqtNca4sl6tWvV4Iubg/fG4L4u2Ktz6Yvv+7j6EKc+IPlinEVFMgowxXXcsnZWEnQGMhboGa7XgLOq6lqNHa9W2DTOlFAtlKbECHRNnIsVKKAGUuCrsMPSHw269bteXl130v/nNPzqWWQMwc13XTunT6VQ7i/MYBRFTzkVRHPdZKTWGqK2xRnvvh+50e/va6YicvB/KevV02DPWKaX7+/v1xXZMNAZAlQubhoF2T/Dw8qFta+dc27YSkoqiyHUNGKzFvu91SkwgQVSk5wUcUFVVXdfvvv7t999+f/PituuOr168sFrf7/ZN06zWW4Wm70/rTTbGWPtpujN3uYBSVkrJ1A8UMoNiNbkwILJiYKWyYWJUigFYn4W9M6JCnscNNOmHTQeI8NFTTilSzMmH5GPwMfuYYspjDCHmGKMXM6GUPBnhVo0xiDLnfEExKkcGZsnk9IR5ZJCThM+rusUd+nl5AwBAzAR/vNr7cakHsIQ3AGKl5tp2yoUBQG4ZpYwxJ5N0zjmmFDNTBqHmT/uWn8uv8zPoubn8Edxc8GZqsueDqZsBoKUgWWq4nHOghMjWnNOWn+Or0s9whnkyNFV4aqaiyC6qqqooig8fPrx586ZtW6VUVbm2bfe7x8fHxzdv3sj5O479ZrU2VmdKv/nNbz774s3Nyxc//PDDu3fvmPnq6goUPjw8SN8GlWrb9uLiYtY2PC3edTI8E9NLmX8s20wsH+UN9n3vnOu6TisQX0AAUBpSSqvVhpmfnp7G0QdPl5eXaTYYY+a+76uyFlG0paUpMDZpKwm3YQp7Z9hCnv0WtNbEYtadqqqQARtRIsqEjGi0Rpk3CNgtpWCcNQZ9ZERwzhgLhMYYAyzqD2JrAkDAjOPgS2usLYiCLP0YY2rt4kkkkFSt9WLyTvhcEE95nMiM5Qk2LW/nvKw/L/eXhXdOe2d+vvNyOMy366N6UWJe1qwBANmAycKrUaBnzz6eCeYfhb05uC7XOS/CnrtPS3Bi5tkdHuYh3/k2EbQNz3BTSQdFmROmau8jSbPz/HfZC/JXE+3vTBxEKHHyOymlcQzjEENI280qpeQ9hBDqsrAaq9IZjffvd2DA2GLwQVtHgNroTJkIh8Gv1pUpnHG2adeFOwmpyY9D06yUnnyP27rUqJw2hzGqcSzLMufYtm1Z6OPxmHPmHAHYKGgrbNvGORejd9pUqyrn5L23hWuaRujM4lBNCmKKPkmqNy2epmmUMsaqAorIcDjs7x/ji42qqmq7bQX2ddjtve/qq0tBPMF8PsiGBQBghUhFUYyDDyFqrYl8jmHpyYdE1urH3e4nP3/livo3X/+w33fXV8XDo1+v4P7+vihe2ULL1mYuNLI2WBR1VekQxlVVMvMwekFy5ZylhXN5fRvC2HWdD6N8dpvNRuDiRVlf3764e3xwRXX38PTy9WcMphuG52YAICIaQfzOC0YOv+W0nNC/skORnrspE3cdYe5tyGUT07J3lrBHRIk45xxiTimNKYcQ+hBjzL3I1oUUYw45xTD1OQcoog9j8MISkZdMs2M2oazzsx4JZFHEXKAb89r+NM2d0sc/3uT8vZHv+SrzG1aANJN8cObtk3j9RQo6xZRtopRSzClmTCTjPRCZmzmwEeK8k2Vygs9NofP9v8Q8wdWBhj4m0QCFZ/w6MWfryuWNZ2Z1NrH/5EyRfrdIeY3BA4DgA4uiQMTSNLZwp9Pp7u6uacrbyWnhMcb4+Zdf/O2v/qZpVjGn777//vPPPvvNb37T/vS2bpu6bRhBPrzRe631F198wQCJKQODVozoU+z7vjJypOYYfRyHHDxPYoyT2I0Sw1Jm0SozpgoheE+bTSntl9VqJSFhvV6/e/dOooVwb611ErGUUl3XNXVrrf3+++9Xq5V8/Oc+NQJfnOPrR6WGbDacsS0LUmAYBqUxE4ScWSlrrSsKY1TwEQBypkLrwro++ZyzGOIlAmZUqLUm5KyVDMFIBA9jdMV8YMnOkcGGZLiI6JwrnJNqjJk1PEs2y4cr72LhXZxjEM7jCs89PQKSQ1CuIQsC5hnAUgJqpbTA0ynKbgOeQw4gIAMyEItE7iRINvXEDBHNeSguvHjhoj1nYM8ZXvqkb/lJY/Y82imxiTjDa+ACSKHM0pIFkOxYnR0BGZ63MM/pgjEGtVoSU1Yo2mY5TeaUsgJ9pJiz1vpwOhQFNE213a5TCkSp78dj56u62Z86QB1S0lqnlLeVA6R+HPTVquQalKrruqrUEImziIQV/Q8fUnogoqvry5i4rutu3HOmtm72h8fLy0vn1OPjDp4FmwAAnBZHISpLe3GxfXp46LrusrRlYWP0iFwUFYJSiEQhZ2KGnMkVTiDBRKAUpjA+Hk/HUyxLkDbMYecRsSiKv//hh5ubC2Y2xnKM8rlIqipSt4hl8KO2DrSqqkqSDzk99se+qqrTQ7+9urzfvTseu6vrF+v16X5/DCEYA1WFcp2mrMZx1AzWakbu+x7A1XVjjBImT98NztgcfGGsRhUpVVW13+9l459Op9vbW2MMUW7X13Vdy92pqurU+67rrCuXFBYAJkeFaZCUAeH32QAwACNrBGDMIL5hiInOyhJ8Xp+Jz+THzsKeaIlJ2BtiCiH0PsSYj30fpPib6jwKKeZEPaecUogxTwxcJcuVzgjcgLRg2RQgMAPSs1jo3L+Rk5yZkaeIiLPP6+/Tg/448v0Lvj8dE+cEo+fIn6cR5XQjMixTq4+6TD96/JHG1Pm2X742xhRFUZal4AD/eCA/D3vyEDhlCEHCwPJ0wle9vr5+/fp127YpJQDabrei6fXVV18VRTGO/XrTWmtvbm7evn1rjPmzP/uzX/7yl2VZvnv3bhiGr776ar1et21b17VAT6fFwVPmLqgWKbzU7KG6vEgi8t7v9/u7uyDGkjmD6GqO47her1NKdV03TfPhwwe5J1ISlWW53B/vvcyZjscjzxoiy6hsyfjOh2HLTZDww7Oa1/Ln3nvxmg8BMkVh2ol6i2A1lVLWaQCgPGkGzjk4ai1iwYVzzlpTVZU8y7JmJMgNw0CzmZxE3CUS5+dZMS2H9fLi89ljGVKexxL+fazE5f0u11mgPcYY6aw+hyL1PAkDrWaOw0fOCT/usi7fWX7zk41z/mvLj84Hvcv3z3EWZzMXVEoR/DMP/vgheY8U08vvLHV2CMGP0Xvvx0nK9cOH3dMjrVbV7dX1drvNKTJzyoEIqqraH47OFYlyzpwBlHHSS7C2KMpSa12WpatKAMgZnDZFUZxO/sPdQ4ywbluF2LatVaiUKstyGPJUW+cpPAOA9zAMk2q5MMrrus45j2OUD0uCU+EqY5zwVmGeelprv/zyS0T88OHdfr9nzgoIAIoCBFSy23UyIDwcuqZppLw7R34xswwOlFJjYFmrq9VKXqfwTcUblpnX6zUifP/9933fv379elO745HXa9X3XFfN09OTPFfTNG3bWmuHgfd733XHcRwPh4P3/ngUcg1vNpuLy8315ZW1VsIeIt7d3V1eXi4NqqZZ7ff7m5sXxpj1en3/8JBzLj5+yDkpBPzzhxwCP4aDnbfT0qwWvaSny9dnbnhJ7k+YH6Iz5X2UryWDkubWOIZxHP0o3/ejjylNECtmTkznZMHnts0k+Ddp6QI+x/U/ErAmzpwtqx8PP/7YblFESIAIqAA0glZgFCBwBk7MkUCsl5BAJ9I14qq9XNVbjQYTFsYWrlCsyrJEUKAYlZI2swDAFWiFWmCXShlrnEJLmYFAISpWwBlyRAqKs8akdYuzX0CKcej76D0wKGTKxClTTixDDyLKPAG5mVJM0twrrFnUeDmT6LEaY5ghhAgqgsqMlCiOMY4xiG5RVa+1LYwtjStOve9Hn4mub2+tq0BD4jSEoRtOjFQ1JSOVha2qYtM2pbOUQvQj5FQYnVMIwff9cDqd+tGHmFJmYlVUzdOuJ1Cbi5vB8z/+9t3g4c1Xnz8ddlVdKc3XNzfbi+uUgQj7zv/0Jz/75nffGK2NxtNhpzBtN3UMp83abtarQ7dj4D6ORVUPId4/7bUrQk6gVFEX69VKa9UPJ84ZQ05+PJ16Zi6bjbLlofeHbkysiHXvhzD6wuraGsWDyZ7SKsXISIiAJtoKQdEphHK97rMaSY+RmWhVu8Zl8KH3oJGrQhdWE3mlstUcYioKXm9b5+wYRmNM1TRaFymz06moStQKja7KEpXKTK4sLrYXSqlMOeZEzJlp4sDFgEpZ55TRBCzmxFprINai2gogHl9G68I6gimPBGBk1ghWKaeUItLMGkEr1MJzYiLOhWKjwCAoIKA8OYhJVoeglQKFGTgSJSJGpJSJpYk75Z4TUJwyAANkopRj4JwQSCMYrZmYmJYULTOlnJOMvrUCrcgoNoq0Iq2igQA5IqHTRV26qgCNkZICBiagDJQVkQaY/kNQwDJdzEwMCMZp60xZJoaYE8Fkh5jjmFNY1dl3x8eHnR/pm9/dhVG9e3c/nPrDLv/sC+NU+NlPXm5a97R7rJtNyPgP37+7ffnZN++fHjrKtgW7yilHP77Y1Hw6/B9/+Qb6R6uCU3HonrojKwPtaktaW2vH5BHh5I/MebNtgHNp9dP9/Vevb8bjcTycGme1NZv1ard7SB7aCjYrfX1Z5/F4eVnn0L/74XdNpS7X6xx87VwYeteYkH1Z1WGIvg/hGI6H/dXFFWhu1803b7/94e3D9avbu/c7h6pUzU0Nx4eHi+36i69+9vC0H3zwKTln6tLeXK7ffvO7uii6flhtrh8P4W43/Pp+wAJuXmwuVk0aj+F02rSbsljd3Z9YNftD3F6+7E6npioe3nUq3//rX7zxw4PRwQ98dekOh269XocQy8qdjruLixWlMfp+25ScuXWtqc1pGDJmMHi/P9TrzRgTK2uLArVhNMYVV7cvM0NRtq5sqptXpqra7YWyBlER57KwlINTSiNjJk4p+jH6MQYfvY+QABkVE+SQUkxRhBcyy4IHBqBJkgiJQfGInIAjcCSKOflEKeeYiVLKPqeYcyQKKQ0hDT6kmEPIQ4xjhCHywdNjF5+6vB/55OHo1Wmg48idhzFgyHpMlEmea/oPWCFoYMWEwMgETIigFCsNGjIBAzIiC9b/46xuIrkyLP8BA/K/lLf3L6mcflyf8Yxb01pXZVmYjKil90RECCACHzRrKy+91/P25vkkn+m8YcXM3B97caFbnlE9u0KDMoIbF5MXYeA+gxWX1B5m1IZku1P7SKB685+XZam11VoXxmptBVWhUSGqvh/6vgeAGOMwREQUQ5/VajWOIwDUdV3X9WTyN5cskj8mpGEYhMzHjOKiRwS/+aevr69vyrJ8fHzcPR2228a5chgGKThubm6mHqNSKaXPPvtsmHv3y8Wl6lrGVOcua/IyjHFLYaTwGSm6FLvn9z+EMDsETSmzFAHjoIjIGGTkSBBj1EaJFG8IgTg6V4orW0rUNAB7EVNPIjloUGmNRkOM0WrDWtOZ25E8y1KTnRegolAlpfN5xaMoL3n9MldLKVmBsp3N2HgWQ1iu+UlTfXn753djucJz11TDWSn2LHso2GFJQnGaSUx6tjA3UQU7kmeBbyKSLgLA+TASMvOyLJccdX7XqFGhVkbp8/4n5N9f781vVlQFUWmtrZVSjJkFsyeLgY0BgP1+NwxRKRXjmFLq+uOUMiPknC+vVnIfZBIm+3feUxoygZ5q5RDCZVsOg3dFwQqHkF6+fPmw/14bpbWuirKzY+WKcewgZz92ifJ6vYG5t79arWIIp9Mpg7iYodZsDEiF6larYRgKa8uyKIpCLE3kbhhjJhF1wW1NdFKq61pkLZ0DrXXTNCHkcQjMk3gpc5bPiM5k0KWTdHl5Ldr3wxCJ4GK77roOL9fH47G9XAunlplTot1uz2jbdg1wYgZjzN3d3e3t7el0ury0ALBa1d9+u7u8MJvNZ03TPD4+rqpivV757tS0JQBID5MIjHbOcUrJzhvq6empG/zNzc3949OrV6+aemWM0WUpB9E4jimkZ3SuRQBYmm3Lelg2149quzMUN57vgonPMyFJCDNkZsyciCAtTc7EUvNREsRmCjGPMY9jFIz8setzhmnWSswzR/afDTp/KNz8Nz3+m8PesgLOb9OPX8FylAw+dKdhXIV1U4FWPgbsGcFy484knz76w3P2tHxTKaWVzTkTy6TKTK47OQPo8xMKFqg3ZZE3lDY1fvosODtFUE4KABhyURSuKOUjX17M8dT1/Sg4IKm1o3ZTT9WVulTWumEYjoeuP3XHQzf0aVlYq9UEOSmKYtOujDFy+ErDUE66t2/fSRe7KIqcOcY4DsH7KM2WnPPDw0PfxaurLSI/PDxur6/EsUgWt1JKpPnu7++X80XGErL9Fu8CRFwcixAxxlgUFgBy4hijVhNQ8JOY91EjbM5grLVKqRiy9ymloLW2hSOgFHzOWWnU2vSHLvgcEihl1us65UCUNpuNeb8PSaIjznZRytqpiYeKNS1BiLVWy6BuyZ9gDntFUThrF2L11KbB5+mXwFs4U4xRwbMI77JciYjPhKeXf89D4Pk6BIAsKi1nrUs6o7Qj/GgKPpkZESIKdBjPPoVJe1NP4E9ERC1QTpaYBgDAyiBqa3Dm2MEZQkcjWq1BK4MKGJAmJE6CSVsVUTozKOw8wokLJNxK6WwaY3yU2e1zH1he5LEb/Ji11l3XhRCenp6YEDkXBSgFr169VGoSQwDE0fvFAIgZgIX1iIg4DENxuRlj2NQlUdY53764eX93/3SI0n6XZJH3HQD0fRr8abu5pJybpmEm2QvEDJxjDEpB4cA5kO5lUVgfhiUszZNgVZalSNUTJRH7BQBZvXVd931PRHVdImLbtl037p76nF1ZNWVVSaM7pZCNlaxRcj7vfVU3fd+jVoMHYGjbOvtjjPFwiD/7vDbG9X0vfGrv/cPDQ7Nab7fb9frbEOJvv/7Nz3/+cxmXyCvJeQKVtG37/Xff1O6qaZpuf5zptJgCAYFWqrROXpU4iB2PxzGksq4O33z7RVGUTa2UypLKZ05RfOoIWSkFI8vnq2gq4QAAARVzJEriQKfnmRcRCT8PZoofLzRoga4gMHMGpgwZmRljZiKOTEQg/LyQ4tLt7H0eYxxCPvb+2PthTKeuSwSZpZl5TkP65wMQnE3o/1vjlzz+lGrvxzHvPF/+5MTMOR+7fn88rNvKWssMMeYQJiEnhI9mEs/Hx0eiw9Ow6hxSv5QpxjwTGM7PR2c0KlAzMk0MWolI5IaXBxFlyAAg1szzMGMiimmtUa0XW9qccww5cS/KUEqpwjkE3dSr1eoURnF4R/G3E9jhZrMRoT+njVJKo5IJloA2RaJTNmrOHEI39D7GjIhv3ry5v394eHjKObdtNRVPBDLClB0uKqsS+UTMYmYXqLIsF94CACCqnEOKo7WlDCpijKLkKP13ZzWCFvGd53pl9roTzEtKWT6I8wmQtRaQTOEyU+AIsyjM4ZCtBSIYx3GzWRVFkYmrqqoqn49jJpC8RCErZKuVUspY5bThMzkJqWU/iUZqVq6TT+SsKppmVMs6VJMAKS1w/GkedkahA/2pP98n075PttbM3vvUTmEahwB/vIeZp6HM9IJhgv4tpWvmuRCRoQMBg8JnmsHshzfFe/U8c5Vn0YAalWhwyFv9xLR9uZk4SwYrRKW1uD3h/MvzBzr798aYYxQWh0Qx4Xp2XVeWZUqhLKEs3fX19Tj2AnESYLOIDFBKiAoSsUryefUexpC0sdqY4TRqZ7XCFzfXP7z/Pueni+srBDZWGYNWm5iiVdD3/fFwuLi40MDzwBirumIijQiGi8JZp0Pw2qrCOhnawZyeArAES2NsTLzglheEVD+cZB4WYzRW9gJIwS16CDHTMAxVacvCtnWZw5BzPoTTTdOejl1Z1mUJWkP04bJpTscDZ9hsNiC6tQh+DM6V3TDudrs3b958/vnt119/6Dt+eHh48eLFN998U1XVbre7ubFKYUqpskZkArUqncOUkiqrMIycQBtIKWnjnDbOOaL0+PhY1FXRaEJYr9e2cKAY9dQB0soIFoaIOJ+nbh8t5ikje2boErAiSgAG+Nx74VlsWlSJCJgZMzMRJCAmjDlThsCZMgjZ3McgbLwQQufjMIY+5K73x2EYfB5DAlAEU1kJzz2Yfz7mfbQr/0Xok/8eYe/8uj+u+c6jIBEpY3Oivh9775vWFs5ajSLnmpAUARFyRgAUyDUxLXNjRJ0zKzHgmIpxPD9otNbM4qAXWdIPZjJK/laauvBRh0xJnYegmQlQJGAQABRqhQZAyTmvlLLWTPUBKGCllQajELK0j7z3zBh9kvnw5eV1jnQ4HLJSKSV52c7YwlWyzcI4ppQF2ylDXSHVvXz5OoSw9DllHxpjuq57+/bd6QQvX66aerXb7QDg6mpbFIWMryXjO51OQofAMxCjc06gJUsjjplDSIOPiBNcc2FMxxhF6hdn8zlBN0wcbdETivE5/qmp22aMKQoDNLV0AEhrTYgppb73/QiXpdUqjgOP43hxuUK0Suntug39mAgUskiwykfpCmOV0npyM2HOErDzDKVZIpmEB1dYeXkSehcNNlAfqWksHfUl8qH40y+L9l+wzs/7nJTzUheqiSvEzzbTGoFZnYW9bAhEzyITAjDRpLOHoilDwq/XWhuttDUpRliejnmxeue5u4kz/VyuL5yDyQtFWrZAzJOyA55lldPrB9ZaWWvRWECcR97PilNTHyJOH7rWNsa+Ow37/T6GYBTKJNxqWLe1cyYmJABlzHDqxxitrboQUwJtEICAJ3YuMRxPfVGtlCYfH9ft9tD1N9dXCr6PHuLorbVjjFVVoTIaBo2QU9gfQtOE9Xpd1E09jsfjsamrcexTIgNQV2VdlNEHA9rZEogk7Ml7GYMviiKH4FwhWgiSOzabVkKdALJ06Y7DyGRCCGJ0oZRqmibk3PfD2Of6lSvLUoKNMQYgO1f6sK9W26pu6rI7HfZ/9uZnjz/8rq6hruux73Kisqh//f592bq6XZ36/nQ6XV9fPjzcDQd+enp68eLF1fXl48PTOPLVtds9dptNb+rq+vr67u1bprBp237oAKA/DQqgKooYo3VlURTGmMPh8OHDh1/+5V8WZd378c1Xn5d1lXJGrVIS2TzW80NwH3MqoJaxjgzOJtH5nBiQFQo+mSmD0ktQmZSzYKrwprBHkIFz5sTEDCmnnDhyFlp6FI3NmIcYxhi7fhx86H3qxzT4OPrEjATSa5nx/9MWy793G+Ksgfn8nXkm/yc8/gc2OeVgDTGTpmEMp65bt7YuKq2RIcdMhjljVhn0WUKqUZ1fRL5QaGLszqlOME9HlMJFiRinMYOpiiLnOMmxKSONNFG9Pg/YS63JM79qASPJiELCG6IyRhdFaa3TmhQDom7bteihS1fV2bKqKu+jstZ7L/lv6YpFT1L6in3XyU9hria1gr7vu65LkcqiRtTDMOyeDo+PT4hwe9us12vKIG2Z1WqlrL24uDgej33fPzw8HA6Hly9fim3YwjdvmkY8ulJKxjRS0nnvvY9FEcNZxx9AgnRexnVzXJGNgUvA8N6nEIhosWOWzg+gjjFkwZXkzKxCSH2fYGKYaaGfX1xKwRfXbbN3D3lkjWCMVsgaQVut5/JLnP+Y2fshBK3Pqj04E5QRGpMEITXrJi+RexntKKVQXA5Sfq4OFyGVj6siOCuPzn90viCXIhhnQbIf7088I/6z1sBMNEl3KgBiYgXaGEyZSCWxMWJORJqmkwjmqg5n+U06e+/4TCuc2qRCupA3wGfoU/i44Ht+p1oprXkS02YBfyulUGgYMzIWAGTlPD4+DcMQgq+bUmutBiCGq6tLGTYbYzLD6AMxWmtj12UCjXI8ZkRkYqXhOHDItKoLAmW0S2F/sdlcXJZ+DMB5066Pp25VV2NMSgERFEWBALvdTvDPNzc39/f3xqg4ekqgCxA44jgErRTnnFKyzuI88RXTEqOU7GJZ5wBQ13VRFCGKOZ9mVukYFGrvh6JA55zw2bMPKXhA2G63zqjudCi1RsSmaRhxGLxyMaXc1hXFQbLAVW0ZFQFGYltVd3dwCeHFq4uQ0v39/eXl9vPPP3/4u2/7nu/v7z///PP37z5sNmVKKQQYfV8YuL68eNAwDPlqY+RMIyKloCxLA5MDc875w/3daei11cqqNOSLy0ut9e44ssKiWMUYfRiZQMAK1lqlDD4jtNVygCOimi1nc44IBoCQNQABAoJiSogagBaqHFMCUDKMS0yUOQFRhtlOKOfMIrDiY4wxdin4MQ4+9GMYYvYxpkQxT61SaYIsZzIza/P7E1HEj/oc59/8Q3HqD13nTwl78AeanH8o+J2GcVVXoExIeQhxRVVtnFFxaRQpNIhKmHiIkosbxAm9ioha2XlPaq1lJLuIZmBVOgRSMA+HEIuJ1IwKWSnWGo3SWimNSqNmTrMk73QASNYOoIiAM6VIxBP1kCjXdZMzKwXWFlPPLWURtqYM3kc/CqYmhZAUallqiFgUhTPTnCPGGMZRILzPnkfMOefH+11KSStrKyM8vL7v9/sjAFxeXjbNKqU0+t4Vpq7auq7NzFp9fHwcx1HEybz3MCtrCJDaGHM6nfq+L4tXsu1jjBLRUkpdN2w2mznST4SElCY9SFjs1M+GYdMWQiytk46TvB2jZgPJ6d9MhExQGBiGjAqUgnFMfTeWpSWipi6tZmQwCq1RGtkYpbVKKVGOGcjoymgNkj9m7yguRzCdaadN5inGLDWKHD3Zjwuan2fJOmvtkHqe+6L6Y03nT9qAP/7O8nlJwJCCiojylKg9U3cnZrdCBpxgI1oBzTgzIRERMkBmRoXKaAOWiJABFGZmRi1yGJI5olKoDWoNTKgmUJKe+/kAYACZWJJ0rTRNUXBSl0WlFpLW9FCiKMiQsxSKU8YjrgvImaYxLmcCZj/GlPJwGjRgjr4uSu+9WJO//uzl4/7xur1W2qYQT/0QMymtU0oIYDUCJMVa9E2yhpjgu3cfLjc/LYoqeq8AU/A3lxenUw9A7aqO34aqacZ9EsteocF4z7LaX92+uLy8NoooJ2OhcFhYYzSKF6bWGOIok36cPTcQtcjxDmMXU9Aaxeddaw0JrLUMIL6HRpsYc1XVdV37EETjUBtsSrjcbnIOXTdU27UMF8Yx7I+HhG7wsanXpameHu5uri50HLuuc8bFkAlDuwJGuH982mzW4zgej8f1qnn5stntuqenp9vb25vba0GmWAuyMoWP1B0P3vu2bVMKVdX0fW9NUdf1er3u+7EbTk9PT69eTfv69vbaOVfU1ckPqKG0VgFzTokIQQQUMk/CzQa1QjFzgblhNhfjzIySo0yMtww466EAMKcJo0XExELrTUw50dzkTDlzyJQzx5RCCD6GlPIYow9x8HH0wSeOifLMumSY9JcBgGee6z8bxv7/Ntv7vfHv9/6UmVMkU5RV01hTIGhjXFFYp40xzmgyRoncnMAaAYB55uFKpoNm5imXME/pRKhNdr6xtXB0JBYicwRQSrV1BYKqR5ZjZFad+AgguhwK85/LS1cAIOAO51wMiTj5MROnvhvDOFIGREwp+2EYRy+9IKuV1jYMAQDEwM8oTbMxenc8Cq5JnjXG2HXdMAw5pqKoyrKOMR6Px+40IOjLy8sYkzC4hbjWNM12u22axpTlw8NDzvl0Oq1Wq5/97Gf7/V6oeEvYk+JPyDHOOWaIITOzddbZkoi6rru4uBBk0LyMNMAzbHK5Pwuhp6oqyIQql8Yi0tj14ziOA7d1BiCtkTPwfPwak4qC9x1oBOcgJRiGIaUKAJxB8eXTCqxGjaiNtkblFIiEXc7GKGakmD9BdZ5DOsdxtNba2a+OmYXRtWQ/y66Qnu04l0Hy+2cU9U9j3rI84OMe/hQ1ppX5DPyhOfKdP56bijKaE7zLov7F4GPQqFChtTYxKZ6hWDwLlBGzHE9aKaMpPc//zsbeoPhZNEtGfEKn+EO7ErVihJwzKWRAhVqbWXlMKZxAKLCwJFMiBL3okpQlhDiGAFcXeHt9dXf31pWWWId+OPbd4GOJSJRg4ksQMmmlGDAmTg6+ffvhL37xM2OLYTg5o/xwqqsihZhzKqxL0TuzVQwaQRk4nU6BoTaglPr+++8v1pvb29uuu7MajEHnjJk7NFpjWZbD2C1ZpjFWa1uWJZMXaElKwr0zZVmKu5m1Os62J9Zo5mnbnroBYMKI1nW9atvd/bvCoYBsjbWnkx9DckTKuqLQL66uvv67v/vXX/3V4cPbruvqq+tE3Hf9F19cHbrxn77u6joWVg9DZ4368z//81//+tchpPfv37958+Yf/uEfYnzWfD+eDherFWXfD/3V9fbwtGs3q34YlFJlU7ftenc4dV03DOMvfvELRujFcmHoLy4uBj8CQMrsjIHKwdQWRyJKslxJfBhksqDFaVhryDnPhBlWygCKk0dC1ASIshABCDKwynGa7RFRoulfZk6ZcuZMOSWamXs5xhhj9jGHmEKmRJSm5YqgEfkMDk0MEwLxDzJOPwlyfzz6/EvD3h/a5z/e9nDW7TlvCv3e75RVdTr2nGK8bJk9pXHoilVjtVmhMwgUo9cKhEQpMyPx/zXaoZk0GIlA9OgAlHNQVZxzjiGllPZ9MMasm1bO7gnFDth1R2OVUYqRfIop6baunHWZA2URLlRENI5B8dQWM1YbJSMiCH7yuzkd+xASAFkL0o6yplBO7/d7YKW1cQ5jjMBKKe1cGcfctu1qtVJK9aeu64bTqQ8hpJCLojSl9t4fDgexX2/bdVPV4xAOh4N4xsqijDF5751zVVUys/gnuMIYq7quyznf3d2tVqsvvvhCeO7S+pCuTtM0Wuunp6e+79frtXOFPJfooIqxQIzxu+++u76+jjFqjdYWk+N8WfnQi/RDURQ+0+PjY9d1zDwMAxIbCwaQOclzOQfa4KZeHYf+adcnhHplATRACJ5XlfEhDR7qEnLmENKLFzf7Y7i5uuj6J+TMFFHhdrM97B4LqxHROBdCKIqiqqphGIzVm3YjfTZBpS+Ai3W7yjnjzKbAGYMqUU2sZxZop9B15Rw/X9hEpEEv63mJ9FIT4xkNfNkFk6ipAZF+ECpeIlrmhQAzuHMOM6hAg4JMYDQSo2IFOPiRRaqWhas6PZfTRio/aXUqmAxjxe4KGCbriSXopixkeQCYaDzyFqyRoV1KWepd6UZ2EAQXjcYiIjFk5hQjzuIA/alDRVrrIaUYY9O0v/7bf6iq6j4/FdZoVDGkpobPXr345tvfusI65x4eD6dTd3l18/Dw21KSzgIO/VgoBRqBQCvjKbCGbow+8vriwh/h6fEOkdt683B3f33z6rTfXV1sFELb1gD7nGE4hG0zqXbFGN9+eP/VV1+t25uxP8bQpxhfvrz97tvftvVkHikiFeM4ErBxVuQAq1J3Xff09IRgGbBt15eXl3/3d7/e3lzQJFCHAg0jgpvbq99985vPP/9c+pxP9/dtU2lgIGJWTVVKekCZH556Uyc09vJiHUN/e70ZuuN2u/XjeDh2meHu/rGs17asPvs839/vt+vi9sXlMHTDMFxcXDw8POx2j7e3ty9evPj221NbQwihuNi+f/d209Q556ury/1+f3mx6UafAU1R3ty8eHh4uL+/J8DPP3vjnNsfDzcvbouiqNcrRPzyyzfDMAwdIKuYQ86MM5h5DLJmcHI2yBmRp5wWlDaWNYsxUM4ZMiBqJe5dzNKZmIfgGdHknEOSpiYQUSQmIp9ySuSz5PZZNClzzow6xr7rujEQaINKAAdkjMmJaR5hACJDRgbKaZnXSPY26yamaUNNW3H6h+APNkV/7/enAdOfECr/eGn5SRBOmTWqxJAT5ykRMDmb4FNltXEOWTNJXpm0FgyLIgWLRZac18vFl+Aqv7Be1zR9Yjx1PjMRwKtXr7quG4eT0tC267J0yBRCYJME4LdEcav0Al1ZLouIMt/OmRFzSkwUck7j6MMwCgIAgIzWbdNM+LGUUkoXFxeiAR9j7E9dnA+UyhXH4/FuvxfIpeg4MPOH9/dE7P04KyclqdJub2+lpEPE9Xq9Xq+l+DPGjuPYdZ3cZIlPiCg8QuecZA/Ouc1ms9lsmGfrRQBh2g2DHwYKwZ9OJ85JPIwog4Cbu66TDg/MruU8cy2QOGfOKsuqrXWtAZm0oANQQWHRGBOCAMNEfw4VMEzAmYSIhcXCqdKBRrAKjVVGgXPi9CTqfywYpWmoaJ/N8xac6iKozTPi9Mdd/qUulNG3lHeCvpEGqcDH4axtIr9/Luny427nMkR//pGop+LZUBqf94g0FKZZPDHPMBPhXxJpoOeZpUKMgErSK5yoBmoeUk6biz5q+CxKCxOMeUZIAwAoVKwYUWmNWoNSjOisQ0RGjbM2Gp6LU88DxeVW9IeOCIYwVFUVQqjrmgi2F7qtK2NUzGCt0VpnppgZtOr7PqVkNRQaAjOlmAGRWQFkUjFB71OlNSI6o5GjB1BAIrhxdXH5w4cHawqlIDMoBQtWCwAeHx9TSv/+3/ykrZtj8s4YjUoBB+9L50R9vyzLw/60Xq9l3wGAc44NGmPqanU8dTc3Nykl51xKAREBydqCORNRXQsiDJg5xiiGzC9ub1NKRqECFj3oEMKp7wjAp1zVK84+x+AsIoNCZKWOx67vx74fExu0pVKKERITIrZt3XWd4AZEU+n29rauoW6gKCwAVVXh/VCWZQjBGAwhJMqbi22zasfgT32fKNfN6vLmGhQ2TWOU7roTDCoRCSD85fVlCKHr4HQ6JSZgjaiNopQIFAqMHVGJzgJxArQfj35Zmhc0qTwi8+TDJ2dIjAFBT6khUwyJso8xy1Q1Acs5HILoaqaH3otYjBnT/tSdTh0xgDLj0UuDQ6APWgGRAmY846fCPLiRXf8nhKo/9PhTIC1/6Jv4+6AuiYCRU6SYc0jZp+xTDjF2XVc5VdflBJ8AVAwaMBEyICj582luopQSIMbEQoNnJJ7WFgGTfDCZmKaS/OnxUTYMQx5O3dj12qDW2lkDoCbO/5lvmaxFppRSnmEL8ozkvc85G+1QMYJWaBJTXTdKKaO0iC7GGFNIOedVXUt1Mo5jDIkIlDLM3B1PXTeMQwCAqizrqkYBPaoxi2V5ZkRdVVXbrnPOr1692u12Irl5dXWllHp6enp6enr74UH25IsXL9br9TAMC8q8aRpp9AkyRTSvU0ox5hhjTsQKvffe+xShKo18QHL686xAJnmuEv937xewT0pJAxoULS6ti0Jpa5U+Hf0wDN5nY6CsazSO2TOhUkwZmEEkTrxPE6HQ2qau2gaMAqOxsEYjNHU1ep9SLq0zSFZrp43wDgWbijPoVAZ4UrN+MlTGM6DTeb9hanXOU8AF2rD00s+ztOW4P9foOocACP5NsFBCM2Dkpck5YTtRVIJkgIEySwFGQIWTWoQyonMGGumspQwIqJFhknQ5M0xA6TYBE9ICIBV5RQDIzMRIxKCkb68BNSAoxNkqQystru5CHlTAKouDGCMCcqbzUnjKtYkPh1PO3PeDtY4hi1bMqqmrugRKIYwAkIH7vj/1Q0w5jJ6ZXaFrBBVhTAwMoFCDisxDzN0wXtSt1rqwOgavEay13g+uKG5urr759vuqbqwD3wHMLH5J43LO9/f3CF9eXmxy7qvCECcAGsehKIqHxzux2ySiuq7v7x+IWaGW0lspcM4A8sXFZnc65JxjBBlR19UkZLjdbvu+Fxn2vu+Lokgxvn71QlJnRIzeO+dGH/b7o1Iw+nB9+zqc3iU/rNpabGHQuHfv3p/60I8Q8livrTFO656IQhjLVTvGqLXebren0+l0OpSlu7h0IuMnWy/GuF41vuuMsTHGSPDy9Yu6XR/6oR+9Nm61Wr18+VIo/BKbgajvu+54WK1Wx/tjVVVFVTa3V4xKRid93yEqmByQQSHhjFnlkJUcvQAiuy5UT+l10+RSA8yc57WRco4+EUEGpswZCAB2ux0R+Jxy5kQ5JQF+p6IovPf9MPqUAaEoiphySLRarRa4HLDIvBAQDTHIaXw+bsA/DF2BP6nP+SfO9j5JsZdRyicxj5mBMQNn5pw4xcVsXo9jGIahKExZWGuUtVbIW5k/okAs/0o5OEmc8POhlsI8gSdAkBmhQUQ/9lVVVVWBCDn6nKN12jnHho0xWj+3auUh6P/4LIE6hT2lRKBqwnQ4q7KVXMwY44C47/vD4RBjrgpX1+0yd5lGmykNwxBC6I8n59zt7S3OuEoJ4UJrVco4B8aYqqpkePM3f/M3OefVanV9fS2WC/v9fr/fPzzsr642P/nJT169ehVjFGDLarUSPczFyUGGMYJYkRjsvWelx1GEp6EoipxYKzbGSBohMUC6zYxKnG/nDCCHEJqyquu6LSuAnEPM5IFFRDsygyuUtTZOMcMCJxk5aa0AKEbyPqSUCmubsli3ZYijNcpZw0BFUSQShUM0RgGA0uCcUPvHJUqJH6HctyXsfVKZGaPhk4n3WX9egmJOaUF1Gu3gvHT7A7O954V9LgaNs1om8iR6+3GYBHHkIWHxAACAAElEQVQskT9UiISsANWMqCJmmLyaNSArVIDGOCQmnJxTpgcxIrJilMOInl2dJ1lC4fjPQAU0GrSa8mYriC4lmfyEA39+U6iVBoB0Lg0BFCWJnCvslFIKedW0lELlwBlVFjpSjjGKp/ng4+jjQgcxCNYoRgUKoxTPgCkHn+k0BEkajMaYUrEq1m1z9/BwcXFVuUJUIpuySNETAwDFOLmjyBt/eni8ubpAvgLOcRwUIBArpeLohZawWq1EdVEWMyJ5PwqDTfLCvu9j9LqsAEhaL5IQN035ww/va4NKqa7ryqrIOa7X62/u37WVtVpLWDoO/tCdjDWR8uZi+8PjbzilqnTaKGt1ZjjsTyEBAnRDKldonSnLQivuus5oVW8ud7vdyxevd7tdWZa/+c0Pb7646k/dOI6rqkwpNE2tFFZVAUA5ZzC2ahvQqh8GpfVqu6lXbVmWqc/OObEPq+saGfzog3Uq8Uhh7A+IWjtbluV206zX7fF4zMApUkgRIGdmpbRWSNLrwkVrlJgz8+RpMJnU8HObjYhJVB8zZ2AiENtX4TuoyahEeueZiGKO3vsQPbIuXFE4PcYEo49+jDGnNNm/OKOctRpN0dTnidfzDPsPVHv830hhmIFgf2rMgz8wVDwPhAAAqAGIGBNDEp/ALBgtE0Pu+xE467o08wmCKFm2IoLFwxYApvjPuHjNSDNGSFDzCEQppYxGpVR7dRVCGLveWF3XZVGsKMeUEk7TGnmWSbFCCIIx5hSJiGUWIs1DhUaGtM+BHIAZ+35k8jmlEEJK5GxR103TtHEcZfgkj0W3vq3rsiydsSGE4/G43+9lEnPaj0IsrcpCpE92/U5AmKvV6vLysq7rruvevXv3+PgYQvhX/+qn6/X65uYm5yzfbNt2vV4LCYlnHKY0x2OMBrX3vuu6oQ+EyppK+Elaa++9Vs89TKXnKganWylFpIxGAKCqqu12u6rqGMf+ePJdF0efcgRg68A5g4gxxJyFMx5lv8xz9UkwqV6ZsrCb1Wq/T4UxpTM5RVcWHeXgB+ccERNF55zWGjEs2Ao1mwBPQtXqmYSgzh7ni5Dn2k3+SoqeqT09dw60sx81JOfx3ifZ2yfrHGe7HwDAM5kJFkzwuVU9TjJgcmzI3WW5w3i2S6RuY4FfgppnGErGK2rW83wm6T6Te2DS1AfFqLVW1og9OhqttDbGqDOJhkSJmQlm0xlUMkFUgp6U9ALmVJJZZu3MvHvaXVxsdo8P7coQ5bJ05EelwVprnEWtQCtlHGrNzCllylkr54yRApkBYkqRcfCBUSOA1kgplkZfXFz85usfmLMP48315b4bm6byMRMr0CrnkRhjkoyZ3/3w3e315fX1dXfaDcMAAEVRcM7W2ratd8fTq1evxnFcprPyRVEUTHxzc3MuNC9LS1JeskgZY/TNRVOWZR/9OI5NVSsGztnqYlaiZkSMIRdF5UFXdcspFtY456zSWutIcQgEqFCD7yDnXGDRNI1WHPp9fzy0V7c5Z+K0Wjd+jIfDRJDvutEY06XUtq0fu01TPz09cI5usyaNYwyEsNpumlXrnIs5CRP/cNgR0c3NjSmcNYpyhMicUsgppaStSXVbNrUxZrOuiZGIxhjGMYzepxRjDMAVM+uZpDetsanHg4qRATLQBPMDGKNHRGu1czoReR/HFL2PZdOkSClxioI6HH0Yg08h47pZ3dzcEMOhH/ohOueKUnTncwijVBo5xRg8ZFJVuZQiUytu4l38AbG9f85N4fc+/sQm5yex7Y80ORGVqLfliS/MiTKRMmYSqIweo43JWCJIKVXNRsLSGZrgTEdjVrGSSzGziCGlDDmnnAMTLqqFMcY4jjzwMHRFUZTOGqtSTACg8fmIlF9eijzJcJlZdFvMrIY1cw+UTH4pg/djTsk5t1lvi6JgomEYTvu9sNFjjCIiJTwhp804jrvHJ1FUkaeTFr/cN7HRWogBP/vZzy4vL6+vr5n57u4u59y2bVEUrmoB4Hg8xhhPp1NZluv1WmiCsp+lz7l0/4go+DSOYwiJUHnvcwKt4eXLl845piR/6H3Uhsdx5JRijMZNXd++7/f7/fHYvX79UoaFTumcg8RUAQIYK7ofOi0mCawlRUNAyqC10jqnlE+n01VbF4VdNfU4nIrSOudOw0kbLIoC8SgKlilFQSjknJPHKV7WtTYWZptANKDOrOyWB53pDU5FITEzC6pWBgnyliWRlImynI9wVvadX/CPF4JTUxSfl9MCtGHmxBkV4pkFEmpYflmfUSamp5tDGz9P6VCCHxKwVjiDNj+BlSmlGACN1tYoCXtKKaOV1udKNBNtGeGjtzCbEOU5ri5Vdc5Zg8qJj/vDzdVlfzpWZUmYlFKoeEECy41KlDUR5xwDBALUkZVCtApVzpyIM2E3+ExARJWx2mDOed02VsM4joDdxcXF7vRD0zTfv92hgqKuqqqsqqrrOlHZPR7j6bD//M2rHNzQjSnE7bpOKRSFFX5R27bvPryPkWQEXpe1oKmrcrVarQ6HwwwCz0SsNRKJjgwcT3trrdik8IhE9Pr1674/WauLolBAxqhuGIRuC8ZG0tJQXq9bq5Tk5X0/iJkRgPKRcqacc1m5VVM++ENKqe97a61M9f7+7/5xvYZhGDartVJQFM4YUxS27zIiphwg09XlRUiJmcu2Wa3X67aVhXQ8HqW1I0OKUlufh8f3dwa4KCpjlEaFOfmxS3EkwLKuXVmWZbEu6rJ04+j6sYsx++E855uXxKzTBwo4fzQ4L4pC4OUxxgys0FirnXMxs9LglLHWNqsaQAnWorp8lXMeQzp1fTWMMREqo6xzzjEBEWVKIYQw9H3fB++fhnFRxoBFPIFo1rD9cdj7/1W19+Or/JEmJ4FCmUgQpMw5Uc6cMscYC22WPRZjFHG/nBkRlKKzLN4AQAwJQU0SL6Ck7ZZSyimTEONCSCkTkRJsXspVVYl2ZYqjtKoLV3l/VMgk8nJMABCJs1okgzUAx+iFNgAAzpVa65RIpmgCgWHCiWyOWuqSlFJ/Ei+NbtJ2ylkCsDBGT/tD3/cCclFKCeqMma1qvPdddxyGQWtd13XT3F5fgSg1930/DMPpdCqKYrNZ1XX99bfvUDizzDIrbprmcDioyWsQRI1zHKfeoMRgmfYZ6wQdaq27vLxKKQY/yFAhpYQKc87FPOoTF9ynp6cQQl2Xt7e3l5ttWRZxGIdhkLGf1pqZrLWCt5+VICDGmDMrLbIkhKiNBmY4nXpkcs61q+ZwtKUrCmuOx2gQ1qvV8XCw2kTKcp2iLHPOMJiUkmhi6boRZFpKScQC8+wXj7MtKqiP4tZSu0xBSPCZM3Apz75LeAYbOe+xL188/2gyU/4oshLRJNMpXVUp5mSmyM9OfsKQkXQqxwTiBoLPiRoAZJpdxATY8iOgGiulABfnZCJaNgqLdIM1qDUoSR0VKDXPP3hqikojFBUoFA2qqS2fKecMzMTTQAEAulNvjJHlRESrVVPXJnNgzholkE+ksEw8+FjoKAxrLcxXzmidMTYxJOaY86kbYoyWuayKtip3Q9c0ze3tVXc4alNWK6uUKsuSGYIHZWNRVFVVSZcSAEoX7u/vX72+NUZZZ4axf3G7lfRXEMuIGEI4HDrnnPeeqHTOHQ77ly9fC7ZT+iKyEsTuipljTE9PT5v1pfAWSuAQwps3bx7v302CRznJtjKm1NaA0ka5MUar8HJ7gUSInDPvdjtmIGDUGoEQMcboCiu7Eikfj8e6rne7HSIqDau1lv1iLWitBTektQ4hlGVJMV3d3PSnThuzWq2KwhnnpA/88PBARG1dWWujD0lpBRi9v7jaEoEf+64fAaBum2a9ckX19HDnyrpZ12VRo1ZWY1UWzpDV0lbRzBxDjjFGJiICnNJ9QJ5oetIM0AgouXFkhcY5V5XWFgSQMwfRSdBKoZFc6tsPj4+7/bsP97v9IWRiUJmACARrba0tClsUhZEuDPFPfvKTRU9HpjN938tp8/sj0J9EYfgfwtv7MUyAZumHCW5JNAxDaSrEAs8IVUop8f6WqaY8tJ6QuPKvYIgXYcn94bR0LSRn12gQ8XQ6ee/rulyv1xcXF4g4DN3d3V21rZe8e5oXMkhuZa212iw9LrmmjPEk7E2q/8rJhlmtVut2TUS73e5wOORIEs9kE8YY4+wyRURhGGWULTCnEIKANfaH+5xzjF5GEVVVCVL/8mprrRWbPYlbIYxPT08i8FmWpai9iODnMumUUkbSXnneKHWnhMOyko5oWVbDMIzjEMO43tQxRqhKuWytLTPHzOM47vf7w+FgjLm4uLi4uGjrBlX23kvBiojOOaKglCLiGCKzUmgBcoyRALRIdQEBgFYaIAutXmtdFJNTutSUiGi0aBQIPyGklCxPxWvOWW5+5YqFtSaBfCna5lYqGVd8uhSJJUJP3VFEPJNt8ynCGQvifNz7SQL3e7fZ+S/zJFKG57+oJ2OEZ+iNVqCUil6kgRk+FlgS02Oaf5kQFCDBhIkRXiTAR/klzJptk+efcPvkHFFTM3Oq5FAsAhUqjagFE7q0T3hWgkCeZvCIKMhe770xxodx3bbOgVLOOQcKRCsLZ5lW+fiUUs4BIFACZtYKnHNDiASQEozj6FPURM65qqrefhiA+Obm5rdff7e+nO62Mebyst0fuudCdr7yalUfjvs4eqN0URQhTMmKc+5wOEhIKIoi58PV9fUweDFC8T5fXV0xyOlBVV32YTBWl2UNc//geITXrxqZ6Aul4fJq+7vf/sOL643cZ40qhFAVjTEmEiilJEQ1TdMfd4Ls77qOGJDIFrW1UdsipxBnf1pEDDECgJjEtm379PRUFIXc5JyzKA6WhYsxNE2TQ1yv133f28I1q5ZzJmDRprm6uso5a4Vd1w1dd319fX19rRFjdyyKwq1WVVX5GITpn1LaXlxxjpMCojIoSj1KydnIjDFGhJBzZoFtTxvkGdIi/566Tmu9Xq+LokBhR8QUgi/rNiXfdafj8XTqu2EYhjGklKrtS+fKV69erNeb9w/3T4/HPoxMWFVNSmkYur5nZ7VzrnBGa/2f/tN/EhX+tm0FlLfdbnPOP/zww/83oeqTh9ZlPWlInP3LOJsy/+i/WSp++m/Z+zPXfvrO8oUhZY12RhmNRkNhtXVGaSirCtRkm6K01mCMtYUtd+OARqOxWY4OpRAVEx+PxzT65BOEjJkK1CtXbsp6vWoraw1woVXtbFu60iqnIcXhYtve3ly2TUk5xeC1Um3T0OAdaEMIIRvCxpZNUTllnTIGNOeUfEg+ciKD2irTdR3lTMK9iAGB67rebNeb9VYp5YM/dScfAjHHHH30RSDyIY4h+8iJMDMTgiQ4Pj7uT+/vHu4ed6d+TMoqV8Ycej9k4PXF9vLqsm6bZtVeXF0S8eFwfHx4OhyOu6f97mlPBG2zTiFs1+ucU4phe32pjLrfP+XJS5A2FxuD+O77H9iHy/VGpzxm2u0Pq9W27/vL62ut8Z9+89sXL64O+3ukaAypTE3jLjdrzZRjzAyZuR/6u/sPdx/eMaXPX9/+6z/7KUJOafTdaRh6772QcgjAWek3ajTOFDWoovP5NHhQQJBEJUdhRkCjrQLbrMtmvclMiVhrSDlJxAljp4CS9wZ5XZaQPMbYOpMXORJj7FRhKwSoq0orJTwgPTsMILFxRhYoMxNzyjnmnCgra1jh5FeHQAikEIxGpcSWj4VjpxUDxJxcUYAAmYxGrQQGCQoNKSTkDDkSJUZWWllrnFZWoRYRFCIggkyQCTRnJIJMSKwYlHiDEShURhmjrFFW7MTkrxrtCtTy1gyiFh18hBgjM0XOiXMCysgJmTQa55Sz8p+2ZmKjE6l5iilNK4WiUqQILYBhVpSZpryeEDknD5ytpjh2h+OTRgagh7sPx9GHsd8fH3LqLzZ2tTLO5lVbFmWplB2HdH31uY/47bcfVqvN2x/utFUpBmSoa1eX5dD7EEJbtcPYaWanoCptXeDN5RaRM/Omvtk9PF5eXrz74bur7QooloafHt5fX7b98aiA0uAv1i3HPHQ9ZL4u4822jf74+sXN/ulDdzx98ealUtA2zdu3b19c35au3D8+OW1UBsy0vlh/9913r1/fKKRu3Id4yhSrqnRFMXSxLFo/pjDGb7/+sN6osrRffPXyeNppTS+ut/d3P6zKom0Lyrlqt8c+HkbPbB53O0VEqWss/Oyztu92MYXP3rx5POyO4xhyVFYrxVXJOXQ5+n/9Zz8N3hNBiJyLqq5XRdX8zV//089+9uWmXX/9j29fXa4swsV6ZRS2beOjP3Td+vKyaNvDabdqKmd1TqEsy7apy7Jar9cpU1U3RdVWzaqo1jGjj2yKmqxjW0bWfaAxESlrXemKKqQQUswpGK3KUpeF0Uicg6GEeeA8KvZFAYVFgtEPndZJQWaVcx4TecDkKtuuq6rSAJHzqIAKDQZA5WgI33/33dtvf9jf3UMmDZgGH3zUgKOxd4/3Hz7cEaXt5eXV1QUDnU4dWMwESStWbiQYEgTlsGxrlTRQppxC2u13T4/7xFy365evPkNl/OhTzEqBQQWUUvBGoeKsOGsZQCIrYDWJbPM8rnzOVSfg23/HEHqeHf++jPgcOYmLxpiYM0Wdbc5ZZaldUkqQibUW/BkQa5Sti0QUfcg+SEdjdXXRVLUxhjOBQhTzzEyff/651UZbI8IonCnmFEYvpvJSCQGwtCiZue9PzMyQl+aqJLxN0/hhHPyoAC8vLzerbdXUzhTH08mP4/5w8IMnYKO0Uqp0RX94WkgRkxJmlGEgvXv37t3dvXPu9sWLqqrun3a/+93vLlZt0zQXFxeXl5cX641UeMfj8fH+QZRWxGFnu91K60OEp4Gy8PeVUpUriOjy9nLoew3YdV2MsbKuqiqD6vHhXlJXaegBwGbTlmUZw2iUdk6L0F9KifJUMZ9Op/1+r7X+xS9+UVUVAgnZKM3Kngu0EgCMNhmQWckYai4+FrDgZKgAMwXndDptNhtjVVEUyHkce/kstIKFb74UXvKE8vXC/JOKX6YaOLPLlwI9zyqdy48+Qax8slyX51rwt/KHdOYEcj7MW7zulrbnUmjC3PM8LxnVGSLzGd55BkCFHw3F4RwRBlPgkjei4cwREEDu/3mdKhgAKZE+uY58bWZiBjOTEJFhQsoRPcvi8EweKIritN+llLSeWK3WTrdadE+Mc0RHefacoVCKmZ2zMedMyAzOOR8GjYqVQuSUkh+jj2FTGFvW3SGFFAXGnXPyQ3JlTZyMMWWpEilg2u2fxBQlRjZGCPtOhHPb1uWctcFpHq+ULI8QJh8SMcKUBRNiWNC8lSt+OH148fI1EXbdB+dAIKDM7Jxr65qZjDGlcTFmozCE8PDwVJTF3Yf7q6ur3/3u+5sXL5qmATgBgMD0vfdFUSD2WmtR2IgxOmdkbC+vUCpLAMgE3fHUNk3bgg9D2zbOuRBGQcAJZ6MoiqhIay169+K9LjdfCABykBrtpGfonDOspLhsmiZGIzBy59zl5WXXdcKXL0orTamcc8KEiIwKEZ2xzaqtN6uba3r7/n1mVKCccyBqUzEfj8eUQlEUFxcXhSmGYdw97rt+ZEZrivUai0yAKuZU1q2rG2vtt4fhcrM1VyZmHv0QM19eXLy8ffU3f/f3p+OBGFfbTdO24xDSOJxSXDmtlMo+JiBbFrZ0Yz/849M/Xl1dtW17e3u73+9Pxx0AWaO11p/YSv6hBuQnj/9uYe/HmLflO+KvKb0oypAzz70p6fMK1CXPdGbHzCmR9AA1KqCJDoVSGoo3ikD7UH14916j0tY4YxUqrZRBQAM+hjSGzIQMqJXVRinFmdCWgtiOFHNOPo3TKCskFr83rRUqxYqIKFM/nKw2m9V2s1pXTc2Z9ofD6fCuqMowBmRcr1bG2jDGu4f73ePTaiKdKO/94XQ8nU7DGHLO3/7w/Waz+eqrr5j5cDy+e/eOCNqqLstyu91eXl7KAEMGEl3X7XY7ZgZiY4y4iMnBenFxsd/vRW8FhcRWFDIvVEoNXX88HnPOtrYwu00aY/aHp8U/6MWLF9JfgkzG6MXACEFEibkois1mM2G7tWZ6hjlIb1Z6hqgRAIxVmIiIkcSAnICyUiAyQ8qgMUaaZpiAUMlwxbraWht9WhwBM7JMBBER9CQqLRFRYHI5RKFqLL3QBYSyMPDmWdcsQn32OCehL/EJZhr7wspf4uUnpPVPwp46o8mfR53zPG/5+pNItrQl4UcwGQAYY/g0SCvhruKE6JyviLOn4PNzIaCeutw0gz3lf5a3IGgaRiAihOemrlKKIC9S7Bomw2GlVN/3Oee6LCWNEOUgpVSM3jgnHErmLGKjWhlmKMvSH08pBw3Q1NXTcSCY0K3e+/3p2PdNrtuqqHa0zzkao5q2CmGMDO1mrTXmHEWmrl2Vu/2hqVfWKSISCXIAEBmjy8vLnDOTlT6hJM1Le6BpmlM4yLhOnnq5J8aYvj9VdXE6dafTqa6rtqqdK5Cpqoq2bfpTV5S2ruv94alsWhlMuLrsum5zVQFAXddGaVaaFZZ1fey7w+lUNg2rR9BKQiAjVk0zBO+cy8BotPe+Q7VatVUF4zhuN5vtdjMcD0qtBEDedWPKWTwfnHPEMeccY3ZukgUflNgTKlFkVLM59mScwinGiMhVVUjEzTkx83SeAHRdJ1iB1bppmkZbJ4R60VVBFdFoZvjlL3/ZDf546vu+j5mMMduicM6NIWoUUBhlVqs1NhvlbPHw8GRO/aHrUyZdOlNWx+Nxdzit2tU4jqdjh8qUzpXO+Jj6w/5/+Q//h/3u+OH+4fHxcX/3gYiKoiqckWFWSHTqhsQkyMfox/1+z8zrtmnblikOp04AuiK0MW2rH0FM/oeHvU+ejz+C+8sIgXNSsqdE9ZisHJKw8BZzzoxpwiJmYmLKGVAjKmBQDJyJmFNKQKy1RqUpRWUMIGaGqZyZj4MMIBKrlDPNLndqtk1XSiGaJe+u63o6fWaChJyAF00jSyqEcDqdYhTQMzzeP65Wq816fdiffvf1t9Kpf/nyZXd3N47jOPrT6XTsTn3f+5Byzp999tkiFCK5mNbWWnuxXa9Wq6qqhJB7OByenp72+721dr/fG6Wvr6+NMcfjERG3262EKFBYVCUhWGtdUQDA3YcPSJxSQoa2bcu68t53h6OcDrLQZWOs1ttxHJ1zw6mLkSSFZ+ack1KqKMoFNyV8+bapLi4uJIWcQe3iWic+Ekgs8txAREI7VQLkR9AIRiGzijEBg9YCH4jMrJQWhyap1WIQ4gcRkbLKGCNBVGtdF+VSOqcQqSJmtrOLtzzOVcSWkHZeqC3xbCk95e1Y52QUuqxA+Z0l7C0LZr7Oc6g4D3v4MY5m6mkgztO4T9WSPvmr5YsFFvT8C7MO04+D6Cf/Li6AcCY6I6nMsp7lzkhQzJyzWDdn0vrM4ZKZZ+6EmFtphLqu5ZNyZZ0pWmuZe5mGEyU9SaLIi4SiKNSxg0yugLZt754OiVAoEiGlrhsOpz5cVKgrZrZOp5RevLh5f/ehqBrn7GbV7g7d0HPZ0NXVtus660xRWOe4ruvj6SAfWVmVZVkOQ8/Mu92TlERS0BhjrdMAIAWfUirz5LdHoOq6HcfBam2U3u12RNSuSymhlAIJOIe0b1VZluVhD0VR3N3dWWvH3jPzYb+/vr4UVBoAIGhr3cPDU9d1FxfXWmtEDpGtJQEBiN6eFJrxcDrG1Db1zc0VM3OKq9UK88K1VYgogFLZg5eXl8Y4iW1KqRSJgJh5mjXErJQy2skuNsYYO5GXZFPUdS1M1sene2utUiA3IcZorS1chRgFbmOMIUailHyKid7//d+3bVu3K2PWx+50PHZ+t0PEm1efHfaH4/EYfcoMQALiDoD66vbFlvj+4en7D+8ed4dxHFNKx/1RhpEE3J+6kKJSRhn3/e9+1zarTVv5vqAUrS2J4LR7SHVR1+1q1WitRx+HYTC22Gw2jOr+/v502N/c3Gy3F0Q0dEcVo9Db8OOAJ5neHwpS/53D3vl1n7cxsJ4gLZxzjsQ5c0qJaJKxed6cRMzsx1HXtbOlBkw+qMwK2WiMMcWYY/YpxJQSEiulrDFl21DKgw/LxykvQIqYiZ48I2KZOWeW7E8WAcjcQymQoxOQiCkTIlrrjDF2zh8jRSRUjFYZRHSN+f7bH+7u7owxm83m5vJmv9//3X/9u5dXm2N3enradV2XKOMsQ9U0jWjNHA6H3X7vnPvss9vXr1+PQycvpm3bHOLj4+PDw0PXdZvVWsSHpNUpopR1XYcUGcE665yLlIkohuDH8Xg8Vq6QhV7XtVHqsNufhn42zOPVykk/syiKp6cnBBqGwSQUcJAzljJKn+Tt27f7/V546zDrn80ZxRm5DafbJ+IkiAyZUkqZIiAoBUygFVgjIquTUKXQHpRSVVVFLxgtgdVpZswZiMiqrLXOmUIIdVMajcYoykw5xuQzRaP0M/wlolIAikWW+pMyawl151+chwrJLqWvfl4vLtHuGa6yjK+naHSGYRa7g9nA77zaI0o8h5zzFyaISpgrRUDFRNISXG7y4sEMM/RG8tnzMpbFTl2rRVQvM+U8qczkM7gNAQuwFmByxwaB8ZMooejzWC4fLjPvHu5jjM65snTGWKlILOqiKJTRqHXO2aCyVmegtnZ9SkoBMhhjQgxNVTurtQKfswKNRjFTiMnHlDKC0sqqoi664fTy5e2H+/dNUzmjbm+un/aHGKEGWq+a9bpVCoxi58qyKlKuZFMXRYOI4zgQwX53/Oyzz4qi2O12wzBcXTXGGB+GclXmnGMm0d7z3htX1nX99t27zWYTY7z/cFcWti6rvu9XbaNhsNbE6GXkCUgSsfa7Y9u2oPQwDLWxl9vtZtXs90dAjVpnplPfjSFoY2zhQkhKAyi21tjCdP0QcwCFBFwYMwxDSuHycvtw/6HrutW6WdfV09OjJOXSxmiaRhmjRCEcjVbaaGe0y5qJyPsgGrPjGEIIACMzC0alXdVS7g/DIPVuXVfDMHz+2Rd93/swlGUtVKics/ch5F6cja0QMK1lUETw5suvYk7DGPp+NIW7qkrpOT0cRltWK20Uq0QQBn8a+sH79Xr77Xc/fP/2rXHlqy8+/7Nf/vm7d+9+85uvu/2hPx4PT7vEpJSyrnQOFeXaWQOgnXt5fblt6xDiqevIqtPpwJnW24vtdtv14+54jImapgHKyDQMw36/v9iu1+uNBjydTnLCfzI++KMB6k/V5PxD0e4PNTkzsGLIBIkh55xiToZjzNkoGaZIFzQzaqIYI6WMFgwqRiUVXubUdx1k4gxIs8U2ABHtH59ySqP3MYSUs3CDFeJTeqirqihLrZR8XxvjrGWlFkKSbPqpNMxxaZdpM+VWAiNc+HB13XjvBbd5OvXW2pcvX+acHx92Dw8PEtsOp+PxcDh2p/mwKEUHpO/7p6en47EriuL169fr9Tol+u1vf/uv/uxnIieNiHH0p9NJriPyngrwcDjEGDebjXTzh3Eoy7KoSqUU5yRaKmJHYIwBYjkBx2E4did5d957Y0BM+BBxUh2jlHMuyglSWbgiJ2Tmp6en+/t7mcC1bSunsHR48pmV+dknr5gQQSMwYiZOkDMyWA0JQWtljFqEjRCx64bT6QTwom1bmjIV3/d9MxUTIpiFCDpnPwyhLKwcBLiMdWMqaocz21Li+sKwXKo0mEVspUaRIntZmcvilN9frv/Jej7/fXlo+Iju+XwXfsSUX36yrKvzS/24TDzvzcJElvgoPMOZusoSwlNIMkle8MnT69T6xy9y7nciKunEKKXUZCiTCYhwAYXSRCHt+74sbOGMpESuLKy1qBUoba1NDCGMZVUUhT2cxrqpdo+dqIcU1vkx1nUZwv+Htj9rkjTJrsTAq7t+q21uvoRHZEZuVZWVVQ2gG41mkxj2wiFlOPM2/3FG5o0yQpkXzpBgA0I0uxsAgUKhllxi9/DF9m/XfR7UzMIiMgvSC/k9hGSau9vymaree8899xxFCMEmUrWDB29c0NZbjzyiMpXO+37oLi4uyrKgGDCE2WSUSpGnnRTU6i6RTJsoEWkIIXmeaz14bzGixip0kD2KHzmyheMMnHMuylizgxZrVLajFCuliqJUSnW9m4wTxgjnlFJMEcEodG0rGaeERKyy7/s0TY0xxSizRp9NxtppSlDX1KMs41wOg7ZmP0mSJKLv+yxnGNEkSeKd7/s+hKCUkonwzgTrBON6UD1tp5NiXBYh2ON6ZoLFWCuEGHrNmYyKFkIIQmj0DoxjFRFh3i972KfIMZOLC55zHhWpyrLUWlunY+KLDw4tVCQx1hpjEDFCCEK5g/Ds+XfOB0RwnpVlmRvjFovFarVqBrRvoCKqten73tlACKuaG5Ekn//4R/2gN5vNcrUpRuUf/ZN/ohG+efX6+auXajBx8BchNGgTpQeZ4BDGb968ef32pQ1+Pp9nqOy6rt5tuUx8QAQgooFt20bn+qZpnNXn5+cxaznu61OQE+B3Djb871/t/WDMO0wMoYigOBusD8YhZRxnWFtvXGDHDl9AGJAxBqDHAaLvhbPWGItdNDkDcqj34/hB17ZWG2V0cJ4wyhnHlGBAo5HAgFzwQ9f3avDWUc4E40k+ssbGIBcPCM4pkSi4YJ0F5DnngqWCcuSD7oe0yDtjql0d5/Yizm6tLbJ8u93ebe/bto1bK89KRvhifeu08QCUc0yp9V7bwVr7/PnL8/OzL774QgihtO77Xsr08vIyYo9a66qqrNKRHIwxHrqeMeaMDSFIKafTKee8ruu6babTaUzWwPkYKb33EYMyVscjvq7rtm2FEARgGIYsy9J0rwJq1X5SrSzLybQcjUaRHRNlqJbLJQBEbcC+74UQhOBhGIZhiNVP1AcIIWAUMMbWG8AkYEcpwSag4BEAxeAwAADFQBGKejsBA8ZUKdO2vVb2iDoCgNauyDFjLGp8Sykxol3XDQNgQN66QDxBOPjQdV2apqmQwTogsPeABgghGGvdAZ49iTr7OHFcmacAQ0wC3vlQHx78oDh7r2uI3j0n/FDr7vi/4WB7dEDU0THQfhD53nsGfBxXDxCiwidE9RY49PP2FNPo2IBPxNhQhJYROrCyD1pEJ+F5b5MNCPYi/LFc3DtoH9V5vFNK9V1HCcrSkXfGGMP5KE1TTAmlVBmLCfHGaq2TRAjJhtWaEWy1K8us7/s0zRHqGGPrzRbAIwyAgnUBWRi0GZTVDkwgeZk1bevB9ao/vzh7e3OXj0ohWJEn81nnMbu7u6U8QRhRhoxVAD7P0/v7XbyHbdMjjI0xlHLnglIGY5plGWMMY5SmycvbGx8QIaRpGqtNmqaU8b5pKSGMUPA24YDBM4pHFxfO6iTBOPi+qS8uLhhjbduWxfj+/j7J0vV6Xdf1dDoty7Jte9X1ghFCOcKwXC4BIy5FQJDm2WK1zVKhlOEJs8Eggns1CCG0NQSBEAwToAxHsfsom1kURdM0iCAuhcwSnkjrvQtQjEbjySSOqyJCUimZ4N5Dr7QLQDmjB++tmDN1Te29jSyYOBQROxfRhCQlqd9L7xqEUJrkwJm1tuuGfetECESY955SbrTWw2CUlkOCESUQijRpjVWDquvW762TgVHBGLu4ujTWa+d8QL7a3d09vHj1kjAm8ikjdDyZbTa7V29u1HfP0jQtyjHGeDwen13Ozs7mT54+ffrZZ5vNxjm3bSrJRdO1zjtCRJ7nxnvv96PrcXFGE5siTcfj8XK5/A8p9fbX//4g5/fjX/xnb7B5GF13h9F155z32Pl35trOOW96DEhQRgh1xg5df352ZrUxZu/RevSrAx8i3dF7HwuXmLbHvOk4NieEKIoiJL7u4iM28kdi5R4l24dhiNN78UGlhq7r3ty+zbJMiiRWQuv1uuuG+N9N02ht92iSc1Hbuu3bIxAfLRTath+G4auvvuScC5FQSpM0lVLmeZmm6XazyrIsy7K+7y3XAKCUapomzusMXR9pnFLKyPFp2/b8/Dx2+OIdi5OFGOMoMRXrG+NclBC02jjn0jTbi55Y2w+aEMIom5Sji4uz0WhECDKDimOhAHsGWsxw5/M5RiGuyNOhwMi9JIQYBR48xuDfmVcAwUAxhAAYAto3+gAOuprW2mEYhl5rrZ2NS3k/c8k5xhhzJmOqpDVQSqMmGUVYOaP7YWg7nWanVRE6oWJGihocRFzDYTrzqFzzQWcugrfxpp2268L3Zk/hEI2+v7V+cL8d3h4+XnBSC7oTS7zjy0VQ4YMXjU9sD+7E+zdx+AiCi2ODMxxvMnqPyRnQ6et6tBfnfFca7pXPYtgDhAIE64yOqrmizIuq3iilCSFSpBhjIFj3BiHkvHfOCplySq0aEEbeh9iWLgoEyFMMbT1EYSvvvbPeAXS9740fTDAelVnig0UINW11dnb29ddfX7gLBF4wMiqLTplv7+z8vBVJLjhVysSv+KjL2vd9kopoqhDbHJzzPM/JwQm9bduI6UVtv/F4DAhvNhspeQAP4LNMWmsJChdns81mx7Jod67TNEUASilUoqqqOOdd193e3n/11VcIofPzs+12++jyyiNNCNZaY0wjzTK2BjDGzhmM94Ki8XQCgKpu59NxLC6llJKzQfXb9erzzz8N4CgVeZ4neSqE2NUVQkjmubW+bXe7XS0lL4q9O3REbuKJBwBRuyO+SlxLsdCM9J+YvB7GbbGUglJmren7oaujMLcDAJkmQghMeZRtGk9GZVlSxqMsC2U4z7J8ni4fVqvVAjwSQiAPnRq6rtl8s+kHDYzkRZkkQiZ0V2+H3aZbVEWW52WRpPl0BtvttqrrxWaHMUavXv3bv/qr0Wjy+7//+5999pnz8Gd/9mejcVqW49lsah1YH5Rxm221WK/L0WTPailLBH6z2Xhjrq+vY5r+7x/w4kWoTN7fpe/pXv/A9bvN2r/fZoeoQB88jj4+EAC84DRNeCJZCI5iXBZpIrn31hrtnKWcc8bUoJDzo7KEALobUimr7c4o3XdDtds1bRt7eN77LM8IpTJJxpNJlmeAkHU2QDDW9kNvrEmz9OLyYjwZG2tW63XVtUmazOZn5WgkE8E4A4QGpfqh45KXo1FeFIigQQ3aGg8BY1zX9ZubN2/evF4slk3TdF0bpzKdcyECfIQSQrRRVb0L4JI0lUlinXPel6Py6tH14yePCaV5kadJKqWcTiYXFxeCy67tzs/nVVVtt9t4zlVV1XUdpbSpa0JImqQRnAwhRMGCdFRQzgijAWDoe2OMYFxw4Z2nmGRpRjCJo/pcCMpY3bQIISFkkiSY0GEYnA9CiGHop+PJeFwShK3VEIJ31hhTVXXTNFrrqPwppRz6ru/7rusIIfjA9WCMRUt0nmQh4CgtYq0zWiltnQdrIE1RnuUIIa2MNQ4hQB4h6qx1ScLzPEf709s551SvvA/x5JpNZ0mSLZdL78O4YME7gMAIwRj54CLtlzISwBOCCcVxLBsTRPbe4+HIuzm27mI42VM2jsoGh1rsSGI66iTg9+khx8chBB+C3/u2hOjUSShF+OC0gCA6Rx9C1h5Rj+shdgFPI9MhZO2JJ1F1GgjGhGBKYv8vVvyUUsIoZfSY4Ftr42sBikZmGGMc0cBDQNtXfwDgIHgIaZLGT40Aog8oRkgIYYwO3g99iwJgjNbL1cP9PcMkK8rtdhvAo+DHs8lsPmvahnE+DEPbdnlZauO6flBGr9arNJFvFy1CIDknGJ+fn3eD6nRPCFPWAw5RoTQY4MROJ5PpdAa2ts5labper4ssF1zUVX1xfoExef36FRdyt+t7HWbzfHY2Xa23f/Dlx5yzqqrKsozKfG/e3HAu1pv106efRGF0hALnLElk33eYse12a51RShVFbq0tRkVTt0WeW2MBkJTSO3d99bhrm/l8nkry7LtvP/v0k7Isuq5vmkamqdLqzc3bfhgIJn/4h39otHbObjbr8/lcOffs+Qshk6btirJsu+7Tzz/b7jZccMp523fT2bTt+gCwWq+LsuQ0RqwwKsvdbpslMs9zNXRK948ePeKJwIRkeaGURpRcP37sAlqtNsvlcrFYee8pY03TAACh1BiT5Xk/DISQ9WaTF0VZlgSho1RFDHvxCiF0Xd91fZRhN8ZZa7yHgPF0Onv06NF8Pi/yQgrBmRCcT8YjgtDQd03bGq0jxiY5p0k+GY2vrs7PZ2dpIoPzRmtrtAvWeVfXu9V65b3N8yKA36xXSI7Xq/VmV2V5cfnoUZJl/TDsqgYTGgLmMt1sd3/zy1/u6vqjp5/++MuvipS+vXm7Xq+3u+3rmzdCpo8fPwGMlTZHcIhgRCl1Ieyq6vGjR+v1OjJ0tDHHhMDtTdyjHMQppuIR+l619x8aNn/XdUqiCwAegYfgQvAeOxecDcOgqaQIEaUtJkFylGdpniabXeWF995bQEopN+i6rhUmgnGEUWTze9jrpyilmrY9Kp7EaiZC3lrr2A9zzlV1HSfwnnz0EU0EpyIgb5T2EBIh87KQnI2nE/BBW62GoW6arm171Tnju64xxinVW2uN1V3bt10z9Go8Hrdtv9vtus5gDGVZzGaTshzLgntjB2OiNXOapoyJEMKjR4+01pyKKMLZti0CMhqNNptNTNOqqgrWxXr3FP6KYwPxoBRC5NNx7GTs6YsBACGKCeA90B+FGGLRHB1i43KhlCZZRgipm05rnR1tAp0LYNEh5DjnokpCWZaRXDoMQxwkwhiTg16G9x6jQAjpB+0PakPOOcZoKom1jlFgFAgOwb0bNfPeEwDGSAjIWiu5CCH4YMvSbVbr+B31vVLKJEkmhDCmi6Qeb53FVkqpfYgx4FRq5x1r49RnnJDjCsQHg6EY5o6VzSmBxe9NR/fXESw95cg45/BJYXT8jk4rSDiBCgEgumOe5oKndyPeydM/iWJmcAiNGGN/0NIEgJiHxfd52k08fWl0ylnFJ5qfCI49v9MpQ7+XQwNj9kwxiOL/Ua0DIEtE2xoPJIRgjKNcWO+KUdkMCgAIRcZYAiFNBCBI01RrC95iDEopaxRFoIMnFHy0UwLQAE1vml5rjzKKOOcHDzwbi5W2bhLBsiQ1wWMAygD5YJ1JUyKljFJ8fd+PRqNhGOIRn6YppRSdzDjG7yuWCPQgYhfvHheUEFIUfLerrXVpmjpnnbOU4s3qYTqeRGu91WoVlf8i81kpNTs7i4ywyWh8MT+v68ogiFCTECLLsrdv3242mzzPo+QKpdhaa51mTMSpO4IEhoACUEKKNI2KZcd1KBOJEDqI8wlCiG5V0zR9P8RklDGRZUm0/XPOPXv27PLysm3b6XR6e3urlOqbejQalWV5jHlN01RVNZlMvIcD5iniuBRCyGLYrTfL5dJpgygpy7Isx0KItmn2nEFCYG9SbZ01nCHEEEYcBB/n2cVsarRzzr19WHDOG9W/fvXm9d3bvmuyVP7B7//s3/3q7fx8lqZpIqX3Ls+zs/lcabtYr6zxRKvxeDo7v1itt//Ln/+vjx49enwm/8s//s+X6+2zFy8RpqvF/cPDw3gyG5W5DygeR21TGWPiIKNzbjQaRW2v2C8MIWitEX4vtH0Q1+j3f/CfGPm+/zx7OMUjD8Far4xVxgrDac4p48YY5/qQMM7IQDEXIkkSEIh4IIQ4ABSCUur25q2gDO9NMsF5H49jJgXGmCcSYxwP7hACEDyaToQQiBJCcD4qMcaxv9XqjjDMpBiNCiZFkWY8kRSht/d3Q9ttqk213VZNo/q+132wwVqNEAHwUeKQMFKOi3I06rpuPB09/ug6YuiEMEoxpRwYNFVtO0CMSSmTNE2TXEo5KgoAkCKVUg59H9HXGP9iuq+UIrA/Auq6ttocmahH4F4IwTgHhPxB9Pm4jimljJCmi3M2VkppvWu61nvIsiw6E8U/iYfgdDqNPuwQwHsfmzRxFD0m0WVZRjHGruuGYYj/iw/CYN77AD6EUDUquisopcPeG9aBh4QDZ4wQZI1D3kVhd++9N5CmNOKcRTbBGBmrCBk3VV3X7fl54VyIBB9GeZoEhuMwUxuwiy09Y6xgnBGKYY9m78+4ELz3/FDnHUFFdJCZPrbTTmPP9xgo++v4J8cr4kXvTaljHBDyCACCPyjERwO8vUkxQmA/xD+OxJkjOAmHFiAhxDsHCEGcwTiJrB7eRdaAY1TDOGACe69EiJJlx8+O99pmcDBLwnEnOh+cd/6dnt/xaeOdRAeRhH0v09s0TXe7bXzP1tokS/phkGniIRjviMNKDZTh6bTcbqvJZHZ/fxucx4BU3yrVIwzBOUKQMwEQUAbewK71VaeMJ8YFwrjWVvAk0pW7pu+6bjKZzMaTh/WGU+ASjDFB4dlkmuXJ29s3XFCle0JIXdfGuEh3tMYjHJxz/JBOWadj6DroszvGGAbECMUocMbqXZXmeZom4G0ihORs0dZnZ+dMiL7tYpjs6rpte4TQ5eXlZDRu25ZTwhixNgTnVpsaYRoAI4QJox5CN3QylV3XJVmKKVGDsc4JiSRNCKUMM2ttAI8QpJmMTuTHNEtKGULoh4FJIdPUONf3qmm6JEkuLx9RSsfjqVL9y5evnz59ijFSSr1+/Xq9XmdZVtVb7/2j8wtKyXa7ffv2bV3XkemmteZcxiy2qTtjNsc8gKdcCiGkFJwzTKzSq4d7Y0x0HEzTNAZIjGk8A6vNklIqmCSEBBcP4UEp9ejirGk6QOwf/f5X/zT7w9Vy81d/+ze/+tWvZuNH6/W6a7ej0Xi9edjtdiLJfvLlF9lN1jTdzdu3q81yPD3bNXWvzdPPPv2rv/x3XCRPP/38q6+++vrZs+HFK+Rx27Z113HOhUyjgEbXdREAqzGOtqPOOUJpVCOx1jJO323wg9L6cQ/ScMrD/k+IfL/7eQCiaWxA3oOz3hhntHNiP/ER9l17MMY0jaUyMdZiIN553Q+66bquU/0wm80EZYSLgEAbo6xhUmRlsdptGcGIUUIpEZw5GV/98uoqyoQnaTKZTLIsAwCt9TibCCYopxiwC05rs37Ydk293m6Grm/7RivlvCecZCIFj3TfRfIXY3HujRMSNQgZRhQh5H2w1ngf1dzQq5s3WZZdX19Pp9MQQlPXRruYzEaLu9VqZY3hnGO0tzKPt45zThEOIbRtu16vsySNRKwYzo/t3L7vkyTBQtRVFc+mVCaplDEbPXaq4jHtvWdMTKfT6exMa73a7rTWQqbT6XQymaRCck7A+uhIEumasaaMubPWOpplR/6Lc84qdRSgiTQqF5A51Igehb7tjAaMQEgQnHoAH5z3Pk75hRDUAOMxitzroxMQxjjP8/W6jWq8Qzt0Xee9L8sSY39UppBSDsPQ9N3RbdmfmHLFCBFpn8cZtWP9d1ylx6hzSic5veLjp0XYsRXnf8h4/TRkfhBN4f1mwbu4dfLTo//DwWb63cjEcVYv/sIxRsIJ4ROHPWc1duyO9JnYzwsnDkXxPWilXWxE7wM3DgEowtoHcB5j7J1rq7ppGgKICqGtY5QqpWK7GiEkk6TtuqZrI8qqrWnbNsuK6WyyWq3y/Gy9IECQlLJuG9U7hIEgIADOAcaAGfPGKAODsn4/u4Jc8FTwoWsno+loZLU2AHgyHq93lRCAMHRdl2D06PpxlJwuiiL2HYdBx08xDENVVVxQ51xRZAf2mWnbriiKY74ipTxmM01T9UN7fn4OgAiGyWTkrUuShHM6KvOb2/vZbLJZ1xFu4Zx//vnnTVU/PDw8ffp06LoXb988urx6eHi+3+mHEuQEUXeM0f1UtXORURknkjEgbyzFpNE94SLSC+J5HQCA4JhMbzabqmrjRmjb9vnz55vNZj6f5Xn+4sULSmmayrdv3wohqqq6vLrEGG8WS+ccQjiad0bpYMbEZrMZj8dpkhPMQghJkuzV8LHfe7rFIWlMKKMME2sMDjC0Xd+0xxVIKWXlDOHAEBeMEU4zQV2eee97NZzPRoTR7a5+9d23Tdf+gy+/+L/8n//F16+3Usrb29s//zf/9tsXzzHlwZtf/+0yLcpxmXg/X2021XaJACPkbm5e//7v//5f/uVfvnj27dNPPzdKGaWtD9oCYrTrOsaHKL4vhIityqqqZrNZ5LnQUzvo3xGPIs/gXUj8P6jU8ydy9d57E53WjfMIlHF9rySDNCGJzKQkAN5BUNYIHGUDTMCICRFn9bz3YK1Squ26wWhMiRBifnURGflRNaccjeORTSlNiizylfeyx4RkuMhy7ozvhratu0H3zlhltB56JighKU8pRojsBbB9cHB5PqeUU4oBcDysleqdC0oprazW2rmAMVDG4vH1h//4HzdN03X9YrGI0MfV1dXl5eXQdV3XPdwvN5uNFGI6nVLKOOdKu3hsee+7po3C05HmFHHIo9B2BCo9hSgkH70RyqIo8wIDYIxVP0QDaA+h73tlNJfibDovy5IQEv3/CCFFUUwmEyEiByy6xYX407hFIzgeY15UtY/9xRgCAWBP2LHaOZekI2c9pSzLUtW32/XGOcgkUEYoxc6i4HzwgBDg6Ojt7CmRck/AMaYsx5TexzegOrVZb6OJDB52jGJKUBzfZBQHZ9p6NxoVGEdaorfB4oBJCBhhd0DnTjHACB2HQ6l6Gm++fx3XKjoZPDiOAIcfoiufhrojfniINJF/uWdhQoC963oImGBKTw22IABwygDAHV4r9ucChL31RPDHIBf/xDofE5EjqBvVco6DdydkHEAniQLGGCPAAbwP/tANxAQP/bBer/u+l5wnQuqmct5G+PEgDU8opbum4ZwjirteGasIKYs0Cc5GVhTnnFIKwVsHiIDgYnDG+7CHbwl4B8b6QZtCAkXxTpBq0M65siy3q43VJq66ImPVYLrBidRPJhOtlXM24vkIoaZpohJY3JJcUACIIwfU4GHoAgLCmLEeYcwoTZJkGLrIdVytVlEdfr3eMEKLPN0st+NRQTGJoFHM/Pqh55zPz84Z5ZGnNplM3tQ7owdnVK9MQMQFEzDS1hrntLNlme+a3WC0lFIkiQdcVZX3jmBGEUKcYoy1GRAKzhkusiQVk8nEe991XQCIbLthGO4XD+tNv1wuq6Z1zu2qWiTpelsZY97evimK4urqwkEgnKhOPSyXdV1fTmfxzXvv2rZdLBaU8jRNi6LQyiLolVLDoENYx6wFwGdZlmaSE8qkGOVFPipTKYdhwCGe35FrxqWUjLFXdw8Y4y3GGGOKaHycc+6NrlVLKM2y9Oc//dw6NxjtVFtyV+/uxin5v//f/uVqUz1/8co6mM7Pk6JEmCvnvv7mu//tF3/bd4qR8OrF15+eJZPJ5Nnz51Xb/Pin/4Dx5NmLl5NsZCG0bae0rarKmCQe7EKILnIg0jTOdcQNSE4U+D6Mayjsq70fConwHxr8ftfzAIruKuARoID3YwzOMyqMMW3rUYKFlNZaYwNGIVBqveMEonZ1xgUSKXJeKeWt84B8CECwZIlIkiRJaJ4I54WzGFCSpaOiZIJHA01OGeUsjjFY5y0E4nF1u4pZVawY0jwpaeG913rYtzqCPZ6PzrmAoOuabuj1oKx3GAimhGLS9g0jfDQpi6wUifTWtX3Xt91REC/LsjzNomnRarXSw7BcLtumz/M8S1OttVZdCKEf+piW1nW9vH+IA3OXl5fnZ3OMsTP2KKkQp+xFkfR9v1wud7tdkWazyVQI0TWN6odY/FFKB2uilGV0nzDG1E0bU9EkSSLmA3uuY4iiX3H+DyEUR9r3nM++d85xRqWUx5H/eOrFw5cQYjwgQgnet8ecA4KAc8CMUBJFmW0IQAgApt4BiWPshBzpbYnMuq5LZTKZjLXWUqYYkYjPCCG8whjQsQ0ZM5i6riM5k+J3xN1jx/vYKXmHEJ7shL9nAR+rxmNtDd8zZziO7X8QVH7XI/j9KXU4aLigE4onOmGi7tmnh8zgIJoSkiRBBz/3UwjXH8TYjq/ivDtl9H3w3j54JL4iOujvBOf3siyAsiyLQG0UGRGSUUoxIc65JM+2bZ0WuTaG0sgmGAhmQrDd/S6EgGE/N8YwBARMsE47ir0D8AFhDBhAWbfdtWcCWeuZFCSAtXa3qx5dXIaAtNYiSTmhWZZt2q33EE/Ytl3GsbNEZjH5K8tx7ApzLqVIle4YY0r3hAatdcx3438whjnnTVMde59lWTLGCEJScqP0oLpJXlBK1utNmeeL5SravEyn06urq8ViUeQZp0z1bVPVo7x4eHhI01wpo1Sf5XkIqGkambAnT67X63VsQEAIaSrbto3yFzRCOADGGMn3OhXxow3D0PQtpbTIUuvdttq1bbteV1XVRE5DBGmsNbG/M51Ou67L8/zVq5fe++12SynNGMcYxz5FluXDMFDKsyz79ptnQ6/zPE/TPDYs4ipSqk+lxBjatt3c3r1WL733KIAymiBMGZZSpmmacIEZxQFAiBhvMOEWrDeD7huMcTEaMQTe+XanHAZKqZAyS3nf1F98+SkRfLXajVL26ePL+8Xq2xevXr34pm6HbDR1xmE7WNVQBgT5r7/+ejqdns1mddPd395mxTjLsjdv3xTlOMBezCHaC0vBIgbrnJvNZkqpqm0jC/3DMIRON+Ch2vs/rtT7gd/xKCDsQzDGEIBU4CRJkkRCtPghSEPw1nkRWEC2V8AF0r6qKmvt0PUuQJplWVmIRBLGCKONGlIhs6wQlFHBGecegTOmnIyDdYPRBFDACLwflLZKY6w551RQwgkAIAzGG2ttOSlgnwvbUyejIs2Grg8EOGeE0VSmMk04FV3fB++Ntc74XnVxdhBwoIJjRqPPdRw3ttY2VX1/extCyNJiMplghDabTVM13vvBDJF7UlWVc+78/Pz8/Hw8HhNMjodgnKmIX7mHEC03x+PxKC8i3kgIqXdVbPLF2hchlGfZaDrhnEc5WiFEkhfxFIjfhffeeWu1jryAqJwSGYNhP84fjpXEsc6LZdNxULRpOsYIIbhpmt12CwBpCowTOJkHBwCMEabUQWAI4oeK33iELFarlXPu6upqsVjFNe29L4oRIQxTCgCM0EjkEULkeR7b+8eRuHCQ6DyNfHBomKGDoMsxVH/QzYLvmdMecaqImp7WfLHqOqXSfLDUT4PlYZf9AAvmWJydBkuE0KDUKSh6/IV4/w9dvXdUl9MQGOvC+Dx+P+J38Hw/KMEj58H52IkM6B25Jt4iF0y01ot5W7XdMUy0HWbTMaYkvqVh0NP52cNyEeF3IUTmwna7FYmcn8/Cs1Wa5hBMnB8XAgYP8SDmqhuMd85hwAj5thtu7x+ucxacH09KZD0EtNvtPn78BAVAIUQ/PyklCiA4RC6YMSqqscskbZpmu/WzGQshFMUozh0ZOwDAMAyMkVjVYYyN0UmSUIrjl54kiTFqNBp5G7qmzbKsLMv1epskSbRMjIukbVvKkslkMpufh+hGKYTVJk5PoeCqapfnFwghYzBjTKk+9gjia8XFVle7qK6ZZblzDoKhjILzWg9FlkajsbjMjrMrhJCoyhtBoKurq8lkslwuHx5et237+PH1H/zBH3R9EwFVdBhjKIri/Pz85vmLSGlJ0zSuuPV6e3NzczY7V0p13QCAI7ITU+Qn15fB+aHrhq6nmEzOL6bTaZFlz18998bux5YIdca2basH5SmWUmZZJoXAGBPMYkZebR3nPGCkrXEIkiwNwfRd+NHTq67rbN9fzbILlK82tdUy+fEnX335o24wTGZEiH/4Bz//zdffPH/9pto1rm27rkuSZLurf/vbX3/x45/++MdfzOYXv/7N10AIofg4jB9pfUWa1nX95MmT3W4XEYimaRhjkcD1u+LRhyDnf+L1g8+zj7cn4mkAsN1Wo5ydz/L5fD6epMgp5weCgVJilBZC0ICQ9YQQDz4yD43S1tio3MOlGKzp+356OT8WKAghwlkmZaSKxONA2ehAuxdoRwciYlRp2bvFEhJDBcYIAGIFHeFvq3SSJLP52UHZXbdtV7dV8Pva3xGHDCaEyDQhhKw29WQyKdI8mqNut9vVYr1ZrQHgYj7Ps7yu677rYjCLb6zv+wgrPbq4/NGPfpTnedd1u802Au4x5sVet1Jqt932fZ8W+XwypQhX661zrsgyY0ySJB7CdruN1WE+KsuyRBaklEImeZ4TLo5K7RAcCuCcj4Wv955RGnEMdIDIj+S3+JUxxjClsb8YIyVCSCmb5zNGoNptu64TAqeSI0TUYAggAIcCxO4RwhhIZAzuu3FxBP4oBHN+fr5YrIZhYIhGMQ44UEtibIvLIMuypmniFxoDgIP3OJzH948PDgzHuHhkcp7q1cH7/Tk4ae8dAVJ8onB2jJenbbPwPo3zWMY5o+GHWn2xGjtltcD7rb53VSCE+NUDADoBRdHhS/kAdN1zNU+al/7kM0Y020FgjCEc5Vh9CMF667131jhtEEJpmkZXgfh5y7L0EOLpbMBnWeYRROLDZDKJjnEBwWw2E6IRgvddZYzigjBG+sH5qCfOjLLa+4BC8AGGQa+2m6ZJg/OT6Sh+WarrowQlAhLCfpARIRB8DzkQhMbjcbQdrut6GPYVQOyCRxDy4B7s8MnwIiHkSFGSUu52u9ls3jVtXddXV1d5nr969ebp06dgG2PM2Xz6+s0dY6ztuqdffJalxf39w3Q6xZSsbu+eXF8453abZXzaPM8pxd67ruuim0rcNYQwhEJVVWmaUsqiR7x3PoTgnYt5W0SDIxl1P6NCCOC9v3TsdAzD8OrVq67rxuMyz/Pb29v1ev306dPFYoFQaJrm6uoqiu7WdR2bglHaN8vys7Oz6fSs7/uuHeJpgxBijCOE433o2y4ed/EGUkqNUg9tW6RZhA0ZY1mSRjqMMSYps9hGvb+/s8pEN4ayLPdhmFHAKCvyPEuV0Zvt5tfrBylSmgglsmIyfXw5H41Gd8vN3cMKBf/tN79uejM9m01GZduPz2fT8/HZp598frda/Po33zTdcLdcv7m5vf74048//ni921V1e4S+grdaa8NY3/efffZZnPKKXaEIDv09cY1wkWBAGBBGJ/9G97IAKED0Zj4+HrPO0yPgmDDG4+lUkwIAPELBew8OI49BYzCSuTRBo5L/w9/76dXjWTfUnWoGq+uhU95TK7EjJBBkQQ/aKqu0sd47HxClJBEsS0SWJ6Mym0yLyTRLGMGIETYqxtPJVPCk7dR2W2dpobVru7brem0H5611xlg9m0+VMcY5kSRZJimjhIFMOeeQZoJLwMRxZrKMpCmi1GqqaQKIh8H1u2bdqg5zUownAQKmHDD2wFwA40ANvu/NZ2ePEsR0069uH26ev3p4c6uajmNyPp3NxhNOSd80bV1brbUe2rZ+uH11++YlCeH3f/7VT7/6EmPYVhujDUtEr5QOIRuV6WTkKem0Ud7Xi7cXs8l8Mg7OWKOSVCCC17uNzBIqWNsP94vFMAyj0Wg2OeOU7/p2MNp644NDOCSc5inNBLWqw96CVXpobT9gCJmUeZqyHFEO2vVNVwdksyLlQhrnXPDaOkylsn69a4FwoMn9cid8mJUFWLe6XyhlR6MxYXK9rYvJ2abulpvOB+CCeB+0VT5oryDhSArx6OJqMGp6NvXBW2/2s4NWS0JM1yKlHs3n0zRtg+NZ1qkhIMylEIxxjvu2SRkThCVCCMw44QkVCU8FEeEwrXFcnPE8jf3I08IrPug8EEIZ45RxEu389hEoIAQEgCCEkcco4OBRcM567xyEgAIQBBgBQQgDEBLikA4CT3D0YfIQ3tWgp+EwUjBO24fv8EaMfAgmeBu89SHaNUaHPkwZphxTBpj5gD3CAREmKKIEUYIZxZQQSjElmJJjAfduVsE575xnPFCCKQ2xHg0QvPXKWN2rtq13W9V0grEsSb2zxujdZpUkCeaUCIlFghiTiQjBTIr09bNvcskYQl03OE+2zZCWc2u369Vr44br6+v71ebhwV1czlRrOKbTrKg39Yj4gsOgYV4ib/rpaCTTkUgy5zHi/Ne//XZ6fka5wDQEsKNxcvP6O479JIOnj8ameRhlE8nE8+fP00TevHmFwJ6dF5xjYI5JIlJqvLHgqrZR3hIhQGkc3NlkvHq4f/r46f3NXS5zHHCRjjaLTSGLJ1fXqus3i+XlfJ5ybkA78M55pfpu6D7+6EmRJ8++/Xo0KmZns6+/+6buOpEVi+2O5ZNaOWQHyRCAWy0eAPzj6+uiGPXdkOfFzc1bbxyE4MxwMZ9xFDLJGEp32yZJUy4SKjlPEpbJdV3Nrq9//d1znpeT+eP7VUPF+Ntnt+uNmV8Vq8160AOXyXg6S7OxMt568uz5zdNPflyMJoP2q81WJkVWFptdjTwjVGoTMBF5Mel6s9lWjEtC2Wx+PpnNRuPxeDJBGAeA0WSclaO665X1NiBlvcwLlmbbtitncywSnhZEpMpDNWgdEJFp0zaDNlwm84vLi0ePsrL0GCtri9G4V7ptO+8CI4wAxh44pqRXtuv9oIgLtmu7qpI4XM/n15fTSS4ltcG02LZn4+RqVmTMffPt87/8i3/dt9UnH12fnZWfPH705NF5u16eTYr13VuKbEKoHVoW9m6U/dAKztTQ/+xnXz377lnbNFmaUkKcd3sxojjYum9tA8IMECVU/PC4+gegzfFxjN+xxd6r596naL+j2AHinCZJQjEyqnNGJ4KXefbZZx8LRhaLh7vbW2s1JhAL5q7qh7531oAPEDwjJPgw9L3RxkMglCZpmueFTBO61ylAeVZKKeu6ef365v7+HmE8m81ub2/bth1UH+nCUa0nkl/3/XwCNDZmGaGUpmkqpWCM751vEfHBG2MxF1FEGwAxKhhlIWAz6EQmxri2bYe+x4DTJB2Nyul4opsusodvb2/ruo5iY5PJJDLQVqtVtA1CCHVdt91uteqvr69/+uVXjx498iF0Xee8I4Ra7yhlSZpImQQESqlhUFrr8+kIAGIidkg+SHzyvu+bunXOHUgr0lrb9B3GmDEqhchkIqVklCAAY4wzVqlBKRWcZ4wliRRCeOKjEqN3IZKevQtRCAYh7H04OMMbrY1z7mwyjRP9VbUDDLFx6L1nXHTdMAwWASBMvD8MjYaAEGR5Op1OnbVpnmCMtFZD33HGjNbgg7cWI1QWZfDBIE8IUUOPADFGOaUIQ1M3BOMkSWWaIIT3BH1MQghwElo+IKp8QLk8NM9O5TFP5S4BADC8a4PvpcL8uwUf6yTYG9mQY5SNT/D9xtv3t8yxi3yKc6Kjg3Nsju8RiL2YVXymI0szeHskv3z/hb7/8aMCJxxlC4OLfuLWqL7thqF3zgXvIIAxuus6jFHbdkwmaV5oaxjjo1FptCIY9V3jvffOd53qhiEAYZQjQuq66VojU0kw98F4QItFiymU5cgOtXNBKRAErq8vtFZeNVdXVwBgjJJCrpeLyXhydTVfLh7arsnzrKqq0ahIkiQvcqUUxbwoitV6JYT45pv7x49HIpFVUxd5lhU5QtC2rQ+BEBIAhkEF647tvTTJIvRKKaWUWWsTIaPDZSwUAMAjSzDbbrcA+Or60dCrxXJ1eXk5Go1fvHhZ1110PqKU3t0/GGNGRcEYM9ZqrQmlWZZFsbQ0TaKDCsY42ns5Y621za6dTCY+eKXUFz/63Hu/WC4wxk3XrVbrj58+ZUxUdfPNN988e3bTd81oIr0PWZYxJoQQmODVar1crgDAe9t1Lec8SYTWum6qh4eHr3/1JgQdbUSjo8tutxuGIUmSmADVdR0n/Jxzt7e3eZ5Hh84joy061HddF2UXj8OOUdWMsii/hCghiGBKiEyTsiiV0ZxxRPBuu33x6uXbmxttjRQCjM7zklC8q+qm65gQnEtlDBeyGBXT6SwviiRNKWHWWa30/cOmyMurq0uE0fNnz7999p3qdVZkBNP/7J/+50ab27sHmSSY4Gi9lGZJRG6KotjtqljeWesAIwh4r9XwTqdvL3r0Hy9Ohk4EnNCJ79cH2w9xNnR909SSonGRnU/HV+eT6Si9fXu3W2HnFcEuzWTX6r5XHRtGcuSjfSICRggl1FsHAMWopJTKNC3Ho6Ic8yT1GHnvAdk3t2/X67UQydnsPEmSbVX98ld/d0ilAyGEIXSUlu6HOv53CM54JBhljCCEtDGAA0GYYBYYRTig4EPwhLDgvbfaGocQIoSlguGE7raVUQ58kIxTwsGHervTg9q+XUStasbY2dlZFJWu6zrqJEViZOyTR6hwOjn7/LMvrq+vh2F4WC77vgdEAJyyJs/LdN/PsMgHxikmiBCIhIU44dc0jVImYjtRXSzP88lkkqapMS7qiGZZlqUyts3TNIXg9KCcc1qpvuustZzS6HyEMY7YbNcO0cYFIWStiY0H50LXDsMwAIDW2hibZdlsNl2tVnVdAfg0lZxzTEhZloO2cbgPAQJEgHgMAQEOWCv1rpnUN20xLqWU2xBCCIQQiwxlLPJR275DKTsJJ9h7jzzEpCG265xz3vkjusUpOVIuj6yQ4yo9jQfvhZkTmeaDLPU7idsQIpgR//bUJw+d8kQ+CJnxpU/n/I4PwqF7dGwQwgk6CseIFw6h+PjnBxI2xpHdEmMifBjbDhjvaXdw3870sdvnACAazB7/Kt5PIQSGEPMa771MssV6I0IQSdrsttp5wNSonhFUluPF/V0I1qi+rWoq076uinJUluXtXffw8CDlKMsSzJJHj3jTD8aqNE0pHZaVIQEAQGm72rZJkjirwTvOeZqm6/X6k4+v0zQdNn18JCJXTdP1vZqMSdVU4/Go61qPQKQJYTTGIcl43bVKmUGbsiyH7bZvh4RSxpjRWnDetm1c5JzzEFCaphEZO4qyeu8DQtpp51ya5kmStE0f78lut3vx4kU5niqlFovlz372s1ev3kghQghd1/VaEULSPJNSEooD4COrkDGGgVvjjdJxejgygN41FxhL03RT1fFb22w2qu83yxVjkBdp3/f7cFXVbdvmeVlVVV0PWcY3mw3C4ezs7OxsnOe587rruv/mv/29qqrevHnT3by9uLjI83wwdvHm5vZh8fTp0zgufHV1NQzDw2I5mUy+e/Ey7uh41V2fpitjzOXl5bZudrtdkiSXl5dnZ2f7ZsRQW22Ms0rbph+8dZgSTtloMqaYJHlxNjs3zu4224fl4u3NL5rF3eefffHJZ5+cn18Y72wIy+WyN3Y8nco8k0l2NT9/fPWo1+bl61du0P/yX/yzv/qrv/rF3/xlIrPLq8cX5+f9YNte/fqbv7u9v//k0y/+6I+m3z77brFY+YBGo5FxJobwpmmm0+nd3Z3x3kOI+yKgaBEDEJ0XAKP/OCnq4zzTB6n0qYXjEQJFCFFglBJCRS5EnkuM8Xq9Xi/unn50NS2z6ayYTgqZMGN7azUhJCHUWssIoYCDGWKQiDkI51ymKefSBq+7VhljjN1UC28DIEIJN87quq7rOqoqwDtZqah3vDfpYIzsvcKt5ZwhxENwWhtrbTSvwRgwRMQ3PNyv8jRL0xwlqO9V37SVbsCD0Q4HEJhiTINzTdMtl8vtdoeVp5ReX18XRRETw9hs22w2cY4wJk0x65zNZj/+4rM0TauqWq/XTdfFoYI4M55lCRciFnYYY8Go976r10mS5HkREXxjTGw7xwGP6BCbZVmUM4jzv2VZpok4tr6t8YNWUcEo0lmjxxDC2FqrvY74Wxz8iGIQhERPah9V2xmLqoNoMpl4b/u+1XpIkiQtcsYYIISp2DbLaIlBcEAACBEEPuZcxoHRTmuDEFLKFACxq2+c9QDWO0SJ5MIDaO8k4uHgGnpo1AEhZGi72M0NYa9X5t/XmIb3gYoPir9jGLD2PXvmk0oxwLHM29dJ3x/1e9dUcyeamaf//iB58tCy+gEx6wPh6F3Yi8Qo56JP3v4dEkIwJRhjRt+bqT9eseb+4KOdRsF3YXh/K7AHoJQyQhEG770lOBZATCYQsHEBMK+bYbXZjfOUYEhk5rSLdg3NdiNz76UDmRRFMRqtdrVBSDmPCA1XVxffvXi5Wq04obPZzPv7KB1FKVWDavqeYZyJBADnozJumWJU9kMXrUW22+14PK6aOhK7Xrx48dlnn/3mN7959Cir6zovizizBABd1wWPet1FnicEvJ/XVipy3MuyjO1kpYyUkhEW/zf22JxzxqmuG8bjKSHs/m5RFOV4Or29vV9vdjE+ffvtt8ZYOCzaePRpoyPRI6ZrsQ/NGAPnOedRXj9utDIpl8tlUWZpmq7XW+/tbDar6xYhFC0F+r4F8NePr6bKJEma5n4YBiGSsxnkeVmW4/qj2mj329/+9uzszDrtnFFKpVlCAjfG/Pmf/3mkp45Go6i1NBqN+r6/vLx89uxZPHNWq5X3/vr6On4cAIgJcbwJEX747rvv5vP5T37yE2vtw8PDN998gxBKkgQFjTElBMUBdkKYEIxzudltR6PJeFxyLq3VlIvr6+uPPnpa31988803/+bf/UU5Hj/99JOLq+vJ7OxiPllsNr1STDRJko3G43Ge4cePcyHervp/8o/+4OMn1zdvbl+8fr1YbsZn8y9/+g8A43/zb//CWv/lT3/+5Y9/cnGxe/X6dd8Pxu+TcmttmqZxc8XmxTFJBYBY+R23238wyHn682OciysY/9DVqZ4zxjnTQ7vbbIauTYWYjseTSWm1ttYYY6rdrq7bYdBKWcEJBOCMogDOKIqJEJQwUuZ5lqUySTGhg9Z10+6quqpbTFGWZmmeOeeX69V6vXHOJWkey1tMEeNcSEYZtc4qrQj1XHDKqPMuRCUwjH0ASigABBdTe/AOrA3WgUyyEFDbDNW22m13fTs4ZbzxqZQEEFivBzW0neo77Jxk3Bo3mUwmk4nW+ubm5ubmxhgTM9a9Uaq1sX395MmTH//4x+PJuGnbh8WiH4aY2yZplmXZbH4WuVjDMFhnKSUEEwghS3iaphGFiCJknAuM8XK5RAhlWZ5lmfe+aRqtDWNsdnY2KkdJIhFCBGOEkFZD13XWGGcsQBBCJGnKKXPODsMwWG2Ni05dhNBhGLwLnHPGuLWuqmqtNecMISyEmE4nq4fbuqkc+LzM0zwHQDZ4hMhyuWlbozQQDISKEPa2Ns4a5yCVWCYyy1PKiExlksjdduOcc9YZrRHBaZYBgoD36hXWaME55wwFIARZY1U/jEbjLM8RwgEQIQQTihCinB+dpo9UzyMmj05kww5r+F2jGmNE3v1NrKViPyBa0EWey7tNcTJoEI64Kdqrg33oH4veH1c/jjGcFmrHx9+N2h223P718THqkWg2RE+GD39XkDst+NxeUvQwCu+9c9ZbZ63puxZB4Ix5H7z1IQQfoB+UlKnHBAhDhN09LHZVdXl5yRkDb3ebLecMPNzd3XkfJOUKGGMCENJGY0KUdptNZZ1vuq7vjZC8HJWCs/FoNBqVSqlm21NsszQpRyPvLYbAGe6ahnPmnauqHaX07u7u6uoq+DA/O9vVu121vf7o8c3tDaUEIORFAQBlOUII101LCLXaQkB926dJRnEgGMcZ0L4fIk+KUhqtDQNCznvKGGHUeW+9643ClCRpqrXt+z7PC8ro3d291mY0Gg398PzZ4vGjuWC8V70xusizOJeaJDJJE4zxoFXXdRFmRAEYYwRjrTVnLMuyYdDG2rOzWZpljLO+H5z3VVVxKZ48/ogz3rXdq1evldJvXt3cvV0Sot/ePPRtE7xXvbp5ffO3v/jN82c3CHSRZwRHHN5DCAQho/VisU3TbDye5HnBhRRCBkBt16/Xm7wou653Pnz89BPnw2q9ubi4JJRleSGElEkqk1Qbu1gsV6s1oez+/uHlq9dN0+ZFOZ3OnPPL5UprpY3VxvSDUtpEXVof4NH19Wq9ef7i5XqzlUlajsaAUNN2RZJePbr+6Vc/u7q+3my2v/362+VqBQgEY4mUgjPV99vNxgx9nsqri4ssz2fj8SgvJpPxdDJmlCilu6a5uLj85//yX754/qLp2sl0Oih1eXk1DL0ULHjnA+R5DgjXdQ17IOcdDOIBA0KRUkkgoP+Iau+oZH/kuX3w+Onejhs7wH6QaJxn1xfzp4+v5tPxenlXrXd+aYpcppkoyxwLVled7mrO2CjPBKVgrWCMC8EYk4xjRhEmxjqlzNArbZz3fjo/V52q69ZZzyjnTGht67oWgiGEMHkHMeG9O8E79Amh/fQui/biNo4UY4zJ8WyiRFTVdrutnLFSyjLNKCHBAcbgvVducGrQvfZR0pcSKApjzP39fWy2HVSR9rciDppkWRaJy2mabrfr3W7nnMvzPEpNyiQtiiJgFNXCQgixGqOUhoAyDs65ruujDE8IYbPZLhaLOH4QlRdiRBQiiVRmzjkmgA4OapGE6YwNGHEhEikZY/4wy+iQiwQzQljkOsZ5Va1133fOm3i8JomIv7DdrhHCZVkmSUIYC94apbTSfd9rBVFgC2McPABEaTSQHIx22011dX1FCDLGIkTSNK+qLaaExLFozrTWPJVo7/UNsVT12iCE3uXmxiSSRbDxOKx2Wu0dg0GsmD8A/T7AJ6Og2KEYAnTwkj3GLYyxsf7kacMRUIX3MP93xeUHOeKxpIvV4ZF380F/LoqKxdnu+DmkFAAHUBJONKwPTg4f9PM+eMIPANUPwqFHYKwFTIL31jvrXcAIUQIAwYPIU6OscQEA39wu+r69vry+nJc0kDwvvLWJCKMi29V9cCoCDyiAVRoRJCTb1art6hBcngshxGazYxhNp1PrwnK5ZgHe3t1fXl4aY1AwEd15+eJbKVmWptutHY/HkaodbcrfvHlzfn4e4Zz1ev3xxx/H6WxCSPDAGI/iZMYYSpmUqe638YYfR3fwwagIY7zHeBiLnpcA4JGfjGdt2w6Dms3O+r5/9eZGSqm07bru5cvX3sOTJ0+OPcI47Rqhkb1WzsEXRUqput4YQxB2zgVKjTEA6Orqyhi93W4/+vjJarXa3u/m8/lmt3v6cRpCKMtyNpuMx9PHjx9LkU7PxM3NDaU8z3NK2Wa9QyhEjighSGstEz4Mfd93T55c/+hHP/ryJ78fjfE2m433vigKjPFqtVqtVoyx+Xzuvf/lL385Go3SNP2Lv/iLCD5F3nvkkC8WC6XC48fnUUR0vV4vl8uzs7PPP//8D//wD7t2F0KIJN54BMWP/Dd/84vJZPLo0TVCaLlcLRbLWGiaTkWBQxt8MZpMZvNeDW9eveacjybj6C1DOfdaN9ttX9dElrmk00+e/OTzTzZV/eT60b/7q7/59TfPbm5us99888/+2X/5Z3/+b//qL/7Nj7/82W9/++unT59qaxaLxXa79d63TcU5B2ebpuWHcu5kof+QJud/6HXatEAIRR1FdGCNH/dhUZZSiCQRZZpMilwQstnsXj1/HpxSbZMXCaeMUhc8CYH0vQHugvdG8Cg3Fo8qa+2irgkhCFNjfa+NsS4QSilfrbeMMCYkYNP3vTEOIUQ5i6s5nndtqzHGjBDGMGckeBs8jupiAIADooRZa4OLSTWlwKy1zloH7s2b11KIyegsz3NBiVG6rqq+6RPBnTF2UF5b5B2y1mhtrU3Ks7quB6UY55PpNJJN4iRDjEx5URRFgRBq2naxXFqnQwhFOS6Kd0N4GOPYCFTWRGHoKI2GMdZdFQF6ANBar9frxWLVNM2TJ08i6TzCfVHuaDQaMULgRFtEDyoOZsX7Izhjh5aegxAwgoAJwceY5x0gBM65YVBR2JpxghASQiCENpuVDa4s80RmIQQXHey0W693WoNx4AIApoTyAA6sCz4gQrIs0XrY7XZpmobg4qR8mudt23J+4O4TYpxLGAsHd8D9yB2y8QiLRjNKqUSmR+EufNBSOQU8j0EF3m9IH356UkTh01GEEEJAfo9yHoPKKVga3p/Jg3cUlQ+H00+RzFOU9YPe3vHVw2kfAe9H1OEw/B5dLKMg+98Drp4+eTgO44eAgo8BNJpJHD8CY8wGb43RWgMChIkLwKRQSgHmMikWu+rt/bqq+1/86hv3o6cXkzzPxouHt+DdfDytd69N35HCKaURQt6HEHQ+LqdTRJnc7GpKudZ6u23SlBdtPwyD0sAZGO0xplXdjMrMeQh+35OmnDBGQnBnZ2fRn/Lly5fKmCTLuqFngpfjEWF0aHvGmDUO48AJXe+amPxJmVrjcYBgXZbnkvE8TSnGmNI4exOP7xACwthoPSjFGONp4gGMc9EMven62Jjfrjdd1w2Dn0733rPGGORD37dCCMYZZfiYGwGAUipJEmNsCKHIciEkJVQNejweu2C31a6qqunZzAdIZFoWIymS2Jvsuo4ztrh/UEoxJhibWe0YgeA8l+z60eVsOs6yIgRnrd1sNgDQ951SKhFpluS/+LtvQwjFaFKMJuv1GlM+m80urq6bppnP52/fvn316tVkMjEuNF2dFaO43YZhEELM5/NIEdjL3FDqnKvruq7ru4fl27sH7/10nAshIj3iMBNp4kjDw2IVs5YkScbjMcIUEHkyv5zEwZLgIx7bNg2nfLNZrxbLarsZj8fz+VxS0m37tm0vn37SDxrSPM2KTLBPrq8e7u6//u1vN+sHwPj/89//93/4j/+z28XyV7/8m5999fO/+ptfPH7yJBGcTieX52e/+fpbCIjhHxCmCOhAEPuPo7TEGfhjeDu6tMRTmx2u4+CRDgYBKNXfvr1/o58LQgQBHHxwWhCcpTljYuht1ynGOEZsNMoxxjxJhOAUgBBsjGnqOgK4CHsfCCDME5GkeZJkQ+gOMloq1kaUUnA+NvAopdZq1bUoQJbLjGaEkGh0wDDDlAbrMcaMcE6Fw84j4IR774fOtF3nrJ1OzvIkpZR2TXu32ehB5Uk6Lke6H1Svq22t9cAwIYQkUjrn7h8eYrVkrV0sFrGZhxCaz+cxJcQHp+84w9sPToo0z/Pj1GrcjZGgEX9HCOG9t9YRwuJAjzE2LvrlcimEfPLkSVyLEHDf9/ENxPLR7pWoIIrwxgrpKEeyV0s54VkgZ4+phnf7VRKVo7uuI4RRSgH5GGm22y3nPMsywZO27a1xAMhaX9W93RvdAyGMEBIJkwABU5Kk2aBUrzTnwjnbNq0xVsoUUyo4PVofuOCjnnK8YtiL/u4RwIyfJYaWo8r2acD7gFRyCngew+EHYS9e3vsYmGLYQ9FjDwEAxJ7cYRYCHVVdED7FP97VkfhEI+20y4iPzkEHbDN+F/swtodYCSEEkb2+aKS3YIxjtYejix5+j0r2Adxy7MR/UN0eL3+w4kOUUMZi+9YGzwmNRkVJkqzvH0QpZZo1t8tt3dc7+Obbl9NxWSRpmWRt21MEZZETD6oZrKwo5fOzad+rXdNLxgaqMcaMsa7r29ZoCznGndLGukePLm9f3gkenIdt1Vycz4IZgg+j8bjXKsakqFu0Wq1ms/l2uy3PxsZZ5DDG+LPPPnvz5i0ASCljKEKIRJFGjDGlvKqqTBLvfWQGpClQSinnsUA8GrbEzlAM/EmSVlUlpQwB3d09YIwnk8nNm9vNZoMxPj8fCyFevnz5k5/8BACGYeCCJknCpDjq3kUi9F7nHQAfRPAJoKqqAODFixdJkkT7iOimGYut5XJJOJtMRj//+c+fP3++2+1i7RWXxzAMWhtCyG63Q+hOSnF5eXVxcSGEYIxyzouiaNv2j//4jx8eHiKwFJk+8U8+/fRTKeUXX3wRl98wDNvtNmoCR5bAQX2iWa1WUsrJZAIHSs5oNAoH47PvvvtuMpnM5/PIIQCA6FAR+Z+R4LPZbN68eRPRrG/LidVmPB199smnZVl6D+Vo9Omnnw5Dv9vtHh7ulg+LzXp5NpnOzueTIl++fQOAJSWWYK305dn0v/4X/+V8Pv/u1duqG2wg9w8LTuCjRxdf//ZXv/fVl3/9y19eXl4+urr4+Mnjv/u7v3MBMcYkYwF8AIyCB0QAPAABFI4F3+/s7aHfcb3PDHjXKYm90Fj8xmHJ1Wq1XC43u81qudxs1rofMAoUY0YIJcgaPZuMAXzfd4xRmQjOxWx2BsgILqTkjBAAYJTE3+m7DmPiA2htA0JJmnGRWhcGPWhllNKDMt4FgmkqkyRJOeeMEmtM33UIhTIvpOTeeckpjnLpCHPOEplQzIINWmkUsDG22tZt03HKR6PxZDxhTFhtm7rVw0AJ5YQqpXfbXd/1RluCCGMMA1ZK17t6t6sGH5TWTds2XauNYYJPptOz+bwcjWSSEEoDgAs+IBBSZnme5cXZfJ5mmTamV4ML3hoTbRmyLJtOJlmaIgCtVNe0dbVz1sSR+bZtAWAymZydzfM8j4EzeIhp2ng8FkIOw+Cdo4RyxrxzMWR6Z4/djiRNvN1TQF1wzrsQkBASY2K0jUHXOb/d7g52EIGQyNKG9Wa12a5nk0kIwXpPKPWAdlW73lR9bwABJaANnM0mjIv1ekMFiwKJg9bWOx+AcUY59d5OZxOKUNc2HoW8LHrVdUOXZrkPwQ3ae58mMpEJpQQDss5ghKw2GBMhZZ7lPFrLIhwPtWNsO23jxYTjFHs8RL5T3G9fvgFARBoojj1CjDFG+z95F0QjmBkPjoP/AT7dIzGMxUi59/FBe/OUGKcj8L4fUj6hg4U9fxOjwzADITSSd6y10UvsoNKCjjv3qNhyfKp3Qp0H1o8yihCMAawxMQEK3lqju65jhGJMooMgANLGeg8h2M22ojJFPHnx+u3zV3fagvfGWl1kaZ4I71zXNMF5mbCmbt9W3Y9+9OO/+/Wv8mLkUXhYLBEhCJPVZrutvPcgOCRJarXdbneEskwa6zzG4eLiXArunFqvF2fTcdtWZZF555umSUSSSPny1WtK2PzRlTXWGDsajY2x1a621k3Gk6Iouq67v384arIzRvUwEOSLPBdcBO/jVxWL8fVmmyRJlhda6+Vm7SFkRU45o1wKmXRdb6wvytF2u3v23fM4Ws6YKPL8fH5+cXHeNHXT1N67osxC8AEQpTRAUGqw3ksp4+ied24ymZTFaLVaW23LcvSLv/vbJE0pF03bTc/O6rZdr9Z9PxRFaa1bLRaM0KaqHl1dRoH92WR6Np0hBNFWLc75zM9mieRaqaaud9tt33VqGHabXbWrMCHBO2v0qCwQhODdxfmcEkwwGvqubWqtBoxACl7k2agsijybTsZXlxfn87M8S8ej8tHV5eXFeZ6laSI5owiCswaCz9JkNp18/vln19fXSZJst9sXL14sFos0TT/++ONIpovYacR+67rebDZFMe7abtfUm82m7dpEJt67ly9fPjzcT6fTn3/104vzs/Vqcfv2xhpNCU64wBiCc9booe921Y4z/tVXX1FC/vRP/9X/7//7Z4yF+fxsGBSAIzgkWXF/dyuF+PyLz599983i4X4+n7dtSzCJFmHR4ZJgggCcswQAwr93tfcBO+6YSB53bzyFj6fMO6accRACQIg80ripo5hy27ZJwrI0GY1Go3LCRRQIJQhI3ykNNpXS4uCMinmxdS6RCZU8ADLadUNlrav6dawnEiGRRHsipg+xq8cIpXlKCMoSGQcVaPAYE0ooDsh0ygQDgFHAIQRvhxCClMl0Os1k1nXder1WJgp5eLoXtkcEOcDOg3cuaKsjbKj6fhg6rS3IJJ4vEZyM3bWosXI8+OIxF2viPC/7vu/76liXRBeCiMNEKZPYqwPkZcI3qzpmwbEKlFIyJjDGSqnFYlFXbSRrFUUB8M5YLloyRW5n8DYmyHuZMb/XK4lfUKTewIn8Xd8PEUyLStZZlnDO46xPVHWBgLlIMKLbanl7e9+0Jkl4VWsf64XgMAEhOKYkBK+dj97NmIAy2vmglKnrej6dCiEcuGMkiC9KEIqM86jWcSBvRIcE508uiPYL75c7p+Dhu8rsUHUdC6BDLAx/3/qH02AZ8UN/XOrOu9Pn+QBojdEOTpgmHwy/HjeXP3F+wId/0PvjFtEDCxGMMfZOvWuif+86xvhjhRrDv/fu9A5Y7wghQDCKFTpGIfYVKeEEcSkQIS74wXrtQTsYHDS9u33YFKmQIk2yot2t1aCThBGPq+0uFfLu7m5+flGWLs1HdTMIwQE0oRA7UhhjD6CUJt6PihIw3W6q2SjDjMY3Wdf1w8MDpyz29na72ppo0MMoZX3TbtZbQkiUbmnbNk4KxfiEEEIBrFFt21K0dzxnjLnw7uDK8zxWMM65o55y3EdSpkmSVVX18uXLZ8+e6SFcX8/jbootdmttnJ/ZC6ViHEWz7AGq8d6XZRmrzLpqb2/u5vN5nqT/6l/92ZOn17vdDmM6Go2Wy2XXdQkXSqnz8/O+79fr5WazIYRoo5bLpff+bDYvyzJJZVxOwzAYo4Rgi8UibpBodam1Jph675OiDM4PQ7ddbzw4ZzxjxCgrU8EIlZIzwhEBo7Qyg9VOJJwgzDmVPMuyBAWMCFDMXLDBgbaqa/q6rfRgAAeK2c3tm+Vy2TQNpfTzzz/P8zwS6Mbj8XK53G63UsrHjx9/8cUXVVWtViuj/LauCEGc85dvXt/c3Dx+/Oijx09kIuq6/h//5E9SyX/yk5+Qn/70xYsXr169whg9fvwEF77abRAV07M5Qe7Vs2//6B/+3k9/+tP/4X/6n/8f/8//17/9X/70y5/+7PHF9NXLN+nkXBDglLx69u0Xn306nU4320r1LaaGUI4ohQDeOhdMCCF4FDh9B3KehqsPYJDwvrLZaV/keI4jhOIxjd6NLr3bb4cn3FMqsMfgyNXVFVhDCIniRg+LOwB/eXnJuc9karTxwQshCIGuUSEEyll8ciaEsaGp+3bQ3oENDnPGOI+qPPggfy0Eh+AYwZwnlGGCEKGIUmr6CjOKAg4WW+fCXsCeKaVGeZkkiTHu4fbBmYh+cDMY55wxykULiJhWQ1gulwggChkaY5x1gCkTtLc2tr6KohiNRrEtjBB6eHiIwlpHUS5KaZ7nm23VdR1CIc9zSkjXdYMeGGNR4sjo4UhCCSFohCIsFjfqwaIhxNyqaZoQYDQaxel4az1jLDjvnTdG9X0/dL01JkKUEZSI4VA7672PEogEYaUGjLEQ0ns/DCryReM0a1RKc861bW2tzrIkAM6LkhK+Xm/u71Z1bQIAAmIN2LAnThFOZC6dc9q4RgdrrVEgJHTaRC2TGMgppcE4cF5QFpgz3nlvKUJRk5oxFoIL+yIIRxzy/bkFfwRvP1D8etczOwmKR+z0ZK3CMbC9v/7fbYTj8yCEQnDvnvNQ7WGMI8PzKJB9ZNPgg7Dn0TDhdK/hExGZY3w7hj1/dNQ7eRxjbI3/IMLtI9lBzeA0HAIAI7H9aUNwB18/H+dVAMDE7QwEkEeYEsoJVpTiEELbq7YblAEbQDlolXnx9iEV9EdPH40n867rANd5UfJhWC6XUsq3f7eenwNjJElk2w1pmgBoxgAz2qmBYRo86noliOcyQYjUbdOpocgYpgRR3HXdwtvzs/lkMgku9H1PCcnSlBJJKa2q5m6xTJJkOp0yH7puPw7LGfHea6UJIQVLJ6NiXCbj0ShL0xCC6ZVSygUPAHk51lorrREluZRSSkxJCGG5XM+mzGr79u3d69ev6zqUJRVJ2tYNpTQaPfa699ZJLmaTKRbe2eARhOCOqy6CupeXl8+/fT4Mw2effXZ/t/j1L3+dpimXYlgohPFoPP7bv/lllKJFhD08LNu2fri7T9N0fn4WnOWUlOVkuVgxQrM8JRgnqZhNxgEcIeSLzz/FiEZpQ6WM99674JwzzjBCg7dtvcMMW2U3ayeoIDSkIk0kT0Uav14HCQ6YCkoRBQJOu071utfKKuRRMS444YLTZDIajTPkEaKIIvr5F0/rut5utxGxiGBp19ZGD5PJ5OryvK7rN69fdl1XluVsNhtP5k/ax03TLB8WD4u7TCZFkd1RFsAThD/++GPJ+ddff4sxvri4KMrx0G4Xd3f1dndxdc2kXC0eCJPj2cWf/cn/eP3xJ//NP/8//dEf/sFf//KXf/2LX97fLy6mZZDiH/7ezyez2Z/8yZ8ImXKZejNcX86rtgNMAFHjowgt4oxxLvfWbB8EuQ+2/WnAO+yod0n0aVp91AZE7/EFAI5wTfD7lk1AIfi2reeTcZZLa3Xb1RhDkqRC8OXiNkkSjBEOWAgRzDAMA4KQ53lEZkMI2llldAhBCJknyb7LiDAKGAMiFBGEMSDvTABgiFLA1hgUEEZEsJQijBzyPiAP4BEChEIYZ6XRZll3yAdCCAqh7/uhW5sAnFLCCaJ0GIa6rttqPxdICMEYQgjWBQ8IEUYwFt4dY15s18VcbDKZKKXquk7T9OzsDGMcp0qV9pzzWEK5Q5kVqZtN00Svnygx07Zt27ZSCkIIY/yYnPa9iqVzURST8Sxyt7TWkd0TK3KlVBzqxBhTtveO8d5rvX/82Kw96kQfVSuP0FwswmLVGO2n0zRFjmVp0ffD61e3Dw9VIhkmfFu1NkAIwDnE5m+SsKrqtVadghDAAnAEXa8CIkIIKdLYl41lfRQJDEq5ABE/j/3jvQ/q96Sc45sPgD+oePyJ8cJpADgdaCMEn6zb9zgg8e++n/nFXzj28PbV27tZCXyc5zvGOfy+pOcpFBkOo+UnwQ75k4H3WOPu1WEOTUEAQAcl0tNAfgz+338wPsIYjlXREV2IjxO6J5cGjDx6h5FaowN463TfVlXbaAcGgw6oM0E/LDnx4zK9OhtnRdkPewBg6Jrp2XkI0HWt1rbrOia4tAGhHSKIEKaU8dQxzJzziOCqavI0yVMxKJ0IFMlKMWmIgEQcm4nWx0PAxviu1U09BE+K3PGDd9Whg9i2bSMEK/NreTblBDPG/MFO0jmHCI4avINWHlC0WfXe9+1grVWDqet6u16/ePGibWE+T4uiqOvaGRsJ2DHNjViOECIpWdd1TddZ6613ACAl51yWZfn27dvRaPTxxx8vH1a3t7dR+nkYhpitAkC0GXHOTafTvu/ruu37njIS/RC6vpEJj6WC0sMwDGkqx5MyQixVtU3TPEI+SZJxzgWXGGPtdCazbb3VveYJt8oGHEb5aFtvccDB2abaOXDgAAjggNuhlUwmeZKKtMwzMZ5SQRlm7dASIIgicGC8cdp55JH3xrqiKIoiHwbVto0x9qOPnkiZZFm62Wx3u+10Ov3yy59Y6+7v7+7u7n/xi78WIsnzdHo2mZ5N8iwLwb1687oo81FR/m9/89cY0OdffIoxvrm7LcsyE7LISm3M/cMtJqwcT0VC2mo9GWVdvX5rBpFlX/3o0/lk9Ntvvnv9+vWrh7ZI+Prh7vbV6/mjy48//rhX5vb+QQ0dJoxyJJkIlCprrVHOWEROqr2/D9v5wD/vey6dp50SOKFix9/0JGbLPip7YowJiRWhv76+Yhy/ef1SGvb06VNM4OWr58aYs14Lhr13TdcGMyirE8EoxSHshSe8AwSEEsxkQpFDgMCBCx6hgAhBgVGMvTHeGIuRIQCeeGdxwIHaPMmd8XHbAwAOAN4bb/UwgEexDBJCOGODdQYDY9xb1zdtFFWx1mJMs6xIssx7b50ehkFZ570nlDJMzs6mcURhnz4bY5z1ELgUItlbXLZ9F+OQh5CVBeecAGrbHqGQF0UiOULo4fbOWhu8wxhHBwHwniAUSYycC4RQbJ4rtVcVOj8/n07OYnvVORcCKKWIB6t1/Lx7cJXvBSmUUr3qnXMIIxzHnwmxNkiZIhRilRmjYOQLSCkpJV3XDsPAGI0C2RznWrub28XDau08CJkZ5/sOEIXoH00oQhgoI8r27QC9B0YgBEAU75paGS2iNqZzgvHgLArAKI3tTHSy0o6VXBQJi0Ibxx4eIQQQOdJhTg/68EPjcccVTin6fub3w+sffmC1wzt88t3ij4FqL5h7EtgQ+tCc73gdUZNDRIxvck+QQQhRymPYO9ll++D3wSeCvVgaPb6f03+ts0oPWtvjdo5l6F5C1AMARoh4hD3CiDJkA6XgCdK90dZ4DAiQBdwb71oHblGmglGUyoSnWbNYcsoGGDDG8zk0da0dKOPOr66NC5zvtWkcAHaBUoQQIyzcLtrxuHx0NdfaKoUppV3XAA5lmedFGglrlNKyLNMkX2/qrus2250P4H2o6zZLRFFksT89DH0ITnKapTJJBOcsKKPt4Pb3kDLGIji8WG0QwSJJGWPWu6ZpqqZWSslkcnd3//DwMGjgEpiQwUPfDVeXF9PpVDC+2+2GtkuSpEgz5INzNkImzgXAiBBKOY+q0OPxOBXpzc3N2ze3CCGM6GrZiFKOp1Ot9cPDQzkeZVkBgDmXwdu2bZVSuEX1rtJm0HrAEKQoKSFKDU1Vt3VVV1sAjxBKEpmmGefS2RAlFzAmCKHRpCjzkXG6bwcumbehV50eDTIVzlqjdXCAKaKYYYww4CfXjwiigAMK2Gqlu8EFCx5pqzCQgDwK2AWLAg7IYyAiE71SzhmESJEl0XYbAC8e7sbj6UePr+K4ujHu808/ZkywLPvFL3755s0rDBA57QChnIxGo9HQdR89/YRT9ubmTZZljx8/ttaavkHIAQDywVg9dJ0LPiDsAY1mZwknWvWUi6uzcd+cNZuH0dn1Rx999Obm9r/653/8l3/z19/85leXjx5Ny4wxtq3qpt56QJhyQjmnlDE+aP+u2jvdNuGEUf3Bzod9xvkeehn/jfMr8D4XBgAwhehchyDqFkb9e/P7/+DnEPxut8myJC+SzXax3W67rrm+/ji+etSbYcgnSZKnUg+KMQEYh0Mq6gGMMVW7ZYQyxjhjUTcS+RDAM0wYF4RgwShlmHJOaKCUtlXvo28cwpQzRigA+OAcgJCccx6sWz4surpBCCVJ0nT9MAxN0wzD4CEIIaK3b6+UcVZrazyiMpEiyYosSdI8oUdKXsTuYmK4V20AiJVZ1GHJ87wb9v5qAJCmaZ7nRg+bzSbWhTJJtNbVdtt1HRwaM/H0PObsEQOMg4DRbjhGwRB83/cM8FHJJZZNCO+V+IdhUFrFLmNUh4ldNMaYcybOHR5pn0mSMMa8dxGyi8bNGOOEZd9+8+z585fBoyJPlDJtN1AKmAFCKE3TELwxKgSrdbAWPIZAUICAGa2bRimFKFZKpZLneUkxxFT6eJQfY54L4L0nGAfA3rqI4h5Pc4wxwoRS6tA7HOI41RDe5zqerlv4IWD/ewnfu33xwe+dVoonGeE7LZWjyQMcxFyOaOf338bxR9+/Ii/GH2g13vuw54v+QKiOSyWcCH4ef2StPtrMHqmk+GBVH0JAB07pvkLlOEkFRhwpHQATigEx470JGFPoNby5ux+P0k8fXyRpTkVlGh1bUEWRLtedDeCxGU2VECLLiAs4SpwAgHeQpjL4wQDEoMUw9t4H72LVOJ1OuRBN1VJKOU8QQsMw7LbtarXa7hohGCW8bTvkw2w2s1ZrNThnsjzJ57M0lUki6roWiHnvA0aMMUyYc04b470njCZJIpIUIdR2XV3XVV0Nw9D14eXLl1qHySSPqAnD5OzsbDabcc5VP0SmdCw9QwiL7d0wDMY6QghlPILP3nvG6WQyefPyzS9+8ctxOcIY3942WbZ3DXzYPFjrLy8vpUyr7U5rzSlumsZaH2MDwiGKrcQuGsYoTSWhyHvb94Nzbrfb5nmeZTljghIuZSqEJITsIts8z63VplYYYz2ohlRNsyc9SSkTkcSWEAB0Tfv9pHCP6SFHotk6SyI/H2OMGLHaaGu8dR6CUcNu6FU/cCn00Nc1zhLHBM/TZK/lwvmPfvT5l1/+OEkSo1Tbtn3fRXXiPM0Wi8W22p1dXqm+//W3X8/GszFD6/VaJsnl5YVH6GGxavpmNJ5yIfu68t5mRenM0LbtuONLJecAAIAASURBVJD/7I//yTe37eL+fvVw93/9b//r/+K/+Ke9Mpgnq83m+au3vdIB4TQrCON1229226HXZtPAaW/v9Gj4XaXeB7v9mHieRrsPM2JCgvcIISnEtCymZZ5Lxgm6urp69fzbutldzOdZJt6+fWOs+vSzTyQfRbaHQ845KwUVnCMUjDGMiRCCGoa2VUoHC0QrH6x34DH2DnmM4kQTYIyFYIJnjCKCHMZACQ7BWG3aaqCEUEoZYQRTFDnoHk0n49XD4ubVa/CBchb5jfe3d72xMW6VZ3PCqDK6rtuH5TLJMoQQYXxUJFmWZFkhU0EJ97qOuMTxgIuXEOLt27cPDw9JklxfX5dlWVXVzc0NoWmWZeWokFISQFXVVLtNXe/KLKcMR+w+Nt5lwtNUMi5i+XW0EeFcxL3knFORmxeCUkopo7UOgJ1zOE7sUooQUkZHgDtiPjEFJoREY13BkyhLGwIKASllCEF7IUGtrTVHl+14qt7c3r9+e7vdqDznnPO6bq11WSYiUMYYG4zCTlOKnQOMwcZ5AADKhekNYOqc6bqOU5xIwRhzxlKEHSAcgKK9vdah6IE4u2l9PKl9zC0iB4di+v0ocsAJ966ncDI/cAyo6B3PJXxvL3y4/k9BVIROZKzfm897r6Tbo4gHrubpS5yyWt4DYPcDDFE55j1ZGXQyLxgOFpof7E104pp0Wgu+e7MEojOKtTY4dwoXE0I8pZgQHFV9nSEERR9hIJgJjgI3xvTKppQD0nVj3t7epZIWicjykmw3cWyOIEQIWAeYwGL58PjJZ2VZ1q3qug4D+ADGWSZEu92lBJquf/nqzU9+9BhR0uy6hLP5fF4URcwRsyTnPNKSYVvVq822b4MUjFKuOx1HHQBCJPFPx6UQjHFCCULgI8IZm9bG2F4NymiEUDmeCCGsD5vNZr3ddF03aDUMQ9u3uyqUBc6ywjkDANG7jnNe13XXtIyxcVFGRoxzLir8OR+O2kCIMELYbDb70z/9U4rofD5f3D+EgMoSxYpTa00Fd2DSNG2aznvwHkblhDE2Ho8DWO89wShJku124xRrqxohwARQoMYoZwwhpBiNOOcUYau0cqqrO4Sw98C479uaU6yHrm3bNE2NMV1wcacTQoau2a7frcOLi4vTtQEHSJxz7kOw2g8nGaT33hOUykSmCSM0Sl+OypxPJ0xwDAgRzAhBKHhntBmC80PbSCkpI01bG2Mms8mj9Lqrm6+++qqu6x9/+RNn7GK19NZTTpuqaW5fffzxxwihXVUBIY8urwat7h6Ws/kZ5Vh3XXDu/PLyo+vLu4f7r7/++nL++W696Zrq//3f/Xe39/c+oC++/NmPv/zq6UePn798/e2LV9td7QIwKorxaDKZXF0/hX9/kPMHA95pX+GDqHnyI+9toIyOx+OnHz15+vjRfFJKRtIEz6fFw+Ltw/3b+/v7EMJ4XCaJwECMMUISxgQcRBebtonBwzlXV21V9z5QoCJ4dDYZAQD4AADBukAoQZgTjhBwyihD3jpvnfFOqbZt20LMhBCJkAghbYa+782gnHO//e1vMcaSCyml1SbqUqZpej076/u+btuqqiI8QhgriiLJMsYYT7MsyyKBvlPDMDQpiRN3Li7xSBuJNEtK6fn5eQhhtVo9PDwcca2oBCaE2CxXt7e3EFxZjlNJ67rebbZaa07paDTK8oRzbqwfhkHrPr7Do7PUwUaV5Xne9/12u+17hRBy8dzH+xM2Njn6vo/9DL83BnunLoE4ikVeTPmttYyJOLjTdZ1zdjweR3nDKFTxzTfPdtsaYwghCg9SKTHnwnpwzgUMxpgQTJIkgIAysBqwBxwg1p5SStA+cvpzKY8ndbwzhBAcDrZzIUTrThS8Q/bYCIxXrF1CCHvS//dC1JFIeczMDh/Qf7Cq//71/34geU945WT9f4hkvgM/DmTdYz/1GKWOJ04MbMd6MeqP7QVID+PqH7T0vv/+I9Jw2uqLjx+fFgBstEg5KTHRoXd4TNda1RvjPPIHEjILiFkfqromGQgGnYK7h0qyN1eX8ywRo9HIWguUbau6LGWrDJXJ3aL55FOWJEnbm5j9BI+sd5TSQfvr82nTrF+/Xv/4i2vG2DAMo2w0KcaCs+VyWdd1kZUR0seI9t0w9E5r8B4OsIofhgFjEJwlggkhlG77wUvOpRTBvbsJg7ZGa8JIhC4QQk1Tv337drFaxlJda1AGRiM6jWM51s5ms7Iot9vtbDqJJel0Oj2bnSml1uu1Uorn3FprnIlpLiGES8m5/M1vftN1gxkUQmQ6PVssFvf34fd+79I51/f9aDQyxllrb29vy3zEGNvtdlVVlXnGRSKlFJKNx6Oq2o1nM2PMdrtqu9p7Owy985BlPPa/KWH7JizmCKEQUDkSxpgs2xPR486N4/8RyIlnRZxNklK+fPnyuNdOLYuTJIGTtnSEDZxzIk2apqnr+hSMicsmSZLI4Dul9hjGX716xTmPC+Ph4SEqoI6LMrZaCCGPHz8BAKXUdDrjF9OHh4fNZkMJ8Rjvqo0PaDYZ3dzczGazyfycM7ZeLtq6Gk0n/+Kf/7P/4V//5tHVxePHj//Vv/ozwujd/fLf/Zv/9Re//NUf/tEfEUJ+9PlnmIqm7x4eluttVVUVZisAICLNP4hqHzQhPrhI8HvJs4AcAkDUExYwpUIq6welEGMi4cpo43RWZNh5hsI0k3/wk0//0c+/mMmQhO6T8wz3qzS0VDfMNBej5KefPPloPuNWV9UWeY2cMUqNR1Mu8vW6TbKZEKOAhFJeKUMRzhJRJjyXRDe1IGiUJuM8HeVynMpJmYzzJKWYo2iFqVTdmk6nJJkVszIfY8B912832/Vys13vdru6rlvOJecSEzYo0/UKECnL8XR6tqjulVMBOcIx5ZhyzBniHBeZKDIxKZJMEKfaZrvQzZY4ZdtNyjDH3vYdeJMKjkJodpXRigCu6vb1qzeLxQYAc5ZAwBdXF2eTSfD67tWrzfI2E2ySCwq2q6uh7Zy3WVZM52f5+Mxg2ih39/a1sZZxLhMphRBCJEJIwRlCBMAb1ey21XKp2gZ7y1DwBIzTTJAsT60zy/Wi77skkX3fSilGZUkJ0UpppQhBSSKHRgsuwMN6ta7qpsjLs7PzJMlev3mb5WWS5to4ACKTrGqHb7978eoBAU1ZWlAmrHPOaYwtY6ju2slZAZQNPgxEbgzstNtpLxC2OnACHPtxmRKwk3EmOJmMi6GrGSOc0bbpCCapLPp2IMhiFAjBlBGMkAveuhAQRoTumqYcjfKyYJQwRo1RCLwUjGKgGAgKGDzBwChmBFEMjGJKEMEQXfFQcBg8JQGDQ8GiYAmK+KuF4ASnBAMhKLaiMUYII4QRJZ6QQEjA2CPkQ7DeG+c0BB+8QxDQ3ibBB++8dxjidoJIqUWAMEIQwMXONwIPIcT6N5aMGEeuJ8YEHWwWAIBzBhCCdxB8NPPzVjujED5l8fgQPA4BA3hrUQgY9naA4D14H5yzAQARhGkIKABCODqjwf7wpMRDUNZoaxAlTPDByWYgnqRVo5Xy331707Z9UYxZlirENUkXg9kFatKzpRMVGT0Rod6swdtRKhlGueDtpqIBpkWGA9y9WeAABLAOoUwmfWckHZIEbbbmJ59OvOmnWYGN+eLjp/PZ/OWL1w+LRVaOWZpUgxrAgxDrxWpotOAwLQinLmVQSAJmkBQuz2YBwtCr8ew8ScarqgOaSkm1852znXYak2J6Icppq8nDpnn++uHl6+WuNkqhfoAQRJqOPEJSCOedN0OZivkoSzjKOBDft9sVp/Do/Fwmcret6r7nImWCts1wdvFoOr3a7jSlBSLZ3/3ds2cvHxarZjSep9n42YvXzvsvfnSprDkbX0qWGaWX9/dCMDU0QkI5SlbbB0SCMlqmKZNJ1QyLZTOaXHjvkiwjIiknZ7tKcVkCy7LiolMY0ynQ0kJmvDRBtIZVfcDIW4cpT1+8els3fdMq73HbKueDD9gZ37XD0GvvAAPxDjy2AQUPPkoWK62MNdZbY00/9G3Xdn03qMGDxxRzwbmhNBAKlBOeJmmeF0RQHzwVJMJnhGHGMRdUSCYkwygVTCDrVNs7ZbAHr7Ru+/X9fV/Xpu/b3W5zf1uvV2A0R2AoySaT+aNH4/k8LQomJZWUcBrANW2lVVcUiWSkrbdgdZnKxx893q7umnrx1VdfzGaz+4eHdb1lMlvXTT45+5/+53+9aQYqi7OrJ+dXHy03dT801hnCk+z7IAl8r8N3TCcxBAAcECBAkTsHgACBUooQJJNEcIoxTiQnhHR1hbxjBP/jf/h7/9W/+OePL88ICmD69eqBoBCcscZ47xjnhJC6ru/u7rRHETqP3A3JhXOOItz3fWRq0QM3LyYgHz1+IhhHCDFK0zTJs4xR4r2nBLdNu9tslFJS8CRJEKBhGO7uF7vdrmma2Iw8dsWEEJHxSCkdjUZFUYQQ6rp2aC+9U1VVdGCPauVRlSaiHHGuLubIeSoIZoRizgQg0vf9drtrmlobs15vHhYPw6C44HlexBnz0XjU1LvVcqGVShOZSBldg4dhQAhzIaRMMcFK667tmq6TbN9+i5PUlFLOGGNs6Hqt9dD3XddppeDACHV7A+59My+8Ew1HMe2KCCchJOruY9ibInnvhZRR9toY0/VdLM7yvBSC90qvVqvdtmoH5qyx1qLgIHjvrNZuGKyQmDLWDmrXdjaAC9APSptAMA4uyASPyzxPZZmnWSowhDSR4B0hOPigVNT14N57CPboC0NOB90AhmHI8zxJEhp1y0LgnMd5zd+FT3w/vXPunUv7Me2NfJAPutR7WBK9xxOBg3zM6cY5NtW891Hi/JgSH9tm1tljVXi61zAmRxj2B+frT1sJpxDou7wb3lHP0PtgLxzUPo/F6ClDLRxGEmMBHbP1ALDabLUJb++Wgw3G+15bpZ0NXkg5nU0ABaN6Y7UaOggBtg9Miqwsd1U1GDuaTpQxN7em6zaz+flms1UmYEIpZoPqCeBPPpluNxul4dF5XiTi4mwyHZfjMn9787ptW0xQKlOEiXEuWj8Jmgy6Ct5zgRnFhGBGGYIwDH1WFkmaYkoDYGMdIjTLst164QJQxpM8z/ORtna13lVV3fVD2/Tb7Xa73TlrI5EthAAYJVIkUkjBUylSKRACZ81uu8nz4vzigjC+Wq1X6y2htCxHXBIgpOsHo22S5m03/Pbrb54/e6WtwRinewBDT8bjIs+01olMq6oyVhOCfXCcs6LM47ihlNK7EOmRAEgrU9f11dW8H4aiyFer9aPH19rayWTcDcPZfC6kCAFscIzzYlROp9PJZBzAEMbmFxcekEgkY5xy7gEY5xCQMlob40KgjAWEjHMPq4e67uumruu+V4MxPiAIgSitnANMMCEcMLIWejW07fDy2cuqaeqmub2/f3nz5m6xaPvOOZ/kmdJmV1XL1Wqz3jRtp4311gPinPOyyMfjcRR2SdMkTdPZdEL3IqXAuOCMD0O/3VabarsnUngfdc6yPM3zPLrM39/fV1U1Ho2ur6+ttd98840ox5PJRKbpdlspbWZnc5Fkq/VWWQdAitFouVzfPSxu7+4Wi4e266N0+3sDDL+rwnv/+n7L3aMAieDDMGhntPdZIh49vsYEnj17JhDilDy6uvjo+sq0W00wTkS1al3nOCMIQpqmUYK5qTutbB86Rigj1Dm3Wa1tYYZhqIbtxcVFEdW2rO373um9RpoeFCEky5MyL/I8pwhr1Wut728frDbgXZZlUqaMsaFr27YN4J3fk/IJJoD2M8+ccy4oJuCcq5td1GW21tbDLmp9Tc7G8crznHMeI1Pf97HN5q2LJ07b9M51mGEhEudctWt6NcBBQAtjfHY2OTs7K8syzTIpU290Xddd2+ZJKqX01qm+jyGZMU44A4S11oO22jjwPstShE4YjGivqRWh1L7ruq7zxsYOHOfceRuFpOu6jkAHQsgYk+f5kfoRny3Kf4DRTVX1ahBCZFmBMe6GQSklREIpF0kqRdL17e39w9u7+6puvd+f+N57Z7VRNqJYRTGygOquripPEh8wty54BEVRDKQVYj+TEOFHj8E5h/cI27v5tjzP22p4F3yisPihK3YcbDiyJU9xwv0pf5CMwidSsej96fVTmPcYAiOoeJx/OMYzdNSAPlF3i19uOCEzH6FFxN9jb/oT878QQtSIOQY59P4kw+mWPAraHePTXgrZ2+Oz4fBhthph0Pc7Eei0k4dPjCBOUdPjuw2AbPCY0eVmPXg6OZsv6qHvjDaDxyj3uYegnWv7zjosJK+xaY0jSUZ4winXDhEhz85749B0OpXy5q7qJDF5NnKmSSTdbpZqgE8fJxfnU46BEHR+fu6dqus6ETxNU23MMAyI0CRJAHBylrdt3TVVkWep4IwCwchaSwlxNlDKA8LdoK3zAZPdbkcIS5LM+eBcCOAe7hevb+4Dwi4gpXTbdn0PeYoEl4zgpml4JjhlqeAYeUKQtdZ78NaUowkhpOk6takAsXIyCYB3dSMRnYxndw8Pm82OcvPtd/9/yv6rV7YtOxPExph22bDbH3/vzZuWSVukWlIB1dVVDQiSgBYkQPoF/Wf0IP0NAWoJEFBqNPSgQonFKhaLJpNMZibz+mO2Dbfs9FMPM3acOCdJoRXYODjbRMSKiLXmmOMbn/nmy692sxnmeV7XdSa4s7au67IoxnFs23Y5O+37ngsym02GsTs7O2Ecb25u6rpOJDVjDOfi4JgfMAKNb65fA+C2Xc0W9Xa77VRj1xaRJn0YkKgtWm9CCNPpHBFPLp/Nz66EYBBjss+9u7sFH6zTh4S8GKO1dn55xqhgnBBkITpnQ4guBrROMyq4oIwKJDF4sE57F6sFzmazuq6BoAuWcJYVkku53W4RGctqlgEGDwCD8lr13vWpNxjH0TvLOU9+UkKIg0QKEQXjUnLOZepzlDJJdgwAzhlrbSblqxefXJ5f3d/fv3nzbrPZTaf1fD4fuqYsSxqDsyYXfPHylHDRD+qrNzf3t+9+5/f/cLtp2lETglrrIs+tN5Bme8e8lf9xlQ8gYvyo/GGgBKqqphid0YzAfFqPp8uglOQ0Wr1b37FoKYlMitPlCSdOihTKkQhvEQmjlL5dbYuimE6nVunVdsMJ5UXZ+SCFSJx78IECMik554LxLMuKPM9zCT70fb/a7YahC9YVecbyklKEEJqmMUo7Z2KMRZW88gJjQkpOKQ/BeR+dM0II7+046nHsQwhZJoSYlCHPhCzrqi4rkUmM0Pf9brPtx0EN46hV8n7DCEDQasMpjRGCBYg2faJJZ7PebmfzyVl2Npst6rre51o5td7sKCEn8wVnzJhx6HoIjjFaFAUAsT5aY7S3MaAUgnHO6X7BSg1aKhtOmzSKSPwXCpjK256hEEJir6QakBbWg5VzWkDTRMd7P3Rd3w8+hjzPCXmMZYhQVWWWF5SwTdO+ffv2u9evdzvjHETiJBeC0+hsb6xWIDMoa1FUZTsqa4K2QIlHZhExy9h0OgUIBEM8IqMSwp1zmeCccw94GA8IIQznHwvPcf/tnkHw4Xp93LcdV5rjAgYf6HD2FTEdz4HieKijh2nH/ofwj+TTvhdXHB3nRxvKEEI8KmOUp7F6/P9xl48q7rHTWDxS+8H7FvODQTt8ON386JgPBe94sPfRnwFANw6E8noy7fp+M/p6fgmIMs+CNaM2D6vVOPTRAWNoXOiGsRcxBFfa2CkXMZJg8qK+elr3gzLOzeZTejdIRiGYImPeqmDcyRyfXJ3EYBfzmeA0ePtwcz2pas5p2w+r1dq4sDg5lXkWIOrBCiHkfD6d1IKSGJz31lo7mc2sD33fA1LvA2PCBr9ery9PT4TM7u4ebu83hPNN06422yKvBq0TS7koMM/zuDdAsRUWFBFCDNG7EFQMnJMUmCeyglGm9G7XtdL4ajLN66oo6fX19d/87Bce4NPPflBVVVXtGONFXpZF5a2x1grBuq5rtmvnoOtbykianafSuNmsk8vJZrPp+76qquSuGaKfTRdEUkGlWulk57Q8PW1VV03KXdMld6rosTejWdtxHI0xs+VldF4BEZTVk5IiqSZ1keUoMsFYTvZSesF4gOitIzQeSG3HZ87h2wP3Kl1WtSxDCNZaY60N1nrX9k17v07dapFlgnNn9Nj3fd/rUQlGk1yCMwoxJM1VetiyLFM+jLV2t22UHp31l08vUzqbEGIyqeq6FkIwJgTfO+DPZjMASOk0VVXdPtw5PZ1Mp9979fwfvvr217/4OYjiv/oX/1z8+V9++fUbMwyCI7dYT6qmG6Lf89rYRzXvn5rtv18vErcN3zPIEAIAdE1fl/knz59MquLtm+/M2JNJ9vTitLm/W86nxKtm/fDyyRlKors4v7wQAjjBdLa1w5jS68uy+qSeFEXx5PJ87AeZ8dPlCedcD2NyLYkxUrKPjuScUyTWWvDBqKFr2rZtvbGcU5lVAMAIDcFppYZhIIBVNZmUVaDeueC9DQHS7spa7Vxo251zIQTHuSzLPM9LSjFGPK2W4IOLQQ+q74dgnbImWOdisEr7GARlSCj4YJ23wSvvpZQBYbvdJagwQEzF5urqanl6QgjxzmirE5ZoHZSzWSHFOAxd10UfyjxLKlRjjDLaWhsgMCooY5wxEv3B5iPt18yoEqvT6L17b0onT38wqr3cMBWJNHhPD560fQmm9j4YY40xY9dBCFIKRqi11hgHAHmecyF5lg/j+PU333399TdNB4QApcApDcFp7aJPyCrkOZF55gIkkAMQkAAQmvEsK0qX+D4UCKOEkUjQxxgRXAh7fQWidGEYhqSIJ5wF56J/VCkghr2eIcI+SSqFYkMEiPgBzerQ7e3xzw/DgPYVMcZIaCQhxgghWW1EArivfxEihkN3GGMMETHRZlK1iODtvs9DQgAPTFGMj5uS49WEHPlix5gsA98buxNCwj+x7/xHq2yMkaQHjxHix+r1A7YZUyz7fodLYowIJL1bBCE+Bjv4GCAiRCRIKWHeewQSgRAmgGA3jrtGiWoJnGKgWca1spGgj8E7GFSMAQgOr308P58RWX33m2+AxPOzy0lerO7uPMC33357efmsbTptwu2qn9d5045/8gfPxn5Q/c718fd/+CkFv364adrdyWKutX54eLi5uSOMVZOpdM4Yq5VNToRZngdvvQ8+hggYAa3zoAxQCoiSMYIsk8Xd/Wbu4O5h++13r2VeROQxoHPee6+VpYhFURKCbdtiiFmWlZlkhGEECHt1SBojeG+7YQRCUYiSZSHEXiti3dffXbdtV9Sl4MXd7f1218+W05PlaSLjGDXG4GIUu+1m6GCxILv1arlc9r1xxkwmFYmwXa8xIIkgGS+yLJeSAkbngzXR225ou25YnCzv71YnZ6evr9/meR4BXnzyIkZ0zo2D3rWNHW2AQDlttembFr/5Vg8jYwQj1NNJkeXPXzyrijLPc0qRRz+VRVnXuZRSkNR1pU0AfZQwHSRYCcY4ABsP290wDF3X+Riqqqpn00VZ1vMTIRghBCNE7wkKBMllCT541VBkhCFjIkFKyXPqm2+/u727B4DZbHZxcXFydi4ECwH+w3/6D5zzuq6XyyUhLAQYutFaW1WFcy5da1VehxCCc/e3D/PTpbXKabGYzD55cqn6oRkt9faf//EfXV48ubnfXJyfrH/95W67ChEJF0j4vux91Of9jyl+H91IjNO6NGp8uLvh5Gw5rfNMXF6cTapyU+d1mXMC3XYVTieFoCzndSExpt2GM8ZE51M+VllWoigZY4JxA5gJyQmNzgvOUwgDISSXGQH01vVtBz6MQ6eUSi2OlLKsasoIhLjZbBgSypBTNp/OCCEU0BjTm+4QO5esmVMBOAjpEsXRWg3AKaV6GGPEGD0iJQQ4l4yxmKFzRlNhjAoBnDapR/Q+ItKyDBGxaXdd13nvfXTOuU8//bSu66LI+77vuuZxcBOLIsfgu67ruyb6WJZlXeYJlkwxDIjICUNCwDvjvCzkwbnYOWeVTi/BGpN0DikDOrkFWmtTAnvaWqZ2Iakpkvl6qo4JVRiGQSmDMUopRJ5Ths57HyNhnHFhQzRt//Dw8N2bdw9rYALKkgouY+Bd046j5RSyjAnBKKUuwDDqbhiVAUKACk4oByqyLLu7vlPjOJnmeZ7Xpcjk3qY5bTAjQRJJYtmleF4hROpjDrXh2PEreXWmon6MbcIjhnFgq6bfHqvi0gnvj4QNh13tcV2ED6eD6SQ8NF6H0vKRzcrht96Hw+O/n9cRYtPG8zEL81CS4z/RNR7DsIeKHkJgjH7Y8IWP/uaA3O7fkOM5/aNl2jFKfCD1pbuIogy74Xa1bjtlAxjvmOBa+VFpSnlZTQjBdrtxHhAAhTCjvt309cmoIpGM3642zaBWq1Vd10M3fvLJZyeny7dv3iHAtBSLSQlBc+K5YBcni4vzk3fffds2u+VyOfRd1w1d2xPALCsY4c55Z0Oe50iiZAwRjbPpimNCameLvAoYzWiAoPeYHG22Te8C9oMOQLWNAE4p0/emqArvATCGEIzz42jKTMxmszKTBGPEQFAyRqTkWSa4EMbq+9VaOz9dLGeLxaDM7d3DdrtdzOvm+o4xMV9MmLE+kF07/M3ffPHy5Zn30TlPCTjnjAFEyPMcrM+yjFAgBKTkfd8nU/jdbpcSJNImNV3jMfp2VCEE7UM1n3bDuN5uzcM9AJks50CYKHJZVsW0ng5KW4WRmJjv8nIyna4P1kuCB4rbXWOdH4xyzkUf6ro+PV1Op/Ock8SiKMR73+BRmeSGeCBwHgybLk9PsqIspnXfjdqq7W6Xchm7+5YhIYAUEGIkgJQiI7QuK621VlqFEek+ilwI8Xu/93vJtLPvxzdv3q22u7qusyz7n/zJ/3y9Xl/fvP3lL39NCFnO5hcXZ4vZ/Pbm4ez8tCrq7Xad1q60uAVwMhl1AV6cLpbL5W++ffsPX35H8/rF5RkAXD172rbtd2+vgTDOpPLxH+n2fhtg+egWD9QWQMAAiCRGAMhlprrd62++Vu3mydXZ5dmzy9Pl5eWlWdZD3wZrOIkP97fLaV3l1GolBYPgKYGqLBGpyDPKeFFU3dAi4ti349BxzpzV4zgWWZ7neQiUUppLAQB7V2UfMAZGMBM8eb9oNSqlzKjOzk/Io+hKa93tmhS0YaJOAyF4tMZIojRCSJKgpkFrQtiyLAsu7YYjAPHeam2HoTPGGaOGQfV9m/5vrU/A6dXzFwAQvGdUCJ554vNC1nV5eXkppXBWB28RAgJwxqTIQ2RjcmcgJB2J9dEF3/XjYVOPhDyuQpQCkggYorV2HIah71O1Sw1fKmlZWRDGHj1cdLLZTSUhvXbnHAChNGG8QSnTtYPS2rtYZFIIQRlL80/GWAAchgEoW292b69v297IDERGGc8IY7rTAIFz4BQTVwYZZVyaEJSxSgNyYFz6QIzWQKjWOumVH6EVcrzEhxCi90j3piGEUYEihGBiREYJZ4xQ5xxSEk2klFLOmRCImBpZcuQHi0dGZQmWCYdwpaPb3vfnEQg9/H36P3xIJCGEuEe1dTzy2DwmlRwXoQNY9FHtPC5FH5XJ4wM7prQc17z0EvYVHRFSsU9ZDYj+faYSeIgR4qNB4F51n5rUGGJEAIIAqVOG9C0iwUgoZ0k+hJQFxIf1VgdwEXZ9x0TOIYzepiARLmU5rSGYGGxR1mUxefv2vlztIs/r5fL23fVgutFG6TxScnt7G4LjFD59WlOKi/lsfffF+en8888+OVnO+3YzjK0QgnO6GlSyADw/P+d5RZAGD0JkjAOxxENE771L/Sk6H4duAOTD0I2jLqqKEDUMAyItq0nbDWp0RT4xzqetWIyAow4BKAHvIiWkKPJpXVVVRcCQCEipkFwIxhiJgNr5+/VaZHlZlB7w69dvml2HlExm87uH26qezudLyuTN7cPb653zQPk+dptzHoP13gsBRECM8eRkjiQul/O0am82m3TiKaWEyCjdW38QQoqimM2mNhOB+W3Tvnz58vXr17P56c393WJxcr/a+QCc85TwPOGZtjr6eL8dhGBllY9KJh+PPM+cc9vdJkTv7J6UsF4/rNa3RV4Nu3Uyz5zP50m0cJiGHM7YQ+ULIfzmqy/qelIUOaEALlpnwAEA6KEHLvJknosEH8fMSinG2Hw+Z4xFJMaYYVTemLdv39Z1/ezlqxhj0zRtN+yaruvHzbbL8/z84unF+dNx7Ieu+/bb11999c33Pv3s4X5FEAFilVdSyu12a0bz85/99fc++/7LTyaCYN9sbMB5mf/os5d/8bNfkKxi6KtM/v5Pf2StvV+tjOqQ1fCRXP3/j9neRwgMwO3t9aTIeZHF4DabTSHE+dmirj7l2entO1Mtp3WeDc3GqR5keXP7rpAiee4xJox3etARtbeBMCQRQvScsuV8med527bL+bzv++gDIjpjtdZj12tnOaFVLrO8jNFrbZVS0flcyFlVe2tdSgFuu7ZtEwmzKAoOLDkOpBdLKU2cmqqqErgfYyyKgnOeqI8EeJqpplvClJ1zq9UqNYuJG1mW5Ww2m0wmsqw3m03Xtcmm3TlX1cXZ2UVVlVrrYTQEIiEQvDUmSCGcjSmhdzqdCsa7rut7lSgzhzWKICRSUyakHsYYgvVeq8TR6Q8KrXT9pAYIAJIFaCKGCCG6rnPOpS5wHHWWZWm+OPRqt2vGcYSUYC45ISyEYHxIPNFhNNu2y4vq+vb+5q6lBKpJjkCtDy64vu+zTEzrCiCM49irseLTsq5t17sAOgCPgMCtt20/9IPJpXR+hEf40ZMYI9svygSdc9F5QvaZdoyxiOxAG6GU4qO6KEA8cFnh0XvzUCo+0obv7TYePW7IkUPm4Y6HwpAu8oPLyeECOZS3Yyzx0BgdLxPHjVeevxcjHvg1AED2XdoHNOkQAhJ6XPMOle/wjIffHvwQ/qkKmqrbcRuHiHCUxn5cUD8AbxIQESMiGu0gEuM8oWAM6KZZnE9KwVmRNbtuNBqDz/Jc8EqPXQDiuZRVEXmxud/OT8jy/CoGt/nNtw/rflaJ77775uLsZD6rPnn58i//8q9IMM/OqydXZ5cXJ3oYv/z6dZUX9bS8vr61zvoQs6zI8jIi09ZRDFUl2m4zDAMhmEzFMIDxVvVDO/R9p28fts7BZ58JKfOm6YwxP718dXuzaro+K8oAwRqf56Isy7dvN0IA55wyzIQsimxSVowx13eIKCkXlHMuAniljfVuMl16iO0wbnfNMFpEyjnT1mplOSd39yvnojJBCsIBq3LSdgMlJHrjrUXis0xyQrz302ndtu3JycI5d319c33dPXlaG2OWy2XaxxRFNo7jbtcQArPZy+82uzdv3gkhvvjNN8uz09vb+6ur53VdD6N6eHjYbNfbXZcE9em6OFnOIJKrJxcZI4clvRlbABAUJnVOSJlWMMAYzPjDzz9Pp4fqe9X3+JgPkzwH0o0Tgo9cASFTG5rCK2gg+0tmMalTqTNDP1oXvcc0QEFNgLZ9l7ZUgsu8LObz+cXFhbV2NNZam+Ul43LfUAJnIkPKrdL9YIbRAlAh+JdffwPBz+fzJ1cXyFhSHv/whz+8vpUPd3dd1/3gBz9cnp1vdr0Ze8nE//RP/ujLb16HTWvG3Y+//2nXdb/4e7trewOeRELzahKP2NiHK817nyr3ofKnJeYRLKIA4KMPIaQ9ImOEUxqjD95mmbw8P1+ezJ21OYa6qopMUgxlJjHYZrseuw4gpgQpY11EkDIv8lxySTnJOGecCcactVop71yz3XLGovdGq77r1DgSgCRXsGoYh2EcuuA9SZEv3nljdtvt6v7h7ua2bRoAEJwjgHNO5OIRQNgvW8n32VqbolmLoogxJosgpZSzQAiNEXa75ubm9uFh1XW9UhoA87xYLk8uL6+urp6cnZ2XZUUp6wc9jmpUo1KaEDKfz548uTo/P4cYjNZaKxKjzETGBWAM1g6DIYhSSIKYKqv3Ps0e0nlAKa2rejqZMkqMUpwy8KHt2u1qrcaRUUqRWGO6oaeUppwjAGjbNiUnyDyLMSKSlDSUAujTBNvZ0LX9ZrPb7RrvQ54XdT2xThtnKWNlXceIq822aVqk9Pr6ZrXeag1agzZuMq8nk8lu155M6hh80/Rta4CGejrJisI4P2j37rrTEUKEyWzSD8pF6JUlGIKPQpDpbDKfTSRnhETOKOOUIhCCSEjwQWsdI3DOATzjnAsBj84jgEgQm6aZzqZ5lkHKH4qRMkYeLYFSyUwv83jRT0ZuCUFFRCGEtzaGkMZl6b7s0SM0Sbnft4zee+8pAEZIs5/DV0yib0CCSABjCNEHjIAA1u3x2wOWuK9S+xyrDypQKtmpiTxMVtKRHGskDuoIRIzOpQMIzhtnrbWHNKb4SNwEgkgee0EgSCgSGgCcDy4EQEI59zH6ECnjgGRQyoWAlGpj190wanv7sP7qmxsboJ5NV5uGiyxApJwDEiSIaYQZQBnFRdH2Q1XXm23Ttt2TZ0/vbm6RBAI+OF8X8uryYj6pfvF3fxsD/PjHn/708xfTqlqt7rumqetSqeHd2xtANNZleSlEZkNESmVeIJKm7ZtmVddVXddd11nrvnvzhguptKaMhxh9cEIQREwagCzLR+1DQFkUUuY+hEFp4ywAZRzns+lyPs+LDKK3zhBEwRgHV+TZdDbr+z7LsxhBlsWoNcsy44LS9vZuNZ/PN7vm7uEeCXPW1vVM5lXbDat1E2IUWUEJu7vbWDPmmZBCaK2Wi/nQ98+ePikzfnF+fn93983XX0OMmcR2pyD4+XwmOOeMeue883kmpWRjP6x7B8CUslrZi8snVTH95uvXi/npOOrpZC5kURRVWdY3N3fX17d1PX24eZ1xRjFISoIzwWpGoM7l8ydXGH3XbrrNutmsrBqmVf7s8vJsMS0yXuaikFxywilQCBBsLphgiMGZsY/eZIJmnJLox7ElGBgBzpCSSDBAsBhdsM4q1TW7sW0pQCElQzBKvb29tsZRzrKsyLKcUKK17rphtd22XZ/YD0VeFnkxKnV/f7/eDuv11lqblyUTvO37vutDgK7vEfHbb7778osvKWFZkVnvATHPOABQQu4fHuqyLMtiNp0N46CUkln2zTffnp1ffPX1V7PZ9A/+4Pd+/rO/7sZQFSVlIsNHk9Y0OElzr9Q0HKZBaTpirU2rRvJsixAppYIxLkSM0VrNKV0s5mWZh+AZoXU9yVgMPjijrVHRG8FIxnmeSc4eo9gpp4QBYPDeOd9sH/SojRqMtiE4QRinyCjTanDGGa2sMsYoq623xhobjXbJsyEEq3XXdJvVervZpLpBCUkBPcXjzQYrRCalkDI//Mu5RATnwjD0bdsPQ2+tT8ngXduvVqu3b9+u1+sUZTefz6uqOj8/PwQoxyPznof1ZrVatV1TluWLFy+ePXta1aVzznsbnKMkCiES5dpqba1lVJJ9jtz+HU6c6XEciqKYzWZlWRJCgnXBuxijHbUxxmpzSGBP8GY9nTym88Rk6aS1DiGITBJCKGUJPHHOEUKzLLPGN0233e4ONtMxxr4frDcyy2WeW+t3Tdf2g7HeWH99e7/bAWVwfj6Zz6chRmMc5zyYQRvtPBAO1aSs6ilSZpzvBr3rNWHABc2rSdsNo7EuQsYwxJjnfLmYTetSciY4yQSXUnDBCCZ6BQnehxAZY5y978DwcQLnQ/DWFUWR5RkCHHvi4GO/le4S3icPcPhQKrAfuf0T3pgHQcJxX0gpRYjHzeLxNvFw9+MBW3wUDBxOkgNCEhMx9PGh9od0hJR+1MMdY7bvq/Hja/TeO/8++2b/ROmOx85nhB4f5zG8fGhYk5oqtbC9Aq3tart7c3MNlGRlNbog8wIJiRGShWbwPrjggg8uZmXd9UOW5/0wUMa8czEERnH9oD95efrsyaUzQ7NbU+KeXM6fP7kSaAmis8Y5CyH03ai1FkIGIELmXOYxwqBMPyrvE5fVa62bprm5u00qwPOLUyRs17Qhgg+REMqFhLiHtd9cPxjrQ4jGeh9BCEEIM8bkeY4YfXAEY5aJMpOSc0Bvh6GqqqqufYyjGkejCaeUi3YYlLGbbbNeb7V19/cdoUAJe/XJp6NSq4et83E6nZdFrYxdb3YX5yd9NwyjDU5DBE7he599hghPrs5+85sv7u/vsyzDfUJZGULAvU0/iTESCmlka4y2pMpEkRe5Vq5vh0xmjIu7u3sEREIgQC7zIivUqIy1k2qymAhKoun7sW8huEywjCOBEL1mGGd1dXlx9urls09evXj59OnZyUJ1rdVKjb3VKngbvXNGW636rtlt1ve3N7c371YPd33bWKO8M6kY990uhpgJFmK8u7v97quvrdHBe0lpJkQmBackpdc//fRTKoQ2DgjJioIRtmma69ubyWRykOMiAYAoBZ/UlQ6SMuFCNNqFmKxlcRwHxvnNzU1V1oDws5//PMT49Nnz2+tbH810No8Afd83TTObzfM8p4y2bXN2euqD//KLL374wx/8xV/8xbt3b/+rf/kv31yvu7ZhKXs+nSJpMpSk4lVVJazskFDadV3f92oYabJ3ZAQCCSGoYIkxhALlsp5NZieLjBGC0SM1zu8aLQXLGGEQ0QcSPAUXrc4kJ4AANGIIIRjrjHfeR8mBYsS0RgA6pMBJDNh3DYkUSSSAjCIhhBIgEDAGZ3Ry33HOJa8pwSgAZEI+ctyTZUdKGuCUpvw2mSZ2yVgrJLcMIIRgjOh9TI3RdtNqrbU2lLIsLyjjSpuu67759ru01B5MmRMd//r2XaqOV1dX5+dneS5DdN77sR8IIZwzwXkIzhnrbYgRmEDvIYTgjbXBG+Os9wjAZSbzIs9LxOi0sc54b6MPutN7epUPwfmUH0YplY9CnKQjTDBd2soQQmIM3u99h9OpNgxN4sIgEsZ4imIZB33ydClkoYx9eFh1XYeEAYCyWus4mWCWF5PJhDGxWq+bXSuEiNp4B5SCzHkSI/Zaj9qutzvjIKso4xkgdfuJElgfgt/PDx7RhY+jHAmhSGkkwQZfUJGGCiEEpBRwT9Ys6ipZTMUYIYa9iSUhjKQ8uXCgyaR3IG3p0ql+eCLnnDwKbYDHCno8h8MPFREfjcCPaZPhw+yRY1B0/wdHI7294OGxsr4vP0eigmMdxYERevwUMUbqH+toiJi+Hr3aI8G9mP2I4Er+MeX78TsAR7lIAOBccM6Po/YeAPfIqtaaZ3LfdLqAMWIIMQJG9DEwIZSxXEpGcL3bXS0XmagyRuq67trdZnU7KfPvffri7HRZl3kcNtEDBowu9uM4jsoDRKSEYYjoAYiQ1AetdHBWUmKtHoYBCGWMjcoySYGybbO2PnIpcpFxSossD8FBJJRS47qiFIC0GwdEzIuK+NBtjJAMIgEIFiMlUTCBxCPS2XIuMrFtmhjjoDUX4n611j5sdtsQcbPZjToiNQDw6sUrSvh212oTnI/tMECvKOV9P7QdZHzrPSyXZZ1nXbu9vx/+6A8XTdNA8G2zK4u8ruuvvnorJV5eXu6222YbCSAjOSNUioIxkrS2IUbnLJM8E9nNuxsC5Pzy4s23r998+zqvSiHEcrlklGZSLmbz+Wy2fvfLtBDFGKsiF/M5ZSUhpBQsNQB1UZRlzhgzqr/bPFR5Yazq2zbtdZJN4DiOaRMphJjVRep/GAaGQQ99349937oIeS6Lsj6Zz85PTsdx5JQRAGestz7GmGw9BuvLalJPZtbathucsYTyq8uno+q99xSilJJCDHZ/mYiijFp3zbbt+ul0Mp/NjPW393dFkc1Pz27fXTurn7588fb65qv/63/3vU8/u7ich0idcxcXV03TbFbrtm1PTs9/98c//LP/9J+fX1189eU3u/Xd/+6/+V/9H/9P/+dpmb16/mw3nbDDhZ1W8Pl8fnJyUtf140m/V1AmLJUxpkd1NH4nnHPKBKW0KPLFbDKpyuQIVZdlXmTbdpguc0AKKTwn+ACEI6MCjdaJtRBj9BGcc86HEEAyCtGHEIPzzkanDQBE52WRs8Qbp+gjiT7EEL037W5rRjUajSECJYIzKfI8z8dxTKzLEIK1Pi12aZRIkAEQiAQJxIDee2ddlgvvIgDEgD6A0apt+64djPe5kHlVgg/Gu3G16tVoRlVOagpIOBNCACXW2F3Xjl2fF9lycfLk6nI+n1OK4zhap5N+ExEpsrSU7CmanMcYU05HiOBi8D4CUsrocrlME6kQAkaHEbx1ehzNaLXW1rnDjoQJnoRHMUbnXUJK4ZEzAgdJ+COhI8aoRtP3IyKWZZXANKNdmgLKorbONcPQa+ORECRKqd225RwvL69CjG3bMsYSltypURBgDGRGi7rMcxkQtNb9OBgPSIGLzEcyGq20iQiMJJ3c++KRMgvTa4mZIEdqoeOigkcU/9TRVFWVlUWi3aYCk5CJAywfwnvNAx6llicXko9+flzk0n+O0dFD+wgAJH6QZxSPFOjHxQ8/JLkcQ454zMw8ouAcHuG41Xs/pfswJvcjus3hLUp9JCKG9MiUIGLCao8P7PhdPS7bh28Pz+VdcC60bWsVBOGNMRCCtRYZdS45WQeKkSBlSADAGJNkZ7PJZOzbLMum0ykjoRBss74bmmZSkovLk5PlTHJi7SgptcYbk8jIxgVgVAChQmTGOjNqwrgLcVDKGEcpneSEMZYVZVnVo71mVISIu7YbhuBClFJ6xhFVcN5qJYQ4O70oy9IGPxptrE/1QAgwxlRFnuUCvVdqIOCySV2WOQM03u22TVbko1KBsXXTWhcoEySi9ZExjBGcA8Gztm3bfphO5kUN6207jraqSCbzIutns9n124d37/qzxXhxflI9y1IheXh4yDKxWMy8j95DopspFbXWjPUAgTGWU55lBaVEayUcHcfRaj2fz8e+223W56cn58tlVRbp9CMh6LHzxpDorFIUYTKbFkXBGMskn0wm0+k0z/NpVVJKvXPjOI5DF2Psum673RJAp42yhiHhmcy4EIJxUhjvKKCUfD6pgZK+ae+bLYY4P73glM3nSxu81rrrOgBSTmSW5d57pfTY9VY7iiTP81xKTUk7DtZaRmiWibzOMfpEVR3HEbzLsqwqCwBomqZvu5EwZUyvnTFWaJdr4yIgF1xmp8tFUZQ//+u//tU//OazT15lRfEf/vw//vN/8T9rR50JnuclEtYNPVGEUmqDf3p12fTqX//Lf/4Xf/N36/vrf/0v/vlf/tXfXH3+Jy+eXrCmacqyPDs7S/4jUspEau/7/timSEqZTMdX4+id09YFiFzmk8lkvlxUVXVyuhCMxxj02EfvbIBm0N4PNTUZF4xEEqygYVbki7rIZIYxRB+CcyHZDgEwQoGA5CKJtzDEgEABY4zO2CIvIETvfXDO7GMXlbV22G4oITLLirIglHnvgzNjHzjnFCOJABEYEkqRCi6EUNEjkhjA+0Rlg7QiBQ9am3EcjLHeO2udVjaESCj3EY0yahiNs5yyLC/rapqXRQrgUMaREAlglpecycXppCzrsioiBmO8NqMZB2PM5dUFhIgYnQ1Ou2ADIYQhU1pZbUdtQ4RIkBAmBE+6PQDYN0beB+eTainYaJXWzu4ZEACSi6ootTWEEAr7nNgYo3UuxChptu+CQ0gTWaWU0S4ZSSMS773RDpHmuczz/H69SVWZycwH1bR90zTjGGezAgmx46iGUQgRvMcIlACnkBe0rCYizwFgHMemG7p+zHIRWSCMq9GaQdkQCQIhNMtYcJZzThJmIDijFKPX1sRYkMde7bAW+xiAIOUMKWGEpn7Ih8Afb4d6FjFlLr7nnhzKQxLsH8g+qewlhkt0/gjJTL6bBBEhAu4dTT4oP7if68VkvQkI6YsQkqTnBDACICBFQpHAkTzguMLtkyUQfqu2fVD/Dv9+1IAefhiDh31S8se0lHTs72t5gjUIe1TyRSSYyl0EiEBCxBThF1MMZQzOR4aMALXKJYmndzHFe+2fBWiMgBCRAALBCOPYzy4uNttVli3bzfpsNqWUajW2q1vw5vJq+erpxaTKfLDaGMEYJbwbmn4f0wpcZpRLD1RQFl0YtBq3u+12u2t7xmhd188uLzKVuRCNdX1vyor6iExkrhtiQIgk7uWISW8nA8kiQUrZZDbt+lFrjRjn85mzWmZcCBqtj0CFYFwwQvHt2+uqqgLEgDAorUIIEafzxZs3bwKQYQAhYpmxjBuMceg6WdR3q4f7h13fQyYBkGhjjIGbmwchYDIV52fLq6vzaV1+8cUX5yeno+oRcbPZhACnp9VyueRMzmdV23WJBEdpkmJJROCC5pzkedl1XZbh1cXi+vp66DfnZ7PrW2WtjRAZcRCNEBGREmJfvHhZ1/WkqjnnjFEpZZFljLHtdk0I0WpYr9dpbffWjeNYVCWEGJFEQgOgB2SMM5ER74auV03rXBCZDD5QLigSF2KAwBkv8lxmpbY2EGZs2G633vvggSClmUBAA8RrN1JM7q/aOdUaBMAYKEaI3hjjtCIIDKOUkhEscznKaSC0iFjiJJcyUjZZLE5OTxnF7frBh/Dp59+7uXn3i1/+Skr5ySefffH12xcvn8ms+Ou//dtJVT57cnX15GK73SIhi7NzF/y6Wf/B7/34z//ir549OSvKP/m//D/+fDabsel0Op1OT09PZ7NZmuQniGwcx0OsRrpyUpjOyclJoshXZZ1VdVEUeVkIIbTWm81G6UEwhgDbpiUUqqq6eVgtprPFtMqkYBh88G3XqS7O69J7s6fzBUDENDzZU5IipJUaABKBc+h6eCzDCcpLZS/PskRDiDE6a6xxPgZCbKItxKABYG9pkpK7pfA+Ome9B8YIISxGCAFubm5DgBSBGUJImhvngscwujFlfycrzsSV//rrr1OLnGVZQQuRZ2m7UJQ8MSf7vm92G+dcIcV0Oi2y3BhjjEppf84FgOB91ONgjPPBIxUMKRMiy7I8z5VSRVFIkVPAthlUPzg7hhCsdcc79+SrCQBd13HOgZJDJ5GicNz+6Zz3HsDFiOM49t3ofbTWeh8QqBAZ5ywReda6T53N2Kvtdq2HmGXk5HRS5uUwDFqpvJAQSd92wUGWQZljWeaykEiZS0nQWhsTgXlnIzKrrGFcEoCiKBljEBQA2HAIDJL7+pRCUGPAGAlJpJJ46AgTRAlhXz+O3eIPXc6hrzr85HjmmlDfg1zhUOp+u6KkWyKRHyfPPWKJ+4T3j71jjgrbMWb7QR06gj0fjxk+avLgw4D4Q1sWf4tifQA/8bHshfi+pzy6d1IpfPDqjl81Pjq0HbDNA/smnTlpo0ARAKixjggWAQgyRnwkEElECBggBB9jtGittRQwRBfBE0KMVaof6rquC/H8YjGf5hBsznmWEWutavRu1yqjOZPIGJc5oWzUxrRdRKqNe1iv3t2oEODqSp5dXCT/DqvHBN4i0r4ftTZ5TkSei+RkIQVY7ykwxlplrLUik3meG+ubpgnBVXUhBIsxaDMyiHkuizJ3ztzfN0hIPwweIBCkgpsQXYjjoK9vlZTgHDAKdT3lXAKAYBwIubvb3e2gliAEb5peaxASjINXr56dniwEiW3bdtvNb7548+Lps4f7znnjbEh8gru7uzwrnzx5slrfI2KMjlBAROcs4yTLMq/ifD7PGHFqyKWYTys9tFLQ7frWe18URSbr+aTMsizLhRS5pAQAhlHjOFLOWD/ce2+1SthPjB4ioYQjBJ7LPCuzqsAIPobogwveWK+dxwh5WVAmvLPWBRZRZkXOKEWy7ZSUkmcMkBlrjY2Ci6ycYqeMUcYYQqhkyAglSAIh7agmVZVVdd829/f3euzLIp/XxTj0DAEoaXdb3TfT6XSxWNTTyTdbtWs7pZRgdBw1BLdYzJ5cXty8ebNZby/Pz4pMuuATTf3f/ekvnn+6pFxwJo2N2ro3766llJwRRunD/W1Zz7788muZZf/1v/ov/99/+mdVJv/rf/1fNtuWffLJJ4nKGEJIhPujze+eOZ0A/VSHjDMJI55Op1RmIYTVamO945y3Xeu9hwxC8DoETjkSFiJSIavJtMoEDwatRqeCN4Mao/XBmRACAmUpBI8x1XcJKhnH0RoDj964FB6n7o9hY9776D2j3HtnrDEjcTHEGDkTXDI9Kmut85EQImUuhAgqmKgwlNZ45w0CFZIxKiJ472LXDcmDDoHG6I1246jVaBwNAEDY3kmIc973/Xa7LapSCJHED6nt2BN/M+CChuhG1fd9zzkvy3KxWFhjlNIJs0WgBKjWenTK2jFGJIxKKYWUSS5Eudibgtqg+iG9FcnxWWuLiJLxEAIyeqh5fd/nec6kAADyyKoPIRir0udordXaeB+11mo0KfckhMiZpJRba8dRG2N0zhIAsrnfGQuTip0slmVZWm2MMYgoKBvHkSLMJkwIURaEc0kICTGkPNOIJCBo7Y0HECFG5FxkeS6yLEY0qgkuEqqTnalzkhGMMSadk3OOIRG5pJy5EJPQPv0qUVTSopy6YcYYUBL8e9ua1Jx9VBvSmZya4IPT5kGnL450dXikjfsIbHwPQh49+DEeCB+qIH4bkDxcUIer6dDtHd8L8T1SejwjPFTZj550fy+A+FgI90fx6AITU2NKyN7IG8lvF+z0/h8khgf1AgA4FxAoRSoljUJobSkhHt6n6iIiAoUQYwwYgEuy3a6rqvDGLmZzozWr8rLMz5YTtCaNnwUNMpOUhs1qNzyMm82OUConJRBKCI0RlTLWe6RsUHpQBilMJjBbzLMiV939fFYqZzjj83nNeb7b7e4fnBAAsdeMcEaV4kFbpUdGCFaLEIL1jjGm9WittgHoODKCJM8kp4ISxmgITo39dt1cXV5uNjsfovEurydd243arLfNdE7LokKAoVd5nnsX727ujdIE2WSaZ7nLsgKQMqEXVNTV9Ox0OXTNr371K6f0dCILwS8upjHGpmmklCcnJzHiu3fvmgZOljCbzebzuTEmBMs4YSxhHiAEsy4EN0wq0XWGEHeyrNu2RdB/9Ae/k+RJeVVyTgnx3o29VZrme6NBo10MFNB7b83eFpgxyghJawshRDA+ek8RgRDBGGGcMbTeR+9dgKwoBUBwblAmfSLBOZHPGCcReSB8UOO727tOKSHEk2fPB+cHpaMDQgiJJAEq+aS6X23atsXocynqurZGX19fl0U2n9SMVl2zc0YZPW7WD2vn/+wXq65pnXNSsBB8cGZWleenJxADp/jmzXcvnz/9gz/4Az2qX//6+sWraVHP71ZbQHJ1fkpIuL+/d0Z/8urFr3/9yx//5Kdq6P/kn/3Rv/+LvzDG/fD73/tPf/6f8+KZlZylpTwtAYfEzrRKwofz/LSyPL/6hHPOZEaQWqet8SaEEEJadvfTLDNSJiglTT/EMFC6clpJAgLtLBPzST7J87FtMASAQBEZY1LKZLM59mMIIVmoeOcOe+3dduecC+59OHXaxW/7bq9py7JIqPc+eO2933czAfI83+e6ATjn1ts2cSAppZnLON9rMyf1LLFYh6FNK3JaZBN59WDQnroZ59zZ2dlhgxxjFELM5/O6rhlzTdPc39+3bZvn+XK5rPLMGNO1rVIqOiel5IwFxh7XfUMI41JIKWWWIZWEM0Qsy3Lo+s32XncDwcAJTc9urU/JyDHGNFnUxqRWj3NOGXPOxcfUbOdcID5p1b33wzA88tvjMAycCyklJTyZeWptnXNDyBhj1jhkcD6fPLm6QsTNw6pvuxhC6mL71pyeTs/OzpRSCAqAWG8DsHSOMMYIAZmLaIMQwgVH962kNsYUNBlp7Vd2SiljhAQQjOzhR8rSAStlktY1ObAcZnLpdE27jX0bFCH5blNKg7GHknOYoh3OlkepKEvETmstFfKjRupQBj5yY9l32DEcmshj9BKO5oKHxzkibsLxH6TXkmZ7h7Po+O7xQweyjzDPj2rtXowYo4sBANK8EJJn2/4nJCEThJBHtf37Pi/9/6DqgccB8GMRJUKItOcLVBgYqZQWSJrDYYgIkNJwASlAYEI0m+35p5/aUZ2dnqxub8qyzHndrO+sajN+cnYywTDuNpthbB7ub8ko27arUvoakhjQR6+1Zllure3HwYVQVZDS2t68efPZ1byqqnbsASAR09ZNay1QCs45JBQtUIpW664DjIFjnzb0iOjSpy9RCNE0Q13ls9mMY3DOWm20HkOA+/t7pVw9r22IqNT9w0ZkQggxNh2WWOaVVlaKPORxo1RZlm9Wq8lkVhSxbUYmyNXlU85Y1w5///d/b/XoHSwmrKqqZr36yasf9n1vLQgRyyr3DoUQFxd8Us+ur68vLs+ccwCB0n1eSgTgnFZlsdlsTs/P5otZ8tAgNBZF8ezZ09vb213TDG0zqt45FxGdc4xMZ7NZVRfW2r7vvfeSUSFEJmXa5iJnkjAEgogyy2zwxrkQrGM+WeGO4zgMQ5Zlk8kkyzLnPADkZbkoS0qpduz169c///tfEkrzqgoxjs51w1jP+hCgKKdSSvCQjDK0Upu2tdYarbyxG7CCoOBMkHh9fY3OlVWW3v2hb2/evv3mm/Xb/MwYQxGyLEPwzpiu3a3uH548vWjXWx/0uzdv/v2//9Pf+clP/tv/9v8wjuPb+3a3Xd89rBgl0Q7zSamUWq/X55dnv/jFL374kx+H6J9cXL65uX3y4uWLl8/+489vZ7MZw7xUzjltnXMEELjc4xvoY4zKWuticD5xe05OTkaIgaCLwTuvnA8BYgDvPSC1xmuvnRq9s4wSKghDwt2gnWn7bkAyjI0bx7oqT+eT89NlITkj4INHCplg1qr1+j4B37vdbhxHilgUhR7V27dvx1FPJpNcSO89RZoI98MwnCyWPoRWxcF6pDGEoO2otW667vT09Pzyoqxr433b7kLwiMhEWRX5gUpHKCUA6P2gVaq1qeCFGHwM3vvlYumcCyYYbzz1hJBM5NV57a3jWVbWuZSS0kgpIlUR6bjuVNvapqHWTieTeZFbax8eHsZxrKqqmEystQ+r1W632y/BZSmzrCjLsigYY9EHP/babjWA1UYYywlBIN55r4E76mur/UApz+SEUWkM9MoqKwpZEJACOWN2sFttBkTPOSlx2u/GtmuHwQQfvcPRemWDKBYOqUbiPdoYAjDDs8hi7LUlsSjmFyfPspwHbXfNQ7drhKCZlNaNYM2kgkK4qFcseH5y6ZxTShOkSOkwbHut8roIyGyvd20fkA7bxjpw0VDKN85IDgDQW+spAiUBIqeUQvBaSck5wjg0ZVlO6lyNLXGBIVXKoPVlUVBKY0BKKaE8RIxAkEnkGBB1oBAgI8Ja65wnhKSN/F5+ThlGQB9JUjykqBfOjXfHQzdESKWiLMpUe/y+iX1sy2IEBKQkUcw/LnUAyRXl8dsIkPaR1HvrvY0xhogxxgCpXhKk4MkjnxjRgd/TuhnFEJIVibU+TSUPae+phYwxepHt66h33kdCiBBSSmmc89GHGCijQooksdDGAqFAGFJMZdwH73z0PkTrI9IEkyLlECCil3nphrcR6Hr3sOk7Ui4mF0/HEGZ5eX39mhGI6CFgCA6pRF4wyq0bJgvhrP3pDz813frsxaLbvvaM3L57+/zZ08XpglK+Xt9mMbz5zXfnJ/WvGjUqeP6Dl7u2vbu7/t73vvebX/3q8vLyu2/fvHj+8s36XSEqFEz1cbtuPvnkk+1uO52eXpw/+c1vvp5OTrfb1lvGGRgN06vz6+vb2SwHhySr/bCraxEEx0CU9n3njEZCckKgBT8qKKTNc11IdEZ5rTGyacWNGjOWOG/wcHNfcyK4aNvucjY5WS7ub+/O6kKEzrjuyUIioqDlZJJXk2mMsW36zeZ+c79To0cNv/P9l998+y0AdL0R5ZwXZw+rlXGQhwxR7ta3wRhG4sO7t3XN1tdfTybTejIJQLu+H5ShXChlJpN1XRNvt8mykZH49HyeZVkww8XJbF4X2+12GGhqFTprp5UcmhWY8eTkpKB8vV5TZKfLCx+Dd5EQenNzU8+mASln8s3tKp8IRAKEWSAqUIjMSUZ43fkQIAfDxtH14xjvB4irEEIpQQiBZb7ZbldDO5/P5/M5Y/zliydvXt9st00HLHjCmJRiESPedtdNo5RSUkrOc6ONaU2MEaAYdN3cNy9fvmxt+/r+9bfXZKc4iKkHD86RgXCBlFOIrnH+tH76qy8eJnXtMH/+8uqG1M3rbVVVl+fT+aLMrunmYRXUQD0+e3rRrPtu++1PfvKT119ef/JZcTJZUGROqc+ePv3Vr95ef/m3zD/ejunX70GYdOFZm0xDvPcx0v1y4ENwPsAeJHHWIo0IkVIqBa8yWRZCUOY3I0FEyoTgnM/JZDqtitPlHL25vr71djxZzpd8ulqtmu1mHEek7Msvv9RaX5ydEyEe1lsMcbE8pZQppWKIWZF5H9tuiATrsnbBJ8LCoMaUJ0c5F0L8sz/5Y0R0MbRDn6Tos+Wiruvtpj80i2lt2kf2JGfLRxXzoY27ubkpyzLZhiWioMwzIUSWiYRlA4TJpCrLUkgWY7y7uxuGgRByeXmZ5/lut0sD5LquQwibzSZ5uwBAnudCCJA8DfOEEBhBOaOVsimXAFLIKMb0KTwqlxFJjKi1VsEaE4y2IQTGSAhOaeucNdYED4RiDNgl9bwyo1ImeTMgEYIRQoFQJJREjCE4Fxh4FzAvs1k9qaYThjgOndIDRTKfzyC4rusIwMXVSZHJxBhlyIZh0Na74LNCCC4AQBnro7UBjQNCOCDxPvpgH9MZIUawFsZxHAZljOGEQuLZH43H4IjKmDqVQ6OG8N5C4n2vQwgCBQA8yos//CrGmCA++igGP4z3ftulBT/UruFHUj9/VHiORAvHpP/fLoe//avU0aZniEdgI2HscHiHho88eop+jLjGaF08cFb3rnuI1trwOARNjX56QMaYCx8cxuEBEznocQacNgYmOR4Yv0eYrdYwjoP3IUCe5zRCCC64BMaSNKjMsoyTUGbZ3d0dC6pb33FKZtN6Pl9keZ5lxfXNdSnEZn1flvn9qkWUl5dniFjXtX1cZ9brrTE2xphlBWU8kr29QIxRZpIxRpnIsowKCdAiYp6DUmCM8R601mUxCSEUBWWMDc7FQJxLl0+MkcQICF5KSPtmpz1GRx/9bhK/gSL6GDBCAooOXAdKaQgOEauqit41TaO0CyF0Qx8jpv2WEIKSuFxmiUUihJhO67KsOaevX7/OEIDgu3c3CGEyqRmBosjGoWeMhhC01lSIPM8pl9774Ly1Pg3dCSGcy4RaIWKIpq7rhNUlG8+USPDd2wcAWO+2N/d35+fnJ2dnq8363/3p/+fk7Oz09Lwoylefffrd69cARGn7/OWLTXNvtLNBUcq5REqY99EYM5nO+3FgIlsul5U296sHzuTFxUWzvk4y3+RLdX5+TgjZbrfvrm9d8CenZ5TKh/vNw/3KmGCtvek33viAwbtIhY0uDnoMNthgtbKbZmONJ5zstq1xLoak3nbOGfCBGsxzWZZ5VouiqC4vL9tuZ4zZ7trgwVU5Y+KmW1dFeXp2fr48Gdvm9ddftbvtn/yzP3z79u2vfvMPT5+9uL6+np0sq7LcNLvNavXTn/zoxbMn7KB69t4ntdyeSxLeY4mJSHKgA3jvI6Bz3jmfImdDDGVZBuvGodd9xxkVBCd1npWF16UxBhmfLc8W08pbM7bbu4c1CTbjbDo7EYxeX19v1itGcDab/fqLb96+W+c5C0g9EKUdY6wo66qapKDXBFpJxkUmp/UEo48xGmOo1lWMs9ns6unl2dnZ3d3darXatU1RFBdX53Vdj+N4fX9bislHpO3kXZleY6LzHGYeIYSyLJM+PYHAnPNMcCF4cs/DCILxqizzPB/Hvuu63W6X/PSklI+4ok243DiOqYtFxKIoyrKUUqayV2QZIcQonQqws9Z7LxjPuAAA61xS4znnMAAlFCIYpcZReR8ZFZmQk2lhrdVaOWdiREIoAg0hbne9Uqrt+25Q1ruIlAqJjIOzHpyPaF0wIcSIQJAQ8uTiCcTojbHeJjhXKWWNLjKR53mZZ1WRaa37ruec19PqoRmUUhGBy4JQ9C7ECITR9sEjByaQEBITuRQiQEjSO28hsXGttVHuIwvw0W8akSYf1IPBNDIKbo8EMs6zIn+0OiGUUoKMEJLAPWLsb/t1EUIQIyEEyD9ijPKPlhP/6OT70c/JP2Hg95GW4PBvhCPWJe4pp8d0kqNySFNlgg+FEIlWk8rw8ZHvy/MjcTcht4jognfO4aP16GELu8/n8+9zAQ8V/aMqCI+iRmst5cKp0dngPWilHOvHEJwLhIQICBAeuWiCCEkJK3Icu7Zpmp1qv//yqo9gtWm37fL0tCiq9aZ5WO2UJGMz5FxsNiOv8erizHmTpuBa6xix73sbwEPMijwGdDGEEBihKdXSGMMACTIAHAedvP1khtoYSkEpK6VsmiaVK+9j8CHZ3TgXKSUhRIhOSLDWjmPw1HMKe/6X91WRHxRsMUKavzjnuMwAIG2PCYTZbJYa6LLKgwdnbEL4iUxWVrEblFLm+YtnRrvtdmuMkZJbo8pKCp5t1g9lnlHJrPeXJ8sNIkJ0zjVNwxgXeUaZQAIuBKddCMFDRMRE8nLO9X2fZ6XkAh8ntcl+qK7ry1ffW6/X19fXza5t9cjGppxOfvJ7v3t7f9cO/XboB2dWuyZt7BygD4oxjpQpbTbbnTFplksZl33XjXrdbLeL+fLs5LRt2y9+8+t+tzk7Ozu/vJjlc2PM1998BwCMidOLc71pNk3bdw+v31zf36+s8SHAzg2cCiboMBgumWAyAPgI6+1uC50L9t3tXTUpbx9W2lrCRTt2QoiyrgTjlKJgJMuEFHzU9uLqye7XHSDrB43IXIgIfLasjA3eOXB2vlgu2u7r3/zDarOTRf7VV19NplNtzfxkKQRjFE0MRSYlX+5tl/fmDkddBSMfBGwemwr6JKtLqaTgIxII8eH2LpM8E6JcLDijgtG2G3a73Y+eX3nv80zMJhXE0Gw293ersd1++vKZ4EQr04xbiL6eTLUa37x5s952y9OTxWJBmByU5nklhOiUXe1uQgiJuyGlXC6X3obdzV2zvUuG3C9evPj888/zPH/97vW/+w//Xghxenp6/uSyKIpRq9Wb13unZuL23PEQtNbJeDPFzqW2L1lc4iPj7vPPPy+KAhFTD8cYAQClh+ahLcp8Oq1ns5kQQim1Wq0eHh5KzlPw+mq1ijFmWZaicXe73cFbXUpZlmVSW/Oq4JQCgFF6GAbVD/ukjwjp2nPOjalBTF7+kRGgLsRR9eOoGeNZVVRVKTMSIxoTEZEg894bp621g/OD9t1oO6V9AKQRAgBa7UMEEpCkYQ4XosiqLMsKSYdhGLougb3eGmu01noxm0wmE0LIdrPu+77IMpnnNsRRK6U0ACilbEBlHRVkMlnere4Yvk8CSghiCIFQYJTEGAASTePgeIIYPyBhpiHTgcYCj6ZclKUNWRJxUkopQZroIYcW8CPeByJ675LF12F9/8jo+cMK9EFtO2jDY4ycfEz7/EfL3uHnHuL7g/mQKYqHLPWj3vS4xz2WHv52mTzm0RzazcNhHOz34oc99EeHdyh7h3UgPubu7ilClBqXkiPBBe+N8REMGEYwYiQYU5MdwVPvIeLQaRKDGvsMouSiKuvddhVjrOtpVky+/vrLnLOvX1/PSkEplyXwLGOMpV1dUvEaY5iQlBgAwhgzep+vK8TeuEApJQAjQYhx0Eopk7z3YoxFIfreEMLatptN8nFUNC9ieD8WDSEgxhRWlHGfaMKJ/ZOk9+99UJOIJsTU+NbT2WNaCyQps7dmGIaz+dRaD8AyWYhMhgBKKWs8k2L1sOn7NkZknJyenrx797br9NlyFgmlTIzajKrPODcuzGYza41Wqh+GYei1VrIosiwrcp7IOIIQoIQxQQhJ69WkDgBQeP/4ovZogbGqmk5+craklG92zWq18STMFosvX3/77u6ecvHNm7dPnz5VoyFCfvXdtyeLiTFtjL4s69PT8yzLlDVDr25vb/M8xwhfffHlz/u/XSwWT58+PT85lVdXMUZKeQhktdpc392eLM9+8KMXTdv96tdffPXNm64brQucy0wWAWJZ7wUC4zgOxuR5ZIz5EIBwyvnJ/IIQQjlvhiFJqs6evcqkTI7BACE4H6Kzzr+7vnn+7JmPKIoaKbGBxNEBjmNNnLWS0Zu3b8nTJ9/74Y+apvnFr3/9X/zJH3/z7bfffPPNsxfPd7tNlpe5FJmQX3x3Rwhjh1F/CAGPCN8pV++Af+5FzcYgj4CEUjhyWgoxxvOLU6N03zZ67Is8uzg9uTi/qKqq395SSqNx29fXtzfvHm7eUvDTMts2neaEYXDGMfTgoeu61WZbT+anp6d1Xa/X6+2uq6pKZtw6u+0651zXdVrrkpDc2sHaruvKsvjx97//5MkT7/1Xr18ni/eiKGaLWT2fUc53bfuwuldKVVU1n88h7pfFZIKQSlEqgQeQM3FYEtRJCElNj3OOc0ooKJ1OTT2f1fPpjHPaNbu+78d+YIQmL7GkGT+k4imlktV6YmQkceSjNxuNPiitx3EcxzFYlxAnoDHdcRzHcRi895xQzrk2Fghoa/SogneyyKoyq8qs63bWOmWtD9G5oFVKjh5B1oMLnXHjY3KANcZ6QAJMCikzKSUVItEWpJTb9YoQIhj1Fo0yCHGxWJRlmdyxm6ZzPsyXJ1VVjeN4c/dgYoiEEmTOox21VjYGZIwVBQAyj+j9XiYPEUMIBJFwBtTgEVPfe49MPo7BMCZ9d0BKeMCY0ogIIUlbtl+vGUVESjhBEgkeElzjEafRQ8DHNHYgeNAG+BBIjPSxFP2Tfdvj16EKIu7TlX+74Xv8Pm2Y4qE67UsmAsSIkSIhQBH2YCmmGCCClBCapOXOhseSA4AhxBB8BIiUcNhnaSNECI9L+XH27H7DRAnnnHKegByAxJOm8JiadMBLD4XzANgeG2o/vh4yKG29p0wQh4A0wcWccwIBog8uOO+DCVEbQIK2f3pxloWizshmtXJaBedOnzyfzeajUl2vsCqbXnvvs/NysjwnhCg1pMoXEZgUTe8XGaZQWSDEeJdo0nmeO28pFemwOeeUyb1VKUl8qJBlGVXGe9/3UJfeOcgZiyEQkiCc9GJ90iwhJreECLAnyjFB4PHNOWwjjDFtOzDGmqZhjC0Wk0lVQIjbtu06yNvGOU8p5ZwTFJTSKDljLJL4/MWTtm0BkBBW1dkvf7U5v6g9wMNqLQTvhpEBVJVcr7eL2YQxTgrCOd0z3bRGISRjlMkkvAHEfdmjeziUMUb5XuMbHx2aLMZx1NP55MWLV/ViFiA2/eARXnzy6t27GySUMfHk2dP7u9WoFZXy7375q+fPXl5cnjW77t/96Z/d3+u6hvPzi6qqxl4VRfHqxctu6Pu+D94tF3NAstlsbu8fGGOEyfnidNePf/Yf/+LFy0+23bhte+MC5QK4HH0YhmHcblLkmXOOc14QJmQOAKdnF0qpLMuUUvPFyU9/9/eFEHVd36vgjBnHcVC99z4FbcbgrLWnFy5SThkHRB+oD4Am/uIfvqrK4ic//qHc7G7uNy9fvnz68tUvfvY3m922KKpvv/22qqoQwtMXzy8uzketXz2/DB7ez/Zi3FuCpdM9FYAErKUKp7Xu+x55kWCoD/jTMb59+7Yui8VyVhdP8kySGDa77bt374jXSqngDKeEYMzrGccgJNs0ncnE6XxyOp+P7fb25p3TanlympVnMca27btB9aP2EZFyJExmhW4amWUXl5eT2TRd7RdXl9///FXf97d3d/f3913XCSFOTi9ms9np6XK73nz77bfjOMpMJKuCEALj79MJdrtd6vMAIE3gUs0ryzKFP6Ust/TmcE4ZJ8boruv6vn3y5MliNuGCDm3zsFlH5/M8n0+mdhzbtgWA5PHWtm3TNPsG+vF2sMsSQlhjjTHDMFilk4kopZQRmvhsQ9uN45gYrRETK8EYRGOUt05IVlaScbCu7/omRnQenAU12n5UfWe0DoPphq5vR2NtckiJIYL3cH61zPOyKEvGmA0hKUbGcczBpC0/o2S5mKduVQhxc3MzDCOlZH5yzjnf7nab9Vprg3khOUWkIYKyVlvTjwG3O5lnEZl14LwL4BEJAsQIzsUoIkTwPliTZJjMBg9SxoAABOBx9rbXku37GM45IH2cYO2Du9KsLhI8LNz7bma/lEOMe2QxzR33rvDH8bOMwT82ivstjPRx8gf7qVs82iPCP33bPx3s+118HGGm2V48enw4AlePYfb0+Idx5rHEIsbIHnP4Dn+ZrLTxMZgQHz31HxMb3ntyHtfvRG09zPhTEbXWWuf6cTDeUVEiREIpEhqB7NULEAHC8Yuti5ITOlgbONu2zSQTUuQJfb2+vqU8u71beWR3K3d5kQWKs0pYa/NcZllmXDDGaQNKmSzLAgDnMsYeEQkBKeU4jiEwH4OLARGp4JxzIAmQRGst5xQpGGtDAKScMfNRZ7yf1KAXYu976b2P4BmlUkrBmFUjSRMChoIDY4RAcC4JlyHLWJnne2MNKedzsNYqtec9DEMHQAilnEvr9NnZGWNEKaOM7ocmBLi6Onvz5t36QU1n1jmoJqKsJtuHO+99VWeFlIl2nrLPnNXjEItpefhoEmOcZzLLMu9ijLHruuSflYpfURRv+s0Pf/yDtm3/+//+30RC/+SP/4vzq8tf/P0/zGYL9/rt1198+fn3f7hab+vpBFp6dnZxOp87F8ZBz2aLf/Wv/hWlvOu6BKql1JrZbPa9730PEftx2G63o4mEEK38fbfN81wW+cPq9mc//+X//ulLKspquTQ6aGuV8xGJQyYKybPsoPI6sNBZVuY8m81mlNLFYiGK+mc/+9nNzc9Onr1MPcM+0wvAOedsKMuyaUfGc+0c4p41HqkwAVfbvuk0Lye7+5u71W62PH352ffeXt8u59O7h9u7u7t+6M4vzgiAGYZAuXOOHWcpkcdTBBHD0ZwvXZZpzMOdJD6QGODRYAKj99Ev5zNGaHS+bduh70gEY5RSqsjYpu2tGhfz2cXZaZlxHi0GyyDovllvdwSgzOuz8yd93wZvqyLfbDZ933trikxkmcDojTZD3yKEelLNF7OyqpJ9aFVVX3zxVVospMylzIuimM8mk8nkqy++poCMkCLLYgzeWJ6XVV567Ydh2G636/U6UUvwUZuYgJREYEmfU1oOEJERoJQqNbRt44Mtc7mcTwWneuj7rvPGCs4ZpRBjUoCkj3YYht1u13VdjDF5nKZEx9TqpcHV0HYp0CiEwDmnjxpzpdTQdiktiDNGKXUxGOOC9THG4IJgZDKpJlXpo9tutyHGAMwH7Ee12w67drQmxEBXUXvrTUQbI0QQDIs8F1wuT87SJto557Q2SqVV8nRZ9n1vnRUin09nUspuHO/u7vthFCKrqoIwfr9aX19fe++rquJ5gUCUUu3YWReMC0bD1vWyzJkQhKLzEK2JMQBSxJi8mUME5+DRdztEgh8t6MFDKgoxGWYQJHSvjKRcpFaPIANKkFJKWIwxxEhiJP7RADMlp8eY+rwU7JfObZIqzWPu62+Dh8eY4QH3eyx+e3J/Inz+9u34VQAA7l8aIALSCDEwoDEhzQApHCFNHAlQAKAcMc0gYR+AlwoVUnI4V49vAO9xl3Su0kcMP0ELHzV28Ei9OWwR0reJNPTbFV073w3KWCCCIMYIBDF1G4YRoCRgfCQWUYaEFYJ2TeuUHryqM5Hn+Xw+o5Tttu3dwyorqna00cW8zj0REaAui7bt+75/9uzZru0fVpusBOMc9T4AyjyjfP9yOMHBWaUJF56w4FzgkVDKAUCpWM9kImPHCFprzoExxoQ0xhjtjbHeAzzyVg5vQwiBMCI5yxhjjBFE7y2ieHSiEGVZUi6KbCul5ByKTCg1qMHPZrO6KssiN8FITmOMjAkfg7VGCMHzfD4/a3ebXbNzzr148cK7eHFe3ly/UdozAUb7XDImMmUsk9I6b41XUUMUjNG6KtLSr9XABKWAlFLwwUcXRaCEAaEi59ZanWY01vgYuBRlkX//6uSLL/6h67rvffpZP6o/+9M/zWT+2ec/evLkGafCW393e/v3f/fLuq6HQTHGXr16BREpI6PSPgaEhFS5+WKZ57l3brVa39/fLxaL6XSaCVnNZ0KI7XbbKb1t+hrpycXVH1ezZ68+i6QoFktrQmr48rKklHW75rCXSmZmwzB4783rN8MwJOb8+fn5Z599JrP8k08/++tf/0OqjrnMKBcRMACJBJnMHlabPK92ux2jIgRCKY+Rl9OlGseb9W5W5CKfvLm7v1wuzq+uroNTxnEu7+7urvjl+uFeMJrn+bTMAYA9jut8siw6Bv0PY5WEBqQV6vAa9hthgBABI1RFGZwZx1EphRA55xQiAGybwTqwkby7e3j99k3G6LPzk2dXZw+rh7PFrJDsYbt923dFJmfzaS6F6c2eqoCknNSz2UxpfXt7gySenZ3N53NEhGjLoi7LAjFyLvu2G9SYSzGfz4uiCM7cvrv22iilE+C5WMzKvPDeb+8eMlknXd1ut4sxpt1HeqUJSKmqKrV6iJiUfGlRCcG1bdN2u7ouLy/OELzWynsfnSulFEJaY5vVRmRiOp3GGNfrdWr7UnRf6h0P+7L09lprUzMXYuSMSSkJoNbaWJtg/jSuz6RkjKWoP0AWY0BEIbKqqvKyaNtm1zZFVYcYjbG7tr9dbZpdsqHhthBECCkEsTaEkOdysVwuprNk0WLtmJSIhJCU2kXCMJ1OE08khLDabruuG0Z9fnlR5JWy5vr6+v5hHYHxLNcO5nlljOkGs22aiMRHAALGAzjHMuRcEOtgBO89MiCEJR+viBAceJ/cxOiBBbkvGAH3s8332Qv7zR+lfM+ZjO8tWvbKvH2RI0eP84Eby6EjOUj3jq1eDgdwGJUdcL/j/k8KcahqhwFh/FBadzBXO/5hjPERZIsJRyGEERLTcPJAOk2rPDwObH7bJubQFz42uPGwN03KxYgQQvBhD5YCgH8UEFJKw1Ef+1H5PG4ZD++t9l5bpz1kSEO0EAIixBAEZSTN1yCGEIP3IQJC2PQKnLk6naHphBBWm+fPnmZ58e7mDiLRxlf19Obm7oc/+ZxQyihmmVBKXd/ePHvxwgW/3u1m08mgtLYujcY55xg/GF/t39hE8eWcEOJ9KIqi6zqjDKVgvacMkDJE1FpbE60F54CSg28AOhed84n3XZaZICTtNdNADxEZoUKIIs+5yKqqHIaBU6yqyhmrnWWUOkSlVDEpUvJJlmXWx5RnMplMm67t+/b0dNm2rZT8u9s3J6ez29t7ADg5mfdtk5WF8/HuYf3s8gyil4IGZ7UxlMqiyMuyVH2XCsMhCSfEaK213hljgDApZbpUrXdpeBZCWNT5559+ut3urq9vSQi/9+PfCYCb1cP1m7c/+clP/7f/zf/mf/gf/l8EMJPF7/7O864bun6oqqqqqrQLJ4QsFouiKJL6TWtdTyYhhJvbu3/4zRcxxsXVi9RvXD153vf9ertzYMt6vmuGm/v7b9+87Qc1KBMAQozGmFwWatA+OsmziMFq56MTTHZDO61n02Ux9mrXD6tN++3r15yKuq7TJamti8YyKrIsy4ssRNhs2+ezBWGaCgEBPYKPdLUbLs7O86qqJzUiXr/91hn96vmTJ09f/OLnf22D75vdk6dXb1+/6dvuj/7oj6weKaX09OmLwyl+PN/fc+gJAYChH5LwoqoqIgWlLPpojA3OIAKlyDnjjA5dp9QoBS+KghBivXchRMB+HJkQFFk/9MvF4vnz54TQrutevHiR50UIoagnIcRBqXIykwCTuiYIZZFfnp8ZrTbbzcly8eL5s6oqEDyjWOYZp2S9uX/93Tc37x7a7S6EWOWF5NKMut+1aujRBwqYZ3JSlIKy6DwGnzP27vbh5uYmDUX6vk+WH33fr9frly9fnpycIGLq9oZh6LrOjKrIch/c9fW7zfqhLLLT03me57PZRI+DVqNgglE6jsPQDwhwen7Wtu1ms0mmt0lhOp/PhRApvT0lN3nv+77f7XbeuTIvsjyjSIIP1trtdnt3fbNerxOd+iDWTqBo1/RaqaIsnjy5krlYb9bamvlyqbRfbdq3Nw+rTdMOXmlQBrQNhsEwjtrYoqzOLy5OTk6yLHcuBAiDGvuu897nWVZVlRQsBl8WeYjgfNDWNl3X9SNSOp0uqBDG+u9ev12vt1lROR+VtYuTU1nU765vN9udzIvJdH5922cFbceIGISQKQqeEKqdsy7ITKRYeYpxWuVqbE9ms1xyDIERIhjnjBMksFfORERSlTkmBQzjhDHKqBCCEk44Q0KQMMCDcgARkT7qyQGAMkIoxeQnIAWSBLTGEAOhhAshpGScMc4oo4QSQPTBG2vTl/POh5D6U0IpoZRQYo2LcR+cEAFCiElXgoQ8jhERkSAhlDJCaeo1H4OLGKUMCEFCnAupE0VklDBKGDKKlMD+iSihDJAgIYAEkFDGnQ/O+whIKCOURgAfAnms4o/CviQW3FMT9wy14A90FeffN6nxSHd/TOBMdTTN8reD+9WvvzARtAMPlGel8z74IAWjBAlGQggSCkgCgAuRB18VeS7Yq+dPSPAE4vnFOUBsBvXt23cPm0Zm+fLk5PzyyX/4j//xd3//D6dMOesY59PZ/PrmZtd0xrm2U/WkOjk9J5RtN+txGM7OThgl/dDNF9Wg1JNnz5wLq/W2KOubm/tehcvLk6FXq41ZLoqHVX96UudF1vU9YTSE6H36YCgAAYiEgJTgbRQifPrJC61GybnW2mhNwRNKAYkQUmS5c67regSUkmeZDCF8+83DZCKl4M45pw0wTOBNjFFwVle1kNIY8+b166ZpdrvNbDohCLvN9t3NQ5axrrPWWil4iF6NozXBmJES2g99VVXz6YQStEYDwLSuT04X1gbvvHeOUZYs4Bnjk7rebLYhYQiw1zklcE4rvb5/KLP8+599fnpyGnyUTFydX3LKx27ACH/w09/9/LPvCcK8sWeLBSsKxhnjfDqbTaYzQDKMY9O2TdsiIRHAWGesG8bRh1hW9Zffvm3bzvlw97AyPhImtfUvX326aVug3EeinZd5Pp3Os7LkUpZFVRSVyCSlnDDKuaScIdLJbApAgKDWth8HADKdz9q2H43y3qc9vZQyhGi8CwAueCmkcUHwLCB6HzxEiJBLNg6jNkZr9fLlK4So1Lhrts+ePlXDUBT5uzc3AG4+myOCYCwvCp4CCv5RiOajvdXht95H7yN5v28iKelUD4N7HE3Bo+7HWa+MLspqHIe+6+fzZTVbvLm5b9b3i0n1i1/9Zre+D86+eHLBCG6327t18+nZHACEEE8uzsqy9BAvBX3y5AkXdL1eA4TpdKq1fv369Xa7kVKen5wmZLIoCkrQaWMJiaFgJAjGCQEIgRLCKbNO902bLum7uztEXC6XaWAQY/zRj340m80SH8w5t91uEyfFRdP1zTj2Xdd6bynDLBOTKlNDp8cBABhBY8zQ9ZSy5Wx+f3+fePmpz0u8GGvtdDpNjKbU1z/GJLmMC+ccen8ohInMWZblAblilKZAEGOMc2Y2m52cnYgsC+AZz7UaN9tu2wybXbvaDV0LykLwQDgyJkz0Ms/TtFJK6SNYpbz3Ro9pwpTneVWm+UEIISittNZa25DGIyIjnDnA3bZ59+7dqExRVMq5gGx+uqxmi+/e3ezaQRkoJ8KFSDlEIACeCxkArXHWu0iQcxofwTRKabLYBg8H/63fOgk/IFgej9mSXOHQaZEPjafJgY2ybwkACEL4iGl5OJkPeMaewpCKR9pqHGCP4yfiGf+IwJnKhhAiPiZdfDD2hvddWuJtRkREKmXyTWXv/UV/S/zwkVzv2DPsn7rFx6XwYLkCACG8Pyof8fj4Dy/c+/elEY5mfoRRE73W2qHwkZMQAQhj6L0HiBQhYIhAwn6iii74iDCO42q1qgW9uLqYlMXrd9fr9QOltCwll1nCyW2EblB0TkP0k0kFBLdNY6w1zjPJqZDGO+OdDwAkhhCs1dbqNIwYx9HFEBF88EyKGG3bpGRwcDEwBgFi4qZymTkbEcMB2o3Rx+DLKsOg0kNZayPn6fMVXCBicEaFAITGhKGzrB+HEEKZF0UBnFHnXLAuxpgQnQP3zVobgHjvX7x4EULoxzGZFRMKZ2fT+XzO+WCU7rrOjr7M2GxWUwL9OFSFbLph6HeC0qrMy1JEglrZ05OTNNUDROfCqFUin5+cnCT4J4SQFXkaB2qtrffRxevxHSP86dPnbvQ3d3cQwrQsbIjtbrvbbE9OTn//p79z93D/xT/8ptHGGNM0TUpWOTk5O704n8xnwzgi0ru7u+12e3Z29vKz719fX3/55ZffvLn/9NPno3W//vVvIpAf/c7vTGeLbhhdgO2m/c2XX27Wu6IqlTXamNPTUxNcen+4EDkr6NEoOnHFixKc913fIyHLk5Pd0KQVeL1eJ4VuVhRSSmcfad6UkBgDiQQxIg7a5FkGyKp63mklivL25m3zcM0gFoIuz04XpzMf4ma3jXH67u0NlaIoKvZRwNhHEM0xvJN+tfenP9xCjD5ECKPqMQLngjEWAZ0PabdsfZxw2bYtIs4XJ4Dk7v7B6fH58+eCgKwcOO2IcMGTrKonUxt13/dPnjxRzr/9zZfL5fLVp5/c398jJYvlKSXJPt9eXV09ffpUUGahzPOcU6q1Hro2LeUEMXqd5zmBMPZDCA4BTD+u7u5Xo8VHX8Q0z0DEV69ePXnyJGnJ67pO9K0Er2VZttmuVqt75/ViXp+cLCdVwRjp2l3KfR0671yggBRQj+rh4eHg95juXhRFIikldDtBl6ndFEJIyrXWznvnXLdr1vcPIYSEOaTrllLqEUel2rbVWs9ms+XpcracGWMGrV0gxsbVpl1t282u23agNfgIBIEwjlIwossqn0yqLJMEwXsbgwOIp6enjLEsuTM/usFprTmn3nvrAiEkk5Jzoaxpm/7Nu+u7O1VPSFVz7exsPl8sl23bXt/cJR83xnOlFOfMOccYJLNQrbX1jgkphIhokESaUhTCB2XvuMLFGEPEg6NWACRI9lMuZKnPI5wB0n0YOaXhcTobYkhFKRBMYQb7/ovuC0ZEhH3S+COrE2KESPejPqSUAN3n/R7OfiAkIsYUUB4/iKY7VNNjRnQ8khYgvsdQYjoAIDFGmWcQSdLpI2ERk5voxzr945pHP7QP3VfH36KiHnDa9ygOvB8BHl/Z8R+7fYigBpmXATBFnFPK088lZ8G5CC4mpzmIESAQjBACkE3TnFTy5PzsbJIHPW42m4TnF0U+OhjGMQJZb97mRUUYL3O8D+Hs/MRG3O1a7+OoXVVP8qJAID44zjmjkVIKwXFGjXGZLPp+9B4QqTUuy7IQ+l3bCCEYA2utEHt7xfQ5WmudgxiBM8aYjNEHbyilQkjGoO97q7UT4hFSfuyVE5GG0NRMpYs3E1wIknZFlGFeyEk+T0okpVTbdwnjQUSl1KtXr15OJm3bbpvddrvd7Xa31+/Ozj+xSlPcm3NOJhPnTdvuiIKqyEWWUwLWu67va6yklBkXKJBSGgk6Gzjn1rsAoJQqy3JaFM65tu+cc1zKsq71oDfbrcj42+/e3t/cv3r16Q8+//533313e3NTlPViNqOU73abh9vb04vz//X/8n/xV199/d1337noi7pK1+x2t+uHoagnk7p68qLI6812t7v54gsp89/9oz+uTr621hpns6q8vX349T988aOf/M6r7/3gv/u//d+fPXvx/NnL0xMNBN+9e6eUooSs7ldpE5bGCgfacJKjHDxyt9stpfTZs2f3v1pJKaXkyYmJUmqt2603Mi8BIADGRHmDgIARI+EiAmn7cdBa346vnlw8ffGqrcvl2eJsMeXgirIeh67ZtYKJX6+/yGf11EUWjhzr4fFcT/y3o5nEe1tCRggQikgiOO+9hRADIRGiD4zz9MKsS67BBDFWVdUNAyKeLE+Bku12S7h49eyZSznIeek0e9gNY98yxrJy9m6z44zdbRrcts6ZMsT7bXO7Wi+XS0Zc27Z3N+/GcZxO68V8LoSIVAxKd12zXW+sHgXjuRQMoZAiOE8xRh8Iht7otm3VMA6jQcTUh6VPYrFYpJrXNE1d14jYdV3Kl09EnqZpuq6bz+rLy8vz0yUhodltoneMwDCMzaCFyKpiqrV+9/pNEJg4S33fp6ZtNptNJpOktkn8qNRVpEUt/VwppZRqmiYFBaR6nHqR5L+QHk1KOT+dZpUIIfSj2naddbHpxm2jHzZdP4JzEBlQAkAYMIacLaZ5meU8EwwBMeZSMFYkDvohhccakyImGGODsZxJUTBKKaFUWfuwae7v769v7XQK9XzugBRltTy/0Fr/+osvtfE+0TMoG7QBJIN2IQIQGhHTphvInoHJGLPaEEjnWBr+kiO2/74ahZhawD2X4bDEH/RkdD/m38+9APEwgYMjkV+MPgkD4TH0IN0YwXQwJC1yH+7t0gmfqEnHtS0dSYgfkFYOFWjPzXm8+6GD9PF9XX9UUAQAQKBI8ANR/W/dDo/2/go9CiH6oGv7rVtiYqfbwbMmhECRHvqetPk7eLh8sPN4NKlAnkcggMiliER4T2PcR1ciRIweca8QSeklTHBB2PxkT5weVG8IlmUBEKSUgx2GQWVlsW2bk5OF0rqqpoSQ6XT6sG2V0jzLjQPOBSHUxxBi5JzHgBgjEiyKQmudyUJrTZhMp4eUMuWIEUIRQGtf1zkhe3ThqK/d2/3EiAGD1jqvBWM4DEP0RimVLlujukQ6kzKPlHiHyhprXZZlfduFEGT6NWVcUESkMjt04eLo1nR7S/2kNDg5OZnP5wCweegZxbrKvXfe+81m44M1LgyDRox5NueU+GC1dUwZgsyhPTxmAFIUhQkuxuiRJGpeMramlO7adr1eM8yevXh5d3d3fX179fTp3cPDru2eP3/+wx/95N/+23/781/8/LPPPv/kk0+GYbi9vb69vb743g8Qn/GMr1dba21EHLXdtF3lQtsrnsliMs3q2e3Dfdv062783T/8g/v7+81mJ/Pi9Pzs6skL48K/+Tf/z9lsYYzZ7Npts5vNZp988slJs7i5uZnNZsfXxYEqlXxnkpGCc04r1TSNtfb58+eJBoiIMitSQSnLUlsPAECsf0RHCAVCSC4LNQwxhNV663SXC3q+nC+mNfhR5iVDvzg7X9/BOIwhohrN7f06ImMfsM6OJvAEyfGuFh831C545j1FiHudX/QxRgiIyAklJCXT2mSNipQWmbjerJISruu6EOD07CKvqofbm7bZdc2WImSSp6X/9bt75rqnVxdbY55cXD779OXNu+tffPn1Z598erPavfnu281qRQgQwNvV9uLMPH/+fFRDs9vd39+OXZ9JPp1UYIEEzwQdu1FSUpclAjRt2/RNJIExtlqt8jxPabqJvbJerzebTZ7nk8nEGHN7extCWC6XjLFvvvraWDWp6rOzs7quAMKoejUMeZYZpb33gnNGqBmVGscYQllOUquXPq26roUQ3vu2bVOfB4/GVGkf6gflvU+Rj8n2WnBuUmmklNC92573fjKZLBaLel5Yb3RnlHbGhm0zrLZN06lhRGWi8RApMMa5yESeZVl2siw5ZUAQQkycHZqUgkpZpQZrlVJG63TRSim9KJOc31q3bYbNZnN7e79uIWdwdn6JjGplF5OpdeHrb767XbvJhPvoGKfBR21sAOI87DfdWZYaFOdsKqKMsb4dSEqqAyAEHgmTSWVAU/xv2If7eEKICw4RU1oK4QwZJYQRwkjSq1EaD65mQBEQSMDHcd+h8ULEEANApKn74ZQwRjijjB3K4WHFPy4tH1U4AOBMpMONj0nl+2rhfbpiDpDXPhTXqkO0LkEaY4yPGQuICLjPrttDhIgRSUQChB7sPSMAEhIAIiIQhMeU9ogUCJL4wawu4aofNdCHmpdkMB/1hcfYKTnieaa54HZs+lHZEBiljIiAQIJHoCH6gzwxEdCQYgQiOX1yeVqx+Pe//GVB/WcvnkwndUQQkkfCoQ/WacZYjD4Ed3N/w393wRhBAlqPQIBzrtVovOvVCIRxii6GYK1BEIIKIUbdBIgB9qaIieWEFBLIHBG834sxGGOEciGEs1EbY0PamijvbfCWcfAFReRaa04heScJxsKjZ0KSzoTH7T9nNIRIKU2KXkaAEGK16ZX9qMu3xmlmPvv0U0Tcbrdd1+31uwhSyquL0lqry3wymYzKKKVizCaTyXr9MGhzd/8wqatpnVPOjfVa7/LFHCiDsD+vhBAYaQhh2/XT6fS8qowx22anteZSXl1dGY3ffPd6eXry4tNPb25u3t3cnJ6ff/Xdty9evPj+j74/PZnd36/+7u//lhByd3//+vXr+Jd/9Yd/+Ifn56dKqXfXO5kXXAo/jP/5r/6qqCptIyD9/Ac//OzzzxnjXde9u3l9enr+k5/+rjbhr/7yr7/57p0QYypdZVl/9tlnb9++/bu/+7vypnrx4tnl+cWuGQ5bq2NoPVWExKcLITDOnXP39/cvXrwau5FEkslMZllyaOOcV0UWAkREwShhNCEahELyB5jPauO81vZnf/vzz14+nVbyV7/4+Q8+efnTH//gxctPMZJvv/7SR5jM5re3W0JydthXJsDycFgfYTiHPaAxZs/tjBEBKBLBEOM+rtN7b4xTSoUQI+xV4ens1EYZ407mM5ln33z7Oga3a9q+GzlnEQkgc5Hsmq4ScNN0P3n+Msjiz3/29wTgdHn2+ubh+u3rZruxxkzK8uz0dLGYnS6WeTnRVtsmOgK8KooyFzKL4L2NHsH6gCR6BGfttm2atgGAvveU0svLy/Pz83EcU6O92+2ePHmSvFR2u13yJDPG3N3dPTw8XFyevXj2dLGcMuLHcYzBSSG8t+PYM8Jn05kaze3tPUV2eXnZQQJVXJZlSeitlLq/v0fEFJWeuswUeqCU4gEOUEyqPcnGbL+NjTGJ6DnfByUHUMMwGhsDMOPCrukf1t0wOANofbQRKBBgnOWymtRlXRMcECmAj4CEAMagRz0MiiE5mJF65w6ciKyaKKW6ZmjbdrVZbzbdoAEAzp8sqczGcSSMRiDffPPNd9895BxigBBiWVTWBxcgBEcIZEWmH8sePrZi9JEzmfoe8jjmOj4D8UhUlwR8BBwiCg6HVi8tzXtu2wczqvRADnGPFr5PxkH8SIh93EIdl73jDukj6O/4vse9UbodGCXHf5Ma6KP+jSJi/LBBOxzJvuz9Vg0+rr4f3chRu/q+VOP7lnePVT7qEVKneVwOD2EL4cM0osMBXF9fb3Y7bQO1NjKHhCcyrTUewQNGoJCQg8gIIGWU7ZpmCMr0Q7XMA4Zduy2K6urqajDufrdr27jZrHzcT1WTX27yPcmyDCk1BowxXGSICI+yE0KAMQYxotl3eFo7Yy1jPALkOaTIT0r3eyllTJ5njDGllLUuBPAeILp0fqXPzXtvLRgF2YSmHC7JEx2PxeC11hExYtq+0H4YnAP+/yXsT55kza77QPDc+Zt9jOnNOQCZQAIECJASq40yiVatVa9715te1R8ms65tdZmsTVRVl9SqpjiIBAkCSGQipzfGi8nHb7zj6cV19/D3gLJ2C3sZGeHh4e7x3Xvu+Z3fwDli8FYb7yHgarVCLqOYKkruvPfWu4NR5bjXOgAAgABJREFUQJqmp6ens9kMAAajKaW2MdGnt6p4ADpoG+H0siyNGYLREHzTDVprJXkqZeyBbPBSa6ESLiXnLFD6g08+3dTbddMg4nQ6pZR2w9B13eX19uLxk27ov3nx/OTkJB9Vq3ojhNDf2qdPnz58+qQz+ubmRiUyqZJ8kr94c/v5b3/9gfloNpvyRL25vF6tVy7QRx88EzJZb9rr28Uvf/Ob7968nZ+ezGenjMA///pXv/3idz/5yc8fPH70+vLm7u4OkKk0WS7Xj7w/Pz83xiwWi5vr667rzi4eHwRyx4TheM44yOdiuNhyuby4eJgkycnJSVEUVPC2bbu2R8RoERczRgK64AHBe+8JYd57SrgLNs2zZbsZjBuzPAB+/e130/FoXlVFWZWjibaIznRBp3nL36FfH209780MyGG2B3viOKGMkN0wHgF4oAwA4Z4YhoBIYlwqo8Q5V5alEGq92q7X6xCCD6jyArzbtgNjjFBOuRydT0/m82J29t3LV7/65ec//uwHZ2n5z7/6DcWQZpVKgDJCeJoWE+3J189fN8Te3dzU9TZRwqHdtk0iWZlkliBLFRNEe9f1XaP7Zuid8wHV48ePz8/Pyd6fjDEWySzr9TpC0mdnZ0VR3N3dLRaL+Xx+fn4+n8+5IM60+z3CG6Mj87zv264bIKBIBGNMd0283KNcPaZ4RMg0btaxtY81zzmnCLfGgg+pVCRix87F7SBuVNHdYDweZ1lmrdWu7bXudTAaV9thtW6axmoLgXEgNEmlSBOZCJFIKgVhYPuBSCAEvUdAj8EZY9q6jTnOgu/8aJRSSighxJvW3NzcLRaLYRj63gwaUgWTcTmezKy1EXa4vVu+en1tA+QFb7ULCOPprGsHRNJ1jnJI03S9bSJSfjT98s7ZJEkUZ96Ew1wq1rP7socQAniPiBYRKQZGMRAAtuufYrg5YxGoiPgaBcTYqBBPSPRS2VtfRpxzN0snEGCn0fT7oL4DIPlee3SYbx2XnGOvzvdq0vGDHHBRymP/RKNIDwhhhMcNfV9e2Z6cwwkhntwfio8r3/Fh+biyUkKP74m4z384Siw69KAAEPw7WovD7T2o83DI2GwbawMGsNZ7r4Xa5TgyxgADgRAwYAjofaAIBAjSm5u3VQLPTmcXF6O2rZe3Nx9++GGWFUyR8Xh8dV1bp/NMMU5G4zLaJ8VfXZblYINDCIjxHEZIbIsxsiWHvk2SDBGTJNluF103jKoMEauqREK6YYhjoRBC03STUcUY2zSdd7sDECJSSoSQnIFKANF0nXYa1InSrovvQJ7nIYALu7eDchpJWIPWSUKikqFtWwBQggMEgjSR6XQ2m8/nSZKEEAIgYwwgOOdW683bt1cAv4tDfULIo9MLDAQJcKmYUIyTvnfbtpGSc849BN2Zrt6gh6pSYjar69p7b7yTUiaZzwhRnDHGtNZVVY1mM2tt3TYRpJ3NZo2XvfdEyG6z/vK77549ezY7O3358mXnjX9DqqqqZmOaCmOGDIv5+cn3f8i/+urrL7/88tmzD5OiDCH0RnOR1k3jQ4dMPv7ggyTLV+vt5c3tttWVGj763vcQyWK9GHo7m81ms5PtptXWnJ2Np5PJeDzOU/XihbRWP3jw42awx5DJId5OKWWMie9tHP1EkDm4UBWVtbbe1IMZlFKjcSWEaLvOORvLHkUSAgYIgQRFVGwtKAkqUw+fPE7zJE3zz37443/+h7978+ZNu1mPi/z8/KLZbDfbpvNYtx3//XPlbt368N4Ci/fhnHMmGYt70d6rMKCIExeknMTZO4vf8CGkaWqNjjz+9Xp9e3tbVuN6u0YmCATriQcihKQYfOiAy8/++OdvXr766rsXP/rZzzKV/NXf/h1DWhVZWzeKs9PTB+PZtO1M8JZz8fXXXyyXK2dhUiXOmVTIajS/ePiwa9ZpmilBbDcYZ4FSwqjV/vzi7OHDh8aYzWYzmUzyPEfEoii+/vrreGCP/8YTWVEUT588rUaF937QAwUrFbXGNfWWcxgXI63128u36GE2PYvSyI76qqqigrBpmtVqRSkty3K9XuNeBG2MaZomMkWNN03TxCsAAPq+DwQOenYfQvzxiLi2bWvo4H0Yhn6xaBervm7Be0Ag3gemkqys8qpkCUVEj64b+pJSxggihGCdA8aYoEwpwVgppUyTPCoUrbV60HVdX9f+zdur5bLnHDiHJCPzyfTi4sJ7SwiZTqcxWX7QUGRgrYtRZ2Ux6jsNAH7f+hxqxqGd8histUVWCUq0NzFPdQ/vUTzMwPa7/J6acc+wP+5C9oez3Zq67738O2TI48boUD+iZD6WqJ271buWmIdpxKHs3Vc+fIdBerhDzO08rqPxR7TT9+sobohkZ6gN+/kl2T0aJYQE8k6c+nvcmfc+J4TQY+O0I5DT71XnsRCyvTN1AH84ceNB4XAUwHJ4N/aTMBSCRy9C6y0TAYCGgFJKDABhZ+HkgnUBgDAbLOU8TUVMy2N7xvLq5qaczC4uLox1wHiE7uMEsSgKj8A5L4rCbtvY2MVnGLzTWjN0hBAfbN/31bgEoFLKiFXE00mWZYOxWrsyz7IsM7rtut1FIgQwSggFAsSa2PYSYLSqqr5ZaA1OA+fc0t3YSQiBSGjwwRNPgFIOnFNKo2I4Xv+6b6WUs9k0TVPCc6VUVuSMsWjJFDO3v/3266qqHIbI84pNP2Ps6y9/d3FxMT09Y0x4AgRZIND3/dXVqmuM1pBwSBOQHLpOX7nrZ9NJBKiNMUioEIIwCpQUFUFERmlZlkVVdl232mzatn32wQdXV1fb7XY6O+lt/8VXv2OMPH36dH46XywW17c3VVU9eHShlLq8fP3y5cvp+OnHH3/UDRqAJknygx/8IH395vPfffP06dNiPGta/d2rN1e3d8aHQdvbu+U891laUEqLYtw0/eeff+49ZmnZDb21/vPPP0+S5F/+yz/N8/yf/unLy8tL4Co6t+3iMPfIVhw2xS45ppNmWSaECJ599NFHVVXVdR3nf9HBXyVJCAEBo4icc4LRL8XAeDxeL5cqFUjIaDS6vXq9WdycjIvVptbaNtDlSk6qUbzCb15cbuuWnZ6dobfBmmBN8BaCp4CUoNG9dyYEF4Lz3iJ6SoExkgihOFWcM0qd89YHwiRPsqQcDw423dAaGwgNhHjEAJh5pyidz0+qvFyt6751IJJuCMBSbSAAL/MxZaLZdkDIo4eP/2//1//L86++XF2//fDJwyrLN6uV4uL09NQ7R4DmRc4o11anSUqAvHjx8ldvujSbnk5Gk0SdSvZslJ2nJDO9DDYBQgPpO7vadNtae5QqLU8vJtYb4yyXsiiKqhpHcqnWgzFmGLo8zx6cz9NESIknJ2Ol73LuUuEFGO+M0xoc4UQYS5vWbFvrCacq9Yx2zmr0heLz6bjMUmcG03cMkKDv29rpQTBC0A9t0zc1OkshkODX7WvglgmCNLiAASgJIngqeOodSiHn8+lonAIYbbYI/dv65HaNNwu73NhVY9sBgEKeq9lslCdcsKBYSDnPBU+ZUEDBa90PRltOuVKZ4BK5oFzlk4lIiwHIutOrflj3+u168+r27rub5WKruYLJrEpSGZxNUjmfT7777nlVjU3Ab15cXt3VgTFQBfJcwsApZJXabNa36w4BHj56sFhtAmUqzQhlzgfJGKeMIjIg6B06lyUykZITfPTgnBFQnGdpwhlLlKQBunYLzilOwFvBUQoqJZNCciW4EBZQO8vzlEjOlSCcEeqRBs5RKkYDQ6AQCIboYcQZUEAiuKCEM6QUGCdMcZWJNBWJBuBCUSZ8AOtDQOICGueBRHIuI5QjofHrziNKgZyB4IEzR4kBNN5r7ykXwDkRAjhnjBPGKOeE8UA4YZJySbmgTBDKQ6R0UhpleZQx4BQoCRACBEHFzrUFgUbEjlAKNLgQeagUKKOMEcYIo4R6QI/xIw7/aMA4CSKExHGqYJQRoBgAAwLnUQgIhASP3se+BgXl1ljBhZLKaWsG44w1g/5//+atp7I1vtcDoGPEC044B+cNoZRwSahAJBQop1JSMRibSGmNSRI5m53VzdC6QFX1u+9ezU4f3623ZTnN8uR0OilSOUplmfqimsisuL3d3txu8rS8ulqkiuQZp9QN3VolglHCVVIU8+cvr8ZzRTmViq9W62GondP15lawwGl4/GA+HmXbzd3dXTeqgElqneEaWADugSNIQEUxoVRR5rreaY8OlQAXfJIXKks8IU43WZEVeeGDM8NAASUFhtjUmzRRzgcmOFHqerEoTk5vNhvBsTedccOmXjXd1rjhRz/6QT+0aapUIgmELE1m04kU3BrtndVZOQB5cX357YvnDsyTJw/OzqaJ5InkVVmKhGvrF7W/W8NtC8uODXll0pGTRRdgCB4YcGrBaUF1GOpge+INIyCkLLJ8NBr1q9txKiZVzgHB2TzNqqIkhAmenszORtno+vIGLKQkUSSZ5bNVW1vrsiLnXMokHZVjztXPfvrz5d3q5Xevb26uP372LASfCq44F5T84JOfvn590w7+6mbx0cefTmbzTdtYHyhnQCnhQjvXDnY0nVez0007GK2VSoSQxljnPOOCygQJ9UCFSoHyXtvBE6QCmOQqQ4LAKFcyyTMkpG5bbYxUKmZoAAKnjBIKASEgRRIE6fUQfAO+ezRNP30y//ThbKZQ+v7nP/y4rlcqkb2Ht+u6PH/YAL8g2+3tK3Zy8fD4IHl8oD58/QDOAoBUaTzPHgwsDmbtcQp1oIbGx2HBBSRAiPWu6wfrXEDwwTPOogmVNdo7U5b59z768JNPPrm9fjH0RiYyS3JrbV3X27pu6ubq+ooQqMqiKsssS4P3b6+uvnv+3KmCU2RoJQmjREzKJFWCIGhrgTAXsOt103behyRJyrIEcFJKzgUhILhkjNd1fXNzE4M/lJLjSSU5b9pt33eU0nEiGGPOO2OMjcw3F7Q1QGjTNIM20Uonnq8ZY48enEfxXyRxxeFZZLJEz5dDbns8+DjfU8opYd6jtcG7wBiTQjIWTaslF4xRCgSd89roqzVE07yu63odvAfOQUrhormaUkIIBDDGtLtgP6uSrCgKwsQwDL02jLM0z/tea+Pavts2TV03q/X67u7u7k5vel9VMk/VMPQY/MnJSVnkbdMQQgnndd3cLVbG+iRVUiWIYNrh2dMLruT11Y1HTBIeEKL5VuTyaK3Jvm/bNUNRYU0JRf/4wUWqRKqkYFQwmqaKU2atCT5QGh22gFImhVRJqlQipCSMU0qVVIRRSgkiBu9CCBGiF8Dj8BCicfNergA7/uSu69qjqQE5PcwL32vjDq3VO83fe4giQCTUCC7u+8WjtowLeWCcHjeUbD/zo5TCPmOdMfbe6O/wYL//CLsnQN9fuceYzR9ofI/y4mM0QbwD22sEYx92uHT/7psrrbXbQ6whBAASexdEjD7ju34dSAjBhcApYRBGeZInqutq9Hh+fv7Ndy+effjRar1ZLJddW+dpMhmP8ix9NE+zrDDWXV5eN+1QFOVidS2lKMpMSkEAKWVRo82oWCwW42lCCM3Somlaay3ncrPZeB+yLI8q5LZthyFkGanKUgrhtWWMM7o3wqU8duNaD5Qi48AYEBooRSm5lJIFE0Lwzse/aYTfY1QfZezk9OTL330Vgv/+J59AbESWi8ViwzldrVYPHjwoy/I//+e/attaCHF+fn5xcVEURdx5Li4uPvroo+rkPHjvjAaE9Wr13bffdU0zn88+/PAjPdjgcTqbjccjY1tjvFT8+m7dtatEirLKBIWhazhl0/nUOs8E51w6RGMdEpqkWZEXnPOTkxMh+HK53NSN4CIAbLbb15evl8vV97/3vT/6oz+yzlWj6ubqOs+yi6dPbm5uF8tlkZd/83f/LU2yu+Xq7dX1YrGcn55IlfzlX/7nJ88eSym5kH/0R3/05tWboihuF3dv377t++HJkyeD1oxyrTWlzFintWaM53me5llRFJLz6PsRafPWWudD1KEfqozzIYTAOZdSzmfT9Xrd9/1kMhmPxzH0kVKaZdnB3JHvMykRkQouhZCMMhIyQas8yZWQjAjOGCXr9SpN86KqpFTjydh5n7nWWcfxXRoL7jW8R3P4+yp4ECcdQmjJQf/k/R/cKYAwY61vG6BsGAwQBowi+qHtAzpExzlNRFoVGWOk3qzRt0VRcU7rTXNzc7u6W7Rta43J87TIK8qEtp5wNrTdYrP1SMajilhNYJCM5Xma57kU4AeTJAmhTHuMa9h7H98vRO2cy/M0yzJORd1sVquNtZYAs9ZmSS4oW61W23odFWzVeOqc09o6F+KA3XqntQ5gjTGE8eg0ZowRUo1Go/l83rZt27bRxTv+2eJ8LiIVEQ2LZm/GGESGgQQCzgZjDAbGOWeSAqBMhJQ8IBrnCcGuN9ttu1rZeBn1PRoHsQVnjAmlkiSJLBLrwwFdDAStRx5AqESlmfPBeLfe1HXdauubrm3aTmutXRgGGBykKVRVRTG0TUclRk+ZxWo1mczatr29W9SdJwBRhm/MQAg8fPjw8va67oJKYTwe3y3XSZp5CIeJ0c4qGkh0bNkpyAIRlEoplZJCiMgvZ4wJRhljFgIEQnZZcSbKKGHPwtgVDAQSkOzXT5RbKyYopSEgpdQj7pR6hLj4ttNo5okAEAA9hgMX8RgLjZO/w2V8jIiayKg8qjRM7Cg2EI1bADwgAWAIJIZpHE3RYE8ce68sHUraMbXkGL08iPbee7TDHY7L3oGtc1zwdtPKIz7OAclkjJGwSzc8eNNHK+qY6hDnLnFAFbSOGpsQgg87VQAhJASMQazxHeecxzEYBZByF5XQNM12u6XEc8DZ6HtZmhrTxQizGPjlfaiqqqpKgJ2iKwSIGHIbWu89Ixx9sMY4YyEgo5QzFjyURRGvt0SpMreSC2+N1poLSgknhAVPjIlBlt55ax1wQZTiAN650LZGSCallCLBAD6EuJwQse+1c0N8A5MkOT8/XW/rL7/8UmTJZ599ZurWGPP1118XRQlAu264uDiNe/TNzV3TNAAQqW2IhFI9Hs3n01kif8oIberNzdurxd3Nd999d3e3/OOf/tw6+C9/9Veb7fbHP/5R2w+/+MXneQFt4757/qJvq9PpuMykA1Z3zup2FCMZCCOEgXdts6032ydPnrx9exUg/PSnP71bb/75V7/Z1pvxZDpY8823X/3X//qLP/3ZZ9/73ief//q3f/EXf/Ef/8N/ePLp9xBxPp//6le/+ulPf6oHl+fp5uoOwf/jP/5jNZn+D//D//2fP/+8rusf/uiPvv32277vP/zww8EaSunz58+n0+lyuXzy+BljTMrEeazrWmt7fX2dlUWsUjF59ICoA4NjUP09XN05F6UvNzc30+n07OwsTdNINYgT5UNDFa/wrtcsI4JS8LTTpmn7eZVN5zPUmR66qsy5IKZrVnVLMBhnT6uxNZ7Nzy+OBwbvrM9w7+q7t2+nh27vcKg8SKkOmoz4CuNNEDDWuoAegw8hjjCcs2miIPg8Tc7PTh4/OBtXFaLtmvrs/JRx3jTdmzdv3lxetm1HKFFSzecn85M5Z1wb473f1M1ms6Wcs6yg4DOK4zyZj7IykQS9tTYvihCg7XXTaW0MAInTsiTliFgU5enpKSBcX1/XdVMURSTNp4lyzi6XS8bobDZNkmSSSe93R4EA6GzoYxjQoKWUZVXFIwwijseTs7Mzir6u67Zt4984Vtx4Zgz79Or4xsY7MEZDAD04rbVzntB4xqSEQJomXEnnvbHOWL/eNDe3y7va9X3f92gMAAEpIUmVUipJU0KIj3IrHxhjKknSNEVGuZRCJkolwMSgzd1ifXV7u942m+12s63rzjmPnDGViiKTSVkF6/TQi+i0JHhAHwP8NpvtemsRQEmihPLe9YM+nYiTs/M3b96um0FwUo3G682Wc4GERFeaEILg7ECgZ4xBCABI0HMKz548LlKVCC557PYSRrBvm2EYokjAo0dEQqlKMpUkjIvofBK7pdj8xGqzS1ogAgAieRF3NC0ExBB8HPFQShkl8WgppQTO4iZ+PJmDPfnowHY5GIAxKRlj0Rx8J8LdSwAPvdheHchoNO56r3WLM7Y9zSRKO+4XFLxTvf6PPrkvZuyeH3tc+Q6Hg/eAHNx7jYYQDp6llFKyN2qJIqpIPei67u+/uzHGICGH453z4aghvv+lsVwTxggGEtx8XNqh79raOjudzTqtp7PTV2/eEAAKZHm3HFf5qColNonKGE/Wm2azae+Wy37oz8/PQ3BCsqjXIEAYYc4Fa91kmjnnGOOr1dp7zPNcD1YIIYXUwxAwJFKlmUpV4rzdbnrBgHEi5I4g4yNsY2N0EUjJKYvOU0Coo5RknKRpGusWo0wIwRgPIcxOThglX33zzXQ2m85mX339OngdELttTYAyzn78oz9arpbO+vOL8/VqwzizxvngGeVd116+edt1bVWOnl9e1ZvN3c3NzfU1Ondxfv7BB8+ePHlSltX1zZU29tPvfzKbT1++fFFvt59+8nG9vc1TrgRjjFVVMT85A0Kvbm65TBwSHzAgEYJLKZ01TV17Z9MkLcqCUsI4H4/HVVUCIdPJJMuLNJGjyfibr7/54WefNW1TFCMqufdhvd4+efL0g2cfbuv29u7uk09/+Or167PzB23bffX1N6enZz//k5//b//pP+d5/uEHH4YQvvr667Ozs1E1rqrqq6+/7to+rh4h4mnYWmulUpPJZHl3V5ZlURQRPxBCUMaHYQC4p3Ea6+ImGeUr0bU/horHYADGWDz4HnNB4/IZrOWCM0KIN+Cs4jRPeCp5niilpOI0SZKm3t7dXEPwktPTqgiIbHZ28V61O9S8w8HzgGdSSmWSvldyd/w6xENE0TFXlTNqnCOUxiFLBGadsxTQe6MEHVV5nitCMJF0XJU8SW4Xi1dv3izuFlobRnle5pPplDBWFiUTMvqzrVfru8VSG+sAiHe54tNCjVLBIaBzIXguRDPoTdNpbYEQKVUiBaM0Ubwqy/FoQglZrzfr9Rq954zGKauSwhhtjJmfzE7ncwLgTW+d8yE4j4MxTdt2Xae1JZTmeS5VEnXNVVWNRyPG6Ha9bppWaxPdGo2xiCCEdM5H6//Ieov7cQhIidCD67rOWscYk1JQwYGgTGSSZYxzY73WtuvNatksFrqxznuwAYCAEJCmPElTLoVxbtC6G3pjLVDGd1l+DDlXSUa4XG+b568uX7x6s1hvrPNNOwzG+wCMcyEE54oKKbho+2G7bULw4/F4Mh5zyoGAVOr65qZpjA0gGahEYQBjLQb/0QdPmqZ5c3nNOCCAShJtLAIQxg7Wd0IpIDQ4t9t5ARED+MAIfPD0aZ4mSopEck5JJiVgqLfboesi0ZwzFhAZ52lWJGnKhYgjMbJrv2KBiQlEhFIqCCeE7FL3dts9UNjHLUDcrJFSuqt2UkmphJD7YAYWrTUZi4o6Ev+NhY8xzqRijBPKCGWMMEoZpYwyToHGnIh9iRSMC8r5YVHAMRMHwPn7xHMarbRDiC5If7DOuX2K+jGeucNwj6raMSj6Hlgab+4oMhr3CxwRCe6ibaJ9Qfwklj1rrUc8UBJ2CWg7p2x+oLxCdI4DwODB27P5xA49+OCDL4sREDabz9ebTVPXGHzXuPk0L4v8ZKy4lITwvjddb169ftP1ZjabCMFiBKH3XjChpPQhKCETid65RCVDP1CgeZq1TYserDab1do6V6RZXuRZIhGCNz36wClKwSWnjMbAwsBoiBoXRj2nlFGMqk8K4G1fjUZFXlhrrbFJkiRJyjnvhn4+n9dt57wnFGRCEcirl7ch2JevbsaTqu2HbV1/75NP31y+Wa23WZ5lRZlmqUxSlSguFKHUI/TaeOuUSvIstUZfX10tl4tgXVUW89lcqaTrO6mSp8+eFHl2e3s1rorpeKyUiN5JhAmZZkkxAso9gLHOOhcwcEazLBuPKq0HlSRAoGlaF9x4VI3G4zzL3r699j785Mc/+fqrr6YnJ23TCSEfP3769vbm5ORkOptfXr796quvp/PZpz/49Hax6Po+y4usyAOS5Wr9N3/9N//iT/9F17Y3N7cPHz5Ms2y1Wq3Xm+l0KqS8vb3LsizmejnnIkYupMyyzFurlIqKhag8AUqHYWB71pL3/gByCiGk4LGLiPKttm0RcTKZ8KPDJRzzk1WqYpuAHoIlwVLwaHQiGCMhS5LZZJymSjAo8nRUZKlkPjg2P7/AI0fawwp5R+p7VPa4VLshxLsQ6HEy3+EIyRhDEjwGypncUdcoIeCNbZttkSUns1GWSDP0VndK8tGoutt0V2+v7xYrRFBJKgRjXAghi6oQUiKA0aZu2vW2HoxFQp2zSrBZlc/HRZUwQYFTIqWiXBjr+sFaZ+O2FbuHLFMXFxdJot6+vbq5uSaEAsJqtbLWSk4ZIyF4lYiqqhilXddRtN6hC8F5F6cdzgXGWBF16AG995FpSQhZLpd920ZoKGKYcdgJANGcBY9sG3cUOIt9NwyDBYAkUXmeCyUII2VVcqkCYt+bbavX23a1aeom6MiEp8A5KMXSLONS+GgdQoBzLpMsyzKlFBJw1hlgq/X21eXbl6/f3C5qbRwwzoTSLhBKhVQyyQhlgzFN17bN0GsnBC3LvKqqNEmQQN/2q9V6s+kjppqmglAw1gAhWZY/upi/evVqW7u8kIRQhwSBhABcikPG2w6L926njAZCgQTvGIQPnj4usyRVu7KXSEkB2nrbtm2cBSaJgoCM86KoyrLgUlLGGed4lKXAeezMCAAoKu8FCXsVOGWMMxpHfhhCnNBFsTlPkoPDPXlXmQBH6ovDt/DoDhTuy89+PHcvLoy3uIbf6/YAAOEoxPxILMEZ//0OjxzZ0LxT836v7NFjceS7E/r4FXesakC4X/gBY2W1kZ4LECfrf/fNlY+m4IcDLpBYFCmljPH7OWiE7r2niATc2clc912aSACSF/mm6c4fPKKUffP1cz24soCLs9M8z84nSfDQ97ZuO2BitVxxwcbjMefUe+us1X0vOBNMDL1J0wxDj0jKcmS0AyBCqMVi0XdDnud922EIaSIZI+gDhkAJ8c4wjrsrhFHGmFQySSUhLiB6D4wh55Qx5AKEEOhcWeZJmsawqCzNyrJK82y5WAgpz88v6rpp2/bJkyeb7VpIenfb/+yPf1wUxTdff/f8+erVy2988D/+0U/qulYy7bpuuVinWfr40dM0S5u6a7qu3myff/f8818/f/7tZd+vwfu+71erldZmMEPf9ZRinudpqtJEnc5G1gxDN1DOCOVNp5vBeKRAGBCepGmSpgRRGy0ozfM0BByGPnif5bkQvO06bU0s3icnJ7/+9a/PLx4QxrKi6OqOUnrx6MFqtYo43MXDB3/5H/9j1w0PHzwsy/HNzfWDR4/TJN02Tdu233zz7enp6c9+9vPPP//8iy+/HI1G52cXzjkuhHfBGIMIjMdEPe6ca/tus9k8e/IkEtq993sOBDLGOBeHnsr5EIF0pRQGvzuvI0aoPHp9JEkSobLDkS6ufYuAIVAIjKKgIClITiWn3uqhaQnBMk8no6oqck4QvE2kQAz8sP+GI+9dcmQKdVj/x5jnYS0dIM3f1xXtpw4UCItW9AECCUAoEuqn4+KDZ0/ms3Hf1F1rsyQvstwZs1xtu8F6BEY454IRlFJyLsejqdbaOdv0w91qPRiblmNCSDBdrmSeqjxVUhKGhlMupXQBo38VIcRZ572nSkgmyqoIIURZuhm0EMIY2/d9madxc0mUSlNl9XDXbRljmVTW2l7raEhvreecqyTJsoIxFpOs8zwHgLZt67rmgLgPzIt/pAO9hez57nF72jFcWrTOBAQhOBNKJEqlklKaZKm2ruvspmmXq2azaevWGg+BAWNUcAoAUkomBCIYa9I8I0wIIShnQEivjbbGGbvUfr1er7eWUhiNsjzPAxJjDBPK++ACOOOttW2rtQMKUBQ0pi9xzrU2XdetVqumMZRCAGD7zsO5MBrl52cX2+12GAYpAADKslzVHQIhnHMhEKh1wfvAmA8hBEK5kNbaiOh5h875iHzGkxxHL4SIUTzonUfw9J1I2MPOLhg31hEKBIAeoE70uAvSDRQpIu4SjTAeCQhjDAkJhJDgIyBMvGcYAuy8+AghhNEYjr7TOcCONXMoG9FsLIKl8WeiYDwGfNO98TTuY48OAon7pQQAe6ee3TDP38829qvmXU3Cu6AlOZI3HHvkvvfd98ree48cJ6CHFU3xvswflna85hljEZKKe1Oes5jS7L0nzh1s9sL+fCw4A8IIIVrrtCqZCC7Azd2y6brT83PKCVpM0zR4mwhurE9TFVxomsZ6JoQqR5VSinG03RDHBKlKKKUhuFSKXgPAPgVXG8tMcJ4gKCFTpQgnkvGh6+2gqWSpFFIoAAjeW6cBKaVcKcF5mufper1sWxsQEAOlJJLIsoxqrbt2KPM8ri+PIVHJj370o+cvX97d3aSZEgmPhItU69n4jHP5N3/9D87B9z4+r+uWUfGXf/lfTk6Kp0+z8WhKQOjBXF/fFkX2+PHTqR7MMFxeXnr7xlsIHl6/vV6tIMtgPi/m8znnHAGy7O10Oj2ZTe6u3ggC43GVFWNg/Ha1eXt9+/V3r3/42adodV/ljx+cKiVN3y6XSxL8ZrN59PRZWZbbujboi3JkfLi9vbl4+OR3X33zb//tv/3yd1/nCMvl+hf/9I/m7/7bX/z3//rP//zP//4Xvzg/P7+6uf3e9773+W9/q9Lksx//8Wgy/p/+/f9rVM1Go9HDh4+++PKrX/7yV4SwR48enZyfffvtt3//93//x3/8x5988snlm6thGLS21oXRaBStrzZN3XVdbPKiImVn+Mc45xzeReAP16pzLssyQkhd14yxPM+994vFIm47sA+VPKwF5wISpAhC0ECocdb44APt2qH1vbNDcGYyrhIli1RSdIlkFCSbnV0cM1AOy++dpe599B1ARCGTQ7cH9/62eDhiHxbS7jEp+BAoo4DgrEX0FEPwdj6bnJ+eQrBvL1/rvjs5OclStVmvX92sjbbaOOscAciLYjadTKZT71zTNCEE68N6s7UBuFTGhUrSNBHjPKlSoWgAbyNvsG5aY72xXhtrrWOEFkU+qkZn5yer1XJxtwBAHsmN/SAom82nSilGSFHkZVk0Td117Wg0EpQ1bb/ZrNu2jWPVeCpJklQpNZ6MY0BwnOcBQHAOEXfuJ/upntY6+lDEUV9sL2LYXtfY4JFyIqVMsyTN0yxLkzRlUvbabrbtatMt1+1qrQcNQCBwYJxHII0KQZlwiIM2XCrCOFKijVlt6+V6s9nWddt9+6ZGCKNxOTud52WFhGpjBusZF8bYftDdoPvBWQ+cQZqqokijIlsp5axbLBZta5QinNPYtxAaXyJMp6NHjx+8ff3SeWSSWxem85P1ZuuclyqRMkHEGEZxuLyj/6zgghEIzhISPvrw2ajMCqUEI4KSPEs5habeNHUTMFAEoZS1llBalKMszyNpi3EeSSuMx9aKEAQfnPeeAYv0FojeJLiD8gIGemhZOBX7mHvcPcL9DO+4YaK/dzPxvHl05/if3Xlx3/8djpIRG3wHGgEAAJUkhwkZknsPGgj3Bez4dniSO6B4f6NH8bnHIOdxwTsmvPhjzifeP0+y//wQehxPPH/zu8voQBmxHCEE44IQEsnJiHAgxQQE731ATBNBIUxH5c3NVZYkzjmhkpdvLsfj8cOHD99eXgoWKEFG8Onjx4XC09NzxuXLl5erVX17e1eUeZKo0agMzqI3VuuqqIo0t8aOxxNtmhCAM7lcroa+l1LpwSqlvLXGmCxN8jx31vRDJwUryzLLOaXEea+1NsZiBNMkn0zH3jtj+vgqEIBzopQaV1nT9s670Wic5bnZp1FKKWfTadt348nk6vpqvV6dPziXUilZfPnllx9++JRzcM5nWSaE5By0Nk3TIML5+fl0OlutVldX11rrPM+cc6enpz/+8Y9n0+r29rau/WRC00zGaAXv7aD79WbVNp0xelzm4/FESHm3XF8vl4TJ+enZydn5MOiXr1/d3NwKRssyl4wJRqSQeZ61dd1rneU5Y2y1XgLiw0eP1uvNaDR6/fr12fk5IC2q6sMPPvy7v/tvy9UiTVPG+Xq18j48fPjgJz/9adt2ddv+5je/ffbsw/n8VKnk89/+DoA+fPhwNBr97ne/W282P/rRjzab7XK55EIIIZ89e8a5aLu+bdu4NhHAObdeLieTyWQyiZnblFKhlPeeEBpne9baONuLc4csTbquM8bE8Wo8XU0mk0OVOWSxxeZP5SVnNGZ8W63N0FP0gtJECYJIAfTQe2+zRBVFcTqbG91TAqyczMIfMqI9buZwb18Ur+A4FImL6sAoPVbv3hu1AHS6V1IKwSkQIXiSqETxTIo/+vGP+r75p3/8hTH6pz/5CQB88eVvnXPX6wGQYPDeWSXEaFSliTLGWGMIIcb5ruusx4DgAbhMRhI4wUfnc+oN8eZ0Oo5CJK1dALKu234wRVEoqZJEXZyfIvqu7aw1gGCNi67q0+kkOlUKwZWS6BwBzNNUCWGdu7tbrjcbICTL8zTLs7zIi5JQmuXZznl9sx36IfgAiASJNc4aRwilhBlju7Yfem2tGwaNAbI0F0IOg95stl0blFTG+NGkPD8/k4nM8qwcjTzAetNe3y5vbtd1q+tObxsMAGlWsJRJlQqpYpBh2/bGeyblaDo1xi/Xm+VqvambpjFd77suzB/Oq9FIynTQ5m6xvL1drjd9N+ht3TvvAqK24BEQIC/ScjRyuvPOQyDBhabrmqbzHhijIaB1oBIgFLyDi/PJZDx69fJFUzdpmmZ5zhi/ub3tekcoUM45F9YFBBJDOTgXlBJrLaEcIThrRkXJKfno2ZPzkxkHkIIWWYrOUAJNvb25uSbBM0q4kF3Xj6eT0WiSZlk1Gke+pJBRGBAndhHNBE4o5zIWhwD7qFpEAGCMsvvGCAnbR/6kWUTRuZCMC4jzPMp8QMo4Y5xSFgexcWhnPe5mhZTFOxBCcedJtM8jwd0sUAgJeC/YP9StCLgdui7C6KGSESSHmcchDOh4ZP4eaMkEe69AHqrXcX98KIR0zyyF/ewT95G8kWzcdV1VVTEC+/Ly8pevVxEIimROiLHShMRuNc759hNKTin13lIC6C0nyChNEkUoBcrrtjfGPnjwcDQevfzuWwjuwfnpbDpR3FXV+Orm7u3bm2o0+e0Xr6oqyfMMwHLOGIWqrLwLbd2UZUGB1H0DhKRJent7O53NBq3bupVSBu+zLJuOx6vVgjHqrC2Lwnurh5YASVRa5kWR54Jz58zQ9dPJRHBW5FmeJQAOgy/ybD6bD7obj0acc+9cmqZSKc5Ynud938/nc8pZN/SPnzwBgNVy9eEHz375q6+yvBi0efL02Wg0fv3msu+Hqhp98+0qSrq++ebbruvPzi8QSN201ujF3cJ517Ztkmaf/ejHT589mk6nSGnwyBh33td1w5mQgi0Wd2VW+IBpVkznZzLJ67bfbLbamB/88Ada68XdbbPdCEryPGUE6roRSnIh45UzmVRN0+Z59t23352czBOpNtvGOXtycuqdZ4T963/1r1arddd28/k8SRMu+Nurq0EPCCiUGo3GDx8//l/+1//VufDn/6c/J4TqwVSj6quvvtrUWyHEjz778TAMXd+vluvLy8uqGl1cPOCcN02rtZaJGo1GzhpjzAcffHB5eamUquu6KKsQgrUuMjO9910/CCGKogAADD5mcUfsLV5ah1bvAJDE/+WcEyqE4BTQGsMoJEp2Tffi5fMnjx977521GIKztm26yMYa5SrPUjY/f/DeSOP4kHiMcMa3koldkYMj8mdcZu9NB+O3ZCL1MBijBWeUIAl+PCqePXuyWa/eXl5SgPOzU0JZU9faOmtdowmnPOJFaSLLokiUZIR670II1rleW+OsCyEgQQDS3JWZenAyKRI+ypNMya5tQgjah2EwvTZAqJQyTZIyT7MsW29WMe7Oe2+sAY8xc44CkZKnWSIEA/AhuLilXl6+9c5TzgkSZx0Axr9KJOZGilHfdYcZHiDWdV3Xu2C/OOGLU5MIVcc+r23brvPWQlWklEJe5nmecSXTPCOMN93w+vJ6s+3azgw6WEc8AJNSqNQxaLp+uW6bTgNlKs1UmnEur+4W6+227gfrA1AmlJCJVKloLFjje6P73gx6cA6BAiUkBAgBPMZDrsjzVAgJALpvIlUBEQetjfGIQAj2GiajhAva9346yebzWd93t7fLk/mMCmFd6LXR1lNGVZanWeHDPSsqNkWxz/EBGeUUkFEIzjx99OhkMuEEBSdKcIKBk9DU29XdghJgjAFl1tjxaFRVkzzP0zwPCEhi2k1kItA9sSVexkdEfwIUSJTvMUaBEroPHopIKWOMSHVgaR4apgPv9FCNDle1C/efx7HcrggxyvazPb7nNpO9avCdOUJcHft2jhAC9J0Z4XtjguNnddzw7SeF7yOZx8/2va4xQpH3dz4ecOxzJ7TWnLG+740xl5eXv7rc0Ciav38oQvbCjEhG2NPh9hmHGAiEJBF26POiJJRpa1utKRPz07nVw931WynYdFSWeZ4nkKZFQPr27S2hbLW6HY2qsiwZw+A0YzRVggQkSAmSvh888ZwJj9jWjUrz4D0XMk2Sb79bSeHnJ5M8y4qi6Pu2abfX131RMELiH4Uzxhmj8T1GDN4HqXiWpZzzuMOFEDbrtVSqHJVlUUghyD44Kr7z0+lMSnlzeyuVms7Gb68uv//9n/zVX/1tnqeU0q7rPv744+g2wrlLkoRz3jTd7W1zdfXae/fhhx86Z1++fNl2rVJKKAWEAKMnZ2ej0Xg0mlSjMRLYbraIoaxGVTW6vr4qyyrN8m4whNCT05Oyqpzzb968nk6qk9m0b9vl8jZ6DzZtp5SSKsnybBgGxIDBp1mCiHoYrLPT2XQ8ni6Xd5zy07PT5XL55NkzzrkPgQChnFECvelvbm+bpm3bLng8O3tweXn14sUrQlmaZo8eP2yahlD64sWL1Wr97NmzJ0+fXl1dJ0myXK4Wi6VSaj4/SZLEOmeMGVeVc+76+no6nXZdlyRJ3bZJkvh9r+WcG3TMgZKIGDW1hyVzDGMc8BiyT5RkjCGjjFAMGLxnhHLGAvpoi7zd1H0/IAJhMV4YrA9O911v2Pz8AT3KNDle5McL6VD2uFCRcnN85DyQ8g9fOTyUDx4AlJJ5kiRKJkrmiUqUevXyxc3NzfnZ6YMHD+/uFuvNlgvVtN1gGSUEADmlWaryLJOMQfDeG+u8NlYba7zziAEgICS2fnB2cjYbJZyNsoSg3242nEvrcVN3znvOheSiLMvxqBSMLZY3IXgSwBnrnWeMJkmaqoRSyLIsVQq9t9ZEPYP3/s3bG+eccd45RyhVSmV5rlSSpWlkKvZ9r4fOORsHOla79Xpd13VkAxpjtTYhBO8D5zzK4Pq+b1vjHDAGZZkmiSrKPEkSLqWUST/om5vVy9dXbW/bwfUm2EAYk1woIKzzBihL0mQ0HpXjqUxS433b99ojEkoYY0IwmVIuCeNA2arRejDtMPRa+4BACeOMcu7RBwBCIEmSPC+kVNEpNAwuUTwO4frBeI9IAAF8gOms8s6G4M/PToRgq8Wds35+emqM2dZN0w4+BKmUSlIupHX+SPQW918CQHwAxikF9Np6o58+enBxOpcUpKBKcBo8Z6Rp6tXyLobTBaAh4HQ2rUaTLM/TvAgYY444PZAvGeU0MlSAkp3fHtl7d1FKGKXxEBMbvl2PyBjljIpkR7xknFIWf+LA3oyf7FsjAkAcASC7wHQW70kIwR2phe+InbEcEiCEUXLQ8xwv40NOAo3hDAd7B/JOqN4fLHvv8FaOyt5h6cFRstV7FdQdrXE4aivpPmIles3H8NWXL19+cdMRQvCIvBMfKeI93u+wJkSkLB4dwDvLKEmkbJvtbDqViVqutgAMkVRVFbzdrhZK8vlklCWyKpUPiEC//fYFAcq54Jxxzgjxm/USABklZjAx5rDd1iJPhRDbbT1oTRnve1NkOSW0LMV4NEpTNQyDs9Z7l2VpnlOCSAnljCcqy7MsS7NEJWmS6EEzRrM0KfJcCrE7RgXsjaaEpknKGffOcy4SJbXWo9Goruuiyucnc2N34JMehpevrx89unh7dUkp4YKtVssHDy+0GabTiQ9Om0FrTRkEhLYbEM2PfvyZsWa53vzuq7vr28s0SynjXTfkRZlm+WQ2q6oxY5wSEgIOgy6rsuv07WK53mz7rnPWSiGn07ESVHctOjeqSsnY3WLRtm2Wl947qZLxdFJvNoPulVKCc6mkMYPgMkkUo2SxWBljxuMxZ+zXv/2iKArG2Xa7vVncnJ2dnp6flVXZtN35xaP/8r//1Ww+T9OCID0/P//qy69EIqWUo/F4vV4TQqOdyM/++OfPnz9HBC4kpbTr+u1264LPsoxTioibzWY6ncZOwDovhAj7i/BQ9mJC/SEL83h8driSj6GLWPl8CAAYgg3eEwDKKAbwAe/uFutN3Xa9cSF49EgCoQEZBW8sssnJGR7djhfJcfU6nAqFTKJj5OHgGe9waAHfO11SRsdllSjpnJWMjUcFCeHtm8vtdiOlfPTgkVLJ25ubwVjGVNP21lMMAYMTnJV5mqcSvB90BwDeWeOs8c4HDEA9YEC4yMKzJ4+qVII3iSBmGLq2TbLMurDZNggkSVLB+XhUjqvSGNMPLSHEO2+MIYRmWZanOedcSiGlDN50XWdsH2t513X9YFbrerMxjIfJdDqdTIsiT5IkTZMQwjAMfd9jdOYFcM5t1tuIR0dH9jjSA4CYoiCljDiSMcg5ZBlXSpRVUZQFFRwAjMHbxebN5W3dGuuJdjBo9AGBcaTU+tBjyMtqMjspqpENsKnbxXqzbVoTwLigrTfWt9rUbb+ph02tPYB1zrm4Fd5r1AghEdGNJ5hd/q12AqAoEi7kMAz94AgBxiEEGI9zAILBVVWhpNiu18YMs9kYCG/btq5750ElMk0Lwpj3wTm/Q9V21WZP1gBCCSWIfddAgCcPTp8+epAIpgRLBGeAktGhbder5c5MmnFC6PzkpChHSZrkWR6ZkFKpQ/PDjuZZjIp7GBBiOC1QRoEgiwUm+iXvMkUpcHnPqHyXPPn7rRIhJBy1W+ywIMm+yh7p2HYvn7xj8kKiuAKRcUb2mld6NLc77vb+j8re4VcQQvZI7jtMTthTZt6DbQDAHjtrI0RAIoTACI2fxMs1TqO/+eabb9YWjhT6ABBjV3cA7D59kFLKeLy0iLOGUVCCt8329OyMC/nm6orLJGDI8pwg1pu1YuTB+XxUlkXGm7onhP3ud19bi6NqYp313krJFne3AIjeGz0omVAkzjo1KjgTN7e31getXdu20aJdKsk4td72fTvoDtHFKIau1dbEszfZSVBYTH+kQvDDKDfuoVLKssoDYvDeWKv1kGd5VRbxZKCUQkDO2GQ8RoC2a5Mk+ea7y8ePHz979uybb765vLwVgq7X69FoFPn38V0+OZnO55Ptdnt11Vq7VVn6+MkjF7q7he50vanr5XLTaT0MVkg5m85PTk+qasI4J0CkTHbGvHmZ5RnBELwhwWVKVkVGKem7BgidTWcqTZfrTVQ6jkbjru+NtWmitO6zNEEIeZ4tF6uAYTwe3d3cLhZ3s8nkg+99+vz589dv3nz88cdpmizXq6u3b0fjsZRJ3baPHj0etH395mo2O9HGKJlYb//hH/7h6QfPPvnkk816u1wunfez6ZwQkqbZoM1qtbLWZVmGBFarlR56pdTZ2dlmsxmPx1dXV+PJtO/7GKkWN0Zj3aHsRQ0eHLnyHgRykQxIjpQFhBDvAyD64DBgzPYKQADIpm60dcbYXpteO+sCFalMMkqIRcIPcZrHawP+EAfsvY7wPbYYOQJPDs+VUprnqfe+a2szDAktwAetddM0jIonjx5xmVzd3PlAuEg6bR0Svs8eVEKkUnLKjB1010XHL9iT6BghgYAnkKdJleeUEhZiokK/e9fihksE57FMC0Q0Q88pNcYMunPOp2kWOzwAiLP3pu+7rqPEU0mM1fV2bZxvewCA0WhycnKSpqmSMstywGC03aW/Ch49Kbquu76+RsQsy6Ide9QwHAA0vxutBwAQgiiluAhlmaZ5Mhg3aNu07dX14m7RUilDINZZYzFQlMwTAiEACtEMuunvnHO9HnScYRLoBkt3VovgEbyHECAgUO9DAEKA8/tpcAhBUKaUklJEeVbfm0jUpJGXj2it9QgMgQADGmSS6L4fj6o8z5Z3t7rvqipPlFjXTQx8kYlM81LIdHBWa4NAPRDCOIGoJyeExFetrBkCBOtBcjDOUkql5IwFzikjTHIW/2DWBgjIgBDO2V7ZtiNPURpxyD27GCBO/BD8npZ5uGh3x0OI3ixAIkC6b5ICIAX0gA4Dg0NDgzGQPYR4/oyVGhGR090bSAhBSpDumZy7IhQPq8dAiMMjnxdEDIghBEnVoZIBfSdFgfwen+Wo03qnpYuvLuKxh1d6XP9+f22+t8aPi+tBDhwxeQDouu6+rXxXdxh5T9GsJ0amxU4xkZRgiAnyQKJNARjtmEQEYowRikcKTZ7no9GoH1ba2sk0FVxd3dxwmQRExcV+KEBxR20XQzsoKYRU1pjBGkDa9x06MM46a4tRQQH6trHWGjOkmWKCSyk5SeP0UXDFmDjK+E6dcz7YgC5goAxUIgghxbisN1uC4Jzr23qdpJNRWRTFmzdvPvzwQ8bY5eXlZDb58NnT0aj6+uuvf/jjHw7DMJ1Mn3zw5K//+gvVbs+KsxevX1xcXMhUnhanKlPxTRvPyvEMTs5P7u6WjPNnH34wOWmXq/XbqztrYLlt8yS9W6wePnhwcTYbjWcRUP3Vr341nZ/ZYbi+vgYSzk9PpJTrzdIgTqbT+biUlGzbBhmjlCa5GOxws9xUV7dKMgCqte66RknBGENr0OtMjseT8V21uHx7/fXvvhgge3B2/sEHHzDBrm4usyyRiRiG/vz8jMpNlo0fgtjW+quvvvqz/+5f1Ulzt7mbzWa/+c1vsiz77Ic//v73v/9f/+ZvootKkmQnJydCCK13qQuMsa7rot/0arUajUYRyfTecyFh74hyuAJxb9Z/PJY+lLrfrzsA0X4XaJwUUOYJo5QRSVEWQIXxxmnT674zIfDE83QTDCGEVdP5exXuDy6P+xkA3Z2L8ciQ7PDvAX49UF0Qw2qxtGY4nU8nVdG37Xa9RsQyz8/Pz/theHt9TSgnTLZd7zwwyjmliZJlkeepYhDs0O+s1r0PEHyAABAAgVLK+Jl0Z/Op4jQRRFCwumeU+oDaRsodE0LmWZalCQbfd52xOiYBcc6KvMjzQu19N4ahb9vGOccoIGLXN03TLNaaczg/Hz9+8rgqS0SkjOVZNuh+6Id+6OMJBRG32+3d3d3NlZYSiyKPbJe+771HAIynb2NM1w3WRtWd4JyXpayqAghtu6Hrze1yc3u36QdgQvUmDMZZD4FAIBgQEbDxsFrXN+t+22oAL5KUce6BAvExNxUpBwJAKOGUMxr7vOhqH4vHLuCKxc6Pee+HYXABGYEkEQyCEMwH7HrjERgDmSZCCMTAhZjNZt7bu9tFlsrxqFqtVnUzeIeEUyUzJoVD0MYN2gLjZB8jt+9f4jPhfddSIOBMWarz+ezDJ4+yRHAaEsEFJYkSQ9cuFgtvrXMuRrjNZrM0yYWUWVFEc+awuyZjkl9kmFDGmEeCuGuB9jjn/t8opt57csZzWWDiwEU8LhsHGOO4zYotwz2SESHHCE7So44N7rtG7+yhXB3XMyHlPXH0ne6Qvrcej3/8D1TEvW7vD+CfR7vD4THxCCMCvD/FMkqjtNQY4/eq01//+teXAweA4wEL7iT/wDmPjqOxZFrnjTFKcsAAgErw4N1kOuu1vVuuCKVAWJqmqeTr5R2g++jZ49lkcnvzWgj58MGTm+u7b7+74RzyPBuPqn5ogrOjqgD0qUol44u7peCCj4qmrrfbhjLWdi5JmBSCUJIXGQAMQweEOG+qshSCK6UyNUqSTKkk+q0wzjhnnPP43TRNkiRRSsWTFufcBc84T9OUAGmbum0aLvh4NKKUNk3DBZdS9kNfVdV4Mtlut83go3vWkydPqkq8eHFNiL+6qstSWWuzLCvLMsuyuHk+ePCAMfry9cvVZkO5ePz06cef/GA0mkilXr1+a6NZYN00bbfd1M75Iq+yskjTTBu93Sz7tqEQEsFyJfXQD23DGZufnGRF2ffaeSgnU+d6DGidy7IkoEcIQ99RSqbTqdaD4FzKZL1eZVk6G0+ff/vNh5/+0Wq13jabUVXN5jNO2eu3l1/+7qtBD7Pp7Ktvvv31r34zOzm7vVm8vbp5+/btJz/45M2bN/0wEEJubm7zPD89O5vPT375y196H0bjyXw+R4TYBU4mk+h13HVdURTRcuXl69dJkgAh8XQVXVoiYQoRpeDkCM88VvUc0JHDcZZzzihhhFFGOeeECUIZUAZMNF0PhAKlAahxzlhvPGoTtk27bjSbnp4fYybw/4/SQuhOLX8PYR35GbLdZnpwqvX1ZuOMHZfFxdkZo+Tu9rZtm1TJ0WiEiJtt3feaUuWRDNp6HyiQRKmqLMosYcTrYXBmQPQheIQQgGAIJngfAChlXDwr6GQyFhRSybzVZugSpbp+6LVlQgJQxvioKqUUeuitMcb1Wg+E0LIsq6ISghMgnHOjh6ZphqEnhFAKwzBs603b2s7A6dn00YNHWZZ7DASoEJwAtG1jnY3bLmBo2/bm5ub6ujUa8pxkWRZHZd776DaZJIn3vuv0MABjUBQyz3PO+WSaxmTO1bpuW7Nc16uNBwJIVNsNgwbKAChYh8YFhLDqnLaAAJJBlmdCpQ7RuBimR3xAY91ggvNoHHqPZJc2R+/lmNG8nBLE4Iy1xgYfKCDnQClJGCGUOh+cD4RAmqZZURRFGULI88w7u9msAEORZ9boZqNtAMYYl4IQpp3VxjrnAfa/ci+pA4g54RAI3a7XUohEsJP5eDaqHj28SAVjJCjBBSVK8r5r726vjem9NZ22lLL5yUmWFVKprCgIZRhpNgePIgJ0rx4IR4KgXaId3dmH3AvYyS6egFAKXMaL9j3D6MNRFI40cyTmpL93Gt2HJ+E++NxjOPBICdw7oRwXUcqO1AjkfomxvZnZobU67ln/QLt2jD7uq/Xxbzy+MyLCkSNMLHu7X4EYNQkHswVr7S9/+csrK+OrPuog4bDwKb23VbPOe+8Fp4zSEHwiBaOkKIq6aeumRQDKeJKoPMvWqztw5oOnj4osWy7eci6qyWy9qV+9fJOmyXg8YpT0fS04zbJk6Ls0SSnQm5vb+ewkpMliuTLWMkrr2o6qLDjPGXv58q3RffCuqipCUEi5Wi7u7jZeg9bGWqeN7vuu7/pBD1oPlBLOeZaleZ5nWRrrX5Ik/aAZZQDIKKNAu64PIVRlMZlM6nqDAOPxmAvedR2SMJtNF5tuu92cnMyN0aenJ6NR9nd/96osgRBnrWGMAmCaJkmivHecMy7ZfH7y/e9/kuXFaluv1vXV9e1qtZZJkiRZ9NMjgayWi7dv3i6Wq2oy5ozPpuMnjx/OJyMSXHCaE9ysbsejilC2WK0DkAePniZZeXVzU+Qyy/Km2SqlrOk5p5yR4G2mFKGEUeas65s2S/P5fOqd+/f/8f/DGCtH5fX1ddu3k+no4uEFodQ6j0h+9if/4vPffCFV9tOf/vz/+1d//ZOf/KRu6/l8XreNc05rc3d3Z6z9sz/779brddO0N7d3IYTxeFKWpbF2tVqVeY6IXddF8dLHH3/8+RdfjMdj53bT7ujSchgAcUaPL+bj5g/2jOjjpcQgAI2sNg6EBaSBUCR00MYFcD4gEgT0QJ3zvXYmQGeQzc8vjo+37y25w1cOkmHG5aGwkYO7EqWE7HwO47f2aJ4BH6bj8WQyAudu727qzVoIVpVllmWLu2UzdIInFtEY6xCt9+B8miSjqkyVcHYY2tpZywCsM0AiJwKN9z5EMTr/9KQYlyUlTlHStxszDFmatl3XD0YmqQ9AKRuNK87o0HcYQkAXQpBSjcfjLM0ikMU53242bds6ZznngL5t223daQ1JLh48eDCqqr7vrTVZnisp+mEY+i5urIjorNlut7e3q7s7SBSUpVRK6b3iJ4IqMUI6sjeTBCaTUSx7Zcmdd3d3q9W61tpvtqZuQEiGhDe9cR6YYITCoKG34AO0FigBQgEJaGP7vtfWAQBXijEWHSCRIBAQgksl0cUw1fvTCdldPcQYMwzGOR/PvwG9MTjKFQHwiAhESJ5meVFVZVkKISgl11dvu1bPZ2PwfrlsJQeulJKJUDIE0M5aFxgXSZr5o1k02e2/0QCaDF2rpMgTNZ+NR3n66OJMMRLLHieQSNG1zd319TD01pim00KIk9PTLC9looqiDIQAIpfyHqbbJSoERCRsRwM+HBj3JQsOnVm8fzylIYsSPv6eV8sBVDk+csbD3XvwID1qDYMPB9VBxF45uxck7NZqZKAdNXAB7tGU6NJyPC84Ln5/oPIRPJT/9yYRx83ioaujQuDBceK4siLGwOsYhxYRy1/+8pc3Lok97n0HDEf6/b0mBBHjnI8zIjjzziZSUU6yJN3WzaBNQKSUJYkq82y7XqDVzx4/UELoYet9oEw2TXd7ezMeT4oi327WUtIsUUqJdrvNs0Jyud3WH334UQvh9u4uWqhs13ZUJW3XccYTxZSUTdcF77q2y7JUcFKW2WY5ABDOGaN8Z3fHGWO0rreEECGiMx+LBq1KKcIYIg5DL4UoR1Vw3ns3KkdA/NOnT613m81mMp1477fN9uHDh6PZhbX27u4uz3NKaVmWT59Of/vb14TYqiqllNfX133fx9BpxthkOv7Vr379zYvvfACVZJRyyvnDR09Go1GSps56JdVsNqVIts126Ide68l4VFVF19S6ras8zRJldU8Qm7apm5YxTnnSD0Y7lxWVZGE8qpbrleTUmD5LVTyhrlZ3J7MT6y2nfH5y5qzv9XBxcf76tu66LkAwxnBJN5uNTJKirAhlL16+/vrb53/xF//n3/z2d8+fv/rzf/Wvf/vb33LFlVKrzZoxZrTN85xxfne3ePDggVKJNna9XsfZHhPcGAMY4nlisVh47z/99NOvv/02z3O999g8kFyifoYduQ4di+IOl/Qx0EgpJRgIxuw95hC9DxYxABAg1tqhH4L3hFFGCRAaMBAifQA2Ob9AAvFjDwwBkHeGeOBD8N5ZG5xH9IwAp+TwEby1ZpiMKs4IBuedcUY7q9E7QM8on5+cUMbXdTto0/TGeJqPZuV4dreuKTAAEpwngE4PijGGbZGLvEipoJ33tXVdgIHw2qIBRpRMkpRzlOhHmXg0H3888YwYzsGBByYCl53xyETTdsG7RPFxkSpBMbgoaG0QqEqTopQqBc4BqIfgnKOMLpZLj5YJdrdarOtGKCYT+uEH31dCOGdYMEqAYoHDwIlNpTBD32uLKNohXN/1V8uuM3CSwels3nbDatVk5UgluUOSlaPemNW6a3vIUxhVScpFKvmkLGpIn79eLlvraLrY6rsGvQQvsiZQTUVN2MqSTWA14R0VNVIL3hKwAMgJkwI4s8FrG7yziEEQmkiZSZUIwQGI95UIxCH4kDCvGDB0IXgIPhAPBDyARRhCsICeUGT0fATjcZpnQlBbZvziZFwVIjjz+tXzzWYDnBWjCfLEEokqcSLpLNfIBkccUKCCcxmTYgRjBAMETzAQAAwhklrrZsMFGRfZk4fnCYFxln388GJWFJKSSqWm73OVIsDl5XXTRO/7elKVZ2fzs9OTREnGRZGnUkhnHSMkskFiYm1EOrkLxDnwnhPCBGOMekocBgdgEZESIjhSZhEQKOFCEB/lE5ITzqOtAiAGIDSE4BADAFIeCPVIXQBHLVLgglJOKIUYi8cJUAo0BIpBUCIYlQDgnbcGfY/eoDcQHCGeksApYQRpTNWj+zj54MEj+EAJA4RYT+Kri9xQEomhsWfeOWwjxKeHEOmm+1oGBzJq/IiPEb+iiKbBAXpKKDBGgKC3oAffNypY12y5xxAIBPXy7eJXv/l2q8aECQQakASMYxQanUujeCKEEOKBmIBgzAdnneNJHpiwoDRyz4R23mqTJ6KQgKabV+nN5eLP/+zngLhebs9OH6yWq+lkzGlwpqNoE8XR2izJ9GDN4B89fPr26kZINT2ZX79ZZDKnIBfL7uLho0VjNCQDUWo0rbUfnNPGEYZG9wiBECzLsVAMKCIJhFHCWADU1lXVxDr0nqZpmaYVpYoQmSRlQUXQXgk1rsbe+u2mBkAmxHg2G4ydz+feh2EYxuMJEGaNq6rxZDRarBa/+Md/brrh5OJRMppMzyYvLt8KmREh57N5UzecM05hOhmPyxNGeMqzL/75a4FhkhXB2GCcZLzv9Wa7AaHy2YzkJalGk8dP0tBq0w9Dv22aajJFkby+upmeP5qdPxw8DA6ESjmjpm+9MYrButsGQh48fhKYuFnUKp8uG3v+4GMdxPM3N4+ffrxp2vV2TTmWhWzb9U//6Ed//b//b5LTT77/g29fvqGiulppmc5Wrf/w+z9uO5skyWw+8kNtzeJinn33+iZLk08+/v6L7162TVsVo6bui7z8/IuvPv74e2mR3y0XJuhtu10sb9JMWQcuhLbvy9FoU9frzfbP/uzPvvjiC0CklMRmT3AqOaOAFJAfHcuOZw0AkCRJdLePxniEEOccZQxJrFeeBMeIlyQI8IKit9oabZ3zCA6YQaoD64NsHWGj+cl9eTuaGWK4/zx+PT4Vxvnh10fSV5ZlVVUdsJFj5zTG2Hx+opRqmma9XgfvGGNVVZ2cnFhr1us1248r6T2ThSRJIqRCRONcnJzHY3jkiUXCuOIiy7M0Sc5TH49pQggSgamAhJC+66KdiuQCop+Wdd77AS3fmWExRiggOG/1MGitm7oOIRACw6CBhDRNpJTOQQg7N9s0VUmi4vl9vdoYY10A53Czbe8Wq7Z1hMDDWUoI0dYIwbMYhhK8EGKzWTsbKIUsgbLI0zQVnGEIbzfN3WJZb3XX6W2LJgDjHJnUxg3eGxdcQAeIsKOeUxrP9bCXe0UjMGaNj7OaeEraW8kkfdsnKSkKxbiw1noPlBOhlDZx9MmUEkWeVUWVFWmeZJOM6MH2Q69UMh6PZZp23bBabwISxgWXijFune/7XhvjHUZBzHvjK7KPwjkMzKKy03svU5WlSZGmeaKIt2WaPDo/FQSUZIkQGFyWKOvM7c1N13XWWqO7oijnpyej0YQLwZlkgvuAEXbbyeZ2CQwEETmJBBaIlBPYZxLFuWac/xEgdG8vAiREq8ZDrxx2VHaglPJomBm8d8F5ByFUo5wzhojeOmutdx5CwIDovLfuEEsZiVeUUmeHe+H50Toz1kZPk6O2LAKJ94Dq8Tzv96fvu3V6xN85dHvkSCl4WLk7oQI4RAgICNQjBh8weMAA3hMM1jkA6hB8IIvN5osvfrfh2Xug0zGb9PD54Q7O2xCixgQQozcb6qEniJIzTlEJrgS1uv7Jj3/krO2aZp9bFKzRceaKiIAheG8HfX5+fnV1Ff8WVVXVXQcBtHVpXjZdu902621DGQgundUYPCEYExcpAcr4dHxygK92Gd8xD8Tv7JZwH7VGKfXej/JMCEEYIYQURTGZTDCEq+urNEsJIUpJzrlzdhgGJFAURasHIeTjx4+BkC++/Mo6/8EHz0bjCWOs3m4//83rD56dDX13+eb1+cU5ARh6O5vNnPNVlU+ns5u7xXgy6br+xctXi9XGOpdkWZrnhDIMwXo3TWVsXBhl1XiUqIQylqWJtW4+P8my7O7u7vr6mjE2Gk+SJGmG5vLyknP++PHjxd3dcrk8Pz8jhGZp5pypt5uouE0SZcxwcnKyWLWf/uCHf/vf/l4798d/8i+uru9++8VXk9k8y/L/6f/5P//kJz9x1gDBm5vroe/PTk8Cyy4vLzHAo0ePr69vKGVcyMlkEhAuLy+lUufn56v1smmaoshDCBhIFDdzzmOo0MnJSdM0EVo47tsOsPkxhnm41Ol9vtU75iqU7hDHCDEe6kU08zwElh1uIZAQAitn8+Pydr+ojvtKvLfiFHsYM848kiRJ01QIETmN8XbY8jjn49HEWrtarZqmEZxlWRZnm5vNuq5rsWfHRHQFEctccs4RiPfe7qROO3SKUuqc9dYyTqq8yPOMM36WuIOq31mrtfZRyx4wDqsF4zsnG+cBwFPMkzRRklMajfiMdl3b3d3d9kNPKAMC1tl4YnbO615TSpQUeZ7keaaUxOC01ovFMgSCwAZtb+9Wd4suOMgS9uh03HWd9a4sS5kq753zLgS/WneMglJQZEmRFUWWA0DfD1fbrmmHvoe2g96AUCDT3CFt+kE71N47BBeHmhCQICNHeFcASqgQUqmEs2j8TZz3wfn9wR9HlZhOT0ajMRUCKJWJFEpSylS0YKaEUEYAnLN93/Zdlwusm8E6zPJcJEnXDovlar1phJSUMsJYCDho3fXaWU9I7CHut8X3xlH3fIe9uV1WZEWepVIKSsDZ2ah6+vCCAWapVJwRCEoIH9xysazrrdZ66Pssy07OTsfjKeOcUk75zkLlHv3fx+YBgIxfj786Qrv7ehK31F3ZA8IYE4xjjI3Zm4wAQNyyvXeMMsF57CUJAOM0EcLY3lsTnANASkAwyjnjUaoXmx/vQ/A++KhxdFaHEDwGhP0EgVDcZdNGQ01CCY8FJT5x+D3+y3tl7/5sGvvXP1Qm37N0OXwF0AAQIDwguBC884CBApIQKKBzDgmzCA7pYrP54suvNix7b3Z42I9+v/5RSq2zzjm/c/sExhgB9NYQQEqQE8zTRDBQgv7w00/6ruuaxhiTpqqut0pKpVTw1lrDKGvqOs+y8Xj84vlzpdR0Oo3uHtY7Y11Vjp6/eLmpXWchT9jOkgNQMMYIRQBCGSHi9u11lmVnZ2d5ng+6b5qaEVqVZQihKAohWN93Q98BYPCu71rEkGRpnhcAxHsnhFCpyoucUhKJXXmeEwJ1XQvGKZAsz+u2kULOZtOuH169emmtu7g4r4rq/Pz8yy9+2/X1fD7r+8F7f/Hg4bffvoizcy5FkqaLxZILMWgznkwYFVxJyoWzDhhQynQ/JDRwqZAAF3Lb1qvVKs0yIRPOedv1lPH5ySmh7O3Vddd1o9EoyZPYYyillEjWy3Wapm9ev3r44IEQYrtZjUYVo0TrYdA947wo53XTJln+5u2VSLOLB4///hf/BJQN2vzs5z/7H//H/8cHz54F9E+fPPnnf/6lkCofzaKS/aOPPr69vbPWDdqMx+MY2zkYLaUsysJ7Pww9YwyQCSH2zq5ACCnLkjG23W7J3m8l2rXsJXDvKAJ+/wCN+8Su/VYDcUIRe5tochbHbTvns3crXwgEEVk1mx++dLyCAI+31yNKC9td8TEyOHI3DqYkB/j18LS8C03TDMNACEmUjKldzrmmqYdhiOKB+FzjzjiuMgCIyQYuBO+9tc5aC4DOub4fzNAzxkZlkaYJIJwqE20PvfdD33dd561DxCxN46CRkT2TO9LfWWyWeYxkc841Tbtar9ebreBcZalg3CN65zs99K0TnKRKFkVW5HmaCBJ81/b1tjHGMSoRoG304m7dNCgFVGU5KcSmrgOGNE0JpQhgnavr2ljIUlLkqZAySRKVJNbY7Xa76L21YbCh12ACqCwhQjWtbuKwE8EDeBpdkeNVEO7PQUd5h3zP0rTWOhuMdVEyyDn1iO1g2q4nhCZJGgJut9uu111nB+28tcFb7yz6AIg8eM5plqdCqLbtl+tN25sAAIQaF5xHF9AHCCGadUWwC35/2z3wniInMDb0UkqhRJYmkjGKgQZ3fjL/6OkjQYmSTDJKCXJKQ/D1drtcLvu+t3rI8/z09Gw6nUspgXLKGCIcSBZkH0IQf7XigtKonOZUcMainIwHRLYb9VEGBAJSQjjnQPAQGLmbJcDuSBeJnxg8Bg8QGAFGiXeWINAAFAgJ6J3Xw6D7vtnWXdcNXR9vbdv2bdd1neA0hHDIi2AsWpqxyC3kQjAWNfIHQiZ7r3U+Lnvv1UJCCB699uMfOfbwPG7RIBhCKaEckbkYUwCEUyAhAKCzFoG6QJyHu+X2t198sSLJ8SDzeOLy3i+N3wrojTEBAwBBgjH6AoMP3hEIgsKozILTozL/4OmTpq6b7SZ2BqvFMs/zVKlh6HU/CM6ttWenp8vlMh72z8/PV6sVEjCDJgSMNS9erQcPeQJZnm63tXeWEuBM7DYzKjgTkkA0zkbEsionkwlnvO/7NE0ZY97fN+ix8+iaBhEjxSUEPwyDkOLk5EQImeVp17VpmgrBrbVKqc1mwyWXQtbbbZKkH3z44e3N7eef/4ZSNp1MHjx4OB7lX3/9DWfs9Ozk7nYxmc58COvVejaba62ralSW1Wa7ztPixatXry/f3Nxtlovl7fLWOe+9u7tbMPRIaNcNSNhqtV6s1lU1Ch7brs2LIkkz72ycKVprX7x4MZqOTk9P27Z9+fLlg4sHwfvNZl1vGyVlUeaEYN/1Dx6eLxcLIfhqtZKqVCqhXDAhX75+g4R++PH3/vEff+khPHr8ZLFYnJ2eNW3963/+1Z/8yZ/e3t40gz89PVmtVoDApTDarjar+fykHzRjrG6atm3LqpxOpyH4uq6lSCKtIV6Ese3Lsmgig33fx9JwcLj8/RDmw/VMyPsVMd7l+Jx3uGfEI6210ZML7uU9nJC9pcXhsY5/zQEhgSPNfIwKjFPKWKsiTV/c5wrtnm78ZUNvovlymqaMkQh27R2KSQwZj95r8alH6xO3R39CgLhvCiG8s1prtCZN1Y5083swC+ecUMY5l1zsnjTZ8X8corWWMsBgvAVrvTFWa73dNJvNJpJruRTeGSDcetQGCUCRqTxPslwJQZxzdug3m029afK8DEjtYNq6NYNnAKlkqVJN3wT0SGDQnQAvhAD0XYtpCnmepyohhABQa31vbNsN6623LgwOBhehJzbYsGp6R8ABQQIIBAGBYvR+DB4Y3Zl8BIIheGJMxLt3SHQ4TIEAAG5XQ6J09NRI0zRlXCYszR3XgwbrHBAA9BACMAZCkPnpPEsLwlldt/3QB08EV0ioc6idC2ipkIzLGPtOKMUjd57j2/FlZ62NwIBSKlBkLIZAhVgdhRDMQyyQkpN42SRJEi8hwe6Tzcke6KeUHLSrsZ87sFFCcJRSQgllgNFUi0UKqwshyuoQSAASgBBAT7gAypAwJCzs2EIBAZVUzjlrTfxFlBAMznk7LvMo7Y8rVvdDvJiTvckZY4xStg+rAOMsuadic8Y45ZxSKpQEAEAa/9l1Zu9mKRxauvfW5vHnNJafvYvKjjuEyKNAk+yia2MdCATcYasghFKKlFJECgSpBUqBcgIABJFgAPRHwOoxoeD4K3gk5gMAyZlgnARPgXjnvTMgUiHE0LWMASFUSlk364ens+j2YMzQ9+3Q9dHeb+9mxxDD6ekJIWS1Wk3GVZZlwRo79IER9KbI8zdvb7MEXAeTcWqs7rrIUXWUcMkpI5QIzomaTtO2beuu7Y12zk2n06Iqx9OJ1cMwDBiCUopziuit1VJKQnnddD6E6XQ6mU1Hk/Fms1osFlVVpkk+DMMwGCm5UmmWZX3fv3z+/Kc//eM0Sbq+m1fVn/3pHw/D8OrFd0KIm5ubzz77zFr7i1/8wiPKJP362+/G47HHAHSHPMDexGc8HlsfchcIF8hpNRoxwTG4u3W92DRD15yfn2eJolz1Nqw3q5PTifFgu95oLVQyywtCyDAMb9++ffTo0ccffPzrX/+6rmvOedfhdDp9/fp1lqWADjhlVAiVMM6EhOvrtx997/unjG87ven0crHIx9NHjx/crjb/y3/8D3/6J//y7u5uPKlCCMvl8pNPf/i3//Rl7Ha2221RjdaiHo1GlNLRaPTixSuRCMbYt98+//jjjz/44KPlch1bi7g2GWNa6/V6ned5VVXr9TpuEbFvOeSUkSMS1vHGcihs9N2khDhfo/ugy3hPa+2h1B2ErffVbTQ/ud+23kFVjrK4jrg0AbGqqrIsETHGDoQQojT7+FyJ92lEFDGGB3FnDSLGl2etAYAsSWIfGmWMADAZ5QA7BTBGWoFzsXAGjz44RqiUPI2HZcYe5BhZWPG4zhiTQkgpvdvFu8NeruustdZm+Y54iiF0/VDXddf2LuBkOhVSDsY0ddMNQ9P0AWA8LueTtBoVZZ4TQozuu6ZvmrbvdJJkzrquM/W2btpACKSJzNLMDGuy0yuDjFZkQ6t1GI/TPM+VVPG73mHTtJu6u27RGLAebADCQOWlQbJqBw8QKA2UebrfyyIOhv7Ql0ccCX30l9rVg0MGcIxUtQhJmiZZDoRa54x1gIFzPh5VSskkEUkiKEUCQUoyyvOPPvqeUGoY9LZujLGEiQBk0BYYsy54AEo443L/GuneqIsebe67WyTB70a2SsUTNOVMScEAOAEOeDqbPnlwzjBwBhRRChacIxSMNm/evK7rWlDIs+Lk7Gw2O+FCAFAmOGMc9u0vpTTqyuLKCdYSQiijsFMr7P4NIRAEAoRTCgGD210Y5BCMHl/CHjv03ntrvHOMBMmZ5IxTQjBsVpv1crW8W2xW677tgg+MUsGFUiq+xjhmjvWPcx7nVWRHHOV7TxYeTcUiERPIUaQf/AF33PcQRXKEJCPca+GPiyU5osC9W6IcAMHI8QAgSClBggjeRdoRItXO9xZv75a//s1vO1X+/pmGvKt/f/d5gnOWUEYpcT7Eg7zgTOuOYVCSnU6n29Xig6ePqzK/enul+zaEnfWN5Nx7j95zzjGEs/nJzc0No1Qp+fDhw9ViiYjWdAiolNxuN0WuovXlets7C4iAIQZP8kAppZIwaZu7RKlyVDFKt3Xdt51SMooQvPOEkDRJYvcW0bA0zwXnvRmabc0Ync/nWZZaa/u+S5JESG6tjfBdnmdpmoZg621NCIkKpel0NhqPb29u7+7unA+MscePH+d5/stf/jogpln+7TffzOazEDBJEs74y5evxuOxcXYymwKAi+RGDN77vu/1oFebjXPO+mA9Guu6wWhjnA/A6HazCSFMxuMsy60xQMh8NgMGr1+/Pj87n81mXddxIThj3nkuRFWW2+325PQ0AArGN9vNZDINSDfb+uT03PtAOL++vXv+4uWzZx/86le/yYvCGDsajbqu/eFnn/3t3/zts2cfMJktl8skSdMk08a0TRNTgq13caQb2Qxaa8QwnU6buo0n8khoj8BeZIdordu2jfhnRAG9PwSEvHPIiyXN7ake5Oh0SAgc7CMOaOdBehRFybHF2pdAgoj3Ze+Y0hKp2PdmoHj/DFS66/NijE7sz2K8zuFxjucKWZZ772NIkh76iHQdl73oYxIxB0JInqpoCut92Ie1x0OiwBAIQRl1g0AoJVLKp2MWdxwhBAESQojMAmvMDgMkO71a8B4A8pwTIIxRQmnX63rbWh+SNAcg1vnVZtu2zWB13wETcHF+PsponmdJonxwXdv3XWetRyRxgqa16zttdCAE8jRNE2VdzTgNAZNElKPSWtN2XZ7xPM+kVFxwygQCGOPqpm0713huXYhp6VQqkeYWYNN0FiBQCpQEoAH2mXAEGMRBTGwNCCE0epOKWM0pi+KGaEznEYARoNT50GvTdLrvdd30uu/7ruv7jiBIyTmjieDz6fTi/FxItVpvb+8W26Y31g/a6cE4HwjhCIRQJoQilAQPEPkXgO+BafEWr7aI2sfha7yyo90tCUFxJinMxqMnD84FJZwCxZAo4a2ljDjrXr16td1uE8GyLJ+fnkxnJ4xzBMql4ELSff5C7HgoIYxQRiliYJzTSFvY2bfc24bR/Ykn+AAADEjg4t68c3/tQ8C+azE4QoABeOf6rmvqum2aVy9frJaLpt56Z6XgeZYWeZZnqbY6+sQ4bwczDLrXZtBGJzJhjFMuGBdRKU252MGySJHsU/7o7oO8W0sOy+q9sneofL9f9g5L9f7YetyloUMgPpAQfxIIoEfvgw+IQRvjkQzOaxOuFovffP5bk03oMZ78LqXlUH3JPUgL3nvKGCFEG4uIqRJKyOAtwSAYOZvPtuvlJ9//WHH+8vl3FDFRythhPB55a7UehOB5kQOis3azXY2qMsuyVKq2baWU3g0UQtvUQMhoPEqyTPfaWRtVvT6A38FkEpkAKirhtTZtPwghZvN5VY36YXj75q2UkgARgjPO956x8XxJpJKMC21sPwwBIUnT8XTcD31ATJS01lprOOchYJ4XRaFevXw1DF1RFPH9SZNEcLrebE9O5svVum3bH/zws7u7u+Vy9fH3vvf552/mJ6M0yyghlLGu7+cnJ0omhDFE0MY6HzhnzllCYDadvrq8S1NVlFXMCGu6vmm76ezkr//ub8ajSTkab+vGWnd+dpZn2d1ieXZxcnt7G1w4Pz8fBhP1oU1dj8ejRMkI/yOEsqqWq1VAwABSqddv3jz74OMkzf/r3/zN2dn5m8tLY93ibnlyevbs2bOrt9dfffX1z37+J998+93pxcOu6yJteNC6yIvlap2obFvXo9G4abth0LPZSdO0q9Xq8eMnXdtHhPOQHBsv5ijji4OteGbdl6v7S+vQ2x14m4eyd98Ivgs1Ha+XCBwel70Qdpmh/Pchi/i/4SA/2iu94qMXRRFCqOsaAGKxcc61bRvNw8JRUBHZz3jul+geLI3DnmNM9kB/cM55F6x33qPDe1s2zrlnDkEIQgDCMAxCsCxNOZfHiqv4ZIZhSKQSEUCDXXe8m3Zg8N44ACDcez8Y7QPNVNJ0HSFEaxuACq60GISgeVVS3Bx4GcZa4wJQrpRomo4zucuQYcABBKOKMcPisSJIKfM8a9vGOTg5GSEiIZGwxDEQ47W2wTigTAZ0PoAnhCAMzvfOaQAPQAGRsIAhxEAwCEhITC6Ofxe+7+ziBPS+8cLdJAkRRVEMXd8PhjEmlcAQwHsP0A6eAVCimSXBWms9JZgoMVi4vVtuNjUQZowbXIiQo3GeEs6FYEwExBAc5VwI4az/g91A1D7H6zuSnuLzpIw55yEEIvmO0UEI54wQdxgnR4z0sEIAIJLxISASJCESTHbuIQc8JN52pqOcwZ6WBbG7YTsdLEFAuHcUuwcznA2M7RzMMAB6BpwRYrSOMNdms7GDLqoJInLKlFSJVBRIzFBkUuwC8wCAkHi2ZYwxjK033f0vE2Q3j4zP/33Q8j0g8bh7e6/AvHc73ing3enIcUXEnQYXERhSunsDwj2mFBARSQAMSNw+IPQ9PJMc0cp//zmQXVgvDcHhzkoRpZTWW/SBUyAUsyxBxK7rqixN03RYd2WWL4el1SZLkzzNWIDvvvsuTSRjbD6Z3t7epmkaQkiUHIbh7qaezvNUsizLNsvNfFL6u9o7cAjBA7FIOFAHzBKqFAKnJBAmPJIACJQxKb765tuT2XQyGRFCqOIRqTPGrLY1tDAejy8ePBh0d319vd2uz87OLi4eXF9fxV0rRq9sNhsppTf1syePBuPWy2VeVgAAhH347BlQfnV9W2TJrz//fDQa/dmf/VkgEAhwCZdXb8fTybcvnlfVKFPJ5eVlVY7L8ShCBdum64b+xavXy003nXRKsoDEeewHMx6PlVJt2zKltrX9/Hff1G1zcXqqlFptakZwVI3btv7www/fvHzTti0hpO/7RIqT09Om2TLGHj9++Prli08+/Ugl6Xgye3t5ORlPfdMmQr55/fL80dPPfvDDX/7m821rfvKjz/7nf/8f/vEf/puU8t/8m3/z7/7dvxuGgVJ6fX09nU6+++75er2dTOcfPPvwi6++fvZ03OlhvV4DQJZl0ZAs48mrV6+Kouz7/rC3x9NYvENskaO++ShX6A8sgV1JOjrGHZetg6w24ouHMnmwoTiuo5QSAGDV/ORQco5FtXg0OxHsoOjlTdviPgCe7B1p76XQR6f+/eeMEGKM6ftecBaLvPc+ipcFZ2Gfr42IZVmCd8MwOO+lVITSvtfRM0lKlSgFgN7apqmromyaJk2yB4WfTCZVVTnn1qvV7e2ts3YymXDGpBARP4mW0M5apRSEnguJQNqubzvtPFgkbdevVmvOhTGm63WieDXKqjxPUzWfJNPZ9Pr6tm1bH3C93hDCGRN3i5V1wQy6bQf0oBRNlGCARDjGaFFkSaL00Hlnk4QTINqY2XSGhKzXjbG2bvVy1foA1421FpASoRKmpLahM5ZQEihBxncGJIwDYzuv/RBwD3IiQDiIjgkwzpWQjDEgJCAGDAjQaxsCxISBSGflFNJEeecTRTkj3nvB+WhUZVnqnBsMbLf1th0G6wjZWap7BM452ZUTGgB3XP8QOKNxsnhAKvq+jzQtznlMaDp0HgCQZGnwTlDqjeYQnj588PDsRDIqGMmUdFZzSpNU9V23Wq1Wq5Vu2vFkPBqPsrwcjcbRs1EmCURva9gFD0VFCiUUwBMSbYkIRNsvDAF9pI5Gwh4JgUaCO6LGkErJCOn6xljNGQFArTshxO3N9Vdff/ni+fO2rbNETSfj2XTiPGRZNh6PJ5NJnudpmqokicFsEcnnMUHgfhJJCKWcSyEkEyKas2CIU9uY7H7wEaUIO17se60bvDs1P4q2oBBHdwjBh93DIRKA4ANgBEzjR6SoEkaBUh5NvmMRi56jjILW1gdEIOumBSZ+/Zsv7lbrmii/x9CPt4+IJr33VEMI3g2MMkYZJYzsPQQ4Y0pyQEfRnZ/Mx6PyfDZ7+fzb9fIuT9Rycfe9jz++vb29vr6az2eMselkkqby8vLNqBqlado0mzRL6u3GWjOpsqbthr63xhZ5XpYjIcRms6nKKgQzaHQQZcbeONTGKmiFUnmWMy6s9877+MaNxyOlkgjSCyEEl0wwxhmTibNu0D0QEJIzxnzwbdcNfTcajfI0B4iuWpDIZOh1krCyLFWSWmcAMc/zJE2stfPZ7O72mnPZts1vv/iyGlU/+/mf/Kf/9J9OzycIIKSKcJrz/mR2cnN9q6QsRyNjDBNsPptuNpvXb9pUufn5w7u7O21dUVYBYbOtfcC8rC4eXDz/9rvHT55sttubm5vZbNq27SeffLpZLySX/z/S/qtHkm1ZE8TMbElXoVKX2vKoe+65gjPsIZqPQ2BAgvy7RL/0A0GQ3WgO5t7uI/fZsnSlDOVyyXlYEZ5RmbVPX4COQlVUZGSEh/taJj777LN9EhJns9nV5VVRlIMx33337d///W/bvl2t14ujIx8DZ3x5c6t0dnJyut5snPNPX3x2e3vz4cO10qqazXyIr9+8nUwmRyenV9c3z1989vrd2+Dj8+cvXr9+c3xyenV5fXx62jRd07Xe71j3yVIFn2wVjFjguCmGYSjL8scffzw/PzfGNE2zWCy22+1sNhuMOUzXRimyMa5NqVQa6oSIe7bizrwcikskPwUAbdsmAxVjROQ7t3efHh42TOw/JkEwsNdy7IZ+dGyPQ9HDaHS/RXcNQwDACO+x1xgZY5lSwzAktkUylKZrYozIOCL6XWayyzIZMWusM8ZaWxV5jGE+m319qiaTCRE1TdN3XWpg8N4nzCjGHYzknEudOoIHJOZ8bDuzqdtt3W+aZlP3eVHM5otMK85BCiEZ6kzNJtMyQwDcrLfOhcH6vjPExDDYpukRyHlvjY8ISkKmBOcEbC9GBX5HqOWYhmBppY21XTcYF+um39bOe+giMy4ioa5KYGzT903fB+IOYkjSxkmAFHa5wSgq8FGUPVrGvfW5j5VkBklHeZfEAHHkLFEIaae8I5iWijjFELtAfd8PzsYISCymfmwAIE5EjIkErMW4yywFZymqStB8kubx3o8ih4eiX0QEhN5ZjkgxSMKL05OLkyMOkRMwiJwhJ4oQvPOr1ery8hKcmc9mi8ViPjvKy5KQRUSpNGeC7t8Zca+WAhDSf2A/x3yX9aQsJgJGwBDBB++cs3ZZb7y13juEyBl5Z1bLu+sPl3/+4x/rus61Pj4+Pl7MsyxjSDFGG0BnOsszLkSE6ENIcSXx+wEO8SDv2c844owluB0h7oY23N+63SkSppbyR8fIix4j3HtUZl/GeBACH0r3Hh4QXYjRp4sRIUYMMSIEBphGWkZkrfEB+Xc//nR5ed2JcjyH8W6mpPYx6QAgSSBEREqtvwmVIkTOSSCgt7/6xdfB9F998dnN9VUMfui6hIFvNhuIcbFYCMbBh7vl7dnJ6WazyZXO8+zm5sZZe3l5Oa1yM/QQgtIyyzIpuJSyqirvfdu0fRcZAOfofOyttTbwOKgsK8oyIvV97wOURTmfzxGRBCEktXcXg0/rIisKIPTBQ4haq7KqGCdrTNe1Sqlca0QM0advi4g+DkQCEb3zIQLbkYHjdDotivLm9lYI8e7d27Ozc2vNxdmFw/jq5asQo86yu9u74P1ivgjBX11eZ0o9e/58vVo1dV1W5fNn83dvr7Oy2G62IXils6qqBuvbrq8mU6VU23V1vZ3P5+vVcjqdTSbV7e3dyfE01SmPj09vb2+llC+ev7i8vDTG+hB6M5yfnTVdt5jPAZFz2W63jLDrus+//Orq+oYY/+rrX9Rt982331XldLVa98aeXZz/+ONPn3/xxWDcfLFYrZeb9ebs7GK5XMUAZTVRKnv3/n1a7CEEH/yYfgnO74tlH3NVpJRJtCzRT5KHM8aOq3dsH0j0kXssYY+IpvQprb2PyNgxHqr/931vjNmvVQ4J5DwsfT/2XmPKmYxawkkT2XRsxYBHRK/xwWCGQ0jkcGemDDSlt0mZJk25VEoB4865VBNmJFJ1MLiPJpYdhgPGmMSI1VoH64ZhEIwHxkZi2wixDtYxjzaAcSEAkpDMBxbc2cWT48WcICxXN323jc4m1QAgtt5ue2uchW3T9sZFdNtN6wAYQRLw5wRKSaU5EUbGvfcuOGt8EgbTOsu15FI759qmH6wzNjbdMFhgbAe3EWdCCB981/eDB8bC3msRAGD0if4wahzfNynHvZ0FiAFdsvZh3xaGGPeTCna1GcYYAyJKxVsAYMSFkETM2WCMGZD5iJgwQUQPMUHcEQEZMY5xp4odidhu7g9jiDiSNlM4loCgwwFVeCD6tQeu6TCa8z5kSjKIxvZCiKqqiOgj1/Ux0xgA7ps6YD8igHb5ECLCge6lYDzNPg8QUmNM8D44/+T8zFvXNM1qfbder03XpeU9X0xnk+l8PhdCmK7fbDZtVzvnJkdP0niN9JVjCMjEqJSWtiLtG+oAIBKliX9Ae2CT0oJkABAwIiZRTowEESI72HSHO+uxS9t933j/o7HvFgGif+gOd39jCBEwUNytGkjhUIBInKHlGANjbLDBWOsPNM/GO5h+ZYxHx2M3lQIpFZ73oC6L3nWtzdWsKIooMM/z9Y0jhLquQwjO2ePjI2sG72ye54wxiGGzXbVt++zJU2N6iEEIQYB1W1vr1suNtVZJKYQIZtguXTmZPjs9JoC6rrfbuvcQIKbuSyKQmeYqQ65jCMCdBxh8bAeHETmRj96HEJzzIfTOM6IjqaRSSGSN6Y3LM5JSO+m3Xd80jWRMCJGpHBGN6du2ZQTWOy6FynRIOpPOciGHvp1Wxa+++uKHn376P/zTP15fX1prv/76ay/Zf/tvv99ut2cnp03TBO+NMWVZbjfNdrut1qvzs7PrmxsPMXfZP/zul/UQt7lervtmsyzLMin9ElHTDSdn5z989+1XX3213tY//vTy9PR0eXMb/PZXv/rVX/7y1+Vy+dlnn/Wduby6mUzny+UyAN7cLieTydHJyV+++fYf//mfhr7t+y4Bp7dXl8dH821nNMv/h3/6RxOikFleTt5eXvd9/81f/zo/OYVIR8dTzvl6tZVSCcG8xxDCZDLfrcwYQgjjFGi/1+o7zNXS39vt9uzs7Pvvv7+4uKiqarVaTSaTw/Y+POAKHC6zwy0QQgCEMRSjg6kjtJ9VJIRIuGvqndglhWE/Z/IwRTjEVcdPShXIw3jzQUR56MnvqTEHgAwelPdG55+qmumkk+yLEIpz7mxII3tSs8RBY6OQUjISMe6GQScVweTPE70lKQI8sBdpr7a9sSEk1WbrY4iRcZkk2H0MNngIMVjHGWVaSg7Wu/W2ti60xt6t1pu227T9tumJC2SCOCMOTILKudCMsbiTCKekPgCModwffd+3betcGJy3PiAB4xhCUAqk5NYNbd+lruZUFDm8npQcYHiId8eP8764J9AeBB8e0uybNI+YI0Xy3vf9YH0USudlIaQ23tVdV9eNtTZA5JwzKZDv0qlEItpnVRF28sohra2khpAAekSUUhZFcTjY73AhjZj7oez9iLGPQQwR5XlORCHsLDiN89ABorvfS3BQ9B7hejgggKX3v0cdfcAQCVEwLqVs1ss3r37865//8PL777fLpRTi7Pzk66+++OqLL9NgzOVyebdehRDKyezi/GkxrVSRcS2ZEiJTKtNCSSb4oZg1HhBAxiB0N2z94z18+OIxeztM4OBR4eDxBvzkvnucAj56T0bEx/NJ+yWE4GIAQuNs2/dm5EPvg+7xeHxu4zclSr02yNhuGm2qcSilzs/PvTOEMdGdnHNJ4ymEIKXM89z0gzPGOceQrq+vnz15whhrtnVZln3fHx3Nuq7zxkrGc6XR+2az7NbL6IbFpHx6cnwylxTBOdCSHc+nR4spU3kzuMvb5XLbBGRcqLof3ny4bIahty5EZFzqrOBKuRi7fri5uUmRt1SqN8PtajkMg86zoiqt8be3d+v1GgASyOa9j8gG6631nHOpVWr5h+DrzXro2+PF7OTo6OuvviCIQ9dsVndZlj159qyoSp1nusgB8fr2lohOTk5ms9mrV68E4Xw2UVzcXd/88z/84xfPn3z95YuTo5whODM40wPAdLa4vLxUWTFbHEXkTOnvf/zp5vbu6PQ0hPAf/sN/+Oyzz/I8v71ZHh8fr1arZBjzrAwBNtum3rbXt7d3d3eAbDqdXl1d5rl++/ZtUeQc4fXr11LxX3z5xWq1Oj4+/uzz54yJ6XS+vFs/e/bs1atXZZk/eXr+zTffLBaLsizX67WU/OjoKBnwVM5PZiEhQIduYtRYSUzGVBNJCwD3cl3jckpd50kRZZxm/MDjPOhMGDfdGIMyxoqiSKeUno8xsnK+eLBPdsnB4fSHvdCGtXYs8x2O5fy5AxFjuB/0nCSA03KPMUgpOaPtdktEs9kscXa1EFrrALFre+udkpnWOsmeWmusNZJzIih01vVtpvNnk1CWJQB0XZd0WJJCYPA+VZUw7maSeecAIKJXOo/E15v6+m61rtuuN/1ghZD1drvdrPq2oejms8n5yaIsi3W72dZ1iNR2w+3dYAw4Z60HnWdc8Bi8c1YwqMpMSuajAZ7G1YUYPWeQ5TrLtZSSOFuttk3bh0iDdX3nIgAiNTbmpWJCbJtm0xogIIFDiIiUpBkBANNSiYgRAx1E6fvQfvdPGiWXCn64HyLOEGMkBMZ2HMbgfAheKZllmc5yKYXzoW3bruudi1Gq1Fe0HxEOjBMXAgjT+PEIIbXcMb4TjUyNmMaY5PN2rNqPtYXuMwNGCDGTUjDKJX96fnZxciQJCQMD0EpE74ehF1z0ff/27dt+W08m1XQ+WyyOJ9MpceFDIGJJEhohwUpA+9yTxG7yHhES3w1wBYjBuRh8jAFSa8Iw9F1n+u67H/7y7u3rzWo9nVRfffnFl59/PptPORcJNl8ul9vtFoDysiyrSV6UQaqIFJMKCBdMiDSKiIgBIkQcBRs4MoYMk88lwYgjS8VGAsCQRmMAEe56LdLzGPyDKOHQwTxAVvDnd+Bhinb46wwhSacBsSQkHWPAGK2xDGmwxoc4uNB0wx/+9Je79XbgxYP3hI979R64bYqOEJPweIwRdyrbHsEfzSdffPasb2rBcDGbru+uvTOTsizy3DnHOddS9H0HMeZ5PvTtdrs5PjpijC3vbr13EKLgPOWSSuk0VN3ZgQC1FErIoiiZlF2/9T5Uk6KcVIzR7fJ2vd0uV9tNvXXeIbEA4H3gUiZROWRcSaW15kIC0GAH5z1xpqWKEIa+hxiUUoIziNE7m+aaJoyTiHlMoUHgjBFhiDE454NjxITgdV0fHx+/ffNmMim3m81mvbGcTycTiFEKQUjB+eXdKi+KPNdSCmOHzWbLEiMCond2OikmVVWVhc5ynedt0w7WP3v2/C/f/FVrdXR84qytJuXrV++8N59/9sWH198fHx3/y//2r59/9tlsvvjmm29+97t/+Omnl5zz2Wy6XN1dXJz/4c9/fPrs+bv371989vnZtPrLN3+RSi+OFk3baq0B6W65jkBv3r7rrZ3M5tttY4wlxo9PT68u32qdLRZHP3z/g9KZUtnrN6+fPnkupFqt1tu6zrJC6cwY65zXOoMY9vH3/YH7WQV5ntd1fXp6mjiSibV/GL2lY+xLPgzmdqphguPHbezJ7SV0Pbm9lHKk5vIQ9pMc4ONqwfiOY+Y4Ul0Os87HJ/fJnO9BKX6MHFPudcheTd8tMWWS2hTC/Wwz55wxLgmYHeazYxA9Qqbj4NwHsX+6fMgFEA8RBut741LhP316UoojgqLMj+bTSVFQ8H3fWxd8gLYbrAMfYNsBsl29LSVbJEBIxgQAur3ETupYElmWSSkBYByzHhGcTfAi2hC4AK0lY9j3YXCpY5gfoFb7+5KEFP97x+OykGB8zKgO+y+Pj0/SXAXjwrZptk1vXKQ9TyHuF9A+VdpRK8e6cZIAi+DTbU2Lkg5088aFPiYKD9KOT7Y9pCWRoNEUDHG+a75JIPYoYhT9w5zjQS51WNwGH3Z+MURjzGazufpw+eqnn7799tvV8mZWlb/59de//MVX89kkgnfGeDskjRjG2Gy2OD45KasKALZ1HQhdDENwFoInAMIA0Tg3rodx1xyWHA4TwZ87PnlDH2dv/0b3dng1PnlZdugLPMKFIhBjLoDxbkyUH3+Fx77wwZ2N0QNAIjoppfI8D95zYsn6tG273W6HYUjzmVNQnwxiCCEZgUlZ3t7elnmRZVnbtk+ePOm6TgoNkezgvPVaiWleSsFs30bXT6v882dnX3/+/OmTWVXq4E1Tr7dtcB5chE0dXr/fvHr3vml7rlTXm6brV5v6brlabrZtN4SIXKqiKKy1q9WqHXohhNTKhdA0TSpX51XJGNtut9fX133fZ1nGhSImAqD1AWBvMJ2H4Pu2LjItOF2cnZweH58cza/fv7u5uSknVTWdGmen81mWZW3fLZdLxtgf//hHIUTTbo0xy5vb09PTv/71r6ubSw7+xfNnv/z6y+dPn6Q5rpeXlwDw5u17pdR6u5lMprPF9LsfXv31u2+Pjo7W67UQ4j/9p//kvf/Nb/7uhx9++OKLL1JX85OLZ9ttw5hIMEZC2n7zq19/8+c/Y4Ttag0ASkrBWd+3X375ZSJndl1Xtz0y9oc//GGxWLx///76+vJXv/rVX/7yl8vLy81ms91uq6pSSh+iF6NhH9fe4UomotVqlZIwa21RFAk2P5x8OWqbpJ04LsIEPY7o45hHHpbARgmUZE+yLBurfYjIZqdn9+jT4UTmPToPAMH5UWkzfsyWOfxKD4LT9CCEOJY34z6SjTFNAybBWXJ1RVGkjGFS5DFG67xzDnbpKiZVTj+JAACAAElEQVR9DWNM37cMETHmOnPOzmezr05EUvRInenDMAx9nzC9TGsighCTPbXGhBC4lsRE39vVtjEu6KzQOkPGpeBm6HPJ59NqUuSzSc4Jlnc3A0Tngg9wt97GCExA20OWCSUF5yyCD97mmi1mpc5E8J6E9t5b03sflBRlWWgpAWG52qzXdYgYkTWdSeiKsV7knEthfWgHazxEBIfBOkCKgCwJZSFQorNggMBickvxozwP9lPfEHDsfh7vTACIsO8GyZQsyzLPc+e8MdYaOwzGugAIgosoyAfng0uylbgT3CfOWYINYgypKT4FSGZwo6peAi3TRzJ2PzfuMApLfXsp28sEe3p+9uT0WDKC4ARREoxRSnLG2rax1q5uLtNgo7KazuZzqTNkxIVEYowY29llurfEdDAwiO/WvXcuxjh0/Wa1vru+ubu5Xa/WfddZa3VGzy6ePn32NNPaORu850xwIX56+QoRq8lsNp8Xk0rpTCgltTapP5BzRhyRYoQkt0lAMaSOCgRAtuu5QMZFwv9oP052ZCDtOu4JEXcabzFGjvDYQcJBfW5Eina7Dz562d/MAAEACGNEjIgBMAJ6H4K33jvrLGPUtq1x3vq4rrs//PnPTTc4WeEB8IM/U/u/99DepIqs9z5EYIwlff2qKIa+TQ2aaXrOt9/8Oc+zXOuu62azmXOua1uttbM2xphpNZ1Mlsvbpq4RMVM6y7LNZjMtJsZYM/QIUQkuBeOcEUISMZBaq7wgztquX67W223vEfKy0HkWoutNtCYg+iRzREhJi8o7l8YBcsaLQscYnXXOOYzAGCei4B0hMCIuGKe0EQJFIMCoheBir4EIgjNGjCBYM4QAJycnq9Xq9Pys6VopZF1vG8YE5xCjGYbpZOKtA0BCmk2n337zVwSYz2aMU11vEeJmvaq3a+9dludcSKUzpbUPIY2tv729e/r06Wa1RoQqLzbrpQ/xd7/6/OXLl9PpbBiGm9u7PC9evHixXm+Rk7H24snFm7dvvvrqy7dv3yHiYnGkMZRl+fbdO0CaTmdcysFaofK3b9+HCHfLdZYVLsJytSnK0kcsC7nZbNu2ffH88zdv3202tQ+hqqZMiK7rB2NihBDDGP2MxRE4oNqlNGYYBkQsy3IYhizL+r4PITB+r7U7/iIeyDA9gBmIcCyiHRItQwhJONN7n2WZMSYJ1CVhMnpAtDuM5vCgIBf3DNTRqX4yDn1cHUyvP6wNjJhs+vWxtX5ETlLIn+L6FAskGZd9zmdSPpfKfinDSzBx0mr5+KLcX7j0ESmQHFzSs+FSahI8xnhzc7NeNwmbFgwxxGHo1uslIiPkxjlro1BMqiwEoKT7ncQwOUnJs1xprXUmxxDDewDYz5yLsF6v2zY454LfabAJJYkoy7IYow9WShACnINhOJQED2OH2d8+Dr3L4X0cx6clW5moIovFoq7rJLJjnUvfAhFt2GVvIyI/XrcH5eJxYdR1PQL6hwSWT+YluFdSYPdXj6dRW7CXwE/3KP10MpmkXHkYhq7rjDEYgTEmuTismT0uej1GSJbL5eXl5evXr9+8eXN7extjXCwWL168+Kd/+IeLJ2cUIVibZVlVlm3bfvvNX+fz+dHR0Ww245w3Tbter1NqAkRMCK11un0ppiAiF0NK+B67h5/L6j75/M/txMO6WnzUIPs45/sk9HK4PQ+vT7pE6e+0Wpxzdh/pHq6xeEAWf7D8HhiE8fTGZqe7u7vXr19rrcuyXCwWQognT54AQFKrSDm9Uso5t7pbVlUVQog+/Ou/frO6W15cXCTWw2Qyy3UBAN7atFRyraSUDKHrGmeHqipm0wlC6HsTAnQOjAtEXOtcKUCAbdNfXd1EgCzLp/PZdDoXWqUhgpEwxpj6Utq2vb29bdsWERkJZJRIW0RUVVVZlt7729tba3xERkTW+r7vTW99sDtAZbF4//6tc+7yw7vFdKa0+J/+p3+X5/l2u40ISeKxLMtnz57lWt/d3T1//vzDhw8xxu12e3Jy8ubNm9ls1rfNanm3urvt2ibNVfj1r3/99MmT+fERY8x5T0SvXr2ZTqfnT55677///vtf//rXUsrb26Ux5vXr123bl2UppUyckaOjk6IoAKBt26ZpTk5Ovvvuu3//7/99CCFEl/TbhqGbTqc3NzdnZ2fJbYQQfnz5Umu9XC5PT08R0Rjzz//8z4mP473fbDZJbDPdxwT8JP7jYf43Lo+UPa9Wq9lsVtd1wurCTq/OjJDmuMFHC4Z7FCrN07ifeXJglBKImMzRrsWZ8/Sh+2xvdhSdD9ZhiBQhhTApLhVsF5UczjdyfoddjG80wiOPt3GMEVnSPkJiRCSQOAEj4jHETKkYQStZlto7E32faT50bVHkgtPQ94M11hofgpRCShm8d84AouS86/uizE9OjmOo9WSGjIwxQNEYU3dtYLJz6HlGukSeG+/8MGCwHHym0ZrhZt1shoiTueHFh7q53bStdULA0yfHz5+dJVDexlBO5jdr5oNarVprAxDb1H30kGWQ50pyPgwtRjg+Pp1NTkMQzrJ6exe9gwCZUuenT05Pzq2Dd2+vm34AjD5CICCGzjtjjVQx03roW9s6jBAjWA8RIGMZBMmiJGIIFKILEIA54FFQUikPBJERMsI092wEngHifviad95xjgBRSuGctTZMp2VRTW7vluvttpxMsqxq2n7bdEiCCx0icUExRkIuhCQk7xxEJGRaZcFHQp7pnDMZA0RPDEWATkomFeMCkUIEhxSIYgQH6BED5ygV05lQmgtJEUgQMQIBUaD7u198mbGYMdIc3dAzQqFUAEIuIhe98yVXP/zwk5B6PptXVbmYT6wdYrSEnsgjA0yT4MB7CgHDlAmwDp1XxEQE13bNct2sNt/+6c931zdd3QkuZ8fHpxcXR2cX1XwRuTCBIpMks8HFm+Vm23Zc6uPjkzwvUicixMiIpBBKiDD0IngePPOOvGPRcwgMI0aH4AF3fyIFYBFYZEzeV92T+JH3wTtOyBBSI0ewBrxlEAXtKHCPw4UH6OV4cB4RYwQfo4/RR/BJdPTB30SQBiuhwDQlKTgHzvHoJYaMoBK8Wa0IAueqNn7Tmv/yL39gMmsCS+2hcUfV22nhjuFsTPP99oezACgisAjMxwBIOpNlWbabJTl3NMl+++UXJQbpDOsHs9lsejian0br4jBQCM1mK4SYHp2QLLvA/uN//q+nz55Mjo6//eHH588ummZ7JM18IppmeXfXcx0Yl4HT0cmFBQghWtMzPyxyfl7J3K3D0l9GsMb1HiyQZ9wSNt5vhvjyenOzXXsQ+WQymcwIRdf3TdOSb5Xguc4Zkh2Ms4GzTKnCDrEfHJEsygkx0fa9jU4VOQNSXGKkoTPWOMYVlxqYDEhNP3AhsywrywzBKx4F8yEACx7ckAllnQkRltvV26vLbdP9/T/9k3GmH4a8zNa3d7NZdXt9FeZf3G7d/OS8a4dXP/04VfJ8XpyUsgC7+vDq2elss7y9vb6rZou//4d/evX2arldzc+eRpGt2uGu7llWfvPdyy++/rXOy3rTtF1fat133cXp8Xa5FAzzbNLZYCI3Ub66uv38F78xwYcYzbARFL79yx++/Oz58vZuMpuHyD9c34GPi/lxDGyzqX/1q9+8fv3m9Zs3f/+73/7ww/dPnpzf3FxyQussI2RIhOCcvW9g2+s5pLJIEjlhjGVZlprrGWP4KJBNx89RSYQUxFhkLAAEZAHJE4/EgcgHYJJJLpzro7fedJvVrWLIwLHZ0clh2DiGaaPk2F5acxc/uuBH1hbifYJ5+A4fwTKpeypiAk4T/AgAwTspOOdcCJ4EjZw1ACC5UEo5F5q+M9Y55yLsUgRrzDB0ECMjBIi5VpPJRIZNnmUYvDOGszRDx4WIxqTKBMcIYI21Q3QmBh8pRKTBo43gSfTGrrf10PVlzjPJpoXOleCEUglO1Pfd1bINIRgzWGtDDMYECFDkXGuVKc0IBeOTqtRSJc7PMDQpp8myrMhzAFhv1svV0oUEhiEQi4CJwS6EjEhdPxgDASACWZcQSYaMpX65EFPpPSJFIjKD8z6kPPLx+viZbBtC8IiolNBaO+e7rivLkjFmjdthC/siLrK0yDjtOx8Y40qpFJXHuNf1ds4M1hhDLIwZ2xiCJWJLevCAABWRMQRGwCEKil88f1YorhgTDKP3nNFY2HPed11Xcr5er1ebdYyxKMuqqnSe50UxJJl/BJGIuEISUQwhmj76yDhDxLquX79+/c033/z1r3/tuq4oiqfPnjx58nQym3LOdxVMiomV1zTNhw8f1ut1WZbPnz9PN/EQPEg1RRfCYc14dEuHRfXD8FZw/aAi/hiKiAcUlXGixeG7wQHI+eD+En6UY42/FfZTBQ5fH2MMe6rv7iySE4Pgne26LkT0EevB3iw3//WPf0ZiTUiEnXvQNX3c2Lp3mNt57wVLInkIkEQDgFKJPngC/+Vnz3PJgumPF7N3r1+bofdMQIycADD2Xeu9z8vJdDaTKvvrdz9YZ8qivLu5GfpuUpVFltvt3XQ6d97f3rVcxIuLJ9V0Yowx1nnn3T4KT9pA8zm9byjEELxjjHwIXW9iAJ1La7x33pvBWxucRQhKyFznq9t3bdMjYllWgLRd18YYrXVZFDEGM/TD0AUIUgjGEi/DjIIbaWKD9y4J543kc4DIGBOCK6UG4NfXV3fLVZ7neVFuttvttq6qyfXVZabzGLzg/OhowTnPtO7aDvOp5Gy1vJtOJlora83Q94zx2Xx+9uTi5na5Wm9efPbZdDr901/+kufZ0K2//vrr7baeTqcfPlxKxpumDd7OZ9NM6/V6pbTItHr96vV0PvHOT6vq6Pj4+va2KCcvX7+aTmdFmccYm3qz3da9sUU53Ta9AwrAi2r69vVP2+32yy+/MsYUZXV9fQNIi8Xi6upaSnV8crLd1NZ5Ioo70aX74tehf0kgZ0LscC8zba39JLA1ok2fOnZFHwBIMy5TguWsJUSAACEkIzr0fd/3VVkJIdh0cfx4R41g4Ih33bu91H+63/mHbu/BRt2Z3R0jAxH3E9wT29A7KbjWSkoRgnMu9WejVhkXore2aZpU4Qtxp9PvnBuGLsaYiKFayaqqsF9qrQggOKsYk1z44Afj+sESY4IJxAjOeWswOMAY0TMuA5MeuYm0qdvlemOHKFiYFNnRbKqlYBi1VoCwWW/eXK+BEImFCM46YwJnUBZKSpHrTHAmOS+05oyboRuG3pgOANJUJqV41/XL1aptu7ALmSkiITEkzgSXUvXG9oMzFpCAuPAQnY9+1yqXNKJi2A0bAsZYDJ4ID31J3Ok/fcSVGm+E9xEAQgAhWFJgatsuQXzW2q7tR/Hy3e1jCVjg41oUQiYNodSSMXJP0lxhrSVnQnDJGGfEGfHxAWEaqUMxgnfBOW+t41JSGr0GUTL44vmzUgnFGCeI3ie9lXTqxtm2bSdaOO+uri45Z6dnZ3meZ0VOiNPphBhFn6QgIkB0znpnw9DGEPqhu7m++vGH71+/etO1Xab1V19+eX52fnR8lBcZI8SInJhgTBe6bdvE2ETEhAAXRUEHnUAflQ0OygyHa/7wmo+vZ4ylbO+wFv4ggXuQ1R26vcMX/G23d/jMGO4cYpv3r0m6FIABMYmVB+9jcLvxJoAu4Haw7y5v//TX75DxJtx3YoxEg9QO9eCdd+Bn8n8Jj9q79eCMYCAQf/Xl56Zt3NBdnJ28f/PWmmFyfDIMPVJkiM12CwCT2Vxmxbpu//LX709PL9qu++67m0nBikxzzo4nWZKfyTTjQngfXPAxgtIZFxz3aFgC84+PjwcxG7p6u+1jdETMBR8QhJQ+eueg7V1Tb43pGFKus7woONq6bZquFVxkWQ5I9XZ7fX07nVSIEGMwpvfeMc4YYUpi0tZQSjHGrTXOWQDIMp3S+xBChIRLR2td52JZFN6HrusZMYYMkcqiurq6Goa+Ksu2acqywAhKSmPt3aaryuLD23dFmXHBl8tVbwYfYbD2yy9/0fZ9UU0nk+lmu/nTH7797d//6vryzXQ6dc7PZrPNcr3dbvM8W69W5+dnBIAIIbhcZ5cf3k1nUyQmGXvy9Onb9x/ycnJ5fdX1w7NnT4UQXVvf3S6F1MaGbnAmYEQ+mS1ub663deOcOz09ffP+/W9/+7vr6+sY4/X1jffh6198fXN9a2zqmEK8l2W493kjTpAQYynlCE6mToHHqOGhv3xwjK6PiHY1O8YRMQbPiAEECHHv9rqu65Qu4qEmJ3xcBhjh1BQ2PujqS88/KKg82KLwEdH5EwXwsYCX9BtTFYf2bYYp500oyj2BkCj4gASjHen73hhXahmJe++JcUYCQxx9Nmc7if3IiEUQkhPnHJiUxH1wzrnB+wBmAKVUVVVEwTibheBCbPoh0czyPI8xDkOHAEwCpNifIQRKA0vhoOOKiJIYP0Lo+iQxjsaOZhEQSXIWkRHx0PSpUIIIQrACuYvGGRejT/MVIiQLEjmXjKGW6vCWj21VY0vK4WVPzpJztDYSUVI9T7wp3Jf9xjwvpfiAu/LeIYaOez29NAQ1NV3thwzAoTXEJLxJlMp1D9b6rjgUQ0CIlCjgO9PonIveC06HhSsi6vq2qiofw2qzvrm54koKLYasqJsNEk8jYFiM4Fyw1lt7d3Vd1/V6vW7b3jmT5fnz46dHi5PpdJoGCSVml84kAMSATdOsVqu6rtMw0tlslgxZqrmOBYZxxeIBVedBmnWY5x2mRw82xWNE5DF6E/cCCw+ev68d7H86ZnUPXn/4/ocH5zxJmIHHGGIIzlobjGG4n6/kIwA0TbNzqx+npPtFRQ8+aPz0nYwZG6UkYvAmuuCDVwz7ofN9r9BvNpuyLBGCVHy9MsZ4kpwEVdU0K8vVevvy7aWQudDZ1U+vageM68H47erq/Osz69ykmk1mRzc3N8vVZrBOaT2RSimltO66XXEo9Xv9j797TqHt6vWqBeBWSWECDCZIVQyx9S7UBuJtEz0QESA7rQrGeN0017dLF2hSlNb6d2/evnz9+mg+nU4rIcnZYbVaaSWkFJPJNM0TTf1hRVE5Z6y1qak8cQDS/ur7fhiGQPri4sKF8P33P1o7eB8Zp6LInj17dnd3p7V+//aduhKKCymO5vP5y8uXA8fZtFitVmVZDj5wEu+v70IIIitPnzz9upx8++1fvbOLudoub7Ms+/Of//y73/1D13UXFxf/8i//cnJ0fHd3t1wuvXefv3i2Wl731pxdPFmtl+enZ4nBnqbMHx8fX9/cbTab09PT6XSKiHmev3l/A6TTSuu67vnz59fX169evVosjpd3a/ErUVUVYyKEUNf17c2yLMumH4wxjPMQQhJXf1BgjvuJ9ulBMjJElBwhfAxyjMkVfOrYvf4RpUBKyYkB+hg+auC2zkFANpkfPfBYo71L9e17axVCjNHHMPqkMez9GyAn7ZQykvwuIe6n8kDkjIqikFIY0yNinmvGmPMxREgCV8R4iBhjFEJopWKMzg4QohBMCplnelqVtr2tysm0KgVjtu9Tbb7r+sE6BGSMC84QYrAGY2TEilJFoNaE3sXWhtWm3tY9Ijx/MltMq0JLCJYRaKV7Y64ur6+XTgjSuQaEpm28i1wAJ5zPpkoIiIEhFpnmnILz0QfioJQqikxrZa1dr9d93zHGuyEkOSMgpERB5Jwx1vSdtcEHIA5CSuTMh+gjOu9DEhOLERESa5FzoZXEvRTqYSvo4ZU/ZEMQRc55jLv5xV3XhRAnk0miBYcQR5HoXRGY7fT0khdMMWz6ICklY2nwSu+cS+BnjG5MfQ5jo7GxZKSupMUNxDAGhiAJM8l+8cXnk0xpziE4jEGIHbRORBHBWltoVpT51e110zXEiHM+n0+lENYMWuvZtCqLIji7vL398O7th3dvr96/eff2ze3NtZLy+bMnz5+/mE2njGizXnlnuWC5zjOtEaDru6ZtLq+vpZQnJydHR0dZlidjLaVKA7P2X2cnsxdCAOIAuItH9sS0cYbDjvZ68CeB+o9D1Me+ZPQjj575iCbzIF8cf77XNaMH5/PwDyFECqn8GyF4F5yL3iEE55wHMD52Lnz346tXHy4jMcvUg5h4dHuH9LTDjBZ2yATtZht4F+0QhkEyCMZIDlrwoWumkyqGcNNs6+2GcZYplRfZ0fGxceEv3/247c10dnx5c3v5YYUBJoXIsqxtWtveVZPJbLGwLrRdn2WZEHK9Xpuh58R1lgnBGe5YvkRUTmeaM4TQtZu6iz4Gj2QjIhMxwczRew/WWO+d806AyXReVFWM2A+9D6CzbD6brdYr5y1npDOFENu2sc4KIfW+SzXJ8gkhlJKcib7vYT8cgzMhhEQE7302mZrBLJdLJCalurq6uVsup5NpluVd1x0tFleXl0gQQ5iUk8V8ITh//erV+elJ2/W6mHAhSaq3H65VVjRdp1Q2nVSMqMx1rsSP33938uTs9Zu3X37xBRFlOru6ukoMdkbIGGVaaSWstXmR394tlVKzMgfEwbrJdN5bg8SWy7tnz54xguvrmwD4/vJa6gKFNi7eLlfVZCKk8iHe3a2fPn36zTffnJydlmW12qwZF6vV+uzsrGs70w9McOddoiU+2AIhhJQij6jV2P4fPgYdxweHTM7DY1eJ2ClpMBzHRBNxxgAjhEgECGDN0Pc9MAkReNhLTh+u6RSe7DWj7yl24wk9YPodxrOfjDoBHiJC6SNo3+mVHqRrsesDAyK2H5a5c/iRiCIBJyY5Sc4YYybQMFiIKKW0DQzD4FyAEDHCoYQuEGLYxQR7gA4SusMYKEmff/65bTbr9brIRDYpiItu2Ky3vTHQdV1pixijcyEiIELfm2TKA0SWtDNDRESllEryRZlCxNSr51xgLFkHQE4+peKcRaLgE+hvnYdIQAwkskwLBzjUPcMISB52QEG6/WmCXQoLxut/GHbQx03iyTylRZMUeoi4tbaua+fcbszpI8c5uq6UHFhrU+nO+5jcHhFxJg/X5bhykp8bwYAHqY+Dj6K/MRcUlEDc3cAgxpjirCiKHAVj7O/+7u++++47Y8xqdffDDz9wzheLRbZcXr0Tzrn1er1er9u29j7mWmmlzk/PFsdHVVUJhoyj4IoIynKSl0WMcbvdrlfbtu+891rr2Wy2WCwStJBS1URCO1zMj9HFw68wXrRP5m0/9+DQbYxvSI96/OHj9D0e0GvHHz346fj345Q0Vb49YGojJuKMeeI+uJDGaSXkYBgGxpj77838PLwC6ZVJLML7EH1wNkZng+3d0OaSBx+26+VMn5DGu7u7WVkCwDD0nDOtZVFVnPPeuB9evv7Tn9//+ndfmxi/f/meAZQV3S7rk+PFdHbs7fWm7pnYaq3n86PtdlvXNWOsrmutdV7lSmVE5F1I2Ymkm2fHlRK/AgjNn17e9uDIMZkNfQ/EBJecBJimdXC3ro31J8WJj3VZlnlWOOfarvMhTCeT6WJabzbvrt77cFKWOecyBNf35vb29vj4eDKZ3N7eLpfLYRiOjxdVVXGRmq98Uo9KW0MIgZw75wRjgiEydXQ0t9ZeXV1V1TSpU45jbTabTVVVf//rX/zx9/9ihq7IdZ7nm7ZfLbd3m4ZUjkK+/XCJiNMiOz89yRU3be0A8jy7u7t78uRJ9PHzzz///tvvFvPp7e3Nycnxy5cvf/nLr7kIfT9MZvO71eb56TxtwCTc9fTp0//y//vPv/vd75Sk6XR6u9qmG62U0p6vX73NtKzKad+9SXe8adrgAQWW5QQRP7y/gv1knsOF8cDzJVSA9nIqh4QX68yhmxh/a5Q+f3A8cEAxBaQAzjmCnVpQsj+JdtAONsbIqun8AYo68m3GeWkjgIaILuxKfYfjfsad9iDTjHuNxH1VL233CABJOmM6nRZ5NgwdInJGdV0zUt6Hbqd0dY/UM4YxRuds8E4KoaTIM12VuWlWBDAtizLL3dBZazCEwZoASMiEkFmmBeOJ+iiVcsGaEG1Aj6x3sNm2QPH46ChTol4vMbj5bFIWhbHm5u725qZpBkACrSUgdl3HGSgl7BCmkzzLtOQ8ldfsYKw1nDMuKMsyKYX3vmm2Xdc6HxJFUyjiQiDjjDEuBGDSQYuJtAIIaRoWEgLhYMwueUjSzxBDGvy755o/uOBxL3T5gOeCEEOIO/W13VLjKV1Li2Tk4u5sGYaEzRwEQDEBgwBgrUsDMbTWWmVEhJj4L4yIHeY6afkd/klJEnGGMRBGgaA4fvH8WSaIAwiOBCAEJyIfQsoNAaDe3PoQirJo2rap6yzL+ra+uvzw/t3bN69e/fTjDy9/+unm+toaU2g1LavFfPbk4vzF82ez+QxiDC5kWXa0mM8WMyXVYPrb25ubu5uu66QUVVWePHmS5Tkg+hgY50rriNAPQ1JOiQhAGMdGSbZTij7cdeN+OQxC718A7HCXxo+5Y+FA9DL9N8HIf2N7P3SfY+PeSE85eH2yMYdAZ8AQA4SkLxd3+vUIwTozGGOsG1x0wP/wzXfXdxsXwJD85Fd7kLmOXja1PXjvvLfeDNYMbujAmnlVsOinZS4IlCA79JxotV6JQkvJizwvq8oH/+rN2+9+fLPt4vR49uFqeb1s80wa47yD50/OlFSTjOqmvV0us7w4OT7p+q7ebqfT6TB0SirBxQ5MEZyAIALatsx1NamYEL0z62Y7eIjEYwQIECAlhojRBw/GueNKDIPxPjDGeWLAx2CMKcvcWTsMvQ9BMMYEAyTnw9D2nAul5D7MHRCBc16WBefcezcMQwipBgHO+mYYtNKCi6EfrLE604hsdbdSQrGkzhhjnhVDP9TbDQCcn8yGYTBmmC6OB+vWdXt1tzYhGBeyPB/6fjatXv7wvZb8aDY9WszWQ392etq1XQgh11oI3nddWRR932dSNU3NGOV5EWJAzuttfTTJhZTExYfLa13kQsi75e1isRCcpFTrbX1zu+qMJ6mRqZev3/gQlc4ur67yolhvNpPJLPEPlssVY0xJba1NRVDvvJLyk6orMca+75P2GOw7yhljxpgQ7h3V4eJ/EMYdLr9UW4px38mctIdCICTEiBFStjf0Xdu229Y6H/gYYz7O9sZQ9LBd73Go+7ePuBcZiTFipBgj7dSTY9y7zzG57LpOzSvrUw9+iODGnDKEgBFi9AiRICRaBCIC8HYwbW+OpsC5FMQcRY6kJXcBBCcppUBEL4GiFLxuakAmku6nMYxRnmWL2fyHH34gZz5/fj6dLRin26v1atN6hIjgPdjg7/vMJO972/bdpCyyTMZIxgym7SAGSqrCe/WdsGP9UQiBWGKQM0BEhoylcDswhoIzwVzwAOgJveBMA0mCQGARXADnU+UsTQNnY34wQouH/cv3DizGEAJjZG1IjKn9jca6bpUSu6w9BsF3nXPOOcKolOJc7DBt70eYLgnUJZ9XlqXgKtXrD1fkCNkn/YXHJpIRxQMrn97ThKCFTNJpsC8TMkTGWJrvuFgsrq6urLW/+OprLXnf95vVGhEFMSmlzlSmdOra1HkWYxyGQWu9WCyIyIdY1zUJvlqtrq5v27ZVmT4+Pj4+Oi3LcoAdRL+PAz6a44UHDXC7Cxsfur1xvzxI9Xa/9Skw80EKdZic/Xf20SNuyzif7PGnH77buLs55xYj+JhmEcckY+9398J4ZwMEJtJ9TE8eAphj7+Bhxjkuv9GSMGToY0DghCQYcV5qYbrhaDHr1xtnrBD8dnVbrzdH82fGGB9dCMEYd327NN5//vWTq7v1h8s7nUkSet2YIwVC5U3TBGg558vN2v/08mhx/Pz5Z4LzpH9k7dC02xhjzsssKxjxEIKtb8PQMM6enR1vOrvtXf/muja9yCbWOPDeM5JcIlK0w+DDX767+fzFoprqTd1Q2y1m07IswQfrrc5z5Bisrbt2yirGMUlkbDYbYlCW5Ww2W6/XTdPFGIkB59z7JDh8fwHt0C27LtPF2enJ23fv372/Ntafnp0w4kVRXF1dZVmmta43myRi8v7dm//hn//xf/v9nybT6dVPb6wFF8NkOicu6qZj0RfV5D/+4feZoOOjCUB48eLF8fHx7//rf3316tXF6dlmtT45Pm7bVgv56tWr5y+evnz9tprOq/n86uoqm0z6vmdCLE7P/j//+X/9v/xf/5c3b9+/ePGi67pM8+l0KsQHxth2ufUi16Wy1l5d3wDgr3796++//aHtu23d/uIXvwCgVHp8cvHsxx9/XCwWSiljnNa6M8PPrWTaq4ilPtEEcj7I6j6JWBweqYc7Jool3I/A2zOlQ/LBYd+TuqvQzY9ORjwnySqmXqU0DyjuZWDGT/VxN7I1cdMPLexh/jHaZZ1p7/3QG+89I8Y5T1owjNF8Np1MqqvLD0KwNDYWACII67wP0XlvvANiabQBIXjnkSKEUBa5VvJXv/rl5bt3q7trb/3p8eJoNrF9ixEYo/V6g0jOeqlUURRi3y7Ste1gOqFzUroZXN10LlKIcbVcvn3TvXg6eXpxBsGv1htj3bZutlvjIsQIUnNAGKwRjHNOfWenk/xoPi/zrK2b7XolOcszhSHoQnm/6/ztuzZ5+64NJAARiTEuJRccAAJERHIh9n3f9jEGyHJRVpXSgnHW9V3Txdbs5h1YDwAxy8q+7+KjbpMYIVXFkv0d+z0ZY2bwp6eL6XSayCyc8xjBObu/QUR4L/YTQhCSOecSaExE1tpU/0szC0OIaU56CMH7QERpY493f5R+HmXnRoRzl4lyFpyD6NvN+mQ+eXZ+VmWSA2jJleAAMenPEREQcs6bzUpnuVR6OplyzpZ3d1rpL774siyKyWRSFJnSSgmutcrzrMgzrbIiL6rJRKe8re/X681yufzhpx/7YSgnk7Pzs8XxcVGWTIgQY4QkBYeMOBGLAWKElLnGCGlmWAwQAwQfg4+MiwfFs1TkSwqNqYMjpb/jTx9kS4fIyqEh2OszPHx9es29sO3H74CMAVIS9UkdMnHPGd9puiIi7XtxEW3wowQTEosQIXgIwdkBAHyMLsC67f/03Q/bzjSdCUI/SE8fhFmHNggRPcToQwgegsPo3ND7vjFdfzKbCIRcCslZWejtes05U5k2ETabbZ5lz54///bb77/94cPZ06frdrhZ1pP5yc1yXfdDxuH05GRaFq9fvoy+41JplTEuEKPSqirLSVUpIfqud85NJ9NMZ0M3RIDZbFYKAIAAwKQWOvcRt123bTqpRAQI3sUk25gUMoMHH6WIUmtkZL2z3kKMSBijq6pKKdm1nRmMyrIsz1MYu15vyqIUXGaZ1lqv1ysp1Xq9ns3mWZ5dXV0fHx+PmHBneq2UDz4CZFmxXq9fv33TNr2QcjKpALAqK+99kefrzXYw5unJzPpAXGZl9ebd5enFs8GHu+VqW9dKK4AI3hZFtphNvvvu2/OzM5DaOX9xdvHq5avppCJEiCGTqus659zzzz7fbLav3rz58uuvqmqyXm+OS2WsnS0WP/z4EjljjLddM51O57NJ3w+buplMF9umv7pbdYPb1K3I8qEfFicnRHR9d7tYnCid3S2Xi/n86vImCSynMsFms+ac9caMmVVStLDWNk0zRm+JAIh7ObHUEPLY7eHBpJcRgxyZd5QaipBijKnC552FGBEjJ0KMZhi2m/V2u40kEZFN50fjAIRRuBP3iuyHIMbOu/qPQM5xuT+G19Kv102NiFpnQojgozEmJXl5ngnOEKFpaiEYIgrO+r7nQse9coTfD32OPlaTQnASRM7aGCzDuJhO1+t1W28xxqP5ZFYWwRqCwBCt2RnxPMtznUX00XsIPnqPGALg4IMNiEyGCKvV+uqqnpfw7OnppCqCDwFC15nNZjt411sQAvJcETFjjVSyqspMs0lVzial4KzvGu9sJlWeZ5ILoUWaHR9CGPrOWhsBiAWVScYYMuJSCCmJk/Pe2qEfAmAk8ozv8IDU4Me48GhDiMgAiDsfAMiHkIZ4j11fuK/tjYw+51Lz0o6CkWU8DXUahiFln96HpKsC8NEEhx2SAAnzvNdiTesqOXJE2ouO7qRk0rCIBynR4RseLg+WtKoABCP0bj4pnl+cpwYGydPgexdCIMaEEImMbvsGgBhLEjzcBz8Mpt5uheSc8bzMF/P58cnx8dFxmjZcFCVjbDDDZrNZrlbL1bppms4Mxycnk9l8MpnoPFdKCaUZ51zwJE37+Oj7/lAYYsz1AT/yPYdf9pDDeeDP/pbAzoPaWIoYH2CJY0jxyZgX6aNzeOCEDp/E3ezGkNz2jpXjY3DeB++s9T5YG4YQGuO++e7Hdd0NxnmuPvllR9wJPs7mrXfODs4O0bvgbDA9g1hIWkwrybDQUkkeY2ibxpjBe985WxTFi2cvXr97v17XWVl+/9O7TTPUrSOZ9cbnOpuWVVloydmrVx9i8GWpp9OplBIR3GBi8EqJk5MjYpikZhBJayW1JiQeB+tCb02IWFRVNZ2HENuuub3dMopKyhiCM0kYiRBIkzPGEYeqrKRWQggueAzBeau1lkL44K0x3nvnrLVWC5EwNkQUkqdI0VqrlCSitmtms9nd3e10Oh05aIKzGCMCCSmJ2Gq5fvX6NRCWZYVI1rmhH+azWTLxVak5F2dPnv6n//K/RuS9dU+fffb2w7vlcmkHo7Usi/Lq6v1iMYcYmrYFLrIsq4qyLIrg3ND1y+WyKsqqqhhj0/msN4PUSuelkJorzU3dtO3ZxZPletv03bNnzz9cvi+Koiiy5d1KKP3+w7WPJLISmdo2bdP1iLzr2vOLp6ll1hhzfX2thE6PcS+PkvQmfbhf4aOiZgrNE8g5pneMMSGEMfbTxYI9cTKOE2EPSwY7gdl9ykWEAJwxxkkwTgSpgaHve+sRAPihqRq3dzgYrf4A0HjshP/2fs6yLJ2ic877wDkvszzLshhcjKFt22EYskxIKQGic84NJoTgAZELyfZUKEyNZREgMkA7DKCVMSYGZ5xHgL43ZnCMiDgnYEVR9MamGiZjzKXJO8SRcwQBwKJHSn0MqTwZ4fi4ms1miNi2bZ7nxm9b44iRjwEIAlDca3NrrWWVZVIQ0ah8LSTLMiWIeQrGmBBdal91zhJnWaYjI5sa3yAAhhjABzMMXV17ISjLJQJL9T2upcxKf71SghgGQJCcIxchMkCGYB6g2w9wM0RMU+rSbU1yQcNgxjkg3ofHd218xpjAOXjyKary3jMGjLH0X9p3R4cQgk+6ix8F/t7v6kVJTHU/5ZfSuCHOOXGGCJIBs2asGd8zA8OuK5Tv1Eei0Jmzfhis1vnpybmU8urDZbPZGmMkF1mmTDZ0mZJSEgEBDsZba9uht9YiI6WUyrNc6qOTU9yfQIB0Q3GX+SBC4hfjQVv3ftBgkm4cG1XHSPQwKIwHkuhwgEPG+GAw1CcUWB48ePwOo/P7pD/DVMEa7z7gvhB5f5r3T0RAZJDmzadRGmkI9h5b9jHEgMGDT7AjfhTyjk79cO09gKGQkxsChMAZoA8AMZdiUWYUvBIseCtz2W63Mcau77TWwPnx8Ulv7bu3H4TSIopVDUzB9OiEqdyvtsYH630AkEpNZqquh007ZIWrMhWRuqEn8FqJ6aQ8ms0Rcb3adm3LGGNSDaYvc11IBOsMUJbJrNJ1+6RuNvVm3Q7ODk4xFpCsMZ6xTOVVRle3LVtuj45PJXHiHBA7M6A3wzBInud57o1JQaQQQhMJIdbrrfcx9TAsFkc3N9eIuFwuq0lRFMWHD+/u7u6qqiJiRa59AG+NcUYRnp+f3i2ffv/jy++/f6NVdnx86r1fr9epA4oxttk2xWThYpxMJler7TSbCw4n8+lmtULw88k0xvj6zfXp6enJfNoPAzadc6Feb148f35z+eFoNuv7hguaTqeMYzJWKstfvX77hdInJ6fstlmu1zHGi4uLf/3j79PIw/3gckjIHxFdXJz++PqqLMveo/fx9m41XzTHp2cfPnyQXDAm6q49Pj5umiZF2ABwcny83WxGP5ci8sMAfeQzjvHlz/ekPyyEP3ClyDkiRtiNFKX9VJb94O2DHiofd27vMGkY077xA8ZTwQOK5mO8dTynB4fSOg1jI6KyLLMs40gAYKwVbDfQ2Vqb5zlEh4hhD/h8VG6MYIyJzlIMgIGIpmWVWDrBU0Cw1nsftVQsAESfaamlAK601kQYBh/3tSIfCIkrzq0B2/XWWqXF6WmWprv1fV/X9XQxByS303IzXArijHGRcSYli/tM1zk3mGEYeoKwGyDHeGNaY4zzJvXGee8jgpTSQWSMEJIUagSIiJEL5r1nLCjOhRAhRgCSWqhcFrnKe7VtnLWA5KXMI/AIZIZhzIYPc/FD23SosxpjNMa4nW5CmnOWmrcS6vJotuK+FJsooyGE1D4+3vER9E664kj3SP14MvFT3Sw4Uj2RS0Go1FgzHrcBMTGiGQm4UGzStm3qds/L/GhxwplsppvVahWD2zb1ertJQgeJ6CtQMsa4kirLiyLLy0k5qZTOmRRACAA+YkSIEBAQI0nOHuyldKoJeMEDGur+JD/Rt4ePivAHjuETDu8wXsRHNblP+sUHDu/+lD5WdRnP55OGI/1WiBD22V6M453F8fEoL3K49w8/+tANx4+bMhmXIQQCSCrmIQZOPM+UM0Nksu+GKtNJ6JKItNYsnwie/flP31gXgcP3P7wqK8H1FHQZSJgQnTPkTa6Q+Ml0Pof4Ydt25u37s6PZZ08vCk0UXN92H96+m81mp6enRVEsVxszDCT6oig6F7TOC10w54PpzDBUEn/5/CyX/F9//5cPq0Do8yzfuM55B4hZWfFla33Y1C1AYNMyywQY8BE2dQ0QlJBZWYxE9LquZ7OZMaaua8YQAM4vThO3ZRjas/wk6f68e/fuxYsXeZ7nRVbXLcZg+sEM7uSs+OLzz5fL7f/zP/yv79+/r6ppmi369u3bTCkiul3XkV97rn7zm9+Eb7+LENd3V8ezqj6aBUClBUDQuQJkrY11PTAlu67bEDy5uFBaHk3nXbNNU0dCCMvl8m65Knx8f32jy6qaLJ5Mp+8+fFitVlmWWWvv7u6m02kShuacp9EKN+subc/z8/P18M5u67KcvH///ne/+12M0ccwznHVWnddl/qwk8Lq4MNoN5JbSZcG9+LU4xoLu2npP3s8WNs/l3QdhpshBA8xRjsMSaSlR55RRDaZLeKB0HDKOh/U88Y9HxNun8bm7Pv2HnvpeFB174c+xqhVNplMqrICgLZuVqtVDF4KHkKAGABCURQQg7WWq4KxVHWApGtghsEMA2Nohh5CgOA5Yy+ePeWCrVfroTOCsWlVHM0n01wRBO/TEBwmlFY6A4Ch72LwjCgCEoWI5CIOLnbGdYMZrIHoMyUFI2tM27XnFxfbbbPaNs4FoJjpLGltaa0YgbM2BJdpHaN3w2CHXjE+m1ZlmQnONnW9Wq1C8FmWBe+HYQAMnDNgIIRUOuNKpcIPEzzLtHWec8aIBOeME1AEiEg0nc6AMWts3dreBOuDszAYS3tR4mRyd/0qKXU9wMR2N8t775Po9r2uNyIlVifAQ5ATAJAC5yxxaK116c3GFggAHNWKU5oAeL9IRugVPs5j6GDcKzIiiEpy9L7M5OliXiqRCQHBKcG1kvesSEac84wr3OWLCIDIUCpV5MXRydF0Os+LQiotlORSKqmEVKenF7P54ujk5GhxUk2neVlJpZNmaJJ3AyLGOAnBOGdcchJjHe6w++2w9y49mfwEI4FACLuXpRkKAMgYH58HQIgIMfXNfNqrPXBg439DeAh7jmyIdG0faMd7BNiNZ//oARLF3VlARAgQDzPasG86jD5E7xKzZbDWWGsD1L3947ffb5ohAPNMfBJxGg1CODhijCZ6Zw14zxDQDeCNZphJxjESgBt6zlnb1UJwJsR0NlP5ZL3efri8ImI3y9W7yz6rChDZqw/Xzseu65Xg0dvgei35enU3m1XGuGZrCN18VuVZxiBgjN5ZKfikqsqq4kJ6HyKSEMJF8kCMcal0CN70HUQ3rcr5tIzObtcrM4DzlogQWIhQZTzpuVhnrTN5nk3KIq19Owxm6BFQSilYyofQG1NVE+8dAAxD75ybzaeJz8W5cNZtt/V8Pu+63lrDOc9zYZ0j4s77um0iUjWZLhbHr998t9lslVRnp2dCiOvrazMMSqnLu7vr29vF8elkNs3z/N27t5xxIcQwdATAODHiWZHPFsd3q83dqp5Os0xnVVkwJAw+eI8Qt+vNarkkxuqmXa5W7TBcXd8A40plT6dyuVr5GIXKNk3dtt3FxTkRtW3d98N6Ww/GE1fffP9TVkyzonp3vTLG5UXZD2Y2X1ycXbz86ZXzgREaYyZVldqriKjvusVi0RuDHwupJxbbA8WDEe/xe6DxwZI7dHtjvxYAhOAh5RAxhjiu7V3PM2DEGENwztqmrtu25YwTIKtmCziYh5BGk49tYYcwTtwNntmt78N29TFCfxyxcsGLoqjKCWOs74a6rp2xiOidzTMdQogxeG8nk0mM3jk3OBiHOe8KfN4H75USzhpCIIhE4cWzZ86a1WoVHArOykwuJuW0UOitMwNDEkpxJpFgMLZt6ggoJYMYlWKDdWm6XEDsB7Op6+26XswmELzgLHh3dnZxfbts+m4wxvsoJEcAJGCc+qFrmzo4u5jNCGJ0jiCWWTafTHKlAOD69na73QLGLMswhTOMOOcuBsaI73MOJBRSaJ1FkACABFJJrbVWKsu01rkuC8bV4ELdtG0XbQghoPOe7/ntoy9JR0InDq1SMkTORSKQUiFiwpmJmJQykd9GtzeaXSHZjmztXAhABIgU9qKdye3tI/3Uf3nfnPeAczE+OZ4qESEjjEEKRsFPCn12tCgkz6WE4LSUSt5P9cM0mgp5+m1E9D41dAZALMpSaZmVWVVV1bSaTCbFpCyqclJMirzK8oIrSZwDMkjsWc52IlGExBnjknFBRAw+klMZl+5ho+rH+dbDac6Pt2h8VBF/AFrCp3CR/V37tLbn4/+m1/iPJeDHc3hUYhzPGAAgjF0lMQ3GDSmasc67AJvB/umb7+reRty5vQdfGfbozgOfF2P0CBgjRyTwHIIiKiSXjDIpoxu8MxBjhCiEyPK8qioS+ds376az2fXt8v37enpUtNZ3LqyagRizzkopvDXOQaGha5uyLDnjEE0IQTJiFAVBnslMK9iLgkolGBPWu74fpkdnERnnIs+zTCuMjtBLQozh+Gghlbi6umksIEShsn4YeHRS6Qixa9sQrJJcaTkpC86Y93YwfXQ+DXBOncGKM6WUlIIxNgz9MAyMk1JKa53n+atXLznnSsvj4+Pr6yvnHOORgLTWhKzrhm4wjImynDgfPnz44Kw/PT09Pj6+vr7uu05KebttI5KQ4vz8fLVcKSU5YVHkt9fXUqv5bGa929RdQCIhJ/MjFntGrOva1d3yaD6TxBmjN6/fKKW0ziNCBGp74wFtCNV0cqpjCrS5UDrP6qZ9+vRJ13WEcbVaC6k/XN48efbZf/x//b8ns6P1tgauI8S2aQGg67p//Od/+sPvf5+klbuum02m49rou+7o6Khuu8OdNWJ4h9WZca2GvVbs33B7h0DgaFtoN6WPIWLS5Aze7Yw2EhEE71NtD5EBRj5CTIdUzAe9qI83LTxCY8b8dAxIkyMsJxUApOaEoTMhBC2kUsoMnRBiGPoQgrVm9Kxt18PHyBhjbCfyQsQZgUNnbKqrMYwkJIA3xnRdZ60m71POyogNJvTWtG3bDX2mNIBIKBqRTSdsrXPWUgStuFIKo59MJozAe9+2PQK5CIxACAGEidNhjAneK5UTEcRABEKpLMsSbcQOJlEhiLEYI+c8LzRQBABvIF0H4ByJCy7SraqqKsbYDx3nXElOTAglRVZ0JmaZms0m5e1m3W6j381HD2EYbeJIExmbTEZg874m5ONOQ3lfhjyE1x8TLlJSGIIPIRKlgR27Kn1qxrgvYkUKIUCkXdIDDIERMgREQEJGSIz2M4aIIzCI95UhRpQIoodJTPpe/LANAxCB7eaQY2SMBQwh+m3bcCTkyIkzRpzHEFwIwZnIOEZijDggIktj1ERM8B4CIkPiwCgAAFCKQtLlSpzNXZckRNr37d33Hu1l2+Bj3O+Tydyhe3i8ez8J0cRHza8PssPHWw8fIcnj8/HjJvdH/hsRAYk8EcVDItK9xvShifmkNXh8ACDnnLxxg5MYyzzPBXF0CGHoB0DfN3U1mybKg1Jq3RgAatv+9rYeLFRc+qFvu74sSq7zbhhS32QA8BCB4bZuy6KQOnddvV6vtYCcT2JUic3nrN1sNllZFEXhIN7eLtf1kGsFxEIIkmOVSwLXdUMucDKZuwA3d+vf//V9HyGEgMC29aB1HsLgHIQQ3l9eOtcXv/paS5HnOSPwvanrOs+yTGZKiYJj13VVVWitY/RN07x//55zfnp6AgApGN1sNkVRVNV0u93eXd8sjk+Z9zGGLMtcZ1arlbHxs88+u7y8qrdt27az2UwIgYwxIY5PTvu+X29r50Lb1lVV1XWdSw5uAKLjo8X1cr1ev8nKyfnZedO1q5tNw+o3r19KYifzqT46YZyVZVkUxWCc1vo8LzcvXy6yxe16jcRvbm6OTk5MiKvVSlfF6elpmq+UZ8Jay2SWmJanp6dXV1frpp9dfCGEjHlMtIx3794lgZUQvBCiaRrOuVaqbVvOabNZjZnc4bjp0WntRkbvhSoZY8F9Guc8zMEOV3uq4aX4OgLF/bP7gt1HOVsIgcABwEcmEj6GLx4/+BuO0BiTsC/Yj1TOsqwoijRgYrVapQm5CV7bbDb7ixXGsRTpDZOW/6h7O9axDmqeIdFBY9yN4U6f3vd9OoHkb9iel5+I+4ceXQghlYIQ08A5KeX5+XlyXZPJpMgr413TtenTy7JM47Wk3o0UKMvy5OTk0MdzQQAwqnaNTiWp++d5nuQBRz0aqXiWqaRlkDZ/qqcOw9B2ddd1wzAwjkWeT6fTsiw5Z+OleNCrPuZY/f4YuxfSrUyElHR63u9A0bEF828Y0CSgPooS7CktdDhv4dD0Pz5or9hy6NLGVfjgp2nGXvqtVCi9VylL7YCRksD3ZDIpikpKSYIjot+NNUcSnCs5mcyS6spisZjMplmWmO4opBQqzQ4STArOeYqAHq/wtD3GNHrEGB/b+sMv/kkXhT+DcD5yPx8p4xxixQ8qZw/O4UGRHz/GIQ/f+bGHhn2ae/hxD0710ZL46LsfWoPxcVonSZQy8foYwxBCdL7vewxxJ0TCeJ7nUsrttlFK/fEPb1YNMAbv3i0hUjfYveIrz7IsyyQBWGvbNjZNu58ICMa7ZMHSNk/LI22lpK+rtf7Dn/784eq67bvtdrteL733ueJKssWkoGAzJb764vMvv3zKkfqhl1I6gAiwwzkY3d25169v67qOMWZZVlVVAsOMMYyx9BXST6WUVVUppbbbbSp0NU3zxRdfIOIwDJeXlycnJ4i42Wy8sV3XtG3LOc+kGoZhvVyVZfmLX/ziiy++iDG2bZuGb5dlef702WpbF0Xx9u3bi4uLVz+9ZAR92/jgri7fD8OQ5/lscfTss8/zsvj//uf/8uHDh6bprq7M1VX3ww8/vH37dhiGzz77jHOeRmOm4XlS6aS1tFwu0xZ7//79crmcTqdptOR2u6sIpsrlv/t3/65pGufchw8f2rb94osv8jx/+vTp73//+1TPM8YopVIFMZXJtNZ3d3eHqyvVjM2eEJR2etrm4WAq3s8dn9w+6d3Mx8cwDKlaNxJq9sZ5X4GeHZ2MsmlZlimlvPfJh8V43zCPo+hiCMF7jMCJMSSIiRAWEIAQBeOcGCdGgNGH4Py27kMAzgVjHKIPwQcIxEhK7mOIEYz15WSWlxMucxtoMIP3HjBwwThF7030A0TnXD+pqr7vhM4m09nx+ZPO+bu6vbJta/uubXPOn8xmGiKZrtQMhR1svenXTd9GQAKBLmOYSxEF1xRjW9dtvcEYRC5ISk8QhAadR1WocrGqu007WBcnhY4xtkMfAXWRo+DtMKybWmZZP/TWOS3FpJgoKW3XtXWzNrX1NisK4gylRCGMC+V0bnxwPnLGBbHgvGSs1EoSDrLqrAHCrCgiUt0MNgShCq1zF4K1ztqh7Zuh9yE4yZ1BBEZAGCCG4EIa6U1gneGcEbEQAgBxLmjPRZFSxwjDYGKEUfoorULGSQjug/M+MIaTadX3IYZEvOSIzLkwNqXFuMt1duA2AWMUMTLOuOCMExLuOscwSiWI0V4hMo2dIGKk8nxSVVIwJfBoUpweVdMqC7YVnDgnwQVjIiCGyLhQeTZ1EiiTosxYrqPknhFxyXWGQiCXxAQywYQWKte60tkEtAiCO0ILMRBGlgZZABEhICfiiCxGFiMLQUD0FhkyQhYDBBeCC4SMExdcEjIEwn2VjoAYMhAMGKVbEBECwq6MwAjT8+OPIPoY0hhzZBjTMBJM/X0MkJAIgCEyQII05hIIKMS9aLyDGDENq6LIKBJG4sh5YCzQ+IcC4vgnEqXZeknL/PBPQPQABEDEk869C865AcAxQg/Gx+ABTaRlO/y3P/9QWyJVDBFjyohHwc8YAMA7BzFiDPcFxOAxBCFFGIahWeUUTuc6wwHNWtEQQzcpBCnkKhNFOT9/YVC9v6t/2Phv3l5dtr3jBJOS5fnGWOstY5HAlop1zQYJHYT56cWAZNteKMWz3IZQt2Zw0ZMcPJmAyLQnMjYAoFIqUzrXqprM3r998/LlT1yo+eIkAN/2PqAIXKPI1uvl8+dPfvzm92CGeQbcmtZD1/bFNCsm1dVd0zgIDFedP336VTU/bQezbdoizwijHepJpTxSMa2artM6I8E48ePj0w9vPnDGnlw8JeRDbwHJGr9cbSeT+ab2PkpElWeTTGbRedc1fmjQm2kug+/++Id//d3f/6aaFL//8x//7u9/oyhevX2lBXOme/7s2d3tze1qnVdVABZJfvHVL3U+QWTff//jTz+9NMYqUcyPn7x69y4STE/OF6endb2ZlNm80tv1zdXlm8XxUe/Agvjxw3LrSKqwGfwv/v63Hy6vjo4XR/P5u1dvCl0Ila823burFbBssjjtBmfMsFkvlRDN6ta27ZfPX1CI3abt2laQZKgYioj8brlpO5sX08i49VBVRdu2aZKw1tpam/47chEOID3cxdqPEM59BM/GOs4YLrfGcqmEkERMcNJKCoTobK6kIgJnMYTow2q5Xt6uYoyAPAKxyfxodGmpcyK5vU+mfbgvTqba3shzHSP3Q6+eqgUR2f132L8JIhZ5RkTOWueclCIJ3qeeSiJCSkJcDlINPkSdqUxnzlrOeabVYrHo+rZt28YaDqQgzDJ1PptmghF5qYXxbvCut24w1tqAkSSXgkvrtiFE53032N5aHwGIYpqBByil9i64GN69fb+6W/WDYeAWi4V1FhHPL85D8Lc3N3mm57OpINSCz6rJ0XRaZDpY17VtF0yMsaqqoiiKskjTpLIsgwNYEglTFsg5j7rie4F2ay0RxhC7ts/LKtN5VlVcaB/CYDtnffQQhIB9cCT4rv0/BbwISeBuL/OIEELgu0lgwVoHADvJmn32fHBndw3R1vpPFqIep3QjDnlY1RtXwmP+xZ5qzCRnHCPDUGm5mBS5FAiBA3AiTgwAA0SISIwYceR48OvIGOPpE9I/dN9RxxlPYzFG+swI/44sU/gEOMlgP8hidOqPs5/7gz2sX/5cKWJ36eBQ1vIwD9vJBTzIydLAjf20rvQOCAC0q6On/qT74PfnZF0+WY9ARAgBABNfzHkfg0eIGONgOmucsb73cVm3f/jzt83gkfGkoZoIvvv3OfjMuKvBO+eStpIJHqLD4BTDUjJBUXGoiiw460OIQHlezo9Os7ys2+765vb19Wq73RrrCTESWmP7wYQAjKUxT9H0QwzB2liVWdd1trVK8rLMyzwnAO8MY6xQKi+0GXo7GCk4MXLWAkBZliRzRGzb5u7ubhj6oigm1URIua03TdO8+Pzz3/+33/+f/v3/+aeXPyhF1UQ4YN6nab3ovHEWGIF3dlKW00lZ5hmDgOApBiWYVlLrnDEWfJBcIAInppRq66YfurIsE1WCC56Wl5SyadsYY5ZpzvnQ94jIGNV1baxLwCYgeR+yvKjbzlp7enK63W6TrUhl31ev3lxcPDk+PRusPzo5/XB1va2bD1fX1WR6dX0tGB4fHV3fXBOjMtdVmeeSedMThuCtVHrTdNu66z101jKmzqds6Ifz8/Miy969fS+lIEDkPEZYreu67crJ4vLm9ujk7KefXgml37y/PD4+9i60bXd0dMwZX63XjPEQYuoOGtN0wJjs/KiNMlo5OJAvxwMGABE5d4/8xQPC1Cjy+WBJ0356QzJiO/OKaK1J6izpfbqu69oOYSdUTYdvFA5E/ccGndHPHe7tx3DKA1jG78VgHlqB/ZGa4kfjm3xt4lmM7QHOuVEOJt349ClZlnnvrfEQCSIhshix682mbn1ExqXz0RgDgAmLAx+sGZwzEUJ0HiOkN6yqqixzrbXcQ3nGeQ9orLc+9s6bAHXnSMiIaL3jSqYimRAKAIQQZTlJEKjWGolciCPNNSF1CcAMISSoU2rFpRhxQimlVnIxm5Z5FpyN3uVaCyGaZru8u+m7ThCbTsr5dFppzRC8h+TkIDXxMcGYiAGt8bsuunthcYweoodEKtuDzzuEc3wl7NV90kVOC/QBqvZghYwtB4de7dAX7iYtfFyAvMcJfRhRhLgTP7snEj8ADyMC27kzPm6ncZiDEEIIlWTmkv4L0EOi44ijPnZO4wclDCQhw59E/D65gP9NPu/RNjn0vp+kq3zysn+y5ve3j58759GaJLmlB/f0fmotfhTyPjA38aC/ZVwJqfsYo0+DO5Lo0p4YRdPpVGutVV5UU6n1tm3fvHv/w0+vttttjDHLZNrg+7ciQBYDeh+diyHEuCvGWxthuW3WTU9STY+Os6JyPjb9sK3bTd22fdcNxhjX9abturbr8kx9+cVnX3z2Ijj75vXL5d0NZ1gVpRBiPp9fXV397ne/67rm//7/+L+1bY8xVGWeaS4Zl4xLLjhB8ND37t27d3d3d1LK6XQOQCEClxqJ0z7ASgjZblEqtV6vb29vE4lfa82FSJnKdDrtui6BgaMlzPM8hJAmjP/2t79NZvDi4uL6+vr6+jqlIpzzENxvfvObhKyWZU4Qri7fX75/h0kumPMsU72xLkSpM6myuu1ubpfA+N1q0zb9ZDL57LPPgjWcYQxuUhYYvXfx8vLy9nb54sVnt7e3b9++l1mOiH1vtk2n87Ltu3fv3nkfq2p6fvbk7Ox0tVqFEJRSl5eXMcYXL17A/u6kAhPnZO2QVDtwX9sbBfTh4xExDzbOGLAebuRxg8DH4XiytOOnW2tHMmYqBo3c0UPh/r/l9n5uw3zScOBBuQgPCOujiz480dEeJdcIe5LFfev3HvCNexW05L3GL5yS5ZSScqZiwMH4bd0tt1sTQmSiH2w/GABQShU6k5IDBO+tcz1jjHHknAvBtBJaa8l3FG3jwmbbGOutR5IqkAjEkaDrewSGSE3TeRezokTEhIznea4yDYDOhsHtlOUOdEx2F3qUG0/XZPTfAEDBIwTOUDAuOCkhikyVRba+W66Xd11bg7OKUaZlLoETeGMxRHbfPR3GjgLvYwiAyA5ZCSnVc86lfA7u2XcjAxhwL6zwwO3Bo9rV4QrBfR3usW7WmF0deqD7nCbed4iOJ5l6GyLt80Vx7+0+eRy6CnxUfAp7wbzxuzzYQuN+G78R7OXB8GNd9X/j8di3/f/jO//GOz9we/BvPg63rb/v19wRxyHS+PUD3MvN/Jzno08diIgQCEAy0lIlAXQiChGFzoTOVKalVF03vH93+e7du7s7n4x+ih2LoiiKIj0QQhBnABBwd+oRWQwYETYG3l3eXS83JPXi9KKoqmYw7z5cOx+RicGa3lgpJQDd3Nwtl0shxOeff/71l18qpa4/XL5//75tts8uztNYAylYptXJYv5//B//abUcvB0Yw6rMJ5Oq0DqBagxgvbp7+/btZrNhgsf9+EnOJQC4EALEdui7rku6UsmC1XW92WwGa+MelPPen56eCiGurq4SzyWVwRaLRVVVXdfVdb1YLE5PT7uuk1L2fb9cLufzed/3m80GAE5OTr786vO+79ttLYR4/fp1Xdc7ctBmOZ1OXQjbuo2IOi+6wX24ugkRgdimqRFZnueLxWIxmwqMpm05eGvtZtvc3i6JiaIob65vY4wQcbnadF2vdb5ebZwLb9++O3/yrCwnX331lfeeGBRFcXt7fXl5KaWczWZZluFe2wv28Anso+pxr415zmE8Cge53WMqwOMFf+ho6KC1FPYEiGRSxD7UGBke6a4hIqtmC/p4sivsWTeHMWncU0vwQGR5/NSRHPh4S0c8wJfgvmdZK9n3vRkGxpjWKqUaxhgkklIikjUOAZTSgnOIyBgLwZth4JyfnZ6kcisAkMicsbbrKfhJpo5mU52pfugGa5ngQihAci7EEAmRESlJyMj7MBg7GGu8Ny4Y6wYbNnWzXtdcaiDWdP3Nau0CTjSPQFmR50VhnSEAnSk79IxgMa0W05mW3Ds79H3ft3awssw4F0VZMcaTmzPecyGJWAiRC7GDoYXkSvkYPXBrBmsMADACIOSMF0XedX2mc51lQiggiiE4O8Rg1n1kDCSXnPEY0Flv7a4tYRQMoF3fGBAyqVgiuaQEdB+AJ37QLpThnKWk0BiTGj0fpybwcZqyS7CI2D46e5y13AObB4cUQnAmOWlOVSZnVV7lWgnGERkhI0aMIxHnkgtBQgjJWRLJJ2LEGOPEiTjDiAgEBLgTZGBpgxw24e1a6AB3fXWfet4YN7rewxr4WEV4eLCfFZI4dFH3Qd5e6CTt2YOrlNoTP9q0iBhC2pm7cQqI6TyB7ar9qUX9AOT5mTlk8ef4NSG5vRgDxpC08ByE4JwFIOtj25ubTfPnb37sfAC6BzkPvOzuchERHjCtnLXJIHKIBKHSfDEtCi0lQ8nJORsDCqmB+HLTvr+6Wdc94yCKYico6mNAQEAfoneRcRZj9CFaYwUjgFhVVdt1PoCP0QTwwyCVnFaVFMLawfSdUFIKTkRCMCkVMQ6Iw9DH4JXkk7JEiJv1qt5urTFaa4xxsVjcXt+cnZ31bferX/7i5vrt1bJ1xkgp8jxHpOhtDN5aQPSp52oxnXJCawbOSErFGBljCNEaG4PPdEaILkHHiMR5lmVccES03rVdN5vNhRCXlx/W6/Xp6YnWum0brXUEDCH0xhDjs/litV4HgPfv32ulfvnLXyyXy7u7WyUVIhVl+fr12xDj2cXTdx8u75brajrjUr17/14KZZKgcfB5Xjhvu6Z9/vRiWuab5S0RYIRqOiUultvNy1evs1wvqqxtu0k1XcznRZ7f3N5Oq4lS2Xff/yRVNhjvA92um64z50+ebeumN0MS4hh6G0LYbpq26xaLIyEkAPidF4jJDDFGMd67ut06cS6EkDRoHrg9AEgEgjFsHTM2OlAmw31mla5wMkdSSik4IiaQkzFSQgJE79wwDHVdD0NPu+0P90ooh7v3cM8/iPcPPXP8WDNpdMsjyJlcPXxcXdhjuLufJss4NsinX0zJ8qjiQTtNyJD6f6WUbdsmPVOtcwQGyGzEu7q9q9vOBY88ECULLhjPM5VrSQyc78dEIeWRudJKSCGEGYah7TabTW9sa5xH7iKLTCAJa52QOssKZ30klsqQQogsy3SRA+JgjHGWMaaLPM/zotiV9NI9S/5mTPXk/kh2VkvGIQqGVZEVWQbBQ/RaqvmkkpKDd4LFxaQ8Pz46nk8mOdccFHFJCDF6Z/a4OUNkB4aJxvs4ah/s8N49pT75PADgnO0vr3twuw//fhDXj7d1XIXj82MIBR9nYPdRVAQIcfSdhwHdfTyIGOneAT/I1fCgeWNMoB88fpx4PU4ED/O8QyT2v5uN/ZzPe/Aa/BmQ83EWNf73MHp4TH/95K78txyHHx3gwddkAe4F5Qfrh2FItulvgJxJjGPk9wohEl15MZtURSb3t4Mx5rxvexMiMsGJS2PcZrPdbrfegxDMOefs0LdN0267pk2svCTzEXZzN8GnYmSM3nvjkUstpaytf3918/7mprGWqayYzpq2u11vfAQfcb2tB+tmi6OiKPq226zWUsrnz56dn58jhLvb6z/+/r++eP5ss7w7Wszq7XpSFXbo/pf/+X9+8fRcSVZvtm29kYLNppXW0kUwBuq6vrq6Wm+3eVGWk6kPcTA2wWsJ2IQkiuSs1rooijGESrCQ1noYhvV6fXR0tFgsbm5uXr58mX636zrv/fHxcVEU79+/jzGmuklZlt9//6Zt20RgjjG+evnjpCi9dZeXl0qp6XR6e7v9/vvvj4/mkvG2bQlZ3TRaZREwIuucXW1rnU8icDO4uq45wcnx4nQxVwxZdFpneV40TXt5eX12/mQxPx4Gy4Wqt81suri7W5bFZLlc1W13fX3ddV3XdVmmj4+POae2bVPX/pu3r2LcdyDsDU5atGmE6mEJDA7GmzxYww8QpsONP/q8cV+MjmZMw8b07LCdNHkW7z3CvfTSLtuDPRA3lgcfUB5GewE7dU3/uKjz6X1IBwbosMEQwRgDO24hT/R9732SLk0BfKqYx+Bxb0AwRsHFxcXF5eXVer1RSnNZNHUdvcfonR2k4NWkYJxFCIkVlySZU8NDiDFxOnay3VwwzpEIgDbbxvgw2DhbHDuPTW/vlmuhdSWwyCtirOta6y1nxAiU4LNJdbKYF1kWzGD7ngAZ44QsoEdErbX33gZPjCUcHBn5EEYSJhciVTelzqIPzjsIcceRCzGEmJe5sa7reucc50RI1gzODG2QyUtZ42IIAISEO+Vov6NMpovFAAXnPu7Q4NRBkTL9cSWNvictDkRE4p809A9W4b1feWSjxzL1Aw+afksKgQAMY8ZpUuhprjMlBIEkIkJGxLngnHMuuRRMCBIciXZfCpMv50QsQABEgjQxHIgREkMiwe/X2wN7HT+WFNkXOO/BlnFrHTrjh1HdzziawxDh8AEdvP4w24NdFvW4Gy+VQxgRpRaDxCDZza0ERMR48HH/FkrLRz4vxB35GhJTJYQQIHjvbEAcrN80/c1q++fvf7KBEVd2Pzjs4K0DACTdRdpX9YQQqb9H8ADB2b5F7yXHYJ3t2+hdkRdCSEBmXGz7YTDRRRciABPIWKKtRSTGOCABRMZ4CsVCCEnVr6rKpm1t4EAMKQkpRIiABJxTkWcxAmdM6gyRjLXImFJa7SBxCM5JzidVIQQNxlpjpZBSq+B9kefNtnbOHh0d1UPomrqpa+8tQtypIIG1FopceOdjDJOqVEoF74iIMejaripLRiwEz4m893mW++Dd3tz74FWmE+yZxCK0Vl3X3dxcA0BVlTFG50Oe58Y563wERCLrvRDi3at3Za4xQqa0j8H7cHJ6tlyttm1jrZvPj96+fdX1w/PPPm+7vm5rD9yHeHp2XretjxFDzLQ6PTlyps+0YAzv7u4uLp4A4aZpnLUXT560deusm5TVbD6HiG3bTqfzy6ubxclZ3Q5M6XcfLgFFPxgiLjXfbmshBCKt15ssy4uivL6+QSDOmZAiybAk+dckNkJ77tuY6iFiGkn/wD4k0/LYhoz/HdGjEX0MgGO8HmNwzoVUPkRkRN67JErWNI13XinlfAAAVk7nKbcbz2Z0rfFRz9B49g/4e4fHSJ/bOX84+G6HPAgEa23ywJyzhHCG1CYdY+Ls7cbQBM92PD4kBCHE+fnZhw8fNptNnueRVFs3iVDWNbUQNJtNskwjYYwhOucTmQeS7/OwU/VCwEhEgCz4aFy4XS29B+P8ZH7UGbttu7vVRupsIvlivqjr+ur6khgxBMRwvJiXRTapSi2E7QdrLWMkGI8APloiSs0xESDhilrrCJBC13TDUtkfEYVQ3js7DNbYCMCI+RiG3szm82bb3t3dDcOQhvxBiBB8Y3n0oe964wLEyPnOUiCwNP1rt0QipCDG+sFaT4Ra61R4T+4tVRqIKE16TEOFGCNEdrgc76OXj3ki4633+6r1g+jnwZuMvyWlpBgYBC1ZlatpkWnJOcZMCMaIM85FotwqoSTnYufqdozHvfdF3NntpMKM98Vq9jE75sERPj5ijJzJMU89pCj/XNks0qf93oMQ4f55ONQ5un9lcoGHKtKwy/biodsD2BMnD8KPePCJ4d9W3rs/pZh0qPfN7CHN0PAhRkS0Pmya/nbbffvDaxeJS2XCzj8eXJAIAHmeK6WkuEeG0+i+drvs2qbZ1tEZyRjDyIiKPHMuhAg+RO8jkOBSEDLvIymZ60wrxTgTQgoplZRcyuCdd8EHG0PknBFiVZVd10XSxg7OOy1lUWSMEGLgCEqpsizyPAveO++ZEIS0qWvJaDabKcHX67UxZjqdKC37rs3y4rsfvjs9PT06OvLOCSkIAQmRqbZuts3aGNt1g/cuy3RZTeq6mU6KtmnqzXo2qSaTCUBExjG6tmvTfINUqnDWFXkRgk8GbbBmsENRFFmWGWOkVHVdLxbzLMu+/+67pmmePLkoy9J5770frJ3NF+vNNgL0xpydnS2mVQih67oQwu3dHee8KPJ+sEVRvn777u9++7uuH7rBTmZzY822blxAxtiTp8+vr29CjMRIMDw7OgI/aCk4wTfffPPF118hY13XX199ePb887vVqmvb45MTwViR58vlqionrTGcy7ysrm6WPmDddi6ELMt1rjhnl5eX/dA/e/rM+3h9fXNycrLerKpyUlYVInrvkpkAgPQYEVNaPLZoP2YDJBNhrH1cpE/g3yGkNHYwS63TSt4FNyHQ/rFgPLm9ruuapokxCCF8cnuzxfFY8w978WylVNd14zmNRezkIA9t3LjoiT5ix4yPmVCJ4cMYS6qbKeVP2V7wQUrJODuYAbh7NyFkDHEYjDMWAMuydMZt1usXLz67u7tt2zZtZmNZ8GlsimEcmKAs02VZBmdjiF1TD11f5HmmdYAACNakGI0440JIxtlgbNt2MULbG2SCCYXEN03rgSLSL549WS5XPoYYvNJqMO2zJ+dPzs8Ep0lZKM6NGawZCIgzjkgggBjP8kJI1TRt4iK3bRcQs7zQOgshGuucD1zIspqE4CFCPwxd1yOxsqwiYtv2LFleQudc13Zt24bglBJD0F1TN/VACELxGMGHSEjW+RADI0ZEzvYIoLVihJ3pYtx1zVtrt9smRsgyPQxDQlz3eXZq92Kp/+Ew/jpkVY0x1whhxY9t62M08hC+izEyJCk4QVAMjxfTo2mZcSYJROpuEJyIESMkTozFkR0lOReCGCPAhAHsrjYB7vxDmgifPppGUc1UYU0JSSrvJXXNVF6VMumjfhRvjiHgg6+z24j4kSM59O6Hu/SeEwRxdKiwl42OMe6qevGj8nkIQSoBADucZg8Pxxj3nQ73I9TjvoHhQZDxAFiGA/Q4xohAIQTnQxKjjzH4YINzUojtpiauOuNevrv85rufeF7WnSHB06VM7885V1JorVMx2BqT8ANrbd91TdM405t+AB84AgbPMWZKKcG10gDoA+zkQIEYMaUkcB689d4TYEw1Rh9Shaep6wiBIzAEa/3J6eL66s5GSp0s3hg79AyDlIIREEKe6b5tVaZ98PW2ns4XXErf9UppIaWxpu9a57yScjqdd32LRIMxRVkprbd1E0KYTSZZOX3/5u18Mbu5vu76yAVyqYJ31pkiz7q2W61C29z93d/9HTFunVtev8+yrKmbPMuctYzIWVdWVQyhKAofQ9/3RVlkWRYQykm1vF2enJxYa2az2c3NNWPs6upyOp3O5vPr62sgmkxnw2C+/+GH3tjZbDY0dZFnCbmvt822aRaLBRKvm3ZTNxHo6bPn3//48tmLz25ub5ttw7kOMVbVxHo3GOO9s0P/4tmTqtQYg3e2Kov37z88efrEh8C5MBZW6+VmvZpOp0+fXCDSpJpwzuuue//+kgn9+z/++eL58763q02dlxPnhzzPN5vt7e1tkZd5Xvb90LbtZDJZrZZt185m06Iotttt2zWpHjSSPGOMI9lynO1wuD5D6q7Zl7oOOwhGDDIdaSkKIez+rfq+d84qpaQQzrmkQZ2m+nRdZ60lwF3CA8Cmi+PxrUe3edjRHD5WgnhAYaeP2aWHSevuAeP3wBHs/LZSyjvrnCMkKSURps2TzMnoX0MCbZNWpOBJa2uxWHRdW9e1tVZKGVEEHyAVxYOLELVWk7LQWnFAO5iQQjlGIQYiMoNJl5tzIQVnRM4F5xxjwjo/GGdd7M3QG4dEUqqMOAIxzlbrpTUDETx/8uTkeE4YBaPovel7by1jTHDBpWB8NzgxFSyTWg0AjPlvjHEwJmlVlGXpnEus/v+dtv9qdiTL0kSxtbZyCTiAoyIiIyMys6q6WvdMT/Pa3KHR+KP5wCc+0Gj3krwztJme7ukulVWZkaGPgHK15eLDBvw4xImqpnALC8NxOFztvfbS37fjKecckDMujPFJkrgQVquls+5ivqiqkgP2Xhpr+662DgAC4wyR+0CMY/AkOI8AdIKhlAJD6KyOFSuMMWutMRYAhOA4ahHd2z3IGAv77NI4znBk6xx5dWMXcNg/DHpMYcZyXKUUhSA4E0Cp4ovZZFZmqWCKo+RccMYj5o0UXComBWc7Z4KLve7Zg0BzIdje/+M4vjeAE01w1jPbP9qT3tt42j+qEHbs0uHTCTwA4GwcJWajF8gHT27s8DH+6BeOS1podIUxqxAgnH3GYeDwEJyJMxEII11UVJsxBAKBtNbARN3Znz5+/v6HtyAz47xMk3jf+7YEiUDe+77vtdZG62h0e+9jD65zOrhAEASDRPBEijRJ0kTFp2TR/pAqTdMsz6dlofJUSRm87bu+aVvnnUrkpJh0fWeN5ggIwAUTDGfVbP2w8iyOe0AggaSUyJRQQsQqYOds13bOuTQvkiw3zoPuhRBSKs55oOCsRcaTRFlnKZBxlgKoJJkUBSBbr9fe0+Ji8etf/SrPss1mu9mSEDSZVl2vV+tmPp8C6c3a/+IXr7MsbZtOgGGMUQhSCGTQ99oYLTjngksh15t13/dxjJI0ZYwFFwBASiGldNbWdW2tAYAsz/u+77SWKimKUhtTt50QggPc3t4aY2azGTJutU7StKrmndY/vXv/7/7+H/6f//m//P0//E+/+9331Wy23daegDxNqkpr2/ddCAGBXtzceKtnk6LermOYgSGbzqqubbetadp6WpaXFxflpJyWEykEIazWm+226bTV1i3Xm9nF1cePn56/+Gq2mNR1HZeRru1jBidC+BIRjjilAcFay0dF3TGzEFeGGFZk7BjhlrGD4rij3N6RuBERchHjpSEExlAIwQC99wyRIyMK1pgIIgPR8h737R1lCwFgYE4fNNzYmIXDAoEvR1fGq+SQR7HWxocfOhl2URdgnEtEboyzxjHknMv41WOLNxERRROegg/kmeAyy4OU2948bNuHukUmmUiVSAQqb4MzjgEmSeIC9cbG7h7vPRIoTgnHSSpneZor4U3bdw2SV5JLjsY6S2A99drb4ONr6fu+KDPv/bau265znnwAEDLJ8jzPsyxjnEMExFKKGALfDTByJhOFiFrryF8PEABAKRWrYLquM8YJIZumaXodS2cJvLfamp68nU6yy9mkmmaKAwVABMT9KGCIZ0MgzlFyZEiRZihm9Xb5GAbe+11l+d50ijqLRiiOY+Uxnn9Hzg2OYu5HiesjZ3H3lfORxQMA+I4U4NF+GofHYyterO9EJgAj5qeAR8xZxphgTMAeHoXYQQXjWGmNIxZHt3R2G0/yI114rAhPOj3GGvHIVhi/xrErNl4aTrtHxm7coxP4J8jd0Q0P3irGWCry8VDGrHPMHANiRCiN5nmM18UjI+xWhNCLzWpDDx8RAeMEjACByYDME9gQXGDW2kDIkXHOE84zKaZ5sphOMsmrIrtaVBfzyWySlWmaSsFZQLIs9tV7CM5Ljs70AIAUyPvgfAjeedK67/q+1f1626y3G+J8VTf369oSbLveAXrA1Xq7XG0AWFFMlEqMMV2nF4vLm5sbIcSbN29++OEH59x0Oi3yXCAtqvIX374SDF5/faMdUHA7fY/R6Ickg7bXD6sNE3I6q+J7jp1zsVB+u90iYppncfb21jysVwEIOZtMJtFi8N7f3NwkSUJEy+Xy4eEh8rItl0tr7eXlZZqmnz59EkL0fR9Xv/lsen19HULI81QINi3yzerBO5OnCee8bdtZNWEIDMlZLVWEwOWeoO665XqNMrGBrHfI2adPn6bT6ayacMmI6OLiQmv97t07xlFlKiqn3pi63XIhmqaRUmRZwgUmqVytVog4n8+7rovspEmSROAVItpsNr3uijIvJ7nzZizd8FhPx/7o1D0qFhkkYhzqpD3P5VHIZBDeoeDlSF74dLY4ql4ZxHKsjcY+5ljxHt3HqbVLOKIFoEfntO/aWODAOffeDShtgDsOnQj6HFFIvAtpmgrOvbOTyaTv2kFrBkJrDTESqSQK2hrOMBVyVpSFTBLOgQIFb6wlIJlI43dow5xzJSRnjCggYN/3RMwFr10IRIyJgOCcLbOyblqtu+Ds5eV8WhTVJCdvX371TLdtvV2ZricAKUTsS00TFl26GLMFAOtd3/fRf5VKSSljQS0i5nnOGIZAgcADaGPW27Zu2l67Vpu2bUMIZVkmQm63m3qzDcFjMuUMQgid6ayn6PvECCcRMGQcCYJPlUikDMF1zkX4U6211gYRhOBElCTJkKYdnPUQAuBBSnmYDHwPmHkUw9yTOh6zio8RUob12nsvGRecCQZlnlzOJmWmJAPJMZFCcCalFFJxKYRUTMT45Kh3NRYfM0S+61d47Oka7gdoLAYHhuE5moinyjafkkYPBybnUx8eBWGMbzLqBmSMPwYwx32EFFFyYs0VZ4zhztt7TPEd5POe8PZOlffouuD2tkFEuPPBReZLT9hp/9s37396f+tR2EjxGkIsaYkTxkQK3xi+ppHVGz9wDkCcs0QIyRkQMSCJzHsXWZqAyDsXnI256oAkOecyJh1ElmeRzkDrHogi2p1gkGcpAjRNz6QEJA4kgBCBI0iOUrDteuucmVUzY01vDHLRG1tOqwzRGGudj8GVNMsChU53aZqVkykXcrPZbptayqSYlHmeI1LX1JfXNx8+fLTOE/WddpOydM5Ws9nnT0vr4K/+6hfIcLVaPXv2fFZwYw0AaK2r6aRtWs4YAXDGqqrqdY+Ixpm2batZJaVMVVrXdde1eZ4DkNa661pE3GzrxWKRl+Xt3f3t3X05mZbT6WazSaVsmibWAfR9PyknvdHT6cx7arp+tamLybTtDQVcbdfTadVsNsFTkiYqybZ1wzgi0HwyQaBFNUHyXVMrlWhnFxeXxhiU2XJ1P18slsv77Wb9i5//PEmS1Wr5sNo8LNfGOuMcV1LbUM4qxngAGytx8jz33lFARGZtJOaUgchayzhLkgSQnHPe7Uy6EMKAxjnOmxyJ2MBAAqMoCO1LFk4N1hj9j8sLi7DUsMvwAdGQ27PWAgFjbOftxZOOVeIQih1L+1gvHt3QkRV8tA26d9yrOMhh1BBjrKyx+TnWz8YYrW3TdHXdWuMZCiLqui5VQghGMdqVKlCqNfbzw/JhuXaRYTIrGHCnndY6eJrNL8rpLM0zJaTkKDnmSlZlJiCkkk1SVUqhGAkkAcQpWE/bTdO1WqV5OamQM0Qsy1JKSUR930cPOoQAwAgZIBcyAWBEGN1Wa30IoK0zzhOhlIlKMsalNm69qaPDzRhTMlFpLmRivd+2DQAQMCGTIp/EVlBreqO7rl1y9LOqXMwneYociSAwHl0rYAyIPCAgQiBrTR/7E0II1loi2KmPUfnv2DiikwL6sUo7Cm8e+BBPeyHjkz+qQ8bkPuS7uwe2b9w7BBWL/ly8YuRD2CtpjM4AAUSkyfgvEkFEtgeggz+HzwzF4zFf3E7zZOOHGn84+7xwjppn7N6d/n/Wzzv1Mr/s6sG+X3gY08HzHj0yIwRPFGAPTs9EpA6OUQEiiutMtFMHOxgRYwx/3PXxGAQCBsiRS6FSxhMfwLlgvXtEdQmOvPVW277W7aYqs0xy9JZsn3CcFul8kpdFNptMLhaz6aQocpGlYlZNgHzCQAmecFASs0QVmchSkWdJnueIsF6b1WbLhcry0li/Wm/rpttsG+vIE22bru+NkMmkmhXl5P5haYxdLC6++eab2Wy23W4/ffq0Wm8Fx/lsSt79p//4P7X15h/+/t8Vmdput7FDwzi4uMizLPvNb36zqZtYdj6Er8bdscvlsm3b2NQBALHxPKI5xz50zrnWerFYxKL/9Xr99u3bEEKe5+/fv3///v1sNvuzP/szKZiSXPet7tvoZEdcm8ViEU8SRXu1Wl1dXeV5HrFg+66VgoUQEDhjoul6lWabuq3mC+28B5xMqnfv3jlP06pM01QI5r0zxjDJkkRaa+tmkxdpXW+yLEvStGm2ZVlo2zdNUxSFMWa9XldVJaW8u7uLkJsxCiMkd85tNhtjTFEUNAJ2Hgddvjx78RAB6nQZGf6MeeVxp9w4Zzd0LxzJy8FJYQ9aMRx6FJAZhHCsqJ+yjocTnr2h4eHDIaluHMgoXWzURz88hjGG9jgIsUBGqEht4gmBCa69X222n28fNtsmeBRcSZkwJkIAa31EjohtcwAQnIdAimFVFpM8yROVCBQISFZAyCRr2954J/ZBSO99npdff/0qOnPOuVb3xtq4asdnGZaDGBuJHwawHACIyBQAEGcw45KrRCYqK8pqPismlVKp8xRZCOq6bvou5gittZv1Q981nIWqLCbTQkrOGAjBlBJKKbFjpACE4L01lmKh5r5c6LFBbUSbdzzh8FyEczddTrzAsZkyDtaNT8j2/aRxuRz2DMnFo6tHTOez7tdZt2z87VhOxoGR8fFjzYFPbGd13vhuxx/O6v5hz2n56Gm8ZLji2aemE3CyL3ilg9wd3fkgRONR39/ho+gFZHtI3sewM45KxmHEmjZeNOL0ti5Yj8i4TFKhEmAiEO6MkkAQHATiCApRcFCIznZAVjHMlCjztEgkQjDtNk1kkaaplAJRIOZJ4rQGAPDaO0POATqOxCgABMnw+vqSS1itVkzI+cVlmhdcqrrRt7f3vbGAXFt7v1xvtw1n8uLiKtZV9n2fZsV0OvOEq/V2tVoFZyd5Jhi8eP7sl7/8BQP6y7/8c+dclmUPDw/Pn8+ef/X1jz+9/f7NxlP48PnTer0eRqHpO6UUIca30TRNBBKL8h7LEdq2HXJRzrn5fB6pzKfT6d3d3Xq9jt17P/zww9u3b6fT6WKxiL+NZUR1venqhjGmlIrWcJIkfd/7YPM0q6pKcc4RbK9jVxIRBaJt00gp275P8yIQOk/VbPHu/cd4QFFkSapms1k5LUIIUkqRCK376bQ0xuR5qk0PDNfrdTz+8vKSiO7v7xFRStn1TcRYiFMo1jo1TRNxrNiYYnp/zBHA5oFC2hVePOIRnq18GWu+MbjKeM6PWyaO1d6pYgt7AtwYdRyvTWdXB/ii9h6vmziKMg36fyCbj2djKLyjAc0r3gzucbCGPvrh521bE3nGwTmnvQPBmVSecLnZrtfbpum8p1QmWZIyJqz1xoWIl8+lgEBW91b33pppmU/StMxVNcnzVLLgg7MModeWc5llhUxSKZM8LyO+QMQR9zs4x8BQCCEGaKLYNC0SFUE9hgB0NCmklGVZKqW01taFPYobQ+BpmpXlNM0K4MwG6nuzBx7bGe99U9ebtelbJcW0yLM04RxjiUr09hhjQoCUXCrOOcRgZrQVoueHiEKIwazhoyglPySqPR3oUy1yZOzjPoh9FIt4XEABcG8jSy4evZBzWi1inOzJUPcaixgQGzKmLHJ+joKxR7c0fkY4yZA9NW+PrMuxQXDW1fuy8jt7/rEhfHTmo3dO+yDP0dB8Qe6edMrh5MOg9xiLJNLGmIC7i8KoWJz2uFMxL37qvwJAQAbImVRSpVwqQhYCWe9j4sdaa6yG4DnDRHApeN80SD5L1KTI8jRJleAIwXsIbk9i4YK3UjBrDBAwIvTgPTgbjPZdG+ptG72x6bTU1vbGdNo2nRYq1c4FYG3bbreN7m3bdw/r1Wq7MS7ML66YUNo4730UWCKy1ndNvVmvnt1c6a7/T//xf/bOvXj2/M9+8bPJZGKMefnypdb69vYBAD59vH3z07v7+/u4TKdpaoxJ0zS+wLwsOr3zxpIkWSwWMYQ2YHR8+vQphFAURSQkiiKzWq0A4Gc/+5mU8re//e3Hjx+FEFVVRb7ymI8HgNXy3tj+9TevrNV5nvVtLYS4u7vLsqzI0yyRBD5TEtnOb+mNtZ4IwAfgUsUlqGkaRN7pdjqdRp4mxtinTx+007FZ3nt/eXUR4UOdM+8/vHXOOmdiIUL0QGJPPQD0fT/EcgACQIg6/sjk+qPeHh0GnAa5GFTg8BXfM1cf5cuPilGO4DZ30sEPET+Ha4+ReWFkGsOhpfllk3OsWofbPVJ74wR+PHjouh9QWnBg3N7HiGP/2eBNE5H1zjmHnCVJotKk7/u6buq6tjY2yWUjBAEWPQ8hOSICeSKSnEnFizSrptOyyDgD5421GpFJkUTA2bIs5/M55/zu7i4CKwy2RvSiYM+dOAxStDiiCzj8iYjRdtsrdY6MGRfDsICMcc6LYpf6nkyqmAav65ox5qxt27ptW0RK0zRJdgrG+V3RQcQEii2xSfKIyTkeCLYHk2OHTXjjtfJorE/9hqMBHY91NOjGHJJuvx05hWIfKzsylc5OqvHcHfudeOiAjhf6sdI9UgDw/8VG52KbR+GXo6scSc1T93b67OMwyenQ/FGhg5Gmf+yvgOOQUcDHu7LeDScZmnoBIKoHRBzq0Y+eLhYwcimkSKRUwFjwoJ01xsWAtjHG9tpaHYKL9EpZmiZSISNjjO67EEKZF9fX10hB8se4ulIKETiHPEuUQslBIHgPJkDbQl23q9UqUj1vNpu3b9++ffvWWGedq6qq7c39/X2ne8Z4r+1yuby/v4+9s0VRTKfTYlIlSUKEXde1dbNcLtM0VUpMJpNXr75erVb/8A//kGXZdDqdTCZv3rxRit9cpW8/tsvlsq7rONtjsX4sUYlF5k3TxPKfGLqMtT/R9iWi9+/fx3u4ubnJ8/z+/l5Kudlslsvlq1evXr9+3TTNx48fV6tVnufe+7qu+76P2vHNmzfe+/l8XpblZrOZTCbb7Xa5XDrnJnmRZ2ksu90ZmnyHHSNl0hktuHQhWjA7YOQ8z2NLTN1s3rx50zRNlmXxzK9evXr37qc0TSNnqvc+UglmeRIVfJ7n8/mciGJn4bCwRCunrutB64xDAl+QvrGiGsdIBl9iKA4f9oxl/9gIGxWIjTdeTKrxJQdBGkC8hjUl3hBjj0GtQ3UloggjMoaSoWAoEDl5rUSss3AMIU2U4MxZE8XAee+DjycighCICcEEQ8Z8CN47AgLGPFBRlh4gshyoLDc+tNqKJGuD5yoRshCYeMu8xgBKyJIXZUdgucink6TI0FnqNDVdE+ok4VmWAucgJUtyDWLdaVCZCwAcBQbmeuw3yjRF6G9RqhwdtNfPSmtX1UzpvkaErjbrZROCKIqpTJRIpMyQeMhzxiQAeiDnrNFtA9ZlUqKnlEsOglwQKFKZek9d0607TcjKvFRCgjeJAAnBtrUzDafgbIeCq7zYdmbd66ScpyJ4b6zVAIiSB0IXAhDWrWaMWed77YBIioQAKGDbt0BBKpFlERcYjTO97jHyvTEkBE8QAIhxYDx4gIhdyWLBJDvCtARAwAiF+QiIKfb0H+OldqzeHj0wxoHRNBNXs2xe8GmCk0woyQgRhEKZgkpiLQfnIJEQMRLYMQaIgEC4q/ADAAgAFN1BxPgZY+1HvDfGCCAQOB8YFwToAwUCAkDG478ICLnzKQHjtzugIERAFo8B3J0KgRggg11jAe7oBSHuicghu/27A/YZSQCGPOoriAR4QAQYiDzQjq+Ss0AeGBJi5EAAAATknAcfEBBhNypIsb8psFg3GQgDARED5Bj5mwABgo8AEON6bM8QGY8t/0SBAniGKCTv+h6EbI3/r//62/tNY1CgTMD3kW+QKDhnnbOBCPd6OgAEikOAAdADCFF73yQJprnUPpalyc6hU7nlecjmQc0aSDaaNZRYVqLtEVip0nlZzossY4S2Z6b76npeFWkmsdluLq8u3374qF2wBK28RFViVnomLVHAQAwcIcgERDqpLv/l+89fvX7VWfrx/WcT2OSmYkUmitRjsKGV6BJJ3PXTTGQMBXkwTgJLeOp7t7xdqoQLLtq6npX5JOe+39juPujNn//867sPP/3w5ndae8fxYWunF7O395uiLO+2fTm/anqTZAogXF1M1w+fjWmeXV8Yp601IpHaBSaSgEIhz7K8aZo0TZx3Prj5xRQ53N1/Igqmb3XbLabTKs/Nti6lfPvw4eL6Kq2mv/nDm+fPX7adrTcr4anK5DxLtw+3s+mkM/5X3//IJ1UfWKbEpJr+8MPvZ9OiXt6ia8uEo7cXi5nu+zyfTGfz5XJjjUMGs2n19uPGWijysq77333/4+uvf7ZY3ABLf3r/adMakc5+fPf5btNnkwtHUju8uXqudWAs2W7b2fzixzdv5hdXWZE78MAgAtyjEIQsFtjlUlijyzwn7501D7e3i8urpmkYF3HNJwIXMBASMIIdytJgWQ5ry5Gp96hHEXEHd7gz3gCRE2qjGWCRZ3d3d0brRCXeWcG439cCPG5j43HMQDYsW0fRmz/RLg4j6ocj03isik9DPeNb6vs+Rj7H/Abx9sYxqOgLRgvLet8ZvW3qVvcoZFbkeTlFROd2URrvfXAuuk0IAREVF0VRXMwXl5eX00khBaaJ6tp6UU3Bh6qqtpsmy0sf6P5h9bDetF1PyKVKYxiTc76rdeYM+CPTW/TwYB8vit5hURSLxcI5Z7Wx1iLuYkpKicmk4JxnWQbEPnz4cHd3N79YXF8922w20dwLgbTuvLGcoxKCiBL56DMJzoXgHA+GbMjahhPYubEHw85tp17+0Xa0c+yUHO08CgYO2xGQ5kHw7ek6yfE0+0JccRxgGV9i7JzBE9HL/19tp+/tT4mdnN7k+Ksv3O3QI3V0hlOHmEXItNFpMfzxx39qBUDEWIK0/8w4Fyi4MS5yYGtnvSMPMcuIgNxY3xobAGSSqTSPLZ6bzabvu/l8/t1330zK3PVWd0AWdLPpu63rW2c1BOc9eQ+9IWvd/f19QPjZ65t37951XScFN7r//R9+bNsekHXaaONUmnEmV5v1w/2q7XshVF4UeZ5H9yXP8xg6AoBYqjabVdPpNLqSz54961s3neac8ywVnz6vkiT5fHu33tSbzSYAGW1DABdCXhbW+KZrkSDLMskFBwSArmkBIFa1DEQ8FHCxWMSUnlKpcy6m/JVSsQRxvV5P8qKqqqZpIgl7mqaXl5exGrwoiu++++7q6hIAInkFEf3VX/1VWZavX7+ezWZVVX311VdZll1eXk4mk9ls1nUdYyxmTyL3RXTfQwha6xgVGxw4AIg5kSiekaI9ckfE0Ku1dghlxXMOuQbaQ5JG6FHnHDIW9viZZ5eaI3UwjhUdOYvDgr9bKPaTlkXFxlgMqlVVJbg4Cqs+BjDH14O9e3hU2ELnti+Lx/h6bITcP640G57h9LGH3/Z9H8Ek4xlGD/AIPxqdeiIy3m3bptN9p/tN23bGoJAqL2RRJEKR87bX3linjek1hJCpBAOx4BlSnqqri/nzm+vFrMoSlUtIGVRlOikLIiJkPEm1h493y8/3q21nkEkuEwD0LjozMXDKAFgMNcRIAuxTrDHWAQCRBEQJHpxxpkcKHAFCAIAsyyaTSZqmaZoS4bZuOefzy4u0KKVgirPgoK5122zJWwjeWZ2kkjNACJJBkqghbe79YyXtmPJqP3vYUM0Lo8aDcVz6NPg53s6u46dr+ngdPFWo8bqnGTgaUWThExscWlFna0ZwFCTHUc7grBl39MOzGusL21llNj4zneQLnvrhWevw6FbPCuB4/1738KPXfvSW4qvGQzf9jz7saZyTAecoGBOxdFYIJbgSQmpje+t6bY312gcTyBJYAouw7brltm61czHwJlWSpkTktMnT5GevXz27vJxPk1IBAwDXgO3AtixYhiR2kDXQdT0F2G63lzfXGPwkz0zX9k39+fbh9n612rSddsaG1vjeOU98U2873YfIk8BBSjmZFFcXiyGPXtd127aLxeL5zY3R3Y8//vjs2bPXr59TCE7buKwlUtyv9GZbP6w3gksXGBeKAi7mV5zzpu5iFi2+ZylE2LHC2dlsFtc6a21d13lWXl89i9zU3vsYg82yrG1bIdTd3YNS6VfPnkcr//LyMparWGurqnr79i0A/PIXv7iYzb1zwNjt/X2WZev1ejabcc6tdd77zWYzlH5st9uxuEkpYxIOACLnX2wpXq/Xg6cRH4QxFoOcs9lsQPJqmiaquqGEhEbZsbgsa60jW1ySJDG0O15extupbUcnAdLx3N5N18Nkevw2hKCUmk6nUSUnMhnMaEZPmmyPZZa0xyyO6Mnwb0mQDFZ8jIBHUy5qgrNdgEenHWRv3EE4LF476jVi0ZIMITDBY+nUtqnrXjfabLVutOkBvFSgZJ7nQigiDM5bbbqmtp1mFDgQ+eCtRhdSJRfT6mJezauq4PD8osoFv1hM7+7uqvkiENs05nZVP9R9ZykwCYxbF4zblWj7iBxPxKRI05QrCSP3JZYLRaNSKTWbThhA33feRgIK5611zlXlJIK8XF9fM8Y+3d5r466ur5VSRVGkKQcC0/fOGITAgQQCA+IISvJMJVIw773WZry27ucNDJHq07d9Vq8cKYyxOoQnfLunziZ2QGPytFJrt+2wyHfYoKdz7KzS/YI1djSvTpXKUyLwlGr/wls6e8xT2uLo3Z5+hhNv76lnPDoGRo35RzyF7KTS9WgEwwgR6gsCfnQDI/tdsKj2YvUmE1wqZCIA84FMCNp77b0NoQ+hd84i7whqbVdtv266utfERTGd3NzcFHnKEIo0uaomf/PLX/zim8tKwYRDBp55B9aiD7FIFBFaDdV8tl6vV6vV119//dWLZ86A7nQA8fH24e3Hz70NvcXP96t1Y4qqApH4AE3bbpu67XtEKsv84qJSSkVgh77vI0nszc3N1dXVP/3TPwZy/+E//AfvXd9DW5uXz+eS866HrrN39yttKQAiV9oSkyovJ9bavu8hULAeyKdSpFJpra3Vs9k0y7I0Tfu+j0quKIokycqyFEJsNhtj+iSRUWfEEtBqOivL0hhT13Wr+77vt9ttmqYf3r1bPTyUZZlIFaH6nHPv3r27u7uLarVt2w8fPrZtG53Iuq5DCDb4NMn6vo+UtnGpiQXn0amItxGTlHG5jjIbl680TZMk0Vozxpqm2W63QzCPRhW/McPHGIvFL1F3Ru/lqal1lIMf652xaBwZbbs9fpcXdMEPlDKxpk8bDZy5sEtai7EcwijZHjV8/PFQNoOIIex8BfjTlN9YnofihSF+dfrbg+UJHgVvHNhk+3JEKSVjwviYcwgUidc441xAz13wW92pbYhF0BdlodIko8C58dYBgDPWah24sxyllBLBW+d0LwUkks+qCQS6C21ZTadVjsY4bZYwnN4AAIAASURBVJRKA0s+3q8faiOBWRAmQG88KopLTUA2+NAMBecBEb33UopoX3nvgYLRPTLgnE8nxephqftWCx4okA/OOa1Nlpfeew+kspR3fdO2QiZpUeaJ5JwbH6zbdDZQcBxYmihtLAQUDLNEScWJvLXaOmCSMeSA7DE4AAyBAfjjMUI+0L+NhyNOiThw55b74xYCOHRZjpQBg6HucrfxUcPp2AyKX40X9CN9cLrijzXK0Yc/fftTvJx/6zYWVxi3JcBjVH+Q3viLo+canvvoDeBhTwWe878BzwhaHIuwH6ZAFBOmuyIyAiT4U0QbD4M0QIwxZMRCiAzGHJgAFphSFLwDBIoZHfLBYwgpCMcTCm6tTe+cEnxWTQpVRuZSImIhCPLffvVsXqQphDcPte5t25Nx4Agw4hJBvHNQMjFd+/xqkQr+8tnk06dt27m20eQhlQoJvON55izINFUeeWssdz7NIMlSwRggn06ndV0b3Qslo0c1mUz+/M9++S+//v7j+3c3L7/92XffdL/+/vOSEIJgGADa3n2+XV7PV4sqtw5s2zsJUqjG1dZ6RFRCegoCmZfcWu39znlQStV17RzftZwiRoynpm5jsDFJ5Of7hyQvjDHB28lkwrFru05rrZT6+PHjpJoXReGMXrb1w93nV99VSqnXr1//669+NZ1OrQuBMEmS1rqqqmLh6N3tx2I6ifrsYblKZjdsD6qMe5rM2G4YS+rKsrzfdjsHjgvb77ao0qSUbW+6rovoP3HnvspPCCGCNdERHFRgXMB9oNEMf5xUjB/gkB2F904twpjKGQghYeD8IxKcx9gsMAQPiBgA4onEWXkYfwi7TmfaQX9aMeT8xkL41DbU5Azr4NB7ByfqNr6goxsYLwrRjhgngXbtd55c8J6CR0/EiUFWFsGZuu+Ds94Z6w09u766vEgoJAn3wlvd7zxo512ndyR2ggfyptdCyUmayzmrMeFSVLPpb37/h/lk0jQNl/nd/bbVrkxzYmmrPTgrmYhFzJxzJgWXYpfPQ4wvUKmUMcaZFELY4J1zEMGgJWeMeWcjxxhQwL2ZM5lM9Gpdty3nfDZbEBebzSZNAhElQhZ5apu+d8Y5RMDgPQOmlEikYEDWOreD3xSIGOfY4KTtX+8hvc7unT7pwcBInw1+3gFgyBPhzfEJd5W5RLEWYzfXAWBflBFCoLC7YxhFOY7MwKdX4YOd4wl2qFcOROhoyp0K2OmRR9uTx5wcfzjziYjG7ETD/qOz7eMzdOqrPf7qBPBs9ycehC6JkO0hsPkO3Y5wB8Yaee3+eJBzEFg24kIDACSBGIC4d+BdYIwFAk/IhPIx4I4QFw4k1oew7JxgDIC3det0nypufax8vprNZsE6Y0yr22meyNkk+cW309v1pt4+LLfrptUeLKHxEPHTP326f/3qpiqL9eqh5+wvfvlz3fzX369slojO+I93a++cuL7sHH68e3h2ueAyIBFgzGdjJM1dLBbe+77vYsmT7rqsKF69evV//D/87/+v/4//7fa//7e//vf/u067pv3hw4f1i5t5ooCA697f3q+r6cQReYJOe07gA1njBOP5pGiaxhrNCS1RLDdt23ZaTdq2nc+rAbJRCFUUhXcxSUZ5nt/98P2rV6W1dr2qLy6uptOp6UWWZV2n+75XSpV5sVqtiKfO2Pdv38HN/MWLF29++mk2m0XYASHEdFYh8k29tcE756M9wZXSq9UsyyKLXnA+NiG0bcuEKsuSiIwxVVV9fNhECFkhROecc+7+/n7QRhHO3lMYKredj0xAj+m6KH1D1vCcPbdfCg7hN/GwVvwoTjPeE+m4xtKd5BkBxHrdCKjG9kufeGpax/wTG+GHAsDQOReDuQc27B/bcA9wFatOzq4XADu9fbQkxYf03mPY6bl4DwDAgGLTApfCeGe9iwj9TCpnjSVwAMtO2893KBUvJ4r5RCWKBW8dYyKRCgNRcLpvlVJZooyxxmoCzznPUvXNTdEZ4/oOTZ8WE6Pd/f2y6TRgkhaVSHNtvPNNnhTIGBeCIOo27/iu3TBWtEYIcJ5wiHhQISCSYMgky5R0nsA7ay0xjsgFg0BuMinXTdstN8ClEMIjS1QagrbWAYQsyzyKUGvrLCdCAiFRqR15ntWGiBQHtm/bIIr+KNurwL3pQPtA5c73OuZJP6vMhs9EB9ruaLE+0nl71+Tx/OIEi4Eeo8HAAGnfNH3q3p1ebqxljg6LSd8jeRtOe/QIRzrv3+QyHp3qqZ8dqDd6Yv/uPI+d7EfycureHYVPh52DJ80ACRkAcMDAGDFGjIV9YJkdVmgTPUUv+Bg6Hg0rEBE6BgQALASwgTgCAjofJOMRgQ8J4n8BEAC7VldlIZXsu77rrbYmBFiv133fv/7quUCWKJGlSgAFb59fLubzarnefPj8cL/ebnvXaNdo12pnrdUO2ra9mk9109RN9/rF5cuvbrbKO2O17sl3HEEpJRiaPEnSljCP4tAZLaxIlRKSqWxirX148IiolLDWUtuWs9nPfvbtr373h3/97fd9W79+9XWn/b/++ifTt1wqYIIwLNcbAnQeuczItgQMUfZ9r7Upy0mHaI0pikI3PUCImbYY5CSatW0/mVTr9ZaI0jQFwhBC33dZlhTFZL3eZpOaMdbqHnwo8vT62U20jIkoT9XyYXPz6vryQr/7eEteZ3mZZYUxDhFjXDRN5HK5bOvNarUaIpb7thB8eHjoug4CSSmn02nbtsBMWZYRD+Ti+kLr3yX5JBbixcVktVpFHl2ttZBJNPGJKJZRD8Us1tqEsegyRYevbVuueAghosvvlwY8mlHjNSGCsESNcySk0WkZ7+GAKBAArNFZljEg5Dyi8ERM1AHV6fy2x/VA3DewD57WaYPUF1aBIZYVrYOh+f2suJ7mG4YtjAjmB88vJskgUATci1iuUkrGufHOkEcheVYQk8te364295u6084RAyEBJWNCyCTq0b5rKXjJBecsOO/0rvTjclosinxzf1uVhTV9IuSnj7cAqNIsKyZpklsf6rbrrfOAROicC0Cwo0zlQojIv2n2FC0xbjxQTzHG0kxlWRZbl7zRBB4xYoTudIAx5uHhrqubxWwmlIox57Is5vN5NSmyRGVKCgaCIecMGQW3i2sr9cixEEYgZIMngPCo5Mba7qj84cgKOdJkp4v+2RHEfV8m3+FNPm5D8w0ixuy09z64x6Kn03D62X7zUyd1eIdjb29sS35Br///daMntqee60jWzgrdeCdjB4xRwzjG/D9+8Un/RCt2/LqG3B4FBGIUMIQQPMR/jsADOkBPYddzghgQCdEBirRIiolMc5bmINPO+LvV9u2Hjx8/3d4tHxhji2qWJsrpXnH27KK6WUyvZsViWlZlViRCIDBy5CER8Onztt6uq+mEI9zd3X317Nkvf/nLLMs6oz2hNuHT57vbhzUxue1M09tGu6brN/W2bVsinyQyNm5HfSClhOC7vrFa91337etXr79++S///M/VtPzum5c/+/ama7XRzjnnCdab2lrfa0uAITAuEyaU1rau6x0ih/dFmsYiEWNM9ELiUvDw8DCZTGLfoRAi4pkZYzjnX3/99f39/Xa9mU5n1viffnrXdd3FxcXLly+7rru7+3x9fT2bVolS06J8cf2sbdvb21ulVNu2RVHc3NwgYiDU1njvdW/zrLTGC5l4TzFXFytCY3Xos2fPEDFm76IBnWVZTPgN2aUIqzagM8K+biWKLe3L0yjic+4jqHH/Dluf6GjaDH/SHrRyqAWNKdJx1+9YR4y9Mg44rCdRtQMA5zzL8wGgI/6QV/OLs/7juHhhuIOYkxxV1EAcpyRJIjHmHjL4kas6kEPEaA4URRFCiJU/R9GnQfyiFRCvhUCDf5nE5m5nY5VjdMa99wgiBDLOOR8J5xgXXEphrI7djLFqW6nEEz2sts+qUqlUSAEAUsgkSSi4vm0icFe0PHwgAuJCqTTlIQjBGGf3y9W6rjfbxnoKgRkT/urP/2L1sPzw05tpmU7zJE9lWRRlmXVtZ40lT955yUQIYb1cdV2XZzkCcYazWYVAbVMjUCAAQu+C7s1OPBhTKqZhrRAiTfOu16vlutfa9ObmqkLGHJExrm3attfAWJrlKk2kUMbYvusJMM1SIZNA0PaWMc6lZGxXRsS4jE21jAnaV5wzxgkRAGOsfLycDZN4rALpEenxcfjGTtuOMGi08sYguRLS9F2RihcX82kqJYREcMaQC8lYRDGSnDPOGGfIAXmSDIIx7qwfu25jncHCwbwaJvah1n90aIZ2Q7YHJIoFaeNA+uESf9CTM44lHh2/uwSMFBXtTFtEhF3J2O4xhlfNBq/78VVDNLTH+49qMveys+suJyI3KpOO3Eq70iHvY9rOBwo+PoWlQFIwBOyt+/D5/v/13/9HZwLxxLkg5IFXNyxJR/Nh+J8DF0LGKDXngjEeA6ZKJRQD/hGkj8CGYI1r28aFkOcTIWXTdYyxJE111wNAoCAZ79sukFNS5kVutFaSAUBeTqbTCXLeG+eDz9LkcjHjGKaFmOZpKnkiBATvnbtt/auvX4IPH26XkhMXvG0bzlnbtXlepFmCCFJwxpELXmS5Mz7PsiTiXWw3SZpOqmkgkomqqsV/+2//XaU5ACLj33777X//x98ih7azaSKc0X/7t3/jdNf33aKq6no9LSc//fTjrJoVRS6EQKI0TT0GAppOJ5vtZjG/UCp58+MbKVVZlM75alp9eP+hmk3LsnDetW2TFtWbN2+zvGCMcyF/+5vfpKnK0/TZs5uf3rxZrlbVbKGd9wG2dXd9c9Nbs1qtqvnFcrWezRcPyxVDeHh4uLxYPDzcz+ezr19+9fs/fD8pCmdNXpSbpl+v19+8fvXp48df/vmfrZerFy9eJGnmvHv34WPbmfni4g9v3r78+vXn2zshpNEdAGy329evX9/d3QkhtLF5nhtrAQB3+YrosXHOOQbvnIugVLG7fzqbOedgT/jFGEM2quKmx/aqsQE3WK6DHR8VIQzdfxTbd3c9tVxway0XXHBORIAYWS+AQAwBQzgXOzprCca03B5EOAw68jHK8YSlfGRrP7U552I6KgrpsOQF7+LPB8qeaDvEtYcConOeBQCI1axJknAGjAUCYAjAOQoOkn16WBGTV3ymRIIBwVuVZNMK1quHXmsfAiE6CsBERJ2uN2uVZophmSaN1l2g3hjb61k5aZut7po0SZIkAcaBce8pQjNIzhMmvHON7fq+H+MXxA+IGN3fpu8EMhQ8Iuy1uh8OEwJlwESFSZl3Xb/e1Pe3HwRuBVdJkqWp7F2gpje9IRaSrLTWEBGXQnBFhNpq3RkCDCE8FjXQY2IPDj2ecVrvKdt/mHajI+nsr05/Pgw9BxSMHyyaseGa73tuaOhHfzQk4VxM/8jzAADw4fS2zx45eD9PnfCpaXx2O/uKotLCwxjp+B5YZEoanWF/M08+whdu7PQ+x2/+/NkC7Xp996DZFAKOTnL6QzrsKhnvPHxMPjADInLkjAMPgQPAjsUCgBC5kNrY5XqVSEXANm3bIUihkrJstHP3D9Mit8EHwkmZ50W5Wq1kkmVZLtPcEXeepJS9NnXX56lgFBiClDLNMm+1MWbzcJtJVk7SFzdT2+u7uyZL4bLXSZJ8vHvw3n717CowvlxvvHXz6dx7t+88w0CuabdCSZWl5OH585tf/NnP/+//238tJtPnL18sV9u//suX//irtwAgGHrOP378eLWYZlJ+urud5klAYEIFQALgjBHDeObou0RfKr7AGPOMxmjsoouFAtbazrnFYqG1bpruq8XldDpdr7bPr66apimK7P7tkgFSCEoIq43vTeyYin0CsVvJBa+Uapo2z0spk7hezmaL1epus9n0QcR0ZlEUWuvJZNK2rVCpUBHed2fYRQvPGJMkSdu2EVhxMpnc3d1JqYYpgSP2Str3TQFADG5F4JgIVTNG/wEaJfBGhvXYVD27khxNvDiJdysS253HA0kp8zzfbrdt00QOAz6p5n+KFA1XtdbEJZsxFjsIY4HpwYIwip0i293WgN8T1cBT0hsIYqXsWPMDQAheCEHeA8BQUxtCINqxbAMAPdLnAgJ574Kj4IN3HgAJGRH3zcYDCimV3LEOMYREiRA8ARhrtbUOiO84Y6Xe1CJRXAgCQC6QCx+CM+768tL0XVevq2leZBLJVWWhlMwyabRhwLiQ3nlnHAXPAI3pI7qyEEJKHoInClKK2ljnA5cyL0rkXGujjQFkUilE9IGCc4iMI2rdbzY6kA6EAFxbu9m2TdsHApmkBKzrtHVeqUwmqbW+6XtnfYiLFzAKEFM4jHNEvlddHPccrZHcYJxsOzLnh06DscOB+Bi9HKuBwVs62hQKBmFRFTcXVZmIVGAqBOMYwVAYj55eZGJAzlhgO4aKo7j60WkfC+ifbsgZn2HY82VVOp75+/0HeuX0tYzfGI7Kvh7lYve22eNpDuD6/OPhu8uP7+fJzMJwnqP7iyd49Ih9iEYFBQhEwfsQHIXgvXU+aOPuVpt/+tff9o5ApD4Aoj+rMh9XuuM0/I4lY/e4+y8Zj89LiAhESBBBQRMhgvdaGwJCZNvtttd6Oq0Sla3W6/Vm60MgIOdCAGBcykQZ53ptGOeLi4ub6+ssyxDBm14yCN5GctpqVk0mVZJm67blSGWezqqJD3Zbd9ZACH2WZt5aIdikLJJE6l4DhTwv2Z7FEShoq02k0UkzYCxJEiHVP/+Pf1mvN8+ePXcuTCZl3Xa6a7NMTYs8TUWRpQzJOzspSwCq602eZ9OykFJaawIRcthuN3leAIAUMoSwXm+7rr+8vJJStW0jhGjahnOeZYm1pm5ckqTL1cb58PzFCx9C02wvLxZZmgCFNz++++rlV8aSVNnn+wfG2Kf1ZjZfXF5ePjw8cCmaug6e8ix1zgLRrJq+eHb9+fOn119/fX9/66y9W25evHjhrBGcE4Sry0vnHBeScf673//B2FDNFz+9/zit5nXTEmCRJTHCGR241WrFhaTHUAbDEQ0yxBwvwKDUt9stRYtfiEGB0SjZH8npxuptnNgaNOLj+YHts997ty9OThZhTmP9qWCct33X1HW8QzFWm/B0zmAcIBrqWWKiEgCcc4yJ0ZFnCqnZvhHwCzoP9i5trHZ1LjxmMoGUUigl7GNTUkpjjI/vAuMlkBgSEhL0umUEHgCC54jeY/DgJWycc3dLAgC4XpS5QuYBwdP84qpptpt6C9Yj48iFC+CtQ55QQEJMVbKY8swFIRQPbFYmnz/fJYyqIrN9o9u+vZkZV0opa+9rY4AIfKCIaF6WXdfQvixo0BDeeyHTruvQuLLgKk2AoTHGGj9XSjCUDBEolYLPK++9N7o2DplFpj2hddExjJDcbacNcikTRSiMb30glBJ2sCzj2rxHjAKA4+WeHfImHqyq++3Q2/sj3g8cxiGJSAqRSqW42FOmY8Qc4IdTL3p+p37SkTYado5LPc+ohNFdjdXSEG8/0lhPeVpHOmAsh0dnOL3DnRIYe5yxgRIP3KZ4yEgen3S+j2T2Uc8d1r+Ms4YIwGhX9D8elOgEnKBgwFPSOqRhxlePy83jnYx81tgyBAhDmwTsxEFxZG3bdgRlWSZZ4YzhKr/bbNtWB6MBOUplgHU+LOv++eUsdnyn2nGZVFX19bPLIpGl4uv15v2nj/d3267pvPdfv/7m4urqZ4zf398zdOVkwti1UmL1sPz02aTy9sXNZW/DatPmWVoWU8HgYbm6ubgMwUvFp9OpB7/ZbIL33uh8Wn369OH65vJv/vIv/s//l//b97/7zetvfs4x//u//et/Zv+83izTNM+U3G7XK91/9+0rFNxbI5NEW2OcTxIAhn3fp2XqnEOk2O4dvTqtddM0sVVuMpkMsL1CiBD0dDqVt3dt2y6X68vLSwiuaRq4ury4uCiK1Bozr2a1Jsl417QfPnyIDEQRcaY3mgFOp1POsV6vt9ttkmSz2cx6v15vF/Pq/v6nv/iLvwjeOGOJWHRjcE8DHsc0rsZJkhjrI8fCwEynlLJ7qgDY16EfzdJY+RJbJpRSjigGLY9m9a7S+IlC6xPrajfDGY5bAB8vPdAbxHBRmqaTyWS9XrvOEtEBVv0XVorxM0T3ecBMAYBTaNrxQgB7tRdXmS9DkY7zNONtYKMf5HmssEMA78l7CjY2KwbBFWcyds7umdUEETfIlnX308f797f3dW+JKeLcEngEJmSSFeWsmswXMss76+5Wa5mkxkHTNM6YhMOsSK6r4vmiVEHnjKZpknDWNnXTNAGQkBttm6at66Zr+yElAwBJkgAAZ5AowTGGb8lazYQyLmzqpuk6H4AzSQGNMW3bElGayCJNikxN8+SiKq/mU+PABQzApErLsszzMnhYLbdaW0SuZCqEcs53vTEOkMsjHJbHtxdwNEcBAIEQjp2oP76xJ9AWzk4e2Kf9BtSC2GALh84i2y/0SMdMAsNFj8y1U33whcje+IRHKhnOqfzxdlRy8qX44aEcnb6Zs+/qj55wfNj4wxfueXxapNHPw6Og7RzBIaGy+3emlOkLz8UYg9iYsmtPCQABkQCC9xbIRxRXhIAQoo0KzuZKSc5iDddicTGdzR2wu9W292iZXPXuoekbDxsbPm2b//KP/+Pd54fARCC8+/T53Y8/mHZ7PS9/9ur5ty+fffv1Vy+eLQDg7fu7n9592Lb6YpokwpNrnW2LPPn2m1evv/2mKOBuWd89rO6Xq+Vqo3vHeAJMGOeds1rr2L49m0yLIgMIEaaLIxPI/t3f/e2rr29+/MPv16sHydmL51e//LPv0oRv1g/Iwma7evv2DQAAQ+1skuXa2dZoQ54QeqNDcLuBQGyahog45xFAA/eMRUIIztF7CxCEkJzzi/ncOff73/8eEW9ubj7d3RrbA0BVVdvtdjadIFBZZOSdEKLruuV6o63rOh1zGbFbPC8L5Lztey5E3/dN0/iw0zeRAhARP3/+jHtinIGYIkYmYyd3pNaLB8eqmXHx49F8plGLSzwsIpbEWB091gfsPKKjohUc1RYcldeNF5/x0jFMzh2ZHUNPwXpHCMWkvLq53jUOng2YfkFy2B57bUB3pH0Bz1mJPVJ74ZBd73Qb6jPjYUOx33BR2hc3Rl07gHDDiCwwWCeQRXJugQKRQ0DvyFlyKPpAq7p5//n+8/1DozUwgUpt6qazRig5qWbVfKHS3Piw2W5lWjAhooYQHMtMXs3Kr64XGQsX03ySC93XpteRjIMQmqbRe1oWznlkWgCAWJ2FiEPmMloM2gdjfdvZ9bZpew0M4/71eu2MlpJPyrxIEym4ElikyeJylmaF89S2fW8sE1woGWt2ZaJkoqyntus6bV2gADjQXME5b+BUgZ1dr8dz7suq7uir0yOjnCdJclC9CcBjXht3e/g+yjdUbTwu3CdKYmwenSqGL+gMGJXhwIktdXY+j/ccKcjx8UdK9FRbnL2lI7C00/sfn/z0mC+r87MniZoPRiCcQz/lWWtjPBOemhJHg74T/7iE7eocmEIuuciEghCQvGAcA4UQptPZZDrremM8kFSep7V1901fW3Iihbz4uNq+v13er7bGESJa09frh+3D54SF+TT7+Tdf//t/93d/+Ze/vLy+IC57R97WAi0n63XnXa+UvLxcfPuz1yrh6832/fuPn++W6029XtddZzlL2rbtui4iAEvJizQTjDtrbz9/XFzMm209m83+03/8nyHQH373fdu23unvXr/85vVLqz0F573tjFlvNx689z7NMutCZ7QPQMhciOgfEBdPYwwROhfiQieEirwrMc1f1zXs+pLZ1eUNZ/L29na5XMo02Ww2q9VqvV6mqYqHZSqZ5IUS8quvvuq67u3bt9vttmma6GBprUOA+XyxWFw+PDxY67U2jkLbtpeXl7GSLhZMRLUXF9vo2MUJEMnznHMRuSaqsahN4Vx85Wi1AQBrbdM0jLGiKMbHHBXKnUrQUzJyNJ+PLoqjOqyoF5RSi8UikjcJ+mN2JR4GfIYVc1/ItqvOoC+2N+EJMdtT2679PIQQAhu1+sXqXtqX0kYVKIQIJIGIkxveDINRWIgQdlD1gcgFB4wRV0mw/e1yxYMFb76+XhSJQCYAEBgPEBkHGHCGQiBTgpNSlnkQiAIgSUSmEm8dF8nHT7cfb+8AwsXFPM/Kpu0ztnvRbd8xoKxK8jxngfoeN4/Lt2cMUpVAoHttA7KA0PYdQKC9AaF129YJ5xy5APLgnUAosnTGi743m82mrmsCVDJXKi1ANq1hXAAwrU1nLDDkwAKh3LeIwr4/nXaQxBwRYQeYwlkk6zspWDi1sE5W8OPV/HTtO1pDB8QsRL9PDZIQgj3iBJ5pX4E9dOd4LuEhRgkACDgOBoxl7+gD7SnlTkN5dBS7e6JgBPFMVGf8565oZdSBdyCTJyUAdK4oDJ/wm+EwrHKkg8+ehChEicDRS4ChoHykrejwh3jQNX+wFBx8ZrvkOiAgxQsF2JcmYfBAxAgIgDOUnAMwZywHTJNEIBNSKsBGG5FmxJgH67lsjP+82rA0vSwW1199s7z9/Kvf/r578ezbl9eFYq5vNs2aJhMuVZqUUuUkVDG7NAEZl9NctrmqO4McCKnvGsbV1dUVR2F0V68e3r//OE2VZHwxKztjmXdSSiGZ1loIkFImSULGPqzX1zc2kEtV8d3rb16/fv3x0+c3P/z+5Tev57PZz7/9pl4tsyy5ve2EiG7cwodQTifLh7sdVKHgAZDtKTAZ24U9Yo1eJDIry/Knn+4vry68t3Vdl5MclhoRqqrKskw7u1ptLq8WRVE0TQPeSS4YmXqznUwWD+s2lSoR+OnTJ2BcKfXw8BA7uziC1jpRIkZW0zSNwSQp5YsXC+dcU68BwDmntY6ldtvlg3Muhj2jksuKlO2BpyPzbZSdGKTFXa3cON2LAEBh1z5kjIlYoHmeL5fLwRyP82807fypkMKojf3I9uXhEeWF2KPMDngxYp9EBIDYncI5/xI97FkpOvJbIxLmEV/aUyeEP0HtDbzn4ysOVUDDzrCnKX+KyC16xXLXKsgQEYkRkQtBqFTIpGn7d+/ff/jwabPdeu8nsypN0wDQG9t1nXVOqSQvCq2ND7veBmeN6bvgrAB/dTF/dnOVKNF1jeQ4r2ZcikjJSETamOVyeXd317ZtJFuIpMPj1sPI+ed94EJKKYOHtu+01hGlIoTQ67au665ptO6Ct0KIyaQwxlGME+47Dq331loXQgxNdDtDNVEjKvMj65v2WLFH27in+2gmnNV5Tx0GX/Ry2CHva9yOHMqj9T2MWLjO+kCn3t4XtlOP7Skn7Ow0Hr8xHJXGHJ38VPXCiQI7dS5Pvzp7/3/0sCNXb3zdoeTg1Ex+6g3gyQYjdXg00HBOTw/9Ht77EGGeCAAgVYm3DhntOm6ZYIx5TyrNCBkyJpPcAS43zWqz1T7MF1eB+Mfb5scff/zxxx8/f/jY922aKGeN1X29WS1X98656XSa5WVn7MsXz+azaSpFniVZkmjdxTa1ly9f/vznP5/PF7e327fvPxCREGqz3g5Zq+gDMcaSJEnTVDJ+f3+f53ns4/7Zt99dX1+/efNTU28+fHg3nRSvX3+d5/lyufGerLWR0TD2/3kKAYExhpxJKdM0jXQrEWs+Rg63223XdbHfLlr22nSxZw4AIlrY5eVVzC5dXl7uHAPGlFLb7bbMi9hLtl6vu66LNHjW2pg+rKq5DX65XEYUzaqqYjfhYrGoqspaG/VQrC+NjUabzSZWnA6RvAjaGYNweZ5HVzI23T9lY+GeQgERnXMRDjTyjA4huoGTAPEgNEWjNqFheR93bx/9eRTeiOHZsOcngL3KiHjfu749PBcbOTvpI54eEQym4b6nijGGQnDGEDAABmTE2C7tF0k9nHMxSh5rYU4lExGRI1EgCrFaggCAIRNcWwuMMamIc+ODAyDOLRHfd75G/jIEQkAgsNbhnkfNxQQ6Aya5Z2kAzmWm0rQ3frVZO4Isy6ezqppUaZLotum3NQ8+RUyA2go2fmPI3i+XZTkHx1OWNsvmcrJo1xtkrCizJE+rywWmatv3n8PGJsmmow/3DWJ58+xbJaYPdyuv3aQoQtDOm3xauuDvt1vgisgowZuuX250S5OGqqUralzUNHVy4SA3Hom8RK+o476zrGeubzY1I+U0t1ZMJheeEIRHFSx1ve18AMYkYh688KjDLm23BwdmQgjhvAWkceCKITJE4CwSvwHDmIKMv+VCAEbGN2ScI2fx2yB4QAyIHiCWAnGpuFTOhwDR3ofoP8cQ11z6BM3LZ7P5JMkk5KlywTCpgAviEhOFXALDgITIkCN42HHI7Wm14v+Kix25XSAGyPfHCMYREOK9YuwJEvHSuya6fTcdUWybc3GyHf0LwcPu6N2/uJ8OX0sACkSeAu5J3uOri28PGUOmCHkgFoARMkKGTCDjznkaanaIIKLRAAIIhiKCHUQMovi99wHx0TJ4NHgFR853nIQAgSi69geKh2DHHMhYwBAAPHm3V0NIDoEwBOMcqvR3b9797v2nxqMWClQavIEYCWCMAHe8hpG5EABw9xXjggvJhPAeAQXuZxtB5GXkiJwxQYQ+ACAXKmUonKNV1wUuVFakecEZbtfLvqmrIlPgFQQ0vQg+F5J5H4xjHhKO01mFQLereqND7dWWkpDMnJzWFjvvAEMifCJ0SsuU1ip/5jxXSWG1q9frKs8uZpN+u+y29+D6q6tZmrO71ZJn2fT6emPdFK3gXAqJwBBQcJWlaZ5m9w8r3fY311e26yaZ3D7c3X98O8uTH+4bmRabun724iUT+NNPPzQtLObJd69eru7uJ7kK2ti2//b512Q99RYQJuWEAnlnOce6XltvGAcPYX4xa7qmnE6Qy167JJ1oQwF6AE2gvWnIt8+vLiTHLMlEmn+4vb95+XVj9GRSeN85s+bQrOuGeY8cpcrVdB54ttx2aVa8efPj9cXF9aJqm1ql6vPdMjB589U3G2OWm60FBiiWq+bi4tl8cbNedd//+Haz1dOLq093q977zw+rb//sF394+xMHAcizvOy1nUxnD8u1MVaphDPOcG/IRny7EMh7QBYIGBd121az+bZuQghXV1f1dssZ45whAGfAGVBw1vSCcwohhGgqAGOIALulaS+DbF+zzBgKBERiCAABiSIuORAxRCUk58K74J3njCOy4EkpYgJE1N5HQVXYgzkdBYhOjbuzVurwq8EvjO5z2NN8n5qEY0fkyD4d7mrc/TZskU5i4GeAUZCN9tVBg/sYQnCkIXBOXFAAht7Btm4+3t0XeQKzSZEkSVZEDNZol/UhcGTb7er6+rrdttMkX61Wk8nEetdp7YOrqqrkCATLu/uu78pSOmO9c8G69Xr9sFzOsjzNc73VxllCxjiL71wJCURKKeuBCHvjgm8C+t4jY+Ljx0+TTM2nyTTjKDxiUMhUmnFt0xSVNNtOG4NpliWJwjZIKYGRO+AB9s6RSA5idI/BgT209FPje7aH4awhj+fCgLsfwoFDgDvMVb7nXohOp4gB92j6IXBkO46IEILgZxwLPCwgfMrL+aMO3FNeC47iqEcHxCucvrfxFQ++xXAkI2HPYXk0vfcfzkDbxPEa/MtxPbelYwCapxxi2vdREeHZw+KeMCLqC96fHW445+4fzaLHVze6SMzcx+mxSzs5vQulOGescc5FE7nvWhgxwIQ9aVfdNEpKlaXT6TQE13QtQAAfWHDXV/Pnz64YuNX6PjjLBTnjV6sV7fGEkyQxxrT9/Xq9jnUZaZ5dXFzEGFss0xi/BEQSQgBwxtjrr1/99vvfOWN8IOvCL37xs3/8p38qCpEh1M22ayFN01lVFEWRJiZNU2scEUUyspjGiy5LkuQ4ygrFjFqsdYxxzhAC4G6sY/Vj27YD3EnTNMbZIp8U02lcS4uiQMToeM3nc37bTKrpZH6x7mzf27Kccs7ff3j74sULREzT9PbTxzRTUvHF4sVqtWqdwT0CdQy6FmXZdjoSDA3hHyll3/ecc2/9QLBgjBmifWcDRcPEGHqRh4rIsizbto2Y2kOFS3TRhgVkLCOnsaX9NDvt7TtOE45nbFFNpZQCvhhOORWb02DI+GlplG4Z3mb0UmOl1im02vnF4uTqR4va45Kk0rPHRzSaMefhnooi+BAcEnIUKvHerZv+pw+3nPMQ4KubJElzJrZts4mhdgXgvEu4KMvSa9PqJthwkaW97bU1xpppWSST4nb10G62aZaWKt2ua6eN7vrl+m6a5ovptMqlllxrxzgXSjrnETFTSd8bliQIIRD0xrbW2dAb4onKem2JiAsQPAnBG7JVxiflJOt1nqi2pq7ZIlkpIE2k4iKAD+SRGCJHhH1v+sGrHg/cPi79OAo7BXdYsHCaojuadnDYzzC2eKLaw31gcx8M9JwnsZIT0TLGELmUkoAFIu89kGOSAxye52Q7nSq4TznDOUV+FAMZfcsP38zwpLsXc/LqaPTeHsViL5mDd7sfAqAjARmkevjzMHjLxo8zXDqMOPAOzLtDcTkbqhmW8hDCmFbhcYgYi9qTiMbBZB+8OGc34L7d+PS5cN9pP54JwwGD2h5C/VmWxUXTWtvVdYQ/HFqYkMvgCfdqT2u99DqVKk3T6Xy2Wq20MVFnkOubdpum6vmzK9G1TW+Qc+TJcrmMtHOReq3ve9e2eZo0ba21TpJksVi4qXXWNPW2qiptjFRKOk0YCLkkQSQYY4uL2fR2ent7e3XzLJB/9uzi1euXv/7Vb2fVzXa71c40TTOdZIvFQkme53lnNLEIxoYA0Pd9ROKNTxoL8aI/EMN9pm4GayPqs1g5GVn3IuMBxMZ2Ci9f5SEE3dvtpkmSrGuaJEmM6efz6tWrV7/5/Y8RXawx1DQNYOCcLxYL26yUElZ34F1VTmaLxb/8y78k1azMizGjbJ7nXfe2rutOm1hREUJIkiRm5sj4AYHM7Gv3Tp0WGBmCg6MyrMNd181ms7ZtY70oAAwkQfHDsFzAqOftyLw+1UH7Sx9XqB3YtUzIJBNHi8hZBXaqfvAwk/9U0mX8wyGAeyQM47UY9wUXR789VbTDU0VtCnsLfYjzshHyNw1NS4iCIfjggxWMM5FgEjpvQq3D+08BUCXpYlomadHVjbYamybNpibA7Opa931VVe9+fFNVlfN+2zYOvfO+73uZJmC9Qn5dLRhzG+Ndr/uu+/DhgdPvn91cp1+/4IkKLXIhuJTOGCkSJUTrOsOCccF5Ms43nat150imGai8JHLaU6Nt3/XMtQBFMeGTLEcm57NgDVhXB9eD04liunM2OOccBuJcBMYpcMkYYn+kqKIq2ltnBw43i40Vh0ruaP0dfxvHa7T3Se9qPD2GImbyhog4AiMAwUMI5CPIDsMd5VA4vSjsq7POqvNThXe6/3QRf+p4OgljxPT7U1P0qcc/3T+ew4dz+/y03+mhPaTh2Rs+vduxnO6yQTzWCyFjjFhs7+UEIfrdsAdriTQdtO/DgxPFNl7mTheNA+neWzCMsVhMEBMfsf1uOp/GhT5qtejWRLzKWLYGABAcInrv+74PErqumzFUXAAikypJc2v6aXXx7u2PRCSTVMmMKWOcNcZ75yKnaySqNMbkeZ6m6Y8/vYn6Jq4Mfd/XdV0UxbZrmBQiEZxzosfYktb65VfP//VXv3n+/LnMU2P6v/u7v/lf/9f/Jc0WZZn3PctStV4uZ7N5kSeKYaRQh1iLwVhvNMcdQlPTNFVVDcV6SZIgYm9sDIF477VxQ/9ckqSMMdN3SiUIvOm2Tae/+hrWq62UMpILrh4ettttkiRN01xdvfrV7/4QvVWtddP280WV5Lnp20RKb12R5WVRcM7r7Xq1Wr1YXJRludms+l47awKCB7pbPjRtFxEu47othFyvt7A3l4ko1lUM8y2MaJyPPgzaYVB7MWsYdSrsuVSP/MWxtTqe3mOf6qy8DZNw+MnQsYqIsXr2mIHhSAXi0x7YU3b3UwfD3sIdGt7PLhBPeXtnNR8idl0XbcM4jQbI7NhWOIRYaV/EwVlwIThHDJBzTiINxIK37V0NeK/SHDgrVJZPq77eem+ZC5M0z5S8326Y8jJJmBCrZr3abIuiEIkwpq+3jAWapOk0y1bNPWlLvTWtrjfwge7efnh/cVmVOcdEIfPAeIBdIDI4ql3vPBgHwLjHYKxpHTnUMlHeOULGIpyOCVgbqdo8F8FDrvj1Yta19m5Vd+1WCoYUgvfBOQCOyJE4QADGgA5GCjFm+YZZ9UgfM6ixk4X4j3TzDWdjI7q+3RQa/Rm3XSxFSI4IgUIIAYJzTgiJIxojDIQI48L6s/PkVNmM9x/cyRNlJiH2i+8nMg5eY2x9hb3BGKO18Ojdjp+RRo0Qp0qRMXZ680ex+uGG92Rh59MK44XgaM9YzvHpmmrGGAGGQIxhwAj8hrgPN4OHIR7FObfhjI7/guQOZsH4iMiaudMx3h3VLzAuY6zMew9ckA9a6+Vy+d1338k9VXcIwQVwpnfOJanq+nZTb5WQ2rkizbhUjLHJfCbv7j7drf/lN98/u74qssSFsFy3qeRDJC1N0zgWeZ5/9823kQfGGBOcheC9Nc7oumlVmmQ+dSEwCi5ePKAHmkwml1eL9XpZVlMf4MVXN5NJ/ub92xcvXgiOSqk/fP/bV69eTSYTDuSdSfOMCc6EIArWWq52WLh931dVRftIWJIlcT2MRfZSyq7fdF0Xa3wEwyRJmq6XUkYV0jQmEHZdO6lmdV2naS6EMMZcXVy+ffsWM11V1Wyx+P6nD5vNViVZnmb3t594kK9f3Gw2m6LM0jRhjP/6N79LldgPHO86Td4KIfre7Jhvq8oT2kAhAHLsmjYWfsfVNcuySMU+FrrTGcv2ENWDXozr/2azybIsQp3FaGp0H9k5fjE6ASobJj8/oQgdG2rji8LO/wGGXHy5i+7IY4URpO/Rcw5SdyoV48OGlNtRbuPReDzBDRnCI2dFbg97yvhhS2M0G4fTDp44BQOBCNBYjwSIAhk64F3TyU2ffbxjjL1YTIokKzkGo7mnaVEu1yuJ7Pb29vLqarOpm75brbdMySLLnXPOWAGYCBW01ds6BdkRY4GkhKaDj7e3r7dfq3SKSnjvtbMMEYE7GxhA2/Y+oPUBmeACA/M+kPZhu2kYEJFIkoTLlDNmiO62+gUHa0PwPkuTIlebTUDSDBVHwEDkH/1vT0jBy0OMsN1CdLikjt8bwWOKFA49vNM948MYY2e9vfGUGNY7tq/85RCDaVaQQuQRk4whEQUCYodW29h3OZW0L0zgI1PxqRl+dJIjSzNenZ0E904V7ZdVxdGdjF8jPAZXj29pAHsbC/Ppgxy9kPFVHiVod6uEiOHYuN7l0vZT4jG4dFbkzwn7boY9fkuAiNG0N8bEYsLIM8A577pOa+2sBoCI+u+NHtp2GWNDSV4fHCIKpYS1ne77PiILMyEaJeR601w/f7la3v/w4wej/Z/92c+zSV44tO1D0zQBKFVJzA72fd91XVEUaZrGPNMOOYExADDkTQiewHoHjoQVVnDGWJJnxvTffvv6X/7lVzbYy6sb68zf/4d//8//p/8llXdcyb5p7+/vF4vFs+trjhS8z9OU7zh3vHOOpUmWpsgpFoiGPdhsbDC31q5WKwCYTqewJ8BRSgG5Mi/qtuNSEAITythGW29cMMb4WE7Fubc2yzJjTLJP8aRpOp+L5WqzWq1M32EuU6Ws7pTkzlgpoe+aqqqapomgBJGyoJrPmrZdb7YBoaqqHXzzHpOTc8EYRvb5sixjRHpg5hlPs93iPKy6o+B5PCa2usdsa9R2UfEPRdGDXTj+8zSuEEbH4Kibahzk2Omd3WEMxdN8e2fn9FjrnhY7PLUNjz1eHZ5yEAnOX/epZYIJDgwDkB8aGRGQM0acEHwsN+OMc4aInoIkEhwDoQP0PuIHSsY5qMLx5G6r3Q/v2k3++ubyalYmacaD5YCm7V3wiCiEMMFv67rtu9VmHYubTWc4Y0WScgIwfl7ObUdKJNNpu976z7e3n+7v8qkUjFvtjTHTNAcAby1HYV3rA7PBE3IuOXIXuPXITLAQQuiIsQ7KpExSB27Tubm0nsCYICQrMnV1OVNlWfeGgvPWORcAGcZiWkLGHlmJR9MRR3bAscUw5PL+xNze2SAnEbHDPY+1/kD7AvrAOAghCHY1CyhEBCzDCM2FgKNenaPJMMzPI2/vaM9ZlTl+kJ3cDPbAvrkOgcEezHKPshVP8lTE8oBxl/Yc4iM45oOo4LjnAUa6MLbSjsV7N89HZuKwBDDGiB2jRQ8LzdHj74If8IgsscsIeA+BhoTbzjBnB97keE04ld/xRfcJ0DPpjCifMc0W+5G89zHBA5HLkwEiGoDI6MY577VxPigplFLOpYhICGme97122hCwpuu1NfNqZpx/9uyZ7Pru7mHd9B/vltPptFrcNGh6Y4lrzqXvNQBwqeq6Xm8/5Hmep1mET6rrOhayKZWA4A6gd957zwATwblg3MRGe8zzVJveOt1p/bd/+9ff/dMP79++T/Ps/uFWSvn58+dvvnmtnRFMqCwPpmeCo98R7iRJYqiPohejfFE/RRLUzWbDOZ/NZlHT7zc3mUxWdU3AQwAhpPHQ9l2SZN9//4fnz5/HdnXOedu2aZYoySPGkxDi6mp+d7+s6zpTKmqpMsu8M13XEdF8Pg8hrO5Xzba5vLyIuc+Lq5v7+/vlZk0Ik2q6blrrHO47p5kQgiDqwphrjNbMUKh45OSNy1KG6iTOeXzA2AIY62hoVPYBe30J58ysI2/PO380CQ8WinFXGxEAeAoEj2xcx/1bT+mwszIGT29sBDwzTu+Nl4Dx0nZqPg8ievbgwUyITYSxXIqIYqY0fo51XHEkeASDThTnMjAeUBCTgac8m7AkNx7e3z18/+b9+/uH3gWRZtVkGlmMN5vNYrFoei0Tpa0LAKvNpunaWBPBOc+yLFVJwtRsWk3yIkmSopggwHLTP6w3Xa+JSEeCQEQA5mzgnCPyEAL4RzsoFgxIlQbkm7r9cPdwu6y3vWsMrVtnnA8+MtT3KmFX14uLxUwJboxxOngDIQAi45xLwYU8GKPxh7HldTRfjzrSjhrsTv08OGf3jJ3I8f8Au+LG+NKEZIIhRxZC4ACCoxg8QQhHuup09MeXe+rDUz95ahIOOvWs4v+yXIzn6vD/0dQdOq6G90OjvsPxtU5/NY7hDw1P4wc88sVPh+PohGNVR6P+yFM9d3S2U5335VGI7kusmCiKCM1st9vt4FwesQOuVquoiuKSOiQynHMqTZMs5SrhMjHObpvOWJJZ7gl77ZlIHLHf/v6n//Hr37XGl9O5JeyNNdYZ66RKqtl8Mpl0nTbGIGdVVVVV5b1/eHh4eHiQacal8oTGud7o3mjjXQhhuVzmef758+fnNzd5mizv7zjSYjH793/z10gUnGu29XQ6XS6Xxpim7hBRcAWwa+SIIxt9oyFyi4hKqSzLYnegMSaCocTC5kFPpGmaKhVXM8a5D6C1LfLy46dWJqrTFrlI07Tr26qq2raeFLkU7P72szP9Yr57uizLtNYxpxjnzeXlZdfqtm3X67WUMsnSyWQSaWa3TQ3IVZq2kTaAMQoopYoaC/dAnQPc0qAvxhbz0VQcJCKOdZIk0WuMzxvV3pA4PBIcfHob5vCRfXykO0ZzHQCAl9PZkWoZKmeOJGo4FzvEW6F9M3x8/rh/oOhjhy29w8nHU3zsOMY2z7EojhfQM2qV8+jq+RACUOwq41IYa2nX8cEi6i0hIGPk+gDggTMuQYiAzIZgnUsT5a3LinR1f+eMvrqYhxDSNL8s8/V6s6nrajFPi4KI9caoNC2KIs9L05umaaSUVTlFgq7rnTfIBADjMgEUxnaBHGOQJmKS54v5LOHCW59KJZhoto1V0rnAuHQB75fbprcolfXUas04Z0yEEAO/zDvf9TYLLTLOhbDWGmudD03daGN9QBQCuBQiCcA8xfAvCn6MKRWrFfZ7DvYjIvKDYPKpzjhaXsPYkxv5AbvMDRzkohBRuPbZzdX1fFqmMuNMMFBScM6s94HQx5YzBgKRQYDgkSvc1+KyETntELs4WuXZoYwdzd4D7QtARDyRbOD3Y9FRRkAMFGKnG2DU0sQ4F1LQPrTggvcUABE5i42Mu8QgY4zzXRcdkGB8LIHDbRzVv41KsQ5WkLOKBEe+uwuPWZPx8ae6cPcnUiwhCoGCp+C9dyb4ACFY74HL//xf//t93TkUnSMuFAR79ALhqD/h8HID/vQgv4KzWL4IAJyxiEcRQtB9r7UOuOtwDIECESATXEqpjLPaWGcNEcUGd++DMXbbbhbzeZZl3hMy7j1oY5x12piynMwvLlab9ZsP91ki5/P5+w8fJrlkXGhj1putC4ELGQgoBCJYLpcAOJlOkTGVpEqK5XLZ6YZxmRf59fW10brMckRiwIo877teKW6d9xQ8eUQUSj57+fPf/OrXnz6thcRtXb969erj59ubmxtrHWeYJakAXN7fff3Vi9jzaZy+v79/9uxZJGtljLWxb4F2BRdKKcZ413UAIISYFlnX6yRNH5brzvjlatPp7uLyqigLbRqrDYXw7OZaCaGkuPt8i0VV5Nl62715+45x2baN0f3FYlGk6uWLZxz8/d1tludEhEz89ne/t4TOuRcvXoRA3373rXX2+9//vu06Yuzm+Vf3D8sQcFM3aZZzIRjw4G2U7liPGquQoptxqt6GRGBd11G7R7DTiPDZdV3sVxFCDIidTy34cc6MqzSimuCnnCQIEFO2IcRk5DBpOefAZAgk8DBGT1/EGHvKyIUnTG84tNPPmoqnhuSRRA1C+9QNwInPEZ5G6yCM9hd4OFgXuBSJ4qv10oTgtP7tj++q6i8Nk5u2CciKcpqUuVCydw0FJEIpkxAIPJler/3Gm6CUgIC93QbG03xycXHhiG2btq7X5PxmtX7IpIRKQpDIGBMykVVVtdb2Te84Xswmmrj5vHpotbYghAIA4kygYpw5DxAIiH+6W1cVTKe5kAmBbbVutNOajDGpKovp1KFa93pdNywELoXt/NFbGi+FZ7y6Q9fwC5/Hy+zpmNJhScvRaO4vHXBfUC8YQ8Y4MmQkkCESi/3pR3N6lGk7G4DFk3uIH8Z9isM0+4IPh4cO7vgrtsfOHj/RU2f4wrwdy92XheIpGYGnBXYsNWNdFUKI1OdnLdqzN3x0/rOaeDggpmeic7Z3JV2sgAAApACwi5JF6z6cPAUBIEFE7XI20GGsTCnV9kYynyRZlglrQtf1wJlzfrlep2n6zbc/E0p2TXt7vxRCvPt0e3V1xUS6vF/fr2rGk6urjIlUppmQ6WZbv333XgiBQGmaffPNvO6Wt6tVXdeSi0Sw3ti+M8V1Zq1TSnLOiQXphJOBiJqmLufXf/e3f22d/vT5YVpVTdNoa3969+7VixeIHID1RvfaOhvyTKZpury7j/xr0cvp+x7w0YsYfF8i2uMvFikTxrmyLNn9ylGwHojxdd301tzd3QnOrLWpQCVVVVV3fZNkkyIRkqPRHQQ/KcuLxSxPOACsNvXt3QMgzmYLJoT2wRMlSVIURfQv27arm6YzdlrNe2OMs54iCBeGEHCUeh5PnjjuYzEcvoqBjegXDblA2AfqYE+wGv2lL6zbQ653OC0dNhGMnMKDSOFwzngcUwoAxFmRw1GzxVPb+LGP/ICjw8Zv4QsLzdEN/FFpHJ4/fhgWwXFp+5njGY88YEQECCzCfmDgCHmebe4/ySTz5FabVhPf9u5e18B5lmVSKe2sMw4JU5lwztu273tjjA0BkJm676z1xVSu244lRZqWReFurq4X8ykDJ0E47UyvlUqIoG1bJGTAFEPwhgPLi9xx1RhngJH226Y3PgBAKkUAZlyIyEZNjUz7xDOZKcaATE/kuRQcBUVyKyQOyBEseO+PazIhOlNjF+3Q54Yxcczowzju/eVBHM97OjwVY4wTcBx8fcI9WTnyR+KqEAIyINjnx84N/an3jyc1lkffHokl7vpKz6e3ifxeczwmQQEwZq0448czk4bmA4yFHQjI8IAz8qwQwYkl8Uel40A06HzF9enZBukYmDceX0scBqJA4SlBO33bR6/9yLQ6UqhxrQ/ORghK5xyLq9guH48xqIEAkSQrQl6NKwCjNiXg29UaEWezRZYVMUfobLB91/fvlJLfvHoNGH744QfjjEzFx7t7kWZVORFJVm/Xn+7vmZKzaZVNprm2Hz68e1hviqLIE1WWXsmkmi36rlvd3n78dP/85iKXUhsdAuttnwgpmITglQJUou26tm5IbL57/eoPP/7w45uHiyu1XK9lknz+fPfi2bMABIwZ45xz1jshMiHEcrmMfRRJkkQGH5Vmw9uLUa6o9owxQggiL5Uyzk0mE2TcB/AEUqWbbaOS7P5+M6/quq7lJAcp8jzNO5/nalPXAsn1rZBJWWRK8Nls1jTN3ceP2nrjw6puuAxZMdHLbTEty7KUUvpA3XZbd621NkY7+74PxGPCyHvPGRtKnY9i8gMb3XgCxG1XsJok2+02ssVFJReXlMhwFJ3FWId4dvr5Eb3RuKwyeA8jLYCIkUdmwHZ+nIeRa4I7gMOSllML98jKPiu34wDp0RLzR0XxywJ25MOd3Y4iv3CuOny8BSYBYsQTgDzGHg4M1nSGU5IknLjM0jKV297rjw+stNPpNE9T52i7rZu6Y8iLrHDBb23bNj0RJlnKk1Qb01pNNrTt2hK/vFCc80W1AHRdvSpTyTyQDUwBAHRtTx6klM62GCwEZvsmGKqKDJOs1D58vGO9dRRi+45zjiNXTJXVFYGrtUUpGPMeCDmTKGez7H7VPSw/WxAkZQAKwVnrUp7Bodkxbmk4UgZfWOmO5sPjCc8Fnx/N/3NO3hCuZEiIHmEHjxzQASI6DJxYAGKBAAZle+TWjM3JsWcz2H5wourgCQfl7MZOgLnH1x38j7Fsw6EPN5ie8Cdsf6Law3P1k0eKfK/OGR0qnqPnjWNBjCHnBEhuBxg9NpzpiRf11E4AUEo656w1YQ+HOBBMjhP8R7PrcVhhqDKCYbXFsKsJkFI677S2RNT3JktDURR9WW232xg0u729LSe5YHyxmBnbE1HdwsfbJePJxc0Llea3t7fWvQ8v8PJqcXF1s6nbD+9+ImDW2tuH5Y9v3/37//B3lzdfEVHT902tq7wMgdq6EyyEPESiZiGEEjx24NWb9aQsyjy7usqM7dvWXE6rzWZTt+2kKOdVQogiUfGNdV23Xq9ns9nQpe6cS/dzaSjpjO8tljhqrVMhrbWcR2xlEgKVSj/d3l9d3fz04ydr7cPDQ5lKKbj3PpMs4ej6VmIgTsV0sphVcazfffhw/+nTixcvkrT88OlWKDWdX+jeVVUVa1mtc63uvSdkrKymd/fLXmvkiVDKu1j6/xjbG9JVbN/7CIdGz5Ayi36/lDIqv52XH8JAKsAYi0wOxpinpHJ4YzBa4dkI8+Hxh6OVAfwxc9kOCuDs3B0LzKm7Nl56xttYgP8UUf+jq8B4z1NWgAs+pvBiSIxoV0P3+PPDov1AfPdnfHEAHBCRvOk2XZNnielMVk2fPb952NZd9zB9IWVRlFwE57RxzoVUKsUV55QlearaRuum7XQIXIpkUi6b29VyrS0AkxITiYwFAEvMBuet5hCSJBHSCR6AuFSsX0/zdNu7T3ef7lvLy/l0MkPpX371vG663lgI5ExM5AXLhZrM+nZb9xp4l6UsIHPeetcX5fW2JSJjrCMiEiAYMiYZsKOhQTxIbj01BEdr02ko+3SkxibIU2fe6TwGHIgN6oEFRIJT9Qnh6Op4QkIEh+YUhYOHHSb9WBfCyB3xX3wDBy8BkQA8BdyVe8Y2NSCEQIQD3ifsaWMJYmL57Hakrf8/c/XGb2C8H+DJYAyL0JoUxwKAcwJOgD7iaNFj3/HpteCcJXFkjoyNm6jzpGCIGJnCrO5pxzAnKEIMI98pWNgzyxMSUdvpWOMXQojeYTyt5IqnyntvtYkIEhyF9z6RXCnhvf/w4cPFxfzFy+fe2Q8fPnCZ9Matm7aoZjxJHeDHu/um06jEt69e/1xK59xms2nafrvdau2L6g9/+ctEJUWmVCBo276ruzLJUsWNcUxaAuBKSqnSJGRZ7kAS8vl8/td/9Vf/5b/9o/cghHA2bDb1fDKN7yGW8IQQVquVECL6eWN5iaZA9If6vg97AjxrrXcOfHDO2RAYY8ZZxrm2drlePX/+/PlXF67vVqvVVzeXEAhCcLoXZZlwuL6oasuqIp3P59779+/fr9dr40JWTFyATmvw4SKbPH/+/ObmRnAVMDRt65xjnKssVUlSNy0RUgCJwoNDopiUiEM/uF9jQTudouO5PSSwxyvJAHU2IJx9eUU6WgrOGk+Ph43qSHbpm+AAQNAT/tzZlesph+9IzMY/H8vM2QzBWZE+WgK+8C7GTh6MNPFT5w+MAxEGikzAjAECSYQk4dt1I4s0CJGkmUjy9ae7ruuXjSnaLi8NemBMpDJFgrZu0zSdFhNk/NP9w4f7W71azheLy5tnk8z1zq2b2v74Yy6LaZrPJ+ViWjHqvdPB2GAdCMmY8OCNtUUikiQBbN9/7jerVQIi4bJvXQjIOEjOfAgciYTgSEQUQHmmgncOOEopAdreNL12VCOKajoX2m511xktFEvzzGs2fieIGBFjwxNgV6fZuKeG4GiYiAhGCJxwMoK7e2AH+xlghN8UUgIK4pGEDzjHCHftD+fV0W0cVXOc1RuDdjkVy6NJe/Tt+HLjGsjxbB+rxqOJ9wU3GkZB2hOh+FIDIpwooafMwSNjdNgzqL0jlw52RjSN40XhTxDYsYuJiNGajm3XMXLlnXHORbUXvGeM7QIYsfCBHxKFRasVoOu6JEnSRA6jujPwIUJ8M63Ner2ONOJFUZi+VUoS+L5vQ5gsLuapkiG43gUO2Lvw7v0nHvEIm+7dh4/FZPrs5sVXL19Za3/zm9+sl8skK5IMfveHH7yn68V88bOfScnaRlvjJJPee9NbxhhKhmLnrBRF8bDxAFTmWZLlnPMk87Etoe/7pu2jxxa5CzhnbdvO53Mp5WQyif0SA0RZhGsBgO12e2lt7K+31g5F/5tOqywPnqRMVqv1er3Zbuvr62fvfvx9XdfGGJ8kiKgEzMr88mIh8urHj/dIEJx11r59+7aqqjzPkfO6bctJxVQWmHh+c3FxcRH9p23bGGOAszwvgWHXdUwK7+I8Px7/oSAZ90mQoxV4EL2wh2RjjMWQ5tH8iaHd2NnS9/0X5HEoXR5EgB2Kw1hYws45P2j+QQQk4Hk5fUpVnJp1p489REVik8DAoTqsNU9VYD4lRbQX5nOr6vn38YXV4fQpPBNAECggBQ6BoZdIEkEJRs4xxuaLRZJny9Vmuamtp8uKIePO2K7pMEAiVTBuu6khAOMiyTIUsjW66XvHkLiYLFLkXHeu3bZ903V1w72vyiLlHMkjkpJKqoSIemu73pQpIKAl6rRtut4F7Ixd1c3nu3vnPBD64JBAcSE5h0AAwgVP6BknIZiUzLuge9e21noGKIFJR94FxwQKxSGIsVLY+VpsKObk7LFvL9pEMLbghhTgeOAO0oH8cXwjVfowbLjDTd/NvLh7ysPVxfxqURWJkCxIhjtNxwUghx1hFPIdIwQEkIdlqE9Usuw3wfnRzBnfwJnfioP3M/52COMMWoFiN+HJyYfZTichd/bEvD3iWBn98DyF72mIcrf/fJ3RgU88XCWEgAhAu0KiqOQiFAlQ0NZqH/7pX39zt20DV5oYAeN44PwN7+1JeaTYtQlDLbc1OpbVxTcy9GxESzzELCnuHeRh3QweEWPzJmOPEKDoHIUghYRAfdtxFGmScMY2mzURMY6cM2SUpqrIi6LMmt5zLl0IznlgKLgMFKx1wTsEqGbzqqqMdV3fMc6TNF1uNrrTfdulKgnW5EmqBM9VQhCAAiBwLpCjpxCIuOC3txupVN129/f3vXVcqCTNrHFZmikpyiz9/OFjpuT1xaLIsnq7SXIVQri8vIx8tpxzxnnTNNrYqqqWy2XbtlU1izmwvu/LXCRJQshX20am+Zv3H0SS9sa2XRdCeHZ9ffvpo+nspEiAiCGURVqUk+W6ns4XxhFwDpx9/nS7Xi2vr6+F4EKI1XqbZEVWTrhQGceiKLq+N8a8//RJa2O9l1Jlef7u7fsky5wNjAvvKcolgB9P9ZhCi/nIsVKAkUMSYQpiqDN+GCuF4UNUe1rrs9OK7dFexgqViBgc+Dy4zypEeyLAztQb5j8QQQgHQc6npvIXnK3xt0cq9+ip4JzPd7qN1xQcuYxPpetwn8P4E11JoDGAlscQIr6I0Z1grOua1998a4P//R9+QwicSSFnTd/121YSLqZVUk4BmO56a21qbTGfV1X1kgMpsayb2/s7w3iR5kVZ8sBDbVb3y/bhgUz789dfOWMpBGcsEUmRcOmDtt7YzjokUU3KWW/vG1P3fd/pruuZSJQCDjxAEIyTd8babegEC4TGWU3Ep3kWCdmRo9NOu564SpIEFXOgQ3CMJeNXN2gOONdah7jzvU71ylEsblB7pxg/4x9GYKovDHo8dofACRTIQRAYW/YQkB04P2PLjB2O+3BjQwvN0bo/oJyMz/aFqMDZ2z71Vk9V4FgKvqAeTq+7vx/4N21P3X8YweSePsvwZr582J9y9aPjY2vdwAKNiMHbiBTIOUfOhu5DFgflCe82crHuGO+UQsSY6Irn33tUu87rsizLsuj7nnNiDJbL5a9/rb0zL168WCwu7+7umPN5mUvO2rpmnF9dXW+3q+//8MNsNvuLv/iL58+fr1arh/vbpmnSNHM+fPx4B8Y8n88u/+Hvp0WxWq2mVW4NCOWSWHhiPHIulJRSzmazi83mV7/57YsXL9jtg/UBEbkQzrmmbtfrdZUnsa83SRIHZrvdEpExpuu6yWQC+87jOHUjWFfEUQshxFAnlxkRTafTNE1R5KvNZjKpYr8dEVkLq9XKG7OYTctCbtbL208fXk5mFxcXJbFe2/V6HZOFum85k03feRQk0ulsvtk8FEXR9h0ArNdrxhhKkaZJzMIKIQDs0JCAgCE81k8did64XXW8pAzeXvw/OrWDOI/1WVSiZze2x1uGvSwPwn1s4dHj8XHnrnY0CgUgAPDZdMYRY65wdzKgAfNl93ixrC4GYMKZnlYiiknRIes4PPxAL3n0mo6E8LFdlx9kngZRHC9bBwvxrmru9N8+YET7KrvdP4PgGBIyBOQBhEW0nnU22IAvXr5eb+vbz5+lFK7vry8XbFZ9Wna/+vGnzsH1zVcesOnaq6vLbbsWkgOQ9Q4Ycinbtvv08ePmgcDn1ewqm8xrax66bkNuac2KnEnSoBKNXOWpylLrtDW9IRBZ6RlveuNsQGLBWNvb55fXk2ImVOZBakpqxx8MPPSIDEGlzoumA3JpkswYFtZC23a9brW3gZEXzJMkVjA+1W0TPFHMMzGOyIhxQgQmgEWqNk5cEuPAOTEegoc9Nx7j+3+M+xAQGWIsgoXYXweA5D0DEIxHRl/cj+Zu0rMd+dzAP5ex7uKyurqaF5nkPAhGXCDj3FNgSnElkDEPjoLjDITgjLhgu6ZAFuuQ9vZdCMHH20DGOGdSMiEJLTEKGAIGQiQGwBEYIueRdzAgABexlAM5N4EIMADQjrwPQkz4IUPGkfOBbQ4550IiEQJSCBQIImY2Ms548GHPacf3nH2AAMgVME7ICVkAFgADsACMCRWAecIASMiBCeQiHkaIwHbkfAEwAASIh535hwEYMsGEYIIhw0hmHoAB25MisigCsXKYUCFxBBABkQwPPrpdmrgo5x/X7X/+51+3Di1hIJCcO29pH4MZjJRBNh/jPVELMgZAbM/XGMn/AgByToiewBEEZMAFMe4ATSDBAkPa3TcSQQCM9xrpFMk6Z6xzPr4zxRCAMU+BS5FkCROcSz6pJgHAONd2RtvgA+s1dT11mrTvNtu14Pzh4f6b19/efrrN0nJSzvrOtHXbdT0iKSm4AIQQyDb328W0erhvn11Xn+9vVZaKXLEsCQkngXfbdV7mSon5bGa7PhUy6E6Qh+B/9+vflUXx4tlX6/uHNz++W1ST68W83dwH0y5mybRQVq+VJA08zfM8z9abdZIwztFb3WzXgtNsOvnw4W01nXW6E0KxRDZtr7ddkubzi8X98gElPqyXKMXd/Xrb2cnk0gaZZpVF17sQBFu1zWwymUyqutmw4K4vyk8//X55++FqPvmbv/4r6732/vr5V58fHr759rte97/59b8+f3FlvO2tdRR+fPPW2qCS7KsXr3/3/R+kyLVxAIwAOd9x3lHw0d4duumj6orde2wEOzCs4TGEG7FEooKInh+MOBmiUkySxO2VRVQoIQQMJBgnHxigAMYBGQEEgkCMIDI34ShxyDgbGgFjln0cmJScI6IYq+5xhPDfYPKNrEs4igidGKSD6joCghnfAJ2r2Tt1Wf6NN3hiqO6fkO1KDlFwHtFpI2ZSZCh21t/d3eneujy8//TxejKdpEnv7dXNs67rVpu1A8qqalpObq5dCOHth4fNZiMEqyaTqqqQoKnXwejNZtPVGzOfpi+edZ321gRjQgiJQAQCH4KzFJxkMCkyFCqfzQ2JjXad77xx5FEELxl5H9q2lRgkF4jYtq0UEJMocYAxOPCSIJL47KIQB3G/w86Eo1E4G17Gk9oQGoqpnhiLI+snzm/ODxFCD3+6t7gOME2G7x8tnkNvEs5FCPYHsNMnPRuNOH3ew/OMbvdc5P/f6iHBobH85SPxiznIQTrwXLHP+ePjT+AxHssYjmpg47XO5whO397YBj37mKc/HMcMdmUsJ6Nz6ozGnWWaxhV2CKBF6obYBT+eA23b3t3dlbPMWuu0i03TwFnXdbrry7Ks683d3d0PPyTXN4tEcKGk4Orm5kYIQQQPDw9K8gglo/vmZXYTAjnn2rbNUhXZ+wBgMpkY6xljeZHe3d7++bOvu66TctciKaWcTKeTsmKMdV3Td21+8SxKa9u2WRq9Ya+UavsOEWNzW5ImiBiI8jxv75roqKVpuqprKeX9/bZtW5VNm65nq9U0z4p84nTtnEsUA4BtUy8WCx9Qa51lmXvYKKLPnz8LqX75y18SsMv54uPHj8AwUh5yzut6U5ZlkiRKpbEl3DnnHDjvhgRF3JkmakjjRTcuDgQ7hJAe1oeous4EJw/5LNmeMyB687Rv+EPE4ZTx7HCoLMYzczwbd+shPu6Pplv0/MQgLY96Bf94effpNi59Hgvk6WpyKjnjI8PJdY8kZ3yS4TH+DVvkH4DTp2NZWsSeEmeD7u3z65vZbPb205vttsukCsD+8Oantpr98rtvOOfTchIYtKbvu870Wql0Vkz4NWy2vdb9/e0thLBYzK6uroo8aevN3ccP9doGo6vpxFutGM1nkzTNeWhZ8Ogt915SYIkok/KSJ+vWBOt825ht47VDJhMh0wQZV94ZyXgqBAS9XW8SxaZlIaXkxjHnAkV0GoIQgjPipLCCRqngLyyOR+2Pp7moOGGEPMNdNV6n4jaoPSB2GGemoXpwvyIfOvQ7xgg4vtsYjCXEwe85nFGMsdHCPmBp7lo2B612FEKAQ8V5ZtbxSGi1D7HirrZzpJ9H/++n2vAP9gZl2IVPEAHHYRrY06nsdHq8evRyz20YzputT07/aFvsz0YMGTEiQKTIdzisUIPMwjm1R4dVu4OKYufa+OAkTzH8NviAh5bH8NujdWn3mQskiHsdeesDasPbrixLrhIm1QD03Ftn1hsQPoRgOsMYW6/XSqltu9Zdz2ZTIup78/HjR+f7Z1eXaZpPqgCpZ4xVldjU7vlNVrfNaqOKVBJh9FTqui7yFABj+k0p1etGKfXs2bMf3v1jqqTzFoAyJYH+3839aZMsSZIghqmqmfkVEXm+elXV1/TO7EIA8AM/QASf8OsBCoVCCgGuEMBi9pqe7uqqd+TLzLj8MjNVfjB3Cwt3j3hZ1TULmrzKivT0cLdD75OVUtV6XVUVKt1atz8cywcJwfpd11Xl0GrOGCNNzcxFUXSdD8XJOtuXZbm1NkT553l+fHqqqsp+eHaOycnxuHe9rYosLwvvasdunVWoqG3bb7//fn9o6ra73WzW5TYv8v/3v/1f7t59k5fVx8/PzPDhxx/fvX9/s14HrAz6VlVVxuRt34dQGu+DHIxjvZUzlhPthynZT3mejM6FMH8Zu3CkWAaJJS9YdLM8D/GrKWGP/bZwpiClMEZJu4IxfAEn0KXC9XS6E43tZ41oeJ1cTKE5xYFg+UzVz7mbIaWkcy1kgnuTIRfGGcmImR4IiLjZbEQkdIHP8/zm/s57//KyK4vVzeaBAZ5e9v/5hx/+/OGn1vNLXWfV6v1vfnv/+OCtfX3+4up6nRXffvttVVVN0zw9PT0/fWnbVmuzXt9sNpuyBJ2ZY93881/++uHzUx9CkgWUsCEsja4ys8qy2zJ/d7POwefEK5KV4krxWvkV2QqciA8sxDnX1k3fD5VtQxM7rTWhIIgK/juW+Q6npGdyLhOwHqts+Llsged9G+YnEvU8HDXFxeMIUBNIFSIGjjgvTTSX6eKTJ9OYo8SisPVV+IFzpWR+cSIEXGEzc9hLucJkCZNIsTguzXMeEZrWNFmeD57pmlELD+J2tFzhucVFZnrt4v7DTFBIj3K+G5NzjMXndDKi3QwRg68r3R9r7fF4jKGA0YwWPnRdp3WGiFmW7Y+HoihUZoDwy8uzUqpcrzzDy+tuXzfKZHe398xcluXv/vB7nUFVVftjczgcnPd93yudrapN37vjsTkej2E5oQH6ZrP5+7//+7u72+Px+PDwkGtdFEVwcAZjHYxK0+FwiDscOk4Ex2fI01itVmEViFjXddiZrutwDHccelVmar/fW2udl75zzrGznllCVpz3nhnCE6y17969K4vs3/ybf/j+/bf14fjxx792be1c37e1JjocDtbasiy/fPkS0ipCRyEei4hGzA0Tm+jfEzif5H3i2AM1HFmKpJOolqDthWpt6UtT2J4ThAnU4Ri1MKf5EzzSsoSx8jNVPTgvEvNzH3JOki7me6WPTcOsf9Y88ez/QXEOuaJUFEVT113XklaPt7dlsfry8spO7m8enO12h4bybH+s//FPfxalv3m8y1flZr1yzMFswiBGw7uHe/EuVKN4eXlpmqYssiIzt/ePt5ubh/s1OHuoG2NM58HtjlUuiJgbUxZ573xrhftOBN/drh3T7aq6W6/2Tdv1tutsa/tt5wuTCfjDYct9uy7M0M1SQBEZhc4DikdghUOB06lAcF4odrI/aREvTCT3uQQzZ07pFwMZorGsOcZsZfDee/YgguxBSJgZgZQyTBRzLBSIIqTz5uFCyAjBTjp4mJBgjH0M9/izxrkiQaQZDxyjqCMS+6oDLKh6KaYtMuB0yGWj4nVoT58/4V6cNOe6NhSd/NYjTCMiC8OoYnKSUwiI0y6GhCIIhM6zG1vcxXWF50wkm/kq8IJSGH9Nq2bH58ho7IqAhHgWA0Xn5X8BoK5rIgohLdHaFlqqhmZGgX8gYrih77uyRDI6y4rD4RAyo8X5pvaZ0dpQ3zRd53vrnRdhdsIAcHd397kqq/XGdu3+WHu2N5vV7e3tZnPTNcemaV5eXu5vbzOt911bVmsg9f7x3R9++7sff/jzH//w+y+fnxRhczxQmed5HvLukTSZLJhkFQ6Og5i6QETH43G1WrXt0J+2bds866pV2bR16NAEANZarTNj8qapb29XVVW1trfO1fVxVREq01mvdfbx8yetzH6/t3x4/933L6+7v//j33WW/+nPP/Zd2xz2q6pA4M2qenp68t7f3T38u3/37+4f3lnrv/n22+fnZ++9Ik2EzBCSJo2hLMts7yKXirkoIhKml0oz8eyCLJVWeEiBREYfSrQ0pDtDSdUIOTcl4swqM9A3HNAHYKjMAaNzBAAsWwh5exP+9At43glJxoo1ETcmXHDC7SewDgChRPacDF2SPa/kLS3PMj75/O9FUVj2gOi9L4ri/vHBe7/f79fFDZF6ef3cNvXj4x0g/vSyd//pP/1fi/862xVeBIRXq80qL9h5b12+vnl8fAztE+u67rqurp21BoUfbjZZXrVuZ4rV5uYedH44HmqEHFlrIqUJdd8f60PtBda3jyrL1kWWabNZVW3bH+pjfVRdWWpSrj12RDKCS9M0MXuUHHtmJK/IgBakk29v2E9SESCGjU32ORozJ5RukucXOd+EScQXxTIKdF5Gj2lSzUchAggQkaCCcAWERr6Wkk5JXxF0o1nOHuI0Wio8hOjMuAonQ8KZVTOVDRf3IWEfX0utWfprfOYcbuNmns4l2duLLFCWuealgcGqSsicyIDj60ZdQVCfPe2SKDDfLpiZEya7sbghk82RxHo2EdtTaSDwv/CVkB5XFEXbtrF+IyI677rWIigG8N5b77TWoOj+8cF2fdvWbd8bo72Xfd3U+8M6y1rbI4LOclQmL2H7/PzTT68a8f379w933wHpvu9fvjyviyJfr733rrcOrHNus67+53/7b//73/3h8fExmOyqQldVlWUZ26H+SJZldV0rFCIKwZyhjkmWZdvt9u/++MfjcdB4Av++vbk5NnXk7iHpn4h0DtX6RunMe+fZNW1vlNbaAGoB3G331eZGgPq2JRBm3j2/glJdc1yVRde1RVkpkGpVfPh4VErd3t577w+HQ0DZ3W43bnhAq5NtJtYNiFfmKa0pDKfMbELDJ4QCRh4ZqFDUo2j8oohAErcJ5+ImjnYFhqQn7YztCRIA6LNqZmm32LdImktIHtneZJ0TnJwIhinnX0SY1M42kRd+1kCm8L/wG4XNAaiq9X5/FGaldVWty3J1OOwEoczK7X5ne1F54YCoXGnbfXg93n781Htm5psiL8tS+cw2bd31tm8JZbNel0WR5/nr62td1113YOs0KWtt1xwyk4HOX/f1cX9UtS2KqixLJNUBNtbv9k1r3b62RbUuVmsymUGETDtn2Pm7fNPWtRUps0yImX3TNF4rIKW0zgGdOBBQWqExwujHuMeJsk/qUi3KZS1nbkgcgP7c8RbBPUZzxVePvwYHG9AQKipKQXTfBb4yJHGBiIhHQYRQBJeiNEcYGCPBqY5ogAnCVKJM1NkTz1vodT6Xz+YwNmIbJk8O9wik/PncuRf7s89DPiDxZOPokKAlsSPePx+ctLFMj/gS/BORCA9mjvHTuAPs2FvvGRAgBIQuFMueM7x0xy6h5ISXnyYz+PYw0Ngoi4AMWmzgpSNLFW1yRPQMbEc5jwgQe+uznLTR2uRKd4iIpDwLgmqatigKETDGOOcEpG3bPN+ExpzKaFSq7R0e2sP+uH532/d9SDY4NnVV5q21rzv+8ePH1+1e8LeI5K28vLw83N3mJssU1c0BtSGk9aokkb6tbzdrETm8hlhfaa3zXcvCSmfOue12a9S7WI4r9AcviuLHHz+EHH8RCcVcAKDI8sPh0LbN5mZl/2pDf6I8z29uSESOx2OZmaLI6UBCarW+ca4+HI4AwB5Wq1WWFU3T5FlmshxIf//NO+vleXdkAM9s22a1WjnnmqZ5//7956fn9fomqC5aawQSRK1NXgx10UL+ZWAqqVcvMj8+b90DAEFpgzFoM9oSwivCzbGeZxAFgiofOzQF2BnoP8sEyCkpwjewPfawJKhFlgSxSstEdJUkgu6NY0IQ4ZyPzm/mC91SABcYocy61Maf/LWS2VdGakEriuL5+TnXSmttRtt6lmXsoK6bYrXOCnM4vCpDq7vHxv70p7/8lGnzzcM96UwJiGPvPXg5bHeDLquNoqEsU9u2tzc31vndbtc1x4e72y/bve/7uq5tpcvSbzaQVyvrdS+mZWot92JZOVZWM8Eg6Lm+72t7eH760naHTVFmxnSNdSLBB660EZUZ3wqD0jrLjJCqWwdR0wq+OmARyZSeyEFhpCVlUz4XI0I5qX0OAKROCl+E3fRMU8MFEcWsGiIiEiIJheUGKixIJAAoggIMkcvMIC2lwil4nfPa8c4hqxWu8Lw5NYdzm2d68fTqr3UsmTBRmMl5E0V50iniq9rbyS063h8tLpfwVAiTCByIGyWnQotnZccvbnuyJ6d7liaMiWEghQ0YGf/iJi/qr4wDMY3NaWMHuIB3xpiQ9AYAfd8jnnLFQl83Zq7bhsFn2pTrVVEUXde1nUPdA+lgIM3LIi+r190OERkpL3DftG3bhmAIz9wemuP+WGR5tVo3/W5VlJQVDw8P375/13fNzbpCxOcnFQL3hR3bvjCU5+Xzbvv6+vru4W61Whk9LDPYOUOzveAMC9H8PIr6Xdfdvf+Gmde3axapVivL/X7fH3YH883jerVpjlttaHWz+fyXl6enL998801nXVEUVaU/P33ZbG6fX75YB1ZEm/Lh7sYxkNZZlt3d3e12u7Ztf/vb3376/KWqquBBJKO9Q0bIsqwoKxE5Hpu2bfNsqEkCSTNYGPPk4kFE8p6ifwT4CUOKeYE09mpQqUV03KIBXM7FrGjonrMGOBfCwuehJucldpWyk0XMmwjIIfY0LgDOxf/5PE7BOYnTm4hiDfgJuE+MPPGeeXpyuOFip1wf7KtaRDxbRNSktNZPT09FURhjcm0A5OnLF+el7zvuMSuK3tnDsQGdg5K277N8xWw/fXldZR/Kv8uLzZqld5a9YJGb/f6AiLdVdXNzo5RiBmPqslp1TWvK1eO7b0D485e9s11RFB933R2VvfL+uGNmBm0pP7ruuD+aoy/LrqiqoZdjVqxN3nBmlPZ8pwW6tmbmTKub29uqqhiwsz5fecvoBQXQAzrXnjJaBnfxqW8OjpzqJAcpnO9z/DAXViTpCxNlQBzLN0jS8Sccd5Zlfd8XRVXkFdij1gjslTLMgAq10kSKCAmEUFBIgtDDHBPpGUFAcLDdA/DJogUAPMAJ4gkIKeDLyZknp1R3AYZEl41oOVnm4l/TbUwDDuf7Nof/ibgWu7FEQhC/NWFpEUnDnJXR4b4RdYkICYZaUAjI3nsZ6kgppXrv8bxWOyOwSO8dEP71w08mz46ts32HpvTeQdLXMJ3DBLvjiEQgzjkErwXjFYzieQxqz0Y4meyJjH6QhBlDSjeDeTMQsjjDPM8DLQquJmOMIhNCEEOAyf1jmefZ09OTCLd959gXWV5UaxGpW2eU3u52t7e3Tdvf3t/lZVEfDtZx76Tbuo9PT9axADVNJ8z7/T5TipQOqsnzp4+/+/67f/XHPwBl+7rebrebdfXNwwMiNk2zLgvHrm/a0K3aWlsUxZ///M9lWQ5RKkY/PDx8+vSpqm4674YmFah3u12e533b1YfjarXa7/c3Nzd//fDifb7f7+8fHrTWu8NxtbldFVitNi/74/03335++tK27b/+13+/WulQDyzLsrZrjk1jMrn/5n252rR9v90dNncbrbX3Yoz5/e9/XxTVy3ZrrTWKEDUiWGudP4gIM+R5TnjS5+BcOYGkLUNw+wWXZNM0Mpqmo/02bFp4Quw6FJQE59y7x8fD4RDS229vN9vtNlRdEThFe01Yg4iEno5BohsSqBIlShK+oHFmh3yLZ29irJhTyUvCZooey7T1Z6qZE7Y3f+Z85qnEGoPHTDagX+cssg8Rvd57gZBBDF6AQvktJaRz33Hb2+2u/vzymitamay6uTVF/7L7YkLfkK7TOru7fUDKttvt8Xis2857L4y5Ua3jpun2TbcqypWusLqzTfNluw0NGM364bj/QGwPTkzrjamNyUOEWvF4Y4wRX4L4qqrk7i7TlGUZEHkvlnvf+bZtOydyocHpXH7/WSeeHlb6/MXP8+cECjZMSUSEQulhUopBCQQyF4xvgsJoTlUZr2gwl6EuBkyfuG9Ubiaa7lvGZCay5Fqfq4BX0CH9+vXbIDHpxKOUuRn2XByGGf8e8igQOIR0jjeesTQ5ffE6Ek1G+up0YhHf459GOX0hwHURwOBchk5fEX72fR90jjzPcSx+rTKdiiyBtuZl2bf1cAXQC6AQCnmhEB1jTJ5lWbXahETspmnavfv46el1uw9VGLu+sdYeDoesqgbJD9BoullVaAqt9ecPHwpjghkGEfd1w7YH8TG/kJNO2mEtVVWFbhW2tXlWWWsRuDJqu99997vfotZ5nv/pp7+K0Ovr7v33fzQvh6EZBViF/He//4cff/zgRHUOekaP5AGb3oqgiHgnRKRJMXNzrB1j29m6rp9enpxzxgzasLU29BsCNQabkUYK3TmAmTND6TFF6E39qXHDT56pc+44h6L4IYbsRpdhiIiZvHSCF3NSMwLGMnHQUfiaYsvXyMGc8815z5WR2mTPvqh+nk/xOvrNh0INAiIeRELF48ELJcjMzts4GWZhQBDxGKpeIAACaYWYZdI7L4z7uv70+aXQxjw+VtWKsvxWOhGsm+7YtixYrW+yLCNl6qZjwbyosnKNKD3Xu2NnLcP7zbYHd7T7ffPlZe+cK1frsiw70OyY+x7AKYXG9Fmmtc5s/RcAAPYgQ95lqI3yzfv3lrnuXd12r7tD11vUxhgjaFIgi9aGCZ29JLzPT3y+zynkzY0HMKHLHMkrRA/T6dXDBEauyKxo9OmF5NOxyqeks0KIXdFhcDaSpBxdojpLgLHWSyhJtGxRvETuQ0AQzwSsM7KOpz/JGFGJSYhOWMvpsUSTpwEk4sXwlTEyNf4eGdg5EqVlU2BMFgxFT2CkRyejCwgzewYBcOyjJyYVhiZbgUm68QLenat68fMgrZ+3jAEAcT5ZpUQ3HjMjkkhoFkGnl9OQfhEnFYG27azzorU2WWGyAo7H3noBImUAFQt4YesdIt7c3HzuOgYQBvIMJDTUKWXwjutj73m1Wq3X62K92XgvIq8vH//0558+fvz02/cPWZZ1R2Drjv5QbdYmy5mZCJSizWYjpLz3vW0fbx7LKkdApdSXL19E/LqsJoaEGHe62qxXq1Vgk6FxedDSda66ps7zfN+1xuTPz8/l5l1gDCbPuq5XSiFIWWX37775j//4f/QOqOfWsvPiRIFjVKbpbGd7IirL0gk5L75tnRcRDJGlANQ0zTfvv/v8+UvQDr33wsAIzIAUxLhrklyM9E5D2GDG6iCp6o7JiNAbS7kGVU/GIi9zTEw/YOLYhiXtK8JzuLKo7f3s1L1UmFpUfhfQ44K34+d66oLbc85ur7w3zHSCuiGkimWQWWJ0fudtCO8GJkRUQEikCCjjjFAA98f68/MuyzK3WRtSN+tN2IS6bvb7fdN5ARKBrCh1VhqjgNSxaY6tRZ1Xhaaieq37p92hrlvnrNYZW1/bBvMSnLDvvRPL7Jw4BC3ct01w52hC733fNSEmW+13wmg99449S+9Zozc6n2jPATBhJMSR2aSoeOWI020cDn1mr5fRy30FTiCproLnGTwICkkUKAIXmFk6YMqwz2aezD9ZhaS1WuYLxCsCY/quCNiTxcI5fZ9A+FVNdAr8OLbrhMuYPHk+zfF3pnDPNzDKBDKmTgb6EmIWggdLfiYRSGc1WXJ89VwXjMRorgos7lukkpM75RSGysHhFzLnEDgk8AWADJR0vV6HGH0v4r3A0FAHPKJBaJrucGzqpnNeNptNtbkpi+LjTx9ft/DlZfvdN4+khmLKYceKokAUIhLvjaa26+vjvsjM3c1tbobayrvdLsuy9Vq7ro59doIoEHpVA2HI8AuhHFGKCnmEx+NxezwE05QI/vYPv396OhRF1bV7ICyK8o//8HdtZ3fHQ+8zUkVtwXXWMypF5Wq93+8Ph7pcrfOiUiy9R8/gWIRxvV4jolLae7/ZbF5fd0VR5HlVdy0DsaTG9gU30xxZJtdjrACcp3HHKJhU6GRmNVZ30loH9T2qejIKfYtwApEogUyQNFIqAAhhBBdLf14aKX+eaAwpyqXrXHzOxEsUb2P5eYxvni84mdVkjOwZTrF2gswQnAFAg/IUH+qBcUhSAQACFkXEQkqMVqQVWZHXw4GeVOe5LPM75d2QP673TXuoX/OiKlerR5O/bvf7wzZgJmmzKsqiKLxSbeeatul7Rwq10gLkmUHpoYpisHsj9cLOuywrfNeR1ut1FYrzkjKbm4dj3QsqpVSWF4Wgx5aIdJa7eYjKyBjSK9d53qWdTMU0OOcBV6SQ+K7RKShKGZahKDERkQKNqBBRwAcHD6HQkJ93ZtbDs6fhaMBcgts0VOQEwAlGXAT4+OHKnZcE0km+6dvHhE9MwDu+jmbdE8OvwYMyedSgJcnZcbCIFwHA0NTRCQMpQJSgZ8sybFxaTkpr4g7IbLxl1SmdiUhNtNyMIjpHAysKsS3r9bptGhryz3oM9axtX5alzoxj75133gspYGZBArcuFIuz7KHv1PGAiKvVqszLx28e91++yBiyr3WmlALhuq7X67X3XgCs7YKudjgcHh8eVqtSBIPydKzbLC+t4zzP03pdcW/7vs/z0hhT10cA6Pvee4+g2rZ9fHx8eXnxirKs/Pbbb3/4sH333R/+/MM/3t1/uzvUfeduN6s//OEP//v/+r+QMvudNZlrHdueGRBQ56X+608f+t5Wa8yygp13ru+6tu59XTd5AW3bZtkp9CzLMiLTOQuCeNa9MrjQzlr/pFqaLPme42Nj/juciDDH6/G7gckFX4+MtvG0D9+E9USAOeFpAp8poKSf9QIMvRktzwS0mbZ3Hc/niuCwBv+zIzNT6fg6z4uvhoT4nkW+RUI4Co9egUIhTV6EGJGR2bNjduJBOAeldGP5w8tz7e39ze1qo4ko06YsS3Nsj77p+14XlXVc1/X+2OR5eXN7S6Tr+vC6q22oJ8zACNZ66+ssy4zJrHXDISIAkOdQdIy9dyHGt80yZiCVo9GgTXOsAX2WZSrLtQEi57wn56KhHM4FFEwShONeXNm6iVSVHOIZUV4kbRNQOfsTqGBCCU1ZB/QiJESFhDDKhxdYMi6N+TwhYdITdkJ0YgkTcJqsfcDemRVFzj15Z5t8YdqLE0tnNRGB5w8/PSQ1lo5FMeKJx6iBhE4BxAyG8ylFfzaa7PS6C0h8ke2dG8zPOO45VHAqe84Y3sTBkx7KnDWGJ9CoKIQSg1mWFUXhrEVEEU+kSZxzrm3boiiKqmQBlhZAgRCHNkwwkN3g2wOhY9M5YZuZar1SvtdZ3nW2VBSskUK03+/v7+9ZAJR2vV2VRdM0GkHnede2Xe9CYUxmDhl79+/vQjRHiN3I89wonWVZZ3sRMcYcj0cai6dohR64qqqPLy/3377v0Hz33Xf/6Z8/hXCP4Cas6xoVWcefnp6rQreO903nWUxeoS52h93D3W14UV5UJs+sOBDHgL11ddM4P9i98jzfbrdd17Vty9wRUbDDB7KOJ+ulRKaVCr50bqiPeBHde+ltEQzmaX+xMlRI5CAiYaGxnUIqsMOMywxi1iwobI7Xms97I5zoAlwcc0yeAHd81BXOl9ZlSPcFfuZI6c6l7UiHAgylOgiJVLDxemZWygAAQyhIeIofYw2KFAEKhZZbzJ7BOWTomh5YKVU64K5pmr7rnPtWrVebm8xkqu2NMWUJzst2u62b7th2gEppA6i63r687l5eXlUx1GECAGbv/eh0CT6hYFgY8otZRMCjNhkg1k2b5/n9u3fKKM9inWe2TqAgw8ye2TkHQFVVwCxGDmbEfSI9fXWk/GNChmDGRVLKzhz6+55xShi5LyQTYGYaGgEjpdNbstdj6DyXdPgTEYSzZs0T/jSZMMxEscXdYD81s4RMwZOV4jx/HPHMw5G+9PTMJU0xZRUcO1pc5uupbhTF5BT5x6JnF0OYgnnt3AyzsBVyNX0eExtPtGKFhK30OXFpk8ZDyepi/apgaBm2NCQwhLI0Zw8kgGAYRGYkD+gBEVCrDEmcG1QN51zdNpVbVVXlHVv2wgqQAEGERTA0NyeTAYAgOMdcd9zZ589P61z1zr3uttlNZYxBYRQYUtmsRQHDvFqtrOe7u8M//fkvTdMIUFVVNzc3ZVne3Nz89NNP3m8AILSMDkbLIOtZawFaRDwejzcP94fDAQC01kVGx+NRQlnqQ7ter6uq+uGHH4iyruuKqmTmzebmhx9+yPP8db8VrPZ1n2t693ifleU//ef/aBSZrGDvneO+c711QKrIjRPqPYsbgnSyLPv06VOIenXOmSIXBhIOV1JYTYEzZkalJx7PN5qd0yD/qB1O0DA8wY8qYIwA994t0vMI8HBOviQNY6EFWgHL2h6eZrMA1ksdLxfx8PqI2m784lx6fcuIjtMrM5zMNrpMMVHDKenPhIPOQQBgxSEKkiIvAo4Z0bM40Vr1XQvAZZU7lGPT1D2wwhdts6wwRKHLxu1tVTfd/vl1bJ+I2+3248ePXddFq3csUFSWJQAwg7U2BqZPpu26PlTeC1Lh4+MjADw9PQWVEYJMRFprLYLqQs/VqYj0BhV5PkaAGz6n5AzPrerpkyc1QkPCu4gQKR69TSLAIiAswHApfftnjkva3lcBLpUM0tVN9I9FoRJmXP/SNqZ8d64cpzAZ558yy8nmpGrfRJsMkUQCJ8F5cjRRpn7jtszXk659UfdN53bl+akUm+zqVDiW2RvHtOUkGwxtKNNjrQ1qVmg+oG3PngAIBBlFBLqus9aD6hEUklbKoIAX/stf699/o+q63qN/XOWZVsEuNTB1z8qzgC/zlV35m83m48efXl62Rbn6zW9+Y4ypqmqz2fzwww/7/b4oijI3oTILEbnehibsznGo7fKgdd8fgz63Xhefv3yhoiCipmlAZff39//7f/ixWN0eDvVqs860uXt8+PHPf3p49/iXH37Mbu/qwz7frG7vHrIs+/z588PdTVEUP/34I6DKi77zIirLV5uKyAm8PL3AmN3x5cuXu7uH1WpVlth7BwIudI5KWh+7heI5A4eLIREAkJZcTsE4jonaF8+OR3CNHHRepQ+TEQHpJBfSybkgM7YXhrq5uYNzliuBl6TMLIFwth5EYtOtIHURonceAbXSeZZppYUZAYzWSp/65J29JZEUzjSDi9i04MycgPuZDH5eefJ0bOCYhEmY2IuzbD0wAwMBEIS2cAwiIbgGMROFjMhAgEgECoWANfbsHIGuymy1yfK1YO68sVaXSokuVXGr83XHtN3Vn1+3h7p3gj2r1nLjoQPlVcamYlMKklAGKvegGQ2jQZ2jzh0TgxJQAoqFPKNnZKE8zzprvcjN7d37777L8mK72395fsmLkrTSSt3f3R7228wo9j2Cz4sNomIgARQgJK2M0cZIjJsNMSUESECEMJZBAURSKmR4oCIGGeIkceiiF/rnIRlBEiAZJCsEQB6dzwokRIAhiCbSinLsb2/Wv/vNd1VhlPiyyFGDUtqCCBISkdKkQCGBYkBklQsqJBWa2YV+W4DIQwfIU/xRCHwOCYsQ1oMooRwlAhKGqQCCoCCFZvDgBb2AYGhoR0BDuztUemy2p8LFoRNe8B4OsaPDz/Ah/AvzGtrxgeAwYQo3wTg3CW0Lx5/hAIhUyFzEocEujnRVxYuJX58Qyds+bHp4f8RKkVMzhyCWD2gSGj4M/TJJQDtUTnQH+uPL4f/1//nfDj040GjKpvOkslwTEcnYEor5pGAn/A9jU3iddKIZ5HfvY8vZFKNHXQdCAGfiHwFENEYjArNn9iEALazNaIobBMwooaMiEgA7ByH0V9hbSwhlkQM21tVEUFUFoXbOoxChyU1JaEgy8CCOgVmTyrQCpKfXo2OFujjUjkUVqzWiPh5enl797373zfv331FmsnK9bRpTrTX6ruvub9fsHAqD4P033/37//zX//H/8T+3bPK7d7eP7192LyZDhVZhV+oSEdq6frhbP395+v79O/EtivPsNptVSJmyTD/8+Knu2WQrUr0yebFer9d3Rblpavf68lof9tvtocixqrJv378Xgpft/k9//ri5fXc49nlRbW5vH7/9/j/+6c/l7bue9P33fzg6/vi6p3JjqtXnp+fn5+fbzd2mWjlQm9tHk1Uv20Nnoe2c9+A9bzZ3xhid5UPyOCKCIIgHxYAB32koIYhEKkBZpghFurZRiEYp23fb3T4WbAqpYpj0goaZ4JgrYmeB+f037748fWbn1uuNc47GCGok0jpX2gAQC4ghlWUqy4bKF1oxgPV+QOQTcIYQRQqRwRrPjYRRXotR0QNURs50WTpbtFKmwuxbBPav3nMu+p0VLfu1Bp6bwia/ynl1AGa2bENyCTO/7hsg1TNnWdFb23tnra/rOqsqRFFKKQEVYo2YQ2R2JBPhFTx2VJjw8vC63GDo9XV/f18URegufXt7G01hY076yTsCAOpc7VgMUp9rDG/cIjhXbs4fuCzOh2xWLUxELI6ZAU9FHMIepxY2mEk88zmP81mwwF/RKlKRa37xVwGhn7uHv2ykiDZfQnIuE0UqGniGvPLJNPi8F8RXpxpvm8znyiYE5S/CfxSLr2h16QbG5aS6soyuyiwzzOw9O+dEILSXU0q1bescO9/DqVs3MLP1TinNIF1vAeBY15vN+t27e+c8AqCAtf3upb4ti6IoQsChAnTOAQ4vRZb1ZhXkjmC/EUGjc89cFisFqlqVLQ5mqq7r+t6R0kp5GKN1giLocWhEEAohdV1nRYUiJoiYZdTZviiKvCo/PX3e7/da67ZtESkEsoaaL7vdTms8HA4iQ7NAAKjrWkSCgzBYoYKdU4CC05GZnZcQIBdKhQUt2VorqNMTiR+iDgPnit3PBeM0BCatwJLCxhmcwMmamkJUCiQp2Azp6hPokcRLeYLjBJo1TpXTOdCf6bB/KwGZIsnE+vErsr0Jog4cAgFAFExLMaFSwaIYSgP0zgaj5bFuBJRlyauV0ppJgQrZ7iKIoEgDCqFzjlGEJRbrSnXiNAlm4tRR2miT39zcbG5uu67b7g59b6tq3TQNiA9FBkkNFd8Rh77hseAFzESQ03tx1Aa+dgSLBsz43VTYRznzKaYTUEpp0ETEPoTYLLucUp53tg9J8ZdUMhCY+ucmk4x3xt1OH5Kesiz5At8Coun+TL47/+ul2b6RC4bsRx4b3cWBo4d+yO0bXzaG8AzRHzho7wgw9L8mIuDT2idVkxYFjonEMKUeV53uE29QysMWZej0nskc0rpuPLafzfPcmAzAhkoRWZYZkxtj6rq21gePI5GmoeS/751XOmttW3c9EXVd197e3NzdF0UufRco7Mv+5bfvHzZl0dTHTJMI931bZCWyWNt5b283N8YYh5Tn+b4++q6hm7I5HqrCINiiuAf2YauttV3f397eHpq675x00rS9ylgp5bx0Xedz0CYXkbpthPKQz6CUKsvy2NnQ3uHl5WW321Wbm6Zpy/IupL3v93tS6nA8EvJut1uv19vtNhbSDK8OsT/WWus4RBgE7y8LHg6HLMsywNAFCXGwgXCKCOcBSpIY3mUsxJPiWopil8A7RsqEIKDQ9/tEYRYOnVJoibaf6EuekJEwQw0JPVoE3Aidwyd3ERbDh4l/flHqvLLsK9uxeGd84OSNP1fQWLx/TgonZxDCxjCpVNkJSm/9sTVesjxHJFDKVIUbPAFEGpVHZkARBo8zXh72MMQ9D4aghCmGBhGh6s92u93tdnDuKI1l/UQk+PAnexWsXMMDE3oUI0bm2zvZ55RkL5K/ZD1To7QiFYE7zCS+HWayPCavmOzSJDb1dILnkY1X4CrlbVPeeQEzrwDVpf2Zf+USClzhHJEwIAAAOY1JREFUc5N9XvxWyjyuTC/dxsBySAhZRKRt22BNEs8yqn2LeHdlPnNfTvxKKlvD18aiZplenADGBCADeDjnum6o2xn8lsHMMBpghxDu6LoCABYE0tYxe29y7Ht7rBsRefft+0/dX7quMwr3Xdv3vVqXw6IE+rarihKAvZWuafM8/81vfrOte5MVnz9/ZNtmOe2fX+5vNiXIt99+m+XaC2dZNmjY2jBD3/c9Q9d1awBjTGgDZa0UJYpI3ztT5CKyXt9kWZaDiDLMfDwe2QsABT4amqSLyHa7fffuznvv2LZt++03j6vVihkC9TA6D2xpfX97PB7brg/JgiIS0gfbtgWAIAnhKH9nWWa7/gTnM3ku5XAyq2MHbxPmIt/KsizEQHjvtTanc0+ydQXPynLGHM2Y/wpLnELPafocXlM3GXsnMx4zWeeZ8vszpdevokF6JV3YL+N2V5AtFW+Dty9ksQgO4jMAeBHxPngvSStEbHthFtu1Yjs6ktE5EJIyfV8TUYghBWRARvEoPi37Ozn7CG2pwhesHM6519fX3W4XrEN1Xed5HjMxUvEncOXBvD42pkMcOhiMYtypEsokqWsyFgl6cgoL96fLwdGsP7QzVSdg9UgMZ1GagRzxBSlvjkt4HoE8AforK0o/pIc++fwW0LrO8BbfO71yTju++sZ5RCV+xXZ6Jj0MghAiADRNY60lY0Laauwgv/DNN4f7RlHs0uZMbAAyC/+59K40JgLH0Ib4p5hGVh/b1Urlec4a2A80NFR4EJFQrhSQcfQOCBLD4MHMlEbybdt+eXr59tvvtp8/7Pf7EJTY1Iem0OIdg1VKOesUkTCi+Lo5oqg//t3ff3rdEumX3R7F3tvb1knPaFwvnrMsY3bGGMfgAdu2BUJBDNKgMXlZlgI980BuWQCYyWgvXG02eVE45GKdH4/N/tiVZdla13VdVa5Nnnvh5tDs9/vb25VSyovr+15Qkc7E2izLWAQIO9t7LysgQCXSW2udYyLSBpVSpigD83DOCZzk7ytwKEngFYwpMSkwpyLalWMNI5SqiWFWkJrE5PRAyy6lkDGJZWLnjJ9DzLxehC1EhFQKPof1lCiky0i1vfhrWvPlLcu+RCwmQp8k/shLxpC3YOaV9y4RptN1dv7EYMbTEhFRGZPxgLazzDbLfV4UWmvPLAOu+qGpy/DvrKZoPLyJ2TP+zHKNqDrbH49N72yRlaSxa3oG8V5QMQAJggh6YS2YVkYIbuG0SGsKkZMzmoPEXJWZnawsHnG6KEoalAzLhFCuAkLlsJERE9Hp98WRPvw0Bzmb85VTHlcx7U4++dYbAenS/qSfrzx/ctZXljB973k6PJ4bdeePmq8GRxdsaFyu8zFVjlDkrIjgIpzAZdxJqdVE7UuAZKGPKCaqIZzr3yK8OIf0pZHtiYi1LuQ+B5eecy4U6R41raGStYgAMhF5wd56AELSwqh1Zll+/Pjhv/qH369Wq67r2INz7nA4lEYVmQGCzJi2rZE9AyqkrmktZutN1Qn99OX55XVnNBzaDlXGpLNMmF2WVdaiUpbZE+rDsbGOgZx10jvHIkoZpVz05DnvlfdEuuu6u9vHslyhEY/q8/Out7y62VRVJYx5nocFNsfae3841EQkWgfrUQggqsoVYugBxCKy2+1CyGjf9y6EaYbq3trAGITiebA/MTMqcwKwREQmUhPA4yGH6nQ0KXG4jkoA0HVd6JiRQsXIUE8khd1ZuZLAJtMyN5MxaHsx2Df+QQEynxowi0jyG6gZmznTipKwzMkK37jmSyNN1IeEsUVEwl9J4ZueQehe7VlEQjgshcakSogQHAzHEqpHiIDJJBSV9yyAQgpoKGYtCAweEUEYURQBCDghQHICACiAYymSsR9g0DBl8NGIiM6zuumcOBZEZQTIC5AyDCRADASogLSAhM+kx4LLNFq38RSHF0Ico54UZJwr4HhV1Tvz7YXoO8AznheNsYhojMkyNMZ45oBaI9sjIglsTyE5OGOuKSmc8JI527uyhDj/eRWVFKImX/8qul5S9eYKHF7w7XHyeXjj9UB/FUOBxheNAi+c+faGR4E/m1WqbIXKIGZEZFQoPMRTwJIcky5top+lJAURQ82tK8uf/JzwVzkbADMyiqM9I5WowsQCJ2jbPs9zY3LmU2QpEWWZJgpVocNzube+6y2gAlK951wbUur5+cX+/jc3dw9aOsu+t9Y67vu+zDNtqCiyptl77wFIae1761Exc1UV7V+7uukzA7vD8XZToclKPVgjiUgQlDGV1q+7rbWONDnHIY0hnn61XhGR63onNnSCvXsPWZbpKv/8FHx1UNe1MfnN3W1oPeG9d+yr1erY1EopUgBEbdsiosrycr0a98orpeq6vru7y4vKup11tdZalPbOKwEiQlLxqJmBGZRKth2mMm4ElcB7Qim4CZxf0XkgSfoMXQ9xdNlCJA1jtBuOPqDIehaowfz53kMsRZ3CoiRtM6MCF2evyFynAhNNa47b18clDE+r2ly689dleHFP4kYOh0pDsLmIoDpVWQuT8yOJ8YCgSVA5ZscsNEXUwZk1FsDnpDfHRKFJF+4cd90RAJBQgemcBQfGGABiBAUEihAVo0VUIYLm2tKuer8Wv7K4yXMdMdKoCS2LGoAxJstIayXOEhGF/PJx3acdkIXpTTjf+XYta0tXrAgpI0xJLczw+Rfvz1fvv7T5eNmkMd8WuWod/apcOInkDFrgRD97I5Ckn6/QILgc0jIpXpi4DKcvkjH6EZMAt/insixCo7jg4QtlaKIiEqiK957ZingAcM6DMGiNilzfFybTWreHY9O1Nzc32B9YwIdKUkikjTZg8kwkFFMFnSkCBvYEuF6vQ/oPEPXOm6I0WaWpDmpTmKfJsyzLdnUDjpE0aQm1OrVmYwyzlGXpWJxzwto519Qte1DKlKvNP//wAQCyLHvdH25v8tv7zefPn7t+8F9uNrdPXz6UZQlMRFS3jXc+y5RSIbRVhvAlgDzPQ9RMiHCJ7jEOpTnG/Qyan+ACq5tgWYCiIF68HRfCSFl+oISBSVvrTriZ3B96VqTZ2xOL+mScNR6StNgMIRH5xKiadsqehLSk7D0CaCgtEz4HW2q6U2H2IQASZoJbmmCYXp9HbC4yeViiXPOLb9l9Gbx3I/opCi5vAPBjatSQ4sbBKESKyLHzA0ZkHjhoY5oUCIEO5bbYs/PsUVFmdF8jswCKyYwJbTBFCBFCv+lphA5aNyI2ACBpkw376TwgkdKv2502GXQ9Ke15EPdprGzJo4QmCIMqFtLvIv+AMwiO2xUE9lj4KvqN2dsImimJDFfUtJT04DskIu992/boCNBhFl4cSn9JEHdJDe/FpHVt+jkeemoJCYbcdAnpbfHXiBu8RNBl1mEg5a9z5oFLbZNTOJ8YJ+bMKT5z0Vd3FVTPSqBFnWieGhWOD/D0ukHMEgbP3vvn5+eqqpqmUUqhMq21pEyif56eHxcyX0X63rSe8kS9jmgbQvWmQDLzpODJbQPB4AZJamDKrQPABNOlMYZ5yI4P5VdC1/JQHiyU4a2qKs9DB/N2COsoCsdApNc3hSJs2h4EP3z49D/89//d04//VLfdw7v3yuRfXrfv3r0T4hAy7UFA2CCGEOX1er3vXQgP6Z3zDIdjjUC3v7kZtGpjeIjtzkiZamV++OnDb3/zu6br67rO85IP9c1mXRTlp6fnvnOgbd02Qvj//d/+VyBDRI+P3/z44dPxeCyKQhB2u51SRtB7YST48voSgjBVZtjDy8vrt99+CyxPT0/f/fY3u+1h/+XLfVYI0Mvr7uHh4e7u7tC0TiATadsWcOjIppQSHByloVR0ogid6v12XZtlGbIPNuSA4N57pfRE7gmfA30IRxObBhMRioTjiI3mx9rcQ/QlJvkJzjldmjRwJur6ESQmP1UQkmJIawLWZwkZE04eq8tMMHMewLmIrrLk4TzDHPg/Z0yE7tOUJlwZB84nhMhLyQAUckLEC4uIA0ZGH9RzBg+MCJnJxUSpYsFaOJ8eXvDzpx9iiYT4K86MdVdE768K8pe+Oyff8bMkN8Sf0XOZ3jnoeMn9ARIXXzphD2cs8Or5TgA7qKM/A0ouj7QkSuSOk5m/RVX6lx6LCh+OocjBLp1enxQVS5PQYbLzM/icS5w4125n57to+4VzyeMSDZmsDhFFEFERnRGuSBbHKEFdFEXb1nXdBoZq1Fi/iYFEUFHv+Mvrq8kKZfLd7vl2swLKdnWXb8gLAipmQBQ3huyv1+vd55csM1prRqmqClHF1rghrEZELItvun3dhNRVZTIQco6JbCD3gX9Y70CIPSAqZufYWeu6rgNCleW99QJdWa6MUdQLAIWCi6HQmAgykXe99z7PSkTsWmuMub29j7sXTJ2IWJZlsAB7TjjCyXZzcQzS51gZ56ugHutQT4AkglwI5txsNs7xxMOVFiKZiK0RDBaBSkSMUgBAc29cWlFm/rX54ifvi7E3kzsnxsl5FZV5jtp/STIxR5jh8NJ/4c4x7P4UFUIIhMF7p5DU2KEDWYIcHQ6SmT0IKK3zIitKozOEhQo9MMPnM1UDlYSqK+cfgHT41Qt6QQYC0kA6aHiMQ8u5MH8Yu9MNoIMQ/10fcTITkX8OG4swEO6PqmFc72KeIpwLSZcmM9ES3nLE1219PwtO0l8lGZM0pvkGXhrpWbzln/zcf+fLj1uXtjyNf7oSufdV4Sw9lAmEpyO1E2DixkvJwvygJ8F7c9IRj4M9EGqlDJEOvVZwDHNXSgGwcz2iFEUWglyEvUIyKlSbGVUEnfXW/fThU1Zt8mJ1OLZeUGf5y3bvGFmIVO4ZBMY64AAKZbvdEpFWKjdmvV4DQFVVgKw0IihhRFTe+7Zt27bd7Q8mK0yWxTMKm2+9D9ph7ywAhEm2fdd0bdM0hCrUuW7bnkUAMdSnDrvhQWIpMeela63WWhvTdC1pdXt7S0RCyglsD0cyWajM2XY9jBSMQ7hA2E8kwWvwcCpu9zZkTO9PTzA00Aip9Pf39yFvPQJSBI9BEx1/TYXpifSZjvD2M+Vg7t5PASjicywwmtZXnLC9SfrO5LGLFGeCh/8led5knnOUm9588vfhBN8UikJRgDSaKAcjufeIqFVmdK6UAqDgIp6ADsz43KUxuRNGohO57IRxfvWZX92f+Z3zKxNtb/6cCK/pdycWwslkcCYKXNmrK/OHJXb1y3YDroqAb5z8/58MHI2xwfCTXh+K6WhNSQmhaJKKty2u+tJ1OD/olGmlezs5jgmTm3+eAHl6RpgEvAR2ElwwWZZFM1pcaa6VUUQKNQ014gOsMvPT84vJCl2Ude87D2SK7aF2Qh61yksWJGVEQKuMiLbb7Yef/squD84nozQKV1UVzJtZlimjUSsW7JwHJOu4KKrQ91UpZYwpy1JrfTwe275TSgkDEZVVxQJ13fZ933QdAGSmCI6kkJ4fLZM0NNDWREMU6+6w994XReWc6zobLoYKLMzcdd1ms3HjwITNLIqw85OVeRj/5a9w0lwezkluIJVE1DRNVVUxLys900vSG56zvTkwhNUtFMoLKi2fEYiU015DHjjX9mSssC4z/+eVB/2aOP3m8SaSF/Zx3BcYPZEkSZmAMZxQxmgMAkVAXpxSJs8LpZT3tu9c1zvvmYF4lKoYaHC9zFOmRpenJP3VTgroGPLHgL3ziKHO5NBh/LSuJD8vjNDHLuWaX90lSCg+hVylc5vkpZsHeKBT/BUCEhEIiaSVkRExRLQKwjR55m85X5kZ2X7ZY+eSHM7qIc1f/Ytf9+uO+RxkzO882TPftu3pDSlSz8nN5AjOnjCDOFmKep28dC5MXJK9BmKKCgEBBsPsENegNUDe9711nYAH5LwwK1s45zx7JCTSREqc9c5zpvu+b61lQCB1aLrNze2hl86JAzJ5yeyMyRyDzgz2XB/22+32JitWZSm1C+XmNaE9dqHlnhdGUNbZ3jpSBhHJ6H19bNshriTT3nu/3e69k2q9bndHhmAsxbZtTWmDrQ4AyrJEss6xtbZYlSwcLDoj1pMgFUVxOByapgsOv9DVAUmXWWatraqqruuHx3d//vOfQ60WVGYIahIZlTyJvGARGCJNgOhIvjwkieNLcSeG9fHYkjCknWRZLknxs1MMC57BSQpsi0AVbtCLTAgRYSa2j7LSNKISRyUj3Bq1vRgFs8jwRJYDxNMevr/MDPXLxgSdTks+38R4A3NSBSCdPwsO9e0RAHmk8gAm1AMUEWt93zvnOF3fBIfnBCJ+giWKEGl6LAi0uK4pyTiXiRCv9VeLzsKzhyeRwJc4Smr0E5STkA6olAIeLb2jDk0htZ4Ez/k2nHOvlJ0koPImz+Vpkhco+yXYu2SouM5H0/P6P5f5TWS7KM53XRd9KgTE4w2XfHuXVHmYwUB6RvO149U4gCtfvHIu6W4TnIkjUbkMSlXQq5zvm6bpug4RqzLvOuh6J8AkLJIoMcp8eX59vN2QKZ5e9w8PD85DY53zmOW570Bnhe86Yww5m+e5UsTebjYbx7Zra4VjI3XP1lrrnfe+sy6E0nhAEdnvjiF1PSgl1tqub0K/3B8/Pu12h7y6RUTnXNfaPM+PXdf2XV4WpExdtyIyVPNCAWCkgSCISF5kQR0UESJtbdM2fZ7noRNFWZZ934fQ05DykW6sgMAbqHGgBkEzDi0drp9a1MXjKQfKQmNjHGZumma1Wm232wh+ABB7I4gIqZOfKNX8UkhI6YNGhHkkJ8AgJrwFvKYQPy5l4i+UpVDXSyEJMKPj8F9k0FKxKzmvMpMis0cPOGhnEpDdCwAIuyF9DUQRKESlNBFVVSVjedzeehZE0uFr1/Zz/uHC9UjOOOnMetr5eX7eyPbiE97SbwYT88XIBadq4rmF6nRxCPqgM7JIRAKEgswMFLP1EUOBdwH+OaB4BW4nG3Ja9S8FmEUFbi7hpglCk21cfuyFFVyaJ5/FoAyJepJ8hlEQCMURzBA+l7YiEhmT9mBkezIK+ld2YMLb8HJUy6XvAoCc71g8pgnRSJ4zVbXTzxNpDwAI1anMB2pCItIwtNrph3BB8HVdH48H59z9+lZrrXXXW88CyB7H3PasWv/44eO7uxud5Z8+/PX7b997gabre8d5pgUtaSX9oLvc3d0VRdG2bW6yPNPW2rzKu7YO8NA0jWMfLDd934coUxi67kGw77V1w8yosaqqsiy3261XH7/7bRmW1nRtWW7qvj8ej49VSbmuu77MV23LUaCFAe+CZ0oZYxik7TtEDL3Lg8k6yzL2AoLOuaIo6rrVOuMBZvDtRBgRldbBkDj0K74cYoZLEdHMzAjRqjymFd7v9/szqgIqkmuPnELyhErHEdmNClAxbg1PjJNTrBtH9FRN3HvzO+dAOYdvTBoFvCX+519uTMTwiWluPu1FtoQ4JjPIkHqtxjTtqqpC1ddQZS76EuZ69qJCMDnL+YfJBmLirJ088NKj3r5Ri78uLiTdzBQqUuhMKd0lJfXSAU3O6Ap+Lq76l8Hb5C3RXMPnzQrSxf6Ct/wLjUvnMmkwG2/4qm/vq29JL17f/MmJX8LBr17B2YgHMeIIOdfH0tshn6Ft26Zp8jxfl0VVVUWWxxSaPC+d91rr5+fn3fFIRLvdrul6L2J779gDUNCiYCTo1arMNB2Px/AEYBeyBoON0VobyqYEa2fXdTzKkcGb2Pd90L+11mVZKmP2+/3z83NomEBEXddlRY6IoQdL+NZqtYp1IZQywbcXNie+um1bEcnzfEwiakPwCBHVdW2MCdRpAt5vBK2z1KarRs756YcTDG/HMfkteBxjJGdYWlhs+JmaHyIMTHSqlMWEi9oSAilRBCLo+fRdBhTUQCIip1w9pMHaCwJgQQTF08BmRSMIAgqLF+/CvFr2c+tt2PHl/Rib+WLyE2ZS4WnQRWEkJZ2TyLo0mhlPhQBOx3ySFh0jALJQSKYbbwvhmqkoxMFAqE14uPeeWYxSq7Ksqmq32znnnGfA0KZqeJIxebiz6yyRzrIMALxnoiFfLZ0nEYnvUrhJs+UGLVMhhP5xAEjoqIhL42AJ53C/AgBkFI7+DwQAnFZpPW0gC4fWmkAIgAzAwhgVCkQOCoYICEhQ18ZwOEbwEJRnzgujSMT3aBCAvbdkcgQFSgFpT4qZGVlxWIqDoaEXw9CC7lT9DyQJLZMQeXuWSAoAqAgRbejRjJh4SUGEc5VJ4rSfEMr5SPXFFBWdO6tVG58zhvueh2wAeJ/4v1MR8AKcC58Z+uJ3zXlw3dgPcPhFRhuViCAoILX33milVEZdA84pZCFkMr2F3mcNlMiZFQYCYI9sY9f4iWCRsswzDkOj32QAWSQ8FU9BHKyao6AtSqtRnk4cNgBKq7hX6QdFBpmHXRr7ErNwKkHK8KYwH+/7HhBu7+63260IffPt7758+ULaOOd655V1RNpklcDhWNd/+vB0f39/c/eYrx0e9nVd933XW84MtV2js+wf/8O//7vf/359/74FJJUdX18e/y//Td/sCq267vjwcPd8OHhCybOH3/z2r4f/+OVY/8N//d/+3/+n/5u5uXv/7nt4/ev6rmqdb8WWZXlo2119xKLo+vbhu9/8h3/+f96uHzo2nc2+bLfrcn3z7nbf8oeXD9nqHnV5aPtdbUFnXdf3TlCb27uHp9ctgvruu++Ox2NZ6eSwAqZkINI5AQBTrMv1XS9/rpt2YLqddd6XZVlUBbPP82y9Lo/H48hRTq3Kxvx1LSJBuBdgAKCBFgmLeBroRYhfZ++RlouEpbRroKhKKaXYutD4VOvMWp9l2Y8fP3zz3befn55Cv0wvnsGDjPZ2N4QsRf2Vkzr+URmIOM6mgFiTM+KqvMG0iIhpP6H5/XOtMzJkWJLU/paRkucrr4ZzfjbRfubSZXygSrp745JiPt+caCwKbUGIKFiQ0silxefMFaZ0XZHPpdNLCc6VzYkPTKPgAKZaFwBcMrr+iiPI10opGmtuKqWYT4ki4T8iIoSUCUzWKDJ0Uh3/tHDQ8DVxdREar3xlosDFc5xoFcmspimwk6NPdaAr713MJcDLvtjry4kIHKQdEfFO+OdgJSYW4+tAeP0hV5555VsTqoIXGhWlaw+FskLoZrQlxviLoGQYY/q+PxwOiJhl2Wq1yrKsPuyb41GR0Vq39TFTKsSAeO/73voMmFkASWtEFWqsE2Xe92VZeOso083hWBR533UaSRCMMZ7UoW+Px9pam+clZaXq8+OxVmT2+71S5un5SSmVFwYRbW+brtVat33vLMNYqbG31pi87RtnWWs6HA6h7vx87SchfsT6yBKC5y+YJeNWjILIKT4jPS/EU5sWWPKlz2H7q/B56c5oWUzfHuqyzin8mfR1KQg/hrRMpNTr0INjsTsACJF4LEzjRTkZlAdhltRZ8Y5LOC9v8Fpf2hc82S5OU01dXCkiXWF7sGTSTJt4vYVt45iDmed5URTGGOdGr3USIB4FE0SVTnsyQ5xFHiYFNc/YWDql+QxTaIgq//xdABBKNC2uazLP6yPlB3DOwgN90VojDvKB1to7AKLwD4O77xxoJ+xtUOaSHNCwGidnIRgAo2D6hvEWCJzj0tySeSZJXJVsLj3z+tzOkOVrHHp6HCwx8VRCr2kvABBs7+evZAgK/DgBmXlG4TJaXVrgomwnSZhSuH4pZRATISONsVq0uw4kiEhEAksoisJ7H6JXgnoBg5/PIWJZlk1nY+vm2/s7rbUm5fpeKUVaO+ckI2vtu2/e9X1/2O7satM7z4DKZETQ2t45p6vK9fvVaqWU8uwPu11uMts2WmFWGM/cOcsCChSg6azY5rjarD98ekJUve3zPH99ff3m3buszJXOul19ONRZWR13NQc1Rw3C9GpzU7d9WGPoDjGJUZjsZyQUMYmIxg5NgUbx2CQvll+Yc76pkHd+rJIU/MIl3978rGFJVI1kPLDkCYBFeI5FjOMzKSnxeMlxptPFnMHiGKKyCMRD+YyB2w9kKN6Q5mSk70635hIf/rnMb3ETF+cMMxSNlWwmuBe/cgnV4TL1x7FbdNyK6OCNNrG4XRMAujTbdD6EZ2ZrnOUUp5s8WfIksXfxjVcSVNIHvoWRyHm4c7rJwzSAkz1ZzOyGlM0vQMv0fgzGwBR/4KpQNcHnry4tffipHOts5+escfH5kyOGr8H/2y0l83clxxceEto6gQA64WPbhBgERJQxjkhEJjsMS79eWoVcCEtZFBTeKPguYgrMQoeSYxqE8hC6WZZlrH6CYxusvu8DU8zzXFCFuLMgB4TcvtAnaPA7oBaRar16+vTx5eWlfczr3hoEUlpQvHMiGJxSRaYf7m9//PjJdpl4q/MsN/rm/mbX7PdNW6xv1rd3r617/rL/9OX5X/3Dv2maPQBsbm6BUEgYfJaRELZ9vz8cN3f3RcGCClEx90FRC4K1MSbP8/1+H0Jv0uWncqeMWddpUnXs0BRoFIxJBTF5LvK/9KBPZnQYU7kSlSD6uYnIXW0SsMj8JjeEhu+XiEDaKzHeM2f/E4CZdFdfIP0TYB1+vQSfiHC+8kvk9avw/fYx4XxRpZgwucjqFiXWS+wz1Roh4YVXuroHmi4ioep5mm6Z/hxAZEbWL4nPw6vTOQfleyBVMZElKbcPQEubgF+zY18427cyPFgi7jBj4bFxyUDNz4msjKgGS3LAFbFjznLeKBXNZ754uDj6XFNykOaNnDHC89fOzQnwNnSIwu8vsCCdvzEGeMadZM/cNF3vWVD9LLBYnP8Erc7ftQAVv2A5KUONKh0siXoKh4Y4fd9nWZbneawCGo7SORfiRwBAa11VVd/31tqmaV5fX/u+d73FMZ7e5BlpFdpi1W2zPzb7pq37vtLKA+rQfjzx9f72+2//9Od/Fl6J6+43j7lSWVF8en1ue39bVEpnx7rbH9q+k67ltnFEdP/bx9fX16qqLHcm113veuebpinWt6vNxgs79nXbW88yGFSH4pkxMWPxgMIuBY4e/dCBr0+qjuDgdDj5WSf7Pz9fib5bmpkl5YxxpGR2gkHx7ZP3DkbXqzFQ8gbb+PAW7+CKkRPxjJNPmV+yC2fbce7/lMT7Eh8+N0imn38uR4xEXBJT73WyLudeligHTfYRZn2U3kL3Uwzs+36uvKeTJCKRZXlkwvYWz/UKsZ5zmnhbCoWXRJzFfZ5s4Ff538jLztSvVBxRFLp5YQSeS+cLMwobGedkByag9VWgmhDKr2qH85sn70o/zA9orq5NQO7SeycHB2/AlDlyDdgHw24jkDA49tb7ummt8wAKUQlLiANa1NdTGJhfTKXs+VSXuP5SMt/XNn++0tSIN5FTQyg8EWVZRkRt26YGj6BMBDNaiFoN/j/n3G6367qOAEO3VGt9Va29swB4bLqms4K0PdZtZzViCPXUWZ4xEGlEZOdub28JkJ1dV+X7h3uD4IC9MGjFCK/bw/b1mJnVu/d3AkbpXICLav3p81+LVe69FQXb7YEFGGl3qN9/f/uyb/reDlZZnW/3x2CeDT0FJ+L1/PSD8SlWBE1VwwlAzkXMs89wduiRfkLyiqA1XrF8LIITzIgki3jvkSj6GlNKskjHYAn9h+NmhqjtTZAEz4NWFnYwfL58z6mezQzQMTED4tssS1dGjHWU88ps4fkTW+JExJgfQzzvxfpyk4CUxcE8bYMZHhU7UaTHg4iLRrz0CfOTu3IuEyCeAPTi5Cf3XBpvYfmL85k8JA0RCrWfPI/eU0RMfHtESHgxwGY07aYijogI6VPsa7RMvHGBb1ldNIxDIuTOjyZB2+XNSYWbt+xnpFNTWPUXIj8vGDk1KWEfrwqCF/BO2r7zAoCnuDuSs4iFCQTOFz7n6HAZbBYltktwe/bF5EUpRi+uGhEFlfWONAkqVMYL1m2PyoQ8IxCwXnrHnfWISJoRO6015jkROfbOOYUDwbXOrW7Xu+cvnuF1t/NesqI6Nm1nfZ75zvm8MFlRBG0ryzI+dASwrgrbtd+9//budiPs6r73SB5pX3eHhhnUan1rPbaNE8D1emNM3llXlivr6t51+2NdlFVZrV8P9e/K6vWvT54hK8pDdwj+vBA6G3TZ0P0gPawJP5Mhwpyj9Bkt25NjijkDE2GOBydCUrlitJ0iIqD4cSxKXZcQ4fRrMtsRxwe+mjLRyEeAl0Eohc8UXyjU1VoExEtDLo+zhyQXJ3+9Rgx+0YgHlr4otXik2mequS8uAc9DgKJuGs84VeEXR3zRSNZ15MHxCTwr/50Cx2RpEya6eCdcYGaXROPFeJ+387+/5fhSs3ts7gVJ3uFkpFtxZSYTUji5cmU+bzmIyZ/iSOEqhZxLcD4Xtiaru65tL77iqxt+CTAEQYBYUBidQO8CdqAgeJDgav3qPsAMm+Cce6XvfSOETAByMqI1b+KlvkKaAgnGsd5jcBRhIpLGXOSYkhgKl5RlGSorEREQORZjMuvZMRyODQPkedG1vRfwws57EQmxWjJUPoOu66qi7Nvum8f7oii8t53nXqB1vD3Wbe/K6kZn1ev2+LLbN50vVxulTZaXWVlkRVn31jouq3W1vrG91yZv2w4UrVYrRCSdNW0PY7MCY8wcrxeRJf01UsX5DZeCISenPyenExI3gZPFn1dgfoLOUzC+wIzmvCkMHRNR41whybcd8tIS08HwuDSW9Ny2zrELO6KIBKO51qeG2qO8wMEePbEpT4ISJz8vRXZNfC3xCVGKofP62vGv6a8iElP3ol8XzjWACVWdHH/8kzImLhNGnSZAZNqKTMZ4KvYn+Ft8BSRcXERIh/I8o86Nwz+dmTMoHJ1/Mla9w5k9E8+F6zgxHNXxSNDn+xBhQPg051OX2PNnYpIr2jRNEEuZmUjH7lwatZASJCAiQIUy9N2j4LcYHO8AoLUK9pwZhT3JuamsKqkkdm4QQ0SFA3zGUFucRQnNifIkfniCbKmYMv814gucp11eT++Nb4z9lmEMSeClWvDx+SmIiohnhwIMYow5Ho+o8tXN5j9/ePr89Fytbw6WnR96K+bGFEV+PNYpxEYUC/LcHB1SdDsRlmT34pTGCZ/xV0ziMxf3IQ3qi88XkQAScm6zERHHXFSldW61WlXr1X6/d+wNZba3RVGwtfvj4VAfldEhmztQrYy0UopAvPcsSKQRBcWz4O//7o9fPv6E4u/v7zUpcc+r1arv903Xrlx2bOpQ3SVwaIWYZdn79++///a7f/+P/+5f/6s/Ng7qngXNbndgzkiXL6+7z1+eHx8ftcnuHh4/Pn12Ap8+f/nd779zrl/d3L5s91lRfPv9b45NV1ZryLK7h28+Pu9CZ8Su61SWVZtN7z2MMiUk0kP41Tm3Wq0A4Pn5+ebmpm3b4NiTc+uXJHGYi3SP04ZEMrixArxprYVdbFsYXopKR9djfFoE4/SwMHGrRZREHIoGnwmXY4wSACCfsYBItUK18Ql8MrNCBwA6zeNOXx9Ue0jUlAFYLxCCMzBNID4F8TmRhSVGMkGD65QoSbCf2nMny8arVqw0YGmytF93XJlGXAjOTNWLX8QLOtD5u04XUz4UxySBbxrE/rUdmGDXadpjQMeEoilQsar92apTY1pUGsbKZyG0eoT7U8PJ8+1a9kPDOXBe2rQJw37jenHmpVg81vnZzcnNHFN+wcAlBWvyIgQBYAZCUIDEgr1jDwiogIgEFSDzcozAnJTMF3tpr94+88lzrty2+MUzWQRP96QEPUoPcu7ICMAJEJo9B0rFLGI0CSjHgtY7FmBxDFrj7e0NkISkwACoAqi0CmofEa1Wq75t+t4ZY+q6xVyZ4ubYNJ8+vzivyur20NQAoIxWikQESffWv2wP+WqXGeMcepHd7lBUNx8/PymjW+sFqKiqtnV+1FzDdqU9vecjarox/GpCbOcXJ38CAJEpOME5DKfqI8DlonrnKmb8qS7X1MXErDp5TprNHGnaHOMAILQ81HzeVOKUzzjrnDBMNAVKPHuonM9miDI6b78J52wyBbiI+RNQviJ6Q+K9S69MeN5E7v7qMczn+bcPnLkVceZQnOuOS/O/5plbJAopz5sQ96modf7Ft6xrokyfljAs9oyyEJHCod3JIvtPJjPyswH2Bs5Hk8rXZ08QPG93HBXf04aP11N2lYpWEyJ46Sgnh7LIVlPtecKkIaERqXP6OrDNOeUlGJtLe6e3C59ChxQhovPctL113okCAEYSCGSRvZsGjl9ne5M1wrl6d31L54h/ZfMXv75Ix0FIGAk1gnKW2YMiA0KKCIS8E2FEUAih4g+aLBSDHZQSYSRyzrnQDcRa661YzxTojDIP93fe26ZphFco7K0L+YAhtQBI3dzcHQSapslMsdsdONdFUbS2/fD5WeuCKeu6Rshnha7WZVYYtGydP9b9bt8+3JVt1wPQ/tjcPX7/f/zTnx6/+b6znfey2dx63je2j6EMGCptnjcdS4E8VhWPkS+LjGERtJIrKDIEdQRtD5MjiEU5rhzWHInOYPsyaMU7JYmfkHP/+hw4J3TJ9RZiBwY45woTII00a0JoJmh8/qrBzKhGo6jMvJewJKnNSVL4ecmdxrPu5BMcw3Or6aWTSBMb4NfmeZOZzDdkKlssWeEXvxgvXgriShnc4gPnzHhOMb8KvktS4eAAnxSGEJaY4BGE8dO0l5JbcYjaOmWhwgJzDScLESEvzXMyJppovBKL510SKS4ylSX/a7xhcvrjFi1X/5pPdb6EOTe5wpZwNErToIgTETGqzvX747Hp+l6ME2JkBgYR57wwIJnF518SZGXJxnsFfy+NS/swEbPgMkanxCRYF4JpPXwOhtwoS8W1EIXW5KCAUWk91k4LFLmzFkUESFCAVJZlq5Xpu6Y5HojeE5FnqwgRBiMbEZVV1XVd3bRKmd3L6/Go3r0r69Yf6v7uYV1UuTJkHLK49aYKZWK6zgqozoJn1fRWkTF5oTJT123RtI7ZelcUhT42IZVQxlKWxhheAhIYSUSs2jUXziZ7vkiZIdogh7SEQfeKX49bilc9BTBq279gpDA/0RlgRvzn3x0yVc58e8l3OFEnTzwPgJ07bRCe3g0xGA9RktTI2M1kIllPoPYXM5hFbgoJvk3+emm7F5kiXrWL/rIxX2+kI3EssiiMRoPUuBfnH+85h4NgJ5yzPU4ami9O8u3S98kkwCcEmOBVuszo52NmUKMvJ92fcHYAocImEQWm6RMR6m/Z/GFWo+ErjROZ7PmEiEPCwybItvg6DqbEc59fOn7WQubK4mRd858wE2viwuOVvnPb7dZ7L6gRQ0F7IUQQ8N5rMikwzHnqXAJYBKSvnkgKJNfFrMkDJwg1xRdCFtFaCWFrey9MRrOIUtT3XdN3lj0jEGHonSmhViohoeBYQggRQ5iC9wLsiEK7G9BZnum+axvrOqNIKwxZc8FJ3HWdZxBBbfLeCbHvnINs1TSy33fOQ14Uq/V6dVOSAWubotBO3P543O4bx9Q0fGx80/Z5rtY3d9Z7ZfLddp+v1n3fC1CYW+qDv+IejrEUqVNjkectQtE0GHDAdBaRJNryzEH+dmA+A4OvIQQmNhsR0epU5SrV+Od0fhRrAC7125tfWZY0z4EVQgiMiB/9nDD260oLYsF5JsAVEg8J0l6UHfxC2Xi4QFCuYOAi1fsVB37NyCnnZvFLbHuyP5MbJmxsrjRfUUrm+7D4isUtXVxUKn+l4BgDXEVC5M04t5mRM759bqwfpzTztMHsnsS7I7OZTyr1zLdusjkTyrKo4qSrHoxCM/Z/iXl8lRFeumECBouAdzY9kFDUu+nal+1eUJFWAJpYBERhKEa9bASbQ90VYHvLoq4vZ/6nS4iQjuHVo7BLREHSDxFVABCiV0LcRIyjGQLxoyaAg1QUehQIO3aiNAFg7zwzK0RxloS1UYiiQAgldLY7HGovan9sjDJO3LFpSZlyfb/b7V72dYj1QIKiyJRCADCZbtvaWuu8AOq+98y66yyqvFib/aHOinx3aFRZdtb3tidSIfs+drbz3usL1hdIwoW+CnVzSjhB5xG+Q5UfgCQmdiKcXTJaTspsJu+6aI1L7EYnJQG1ltmA88SM+ZLPfHvn6wTmoTh/+rgEZ8annEnKMObkDXsUjiQ2s8Dz0Mr4zMlf02OY2/0WMWFCyhcQ4KoMMgkNh1+Eq9cHXjByTmY+l6zPyMoFl5jMTKbp3sA5Q4IZmfjqhK+MlPTPFjUVQUKp0jzPleJQk1NEAATPldfJ82MyECIiqgS10ndNlx/Z3pWFxA1flPwmdDzeP2Fvk5fOheh0iy7t+XWou6wsTlH60oFO+FCw5DBD19mhnxkEixkLSGgTMpn/BJUWXzQvlTdRTL+66uti1mQVE9I8ic8aoSJ08EDnfIgKRvQi4Jzve+t9MHvQiCk+yFkQlDZEUEPYoVLKsQsiuLeubdu67TKTK4UmU5ki5KEbQO+5bbr9fk/56vV1+/j4qJTaH5v729uO9YdPz/tDk5cr1KpzHSOjFmPMsT321gMYlZm2967notq0H11W8Ha77XoOfQm6znrvj8djXlVFVXqG0J8okNkQ1TIBQjiPeJDzQPfU1RdZYzRKp/SfmdNedQN2j+LgJBdiuEddCEVMnDJnSMHX8DQYihhPEctMZ0GXEaIW1S0AsDaJ5JzAXPr6uH0Lcu6CgWL8H2IIbY8iQKQXE3EgJZdxv+a044qQO6c4S0LEV4j4V3nArzjwDYaaRXk2ru76blx6Ps6sVXNOHyM/38j1F7W90xvhjD2ISCgQZYxR5DBpajN3SKYRYjzWDAzJDnDOiiBhgQvp1VfZHichV5NFneZ22RV6iWUmf12WutITfyOIpg954+l8ld8zBNXEeuGwF957wNAPQ8K5zJW2+ULiu1LWON/Mt48rotj8mXPBbn5kgTRHA7skjqi03HxwChARSGi25Qh1LODp3fBY733Hruu6LNsYYzKtiYjFhSbdItL3fVO3hS6Ox+Pj46Mi3XTtN9k3n1/3nz5/AXSPj3ehhKb3VoC9p5eXL4dj9/jwHRH1vfPOFUXlvReBl5cXBu2cy/K8bjtmqLu2WK2MyVvV41gF4kofxFREiMwplfUnMvEEBeRUOn9eDuLMQTN56ZXzjXw3TcFkXl7CeaDi6fmTkMb45xCzKmMh8ng9sHOtTuGZAADCwsIRtwNTAhpa6gWREBBQBECUsLCAYwAoSDGzc5a9D2kPpAkRxXYi3otXnJHWqBSgAkQenVGECELAyEN33bMooGGJM5J0UhMZcGxZMygNkYyKiDAmgB5Z94TTp2g2Z7TpDangM/kizKhY+rSAWqkndZAGlEZSSqE4FBEHHP5po2H05J3mAICgFqcdns8MYelju74pY5uDXUpHRIQ5mkEUogrzjZEp8SgSyPOIqEghIohnZvCnXMxwXSMpQINkSK2L5n4Fvt+LyfM8F89aZ16ECQQ9K9bokTQyEGoMgYUiAEykQ9fegOEDKNMgWsnYf0tgdEKf5+QMCDam+2itEZCZVW7G5QggMAuzFxHbD/08iQgkoGXIZJUo5MKIOsERhsOOBwgEQAghO4H5yfAGAM8B7BOmMsboJq7ZybnMT3CA0KSPdloaQ4/GnwlkGl15a20P66pAYO+EAJ+fn8q8aEWYBSjo4N6TqEx7L2cu/PGfBI8GIieZXpHERJthlCpcEmF4TlXP0pNSKjxXNDFWFI0+pzH+YFAFxrSEePoZkc4y7vveuYxIIdqmIRFN1B2P3PcZEYgokWxIq115wKABCgoSOmbX9zrPnHMh3xEEiaTI8jzbqPVv/+mnH0qdfX7p/+Ffff/hhx9u7siUK2utQ2FSerXpTfV8rP+y7fHW/vsfPpqH+y9fviivb6vfPLdtC8p773tnnLndfONMVd5R4QRY/vTTP1V333Yi24MtS31z9/j50xetFDj7cHP7+vQkgHmeE+q+cwFQx0gvmVjvU42HKPrDoHWMiEhaUEC8jAa8oDgiMI/oLMwgUho9RLN6H6hLKI5fFtlutws9bGN5awDIlEZEhSEojQIMQUiPEUEEEGbPsd8KaoUpQR/nbLQWEdv3boQuJUBI3tlUlAGRUEEmMyfhmPnUEr0oKwDQE90L/gVGjG0ZDmBQe1Mp+OJ35Q3e0X+hgYnG+Yv3Zy43XfrT3z7b9MN1zXVRm/lbJjNAUTKZ8e3TGqQhsDvN21tcxemZCeeekNRfZd+iteeSWBAnc/2Nkz08oZWc3fAvNOZzY54af+awF2hT27bW2rPouMQT+kbYmCtYX71/IiP+Mu1wonfG2UYsCF69uY4eXGJR1YuSqKKTWCkxi+bylPxAUcHyqSiXiGy326ZpyhsJWfB937dtu9/vAzsJGdyhN8vYaZ2MMVVVhSgVpZTSRESu68P94bFKKSB0zqHJEDH05o02s/QUUjrwswApZZOpghjekrYOxbHwzfzgftZ70zv5vPR/fPJibMfc6BKBah6JHQPIITYemnOXuRnh7bRmrjnFBEljjIJTiv7EKjJZagrEl5jHrzXmNqj5e+EN5G/yBFjifIvGor9xpGCRcpr5bBdI86+0gQBTuB9kvMSOsVqtyrIMytbpzoVgkxNUhO/GwivXA6OvTm+651EdmVQIgnO0n6iPi1rI/EWLv8aLv+LRLzKMNC9CksIRgkMeSLhirQ0Fl0WySTBBeiyLs025S8Lmv8K9JngxScScc+g3jrQUUWrXCjnjE5+TiLRt2/d9yvbCLgWZfHKsw5yTPYj4td3v9vURCx0jGBFJkanruuu63lkn3DTNfr+31oY3AkDsPh1qmvR9z+zzPA8TOxwO3nttVNu2XWfzPNdaDy0gjGGQvu/LvICEW8PI4+P2TgjCla1LxIXTyY5qokStPbyltX1q60odWJM6nPMwMThnhxN6G376GdtbpKKwhGipBJAmzOCsVrOePPQ6d7myfanEkSpJE5teUKhPAnu6tnDzuYwZP/wLaaKX4CB+XqzecmUyqXR8RaSFX1v8nwBTYitfiGn+VQTtpa/MHauJgZEoFK0oikJrjV5Ofw5WK1xuSilj3tW/BBikwVYT2XA+DQAIrQlSprIYOnv9oH/FVczPLjLpufQZ8EvGUoIAGPsMiDFncWrJ50XwuKwcT3E/vX+OF4uThLcJN5Pzmmh74UqMGZ6IJl3XxagHOEefS4i5eGqfP38+bPc5VgCglNH6VPSRmZumadu26/nl5SVMJmifIauvaZqu6wiFmeu6LooiMOO6roNFZLfbWSchUaHrOmbWWntha2117nNJjuC0fFgqsnh9P+PTRinzFBETMCXUo4kYDUn0Bs9q908Oca7GXfo8h+cUkOaaUirbxXsm9WJS3kTpxn31jN84JtOSpIJzGuQa7/mqRvJfcswR4BdMZlGimaz3Z9kB3vLG+MC5rJqeyCUW/ou3Ws4HzKA5jmjk/KooOhEh08fK18pXXpnkRA+eYEu6D3ORMz5hvpMTkrooL85x/lcc80OcINoEzWNDNWY+HA7zcO4r9OjK0lJ8WSQsc7y4DpzX35s+IdUtUnibCExxhqHnXKoHLK46hZY4yfRPu+2+aZqgNWqtUY2NMrQSBNs79tA0zfF4jM2Pwp0AEFS9wJgDRwkJ9Yi4Xq9XqxUzR59oWMIkzHIOh5hoe9Hj9VWom59CfGPKQiRxmk7kjLQ4y/yZcJnDzcVNnA24QLhSjpjisowmogn3iQzo/wcwr8OUs0LeIwAAAABJRU5ErkJggg==", "text/plain": [ "RGB4 Images.Image with:\n", - " data: 256x256 Array{ColorTypes.RGB4{FixedPointNumbers.UfixedBase{UInt8,8}},2}\n", + " data: 594x629 Array{ColorTypes.RGB4{FixedPointNumbers.UfixedBase{UInt8,8}},2}\n", " properties:\n", " imagedescription: \n", " spatialorder: x y\n", " pixelspacing: 1 1" ] }, - "execution_count": 1, + "execution_count": 106, "metadata": {}, "output_type": "execute_result" } ], "source": [ "using Images, Colors\n", - "img = imread(\"cat.png\")" + "#img = imread(\"cat.png\")\n", + "img = imread(\"/Users/chiyuan/Desktop/bird.png\")\n", + "img = imread(\"/Users/chiyuan/Desktop/bill-gates.jpg\")\n", + "#img = imread(\"/Users/chiyuan/Desktop/dog.jpg\")\n", + "#img = imread(\"/Users/chiyuan/Desktop/schoolbus.jpg\")\n", + "#img = imread(\"/Users/chiyuan/Desktop/horse-face-2.jpg\")" ] }, { @@ -58,7 +63,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 107, "metadata": { "collapsed": false }, @@ -68,7 +73,7 @@ "output_type": "stream", "text": [ "Image resized to (224,224,3)\n", - "('Original Image Shape: ', (256, 256, 3))\n" + "('Original Image Shape: ', (629, 594, 3))\n" ] } ], @@ -93,7 +98,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 108, "metadata": { "collapsed": false }, @@ -127,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 109, "metadata": { "collapsed": false }, @@ -156,7 +161,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 110, "metadata": { "collapsed": false }, @@ -165,7 +170,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "Egyptian cat\n" + "wig\n" ] } ], @@ -186,7 +191,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 111, "metadata": { "collapsed": false }, @@ -195,11 +200,11 @@ "name": "stdout", "output_type": "stream", "text": [ - " Egyptian cat w.p. 0.415571\n", - " tabby, tabby cat w.p. 0.230575\n", - " lynx, catamount w.p. 0.153771\n", - " tiger cat w.p. 0.129155\n", - " Persian cat w.p. 0.053005\n" + " wig w.p. 0.430989\n", + " feather boa, boa w.p. 0.127852\n", + " fur coat w.p. 0.112760\n", + " hair spray w.p. 0.055442\n", + " cloak w.p. 0.019644\n" ] } ], @@ -213,6 +218,15 @@ " println(mx.format(\"{1:>18} w.p. {2:4f}\", l, p))\n", "end" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/src/callback.jl b/src/callback.jl index ea890ab2c269..9f3d85b576ff 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -11,11 +11,11 @@ Callbacks in training abstract AbstractCallback #=doc -.. class:: AbstractIterationCallback +.. class:: AbstractBatchCallback Abstract type of callbacks to be called every mini-batch. =# -abstract AbstractIterationCallback <: AbstractCallback +abstract AbstractBatchCallback <: AbstractCallback #=doc .. class:: AbstractEpochCallback @@ -24,33 +24,26 @@ abstract AbstractIterationCallback <: AbstractCallback =# abstract AbstractEpochCallback <: AbstractCallback -type CallbackParams - batch_size :: Int - curr_epoch :: Int - curr_iter :: Int -end -CallbackParams(batch_size::Int) = CallbackParams(batch_size, 0, 0) - -type IterationCallback <: AbstractIterationCallback +type BatchCallback <: AbstractBatchCallback frequency :: Int call_on_0 :: Bool callback :: Function end #=doc -.. function:: every_n_iter(callback :: Function, n :: Int; call_on_0 = false) +.. function:: every_n_batch(callback :: Function, n :: Int; call_on_0 = false) A convenient function to construct a callback that runs every ``n`` mini-batches. :param Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on iteration 0. + will **not** be run on batch 0. For example, the :func:`speedometer` callback is defined as .. code-block:: julia - every_n_iter(frequency, call_on_0=true) do param :: CallbackParams - if param.curr_iter == 0 + every_n_iter(frequency, call_on_0=true) do state :: OptimizationState + if state.curr_batch == 0 # reset timer else # compute and print speed @@ -59,23 +52,23 @@ end :seealso: :func:`every_n_epoch`, :func:`speedometer`. =# -function every_n_iter(callback :: Function, n :: Int; call_on_0 :: Bool = false) - IterationCallback(n, call_on_0, callback) +function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) + BatchCallback(n, call_on_0, callback) end -function Base.call(cb :: IterationCallback, param :: CallbackParams) - if param.curr_iter == 0 +function Base.call(cb :: BatchCallback, state :: OptimizationState) + if state.curr_batch == 0 if cb.call_on_0 - cb.callback(param) + cb.callback(state) end - elseif param.curr_iter % cb.frequency == 0 - cb.callback(param) + elseif state.curr_batch % cb.frequency == 0 + cb.callback(state) end end #=doc .. function:: speedometer(; frequency=50) - Create an :class:`AbstractIterationCallback` that measure the training speed + Create an :class:`AbstractBatchCallback` that measure the training speed (number of samples processed per second) every k mini-batches. :param Int frequency: keyword argument, default 50. The frequency (number of @@ -83,12 +76,12 @@ end =# function speedometer(;frequency::Int=50) cl_tic = 0 - every_n_iter(frequency, call_on_0=true) do param :: CallbackParams - if param.curr_iter == 0 + every_n_batch(frequency, call_on_0=true) do state :: OptimizationState + if state.curr_batch == 0 # reset timer cl_tic = time() else - speed = frequency * param.batch_size / (time() - cl_tic) + speed = frequency * state.batch_size / (time() - cl_tic) info(format("Speed: {1:>6.2f} samples/sec", speed)) cl_tic = time() end @@ -117,13 +110,13 @@ end function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -function Base.call(cb :: EpochCallback, model :: Any, param :: CallbackParams) - if param.curr_epoch == 0 +function Base.call(cb :: EpochCallback, model :: Any, state :: OptimizationState) + if state.curr_epoch == 0 if cb.call_on_0 - cb.callback(model, param) + cb.callback(model, state) end - elseif param.curr_epoch % cb.frequency == 0 - cb.callback(model, param) + elseif state.curr_epoch % cb.frequency == 0 + cb.callback(model, state) end end @@ -143,7 +136,7 @@ end =# function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) mkpath(dirname(prefix)) - every_n_epoch(frequency, call_on_0=save_epoch_0) do model, param - save_checkpoint(model, prefix, param) + every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state + save_checkpoint(model, prefix, state) end end diff --git a/src/model.jl b/src/model.jl index f28fd0d9f5af..5b42278f8db2 100644 --- a/src/model.jl +++ b/src/model.jl @@ -263,14 +263,15 @@ end callbacks :: Vector{AbstractCallback} = AbstractCallback[], ) -function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, param::CallbackParams, type_filter::Type) +function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, + state::OptimizationState, type_filter::Type) map(callbacks) do cb if isa(cb, type_filter) if type_filter == AbstractEpochCallback # epoch callback have extra access to the model object - cb(self, param) + cb(self, state) else - cb(param) + cb(state) end end end @@ -355,8 +356,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] aux_arrays = [NDArray[exec.aux_arrays[i] for exec in train_execs] for i = 1:length(aux_names)] - optimizer.batch_size = batch_size - cb_param = CallbackParams(batch_size) + op_state = OptimizationState(batch_size) + optimizer.state = op_state if !update_on_kvstore updater = get_updater(optimizer) @@ -388,18 +389,18 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra cpu_label_arrays_full_slice = [SlicedNDArray[(1:batch_size, x)] for x in cpu_label_arrays] # invoke callbacks on epoch 0 - _invoke_callbacks(self, opts.callbacks, cb_param, AbstractEpochCallback) + _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) # now start training... for i_epoch = 1:opts.n_epoch time_start = time() reset!(opts.eval_metric) - cb_param.curr_epoch = i_epoch - cb_param.curr_iter = 0 + op_state.curr_epoch = i_epoch + op_state.curr_batch = 0 # invoke callbacks on iteration 0 - _invoke_callbacks(self, opts.callbacks, cb_param, AbstractIterationCallback) + _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) for batch in data load_data!(batch, data_arrays) @@ -417,6 +418,10 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra backward(texec) end + op_state.curr_iter += 1 + op_state.curr_batch += 1 + optimizer.state = op_state + # update parameters for idx = 1:length(param_names) # gradient synchronization @@ -445,8 +450,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end # invoke callbacks after finishing each iteration - _invoke_callbacks(self, opts.callbacks, cb_param, AbstractIterationCallback) - cb_param.curr_iter += 1 + _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) # update evaluation metric on training set load_label!(batch, cpu_label_arrays_full_slice) @@ -503,12 +507,12 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra copy!(self.aux_params[name], aux_avg) end end - _invoke_callbacks(self, opts.callbacks, cb_param, AbstractEpochCallback) + _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) end # end of all epochs end -function save_checkpoint(self :: FeedForward, prefix :: AbstractString, param :: CallbackParams) - save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, param.curr_epoch) +function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) + save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, state.curr_epoch) end function save_checkpoint(sym :: Symbol, arg_params :: Dict{Base.Symbol, NDArray}, aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) diff --git a/src/optimizer.jl b/src/optimizer.jl index c2c8ba4c28ef..07950d82a221 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -1,22 +1,158 @@ +#=doc +Optimizers +========== +=# + + +#=doc +.. class:: AbstractOptimizer + + Base type for all optimizers. +=# abstract AbstractOptimizer +#=doc +.. class:: AbstractLearningRateScheduler + + Base type for all learning rate scheduler. +=# abstract AbstractLearningRateScheduler + +#=doc +.. class:: AbstractMomentumScheduler + + Base type for all momentum scheduler. +=# abstract AbstractMomentumScheduler -type FixedLearningRateScheduler <: AbstractLearningRateScheduler + + +#=doc +.. class:: OptimizationState + + .. attribute:: batch_size + + The size of the mini-batch used in stochastic training. + + .. attribute:: curr_epoch + + The current epoch count. Epoch 0 means no training yet, during the first + pass through the data, the epoch will be 1; during the second pass, the + epoch count will be 1, and so on. + + .. attribute:: curr_batch + + The current mini-batch count. The batch count is reset during every epoch. + The batch count 0 means the beginning of each epoch, with no mini-batch + seen yet. During the first mini-batch, the mini-batch count will be 1. + + .. attribute:: curr_iter + + The current iteration count. One iteration corresponds to one mini-batch, + but unlike the mini-batch count, the iteration count does **not** reset + in each epoch. So it track the *total* number of mini-batches seen so far. +=# +type OptimizationState + batch_size :: Int + curr_epoch :: Int + curr_batch :: Int + curr_iter :: Int +end +OptimizationState(batch_size::Int) = OptimizationState(batch_size, 0, 0, 0) + + +#=doc +.. function:: get_learning_rate(scheduler, state) + + :param AbstractLearningRateScheduler scheduler: a learning rate scheduler. + :param OptimizationState state: the current state about epoch, mini-batch and iteration count. + :return: the current learning rate. +=# +function get_learning_rate +end + +################################################################################ +# The learning rate module +module LearningRate +import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate + +#=doc +.. class:: LearningRate.Fixed + + Fixed learning rate scheduler always return the same learning rate. +=# +type Fixed <: AbstractLearningRateScheduler learning_rate :: Float64 end -get_learning_rate(self :: FixedLearningRateScheduler, iter :: Int) = self.learning_rate +get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate + +end # module LearningRate +################################################################################ +function get_lr_scheduler(scheduler :: Any, lr :: Real) + if isa(scheduler, AbstractLearningRateScheduler) + return scheduler + else + return LearningRate.Fixed(lr) + end +end + + +#=doc +.. function:: get_momentum(scheduler, state) + + :param AbstractMomentumScheduler scheduler: the momentum scheduler. + :param OptimizationState state: the state about current epoch, mini-batch and iteration count. + :return: the current momentum. +=# +function get_momentum +end -type NullMomentumScheduler <: AbstractMomentumScheduler + +################################################################################ +# The Momentum module +module Momentum +import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum + +#=doc +.. class:: Momentum.Null + + The null momentum scheduler always returns 0 for momentum. It is also used to + explicitly indicate momentum should not be used. +=# +type Null <: AbstractMomentumScheduler end -get_momentum(self :: NullMomentumScheduler, iter :: Int) = 0.0 +get_momentum(self :: Null, state :: OptimizationState) = 0.0 -type FixedMomentumScheduler <: AbstractMomentumScheduler +#=doc +.. class:: Momentum.Fixed + + Fixed momentum scheduler always returns the same value. +=# +type Fixed <: AbstractMomentumScheduler momentum :: Float64 end -get_momentum(self :: FixedMomentumScheduler, iter :: Int) = self.momentum +get_momentum(self :: Fixed, state :: OptimizationState) = self.momentum +end # module Momentum +################################################################################ +function get_momentum_scheduler(scheduler :: Any, momentum :: Real) + if isa(scheduler, AbstractMomentumScheduler) + return scheduler + elseif momentum == 0 + return Momentum.Null() + else + return Momentum.Fixed(momentum) + end +end + + +#=doc +.. function:: get_updater(optimizer) + :param AbstractOptimizer optimizer: the underlying optimizer. + + A utility function to create an updater function, that uses its closure to + store all the states needed for each weights. +=# function get_updater(optimizer :: AbstractOptimizer) states = Dict{Int,Any}() function updater(index :: Int, grad :: NDArray, weight :: NDArray) @@ -28,6 +164,5 @@ function get_updater(optimizer :: AbstractOptimizer) return updater end - include("optimizers/sgd.jl") include("optimizers/adam.jl") diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index b910e2c3d6b6..9655229b4ace 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -1,36 +1,29 @@ @defstruct SGDOptions Any ( - (lr :: Real = 0.01, lr > 0), - (momentum :: Real = 0.0, momentum >= 0), - (weight_decay :: Real = 0.0001, weight_decay >= 0), - (grad_scale :: Real = 1.0, grad_scale >= 0), - (grad_clip :: Real = 0, grad_clip >= 0), - lr_scheduler :: Any = nothing, - mom_scheduler :: Any = nothing + (lr :: Real = 0.01, lr > 0), + (momentum :: Real = 0.0, momentum >= 0), + (weight_decay :: Real = 0.0001, weight_decay >= 0), + (grad_scale :: Real = 1.0, grad_scale >= 0), + (grad_clip :: Real = 0, grad_clip >= 0), + lr_scheduler :: Any = nothing, + momentum_scheduler :: Any = nothing ) type SGD <: AbstractOptimizer - iter :: Int - batch_size :: Int - opts :: SGDOptions + opts :: SGDOptions + state :: OptimizationState function SGD(; kwargs...) opts = SGDOptions(;kwargs...) - if !isa(opts.lr_scheduler, AbstractLearningRateScheduler) - opts.lr_scheduler = FixedLearningRateScheduler(opts.lr) - end - if !isa(opts.mom_scheduler, AbstractMomentumScheduler) - opts.mom_scheduler = opts.momentum > 0 ? - FixedMomentumScheduler(opts.momentum) : - NullMomentumScheduler() - end - - new(0, 0, opts) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + opts.momentum_scheduler = get_momentum_scheduler(opts.momentum_scheduler, opts.momentum) + + new(opts) end end function create_state(self :: SGD, index :: Int, weight :: NDArray) - if isa(self.opts.mom_scheduler, NullMomentumScheduler) + if isa(self.opts.momentum_scheduler, Momentum.Null) return nothing else return zeros(size(weight), context(weight)) @@ -38,8 +31,8 @@ function create_state(self :: SGD, index :: Int, weight :: NDArray) end function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) - lr = get_learning_rate(self.opts.lr_scheduler, self.iter) - grad_scale = self.opts.grad_scale / self.batch_size + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad_scale = self.opts.grad_scale / self.state.batch_size grad = grad_scale * grad if self.opts.grad_clip > 0 @@ -50,7 +43,7 @@ function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, s @inplace weight += -lr * (grad + self.opts.weight_decay * weight) else mom = state :: NDArray - coef = get_momentum(self.opts.mom_scheduler, self.iter) + coef = get_momentum(self.opts.momentum_scheduler, self.state) @inplace mom .*= coef @inplace mom .+= -lr * (grad + self.opts.weight_decay * weight) @inplace weight .+= mom From a2beae8ee57ba1e664bf4639e226b5a0de2ca5a7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 28 Oct 2015 23:39:42 -0400 Subject: [PATCH 148/630] refactoring of optimizer --- docs/api/optimizer.rst | 28 ++++++++++++++++++++++++++++ src/optimizer.jl | 36 ++++++++++++++++++++++++++++++++++++ src/optimizers/sgd.jl | 10 +++------- 3 files changed, 67 insertions(+), 7 deletions(-) diff --git a/docs/api/optimizer.rst b/docs/api/optimizer.rst index 97b152375bf9..e9f29e4aa07c 100644 --- a/docs/api/optimizer.rst +++ b/docs/api/optimizer.rst @@ -2,6 +2,9 @@ Optimizers ========== +Common interfaces +----------------- + @@ -102,3 +105,28 @@ Optimizers + +Built-in optimizers +------------------- + + + + +.. class:: AbstractOptimizerOptions + + Base class for all optimizer options. + + + + +.. function:: normalized_gradient(opts, state, grad) + + :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field + ``grad_scale`` and ``grad_clip``. + :param OptimizationState state: the current optimization state. + :param NDArray grad: the original gradient. + + Get the properly normalized gradient (re-scaled and clipped if necessary). + + + diff --git a/src/optimizer.jl b/src/optimizer.jl index 07950d82a221..2e6e58ac1429 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -1,6 +1,9 @@ #=doc Optimizers ========== + +Common interfaces +----------------- =# @@ -164,5 +167,38 @@ function get_updater(optimizer :: AbstractOptimizer) return updater end +################################################################################ +#=doc +Built-in optimizers +------------------- +=# + +#=doc +.. class:: AbstractOptimizerOptions + + Base class for all optimizer options. +=# +abstract AbstractOptimizerOptions + +#=doc +.. function:: normalized_gradient(opts, state, grad) + + :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field + ``grad_scale`` and ``grad_clip``. + :param OptimizationState state: the current optimization state. + :param NDArray grad: the original gradient. + + Get the properly normalized gradient (re-scaled and clipped if necessary). +=# +function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, grad::NDArray) + grad_scale = opts.grad_scale / state.batch_size + + grad = grad_scale * grad + if opts.grad_clip > 0 + grad = clip(grad, -opts.grad_clip, opts.grad_clip) + end + return grad +end + include("optimizers/sgd.jl") include("optimizers/adam.jl") diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index 9655229b4ace..abad6770216d 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -1,4 +1,4 @@ -@defstruct SGDOptions Any ( +@defstruct SGDOptions AbstractOptimizerOptions ( (lr :: Real = 0.01, lr > 0), (momentum :: Real = 0.0, momentum >= 0), (weight_decay :: Real = 0.0001, weight_decay >= 0), @@ -32,14 +32,10 @@ end function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad_scale = self.opts.grad_scale / self.state.batch_size - - grad = grad_scale * grad - if self.opts.grad_clip > 0 - grad = clip(grad, -self.opts.grad_clip, self.opts.grad_clip) - end + grad = normalized_gradient(self.opts, self.state, grad) if isa(state, Void) + # vanilla SGD, without momentum @inplace weight += -lr * (grad + self.opts.weight_decay * weight) else mom = state :: NDArray From fe36340e1f4a73ea64ab2069871877b24765baeb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 28 Oct 2015 23:55:26 -0400 Subject: [PATCH 149/630] refactor adam optimizer --- src/optimizer.jl | 10 +++++++--- src/optimizers/adam.jl | 39 +++++++++++++++------------------------ src/optimizers/sgd.jl | 8 ++++---- 3 files changed, 26 insertions(+), 31 deletions(-) diff --git a/src/optimizer.jl b/src/optimizer.jl index 2e6e58ac1429..33a67ebd5896 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -184,19 +184,23 @@ abstract AbstractOptimizerOptions .. function:: normalized_gradient(opts, state, grad) :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field - ``grad_scale`` and ``grad_clip``. + ``grad_scale``, ``grad_clip`` and ``weight_decay``. :param OptimizationState state: the current optimization state. - :param NDArray grad: the original gradient. + :param NDArray weight: the trainable weights. + :param NDArray grad: the original gradient of the weights. Get the properly normalized gradient (re-scaled and clipped if necessary). =# -function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, grad::NDArray) +function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, + weight::NDArray, grad::NDArray) grad_scale = opts.grad_scale / state.batch_size grad = grad_scale * grad if opts.grad_clip > 0 grad = clip(grad, -opts.grad_clip, opts.grad_clip) end + @inplace grad += opts.weight_decay * weight + return grad end diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index b555773a49a9..d8d1d2377836 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -1,38 +1,38 @@ - -@defstruct ADAMOptions Any ( +@defstruct ADAMOptions AbstractOptimizerOptions ( (lr :: Real = 0.001, lr > 0), - (lr_decay :: Real = 1.0, lr_decay > 0), + (grad_scale :: Real = 1.0, grad_scale >= 0), + (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.00001, weight_decay >= 0), (beta1 :: Real = 0.9, beta1 > 0), (beta2 :: Real = 0.999, beta2 > 0), (epsilon :: Real = 1e-8, epsilon > 0), - (grad_scale :: Real = 1.0, grad_scale >= 0), - (grad_clip :: Real = 0, grad_clip >= 0) + lr_scheduler :: Any = nothing ) type ADAM <: AbstractOptimizer - iter :: Int - batch_size :: Int - opts :: ADAMOptions + opts :: ADAMOptions + state :: OptimizationState function ADAM(; kwargs...) opts = ADAMOptions(;kwargs...) - - new(0, 0, opts) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + + new(opts) end end type ADAMState current_lr :: Float64 # current learning rate - mt :: NDArray - vt :: NDArray + mt :: NDArray + vt :: NDArray beta1Power :: Float64 beta2Power :: Float64 end function create_state(self :: ADAM, index :: Int, weight :: NDArray) - return ADAMState( self.opts.lr, - zeros(size(weight), context(weight)), + return ADAMState( get_learning_rate(self.opts.lr_scheduler, self.state), + zeros(size(weight), context(weight)), zeros(size(weight), context(weight)), self.opts.beta1, self.opts.beta2 ) @@ -40,12 +40,7 @@ end function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, state :: ADAMState) lr = state.current_lr - grad_scale = self.opts.grad_scale / self.batch_size - - grad = grad_scale * grad - if self.opts.grad_clip > 0 - grad = clip(grad, -self.opts.grad_clip, self.opts.grad_clip) - end + grad = normalized_gradient(self.opts, self.state, weight, grad) state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) * grad state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * (grad .* grad) @@ -53,12 +48,8 @@ function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, mt = state.mt / (1 - state.beta1Power) vt = state.vt / (1 - state.beta2Power) - #@show state.beta1Power,state.beta2Power - state.beta1Power *= self.opts.beta1 state.beta2Power *= self.opts.beta2 @inplace weight .+= -lr * mt ./ (sqrt(vt) + self.opts.epsilon) - - state.current_lr *= self.opts.lr_decay end diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index abad6770216d..84ea55f40f11 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -1,9 +1,9 @@ @defstruct SGDOptions AbstractOptimizerOptions ( (lr :: Real = 0.01, lr > 0), (momentum :: Real = 0.0, momentum >= 0), - (weight_decay :: Real = 0.0001, weight_decay >= 0), (grad_scale :: Real = 1.0, grad_scale >= 0), (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.0001, weight_decay >= 0), lr_scheduler :: Any = nothing, momentum_scheduler :: Any = nothing ) @@ -32,16 +32,16 @@ end function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, grad) + grad = normalized_gradient(self.opts, self.state, weight, grad) if isa(state, Void) # vanilla SGD, without momentum - @inplace weight += -lr * (grad + self.opts.weight_decay * weight) + @inplace weight += -lr * grad else mom = state :: NDArray coef = get_momentum(self.opts.momentum_scheduler, self.state) @inplace mom .*= coef - @inplace mom .+= -lr * (grad + self.opts.weight_decay * weight) + @inplace mom .+= -lr * grad @inplace weight .+= mom end end From 1f39e1d90d922b6ca8df07165366039352797231 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 00:05:33 -0400 Subject: [PATCH 150/630] more learning rate scheduler --- src/optimizer.jl | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/optimizer.jl b/src/optimizer.jl index 33a67ebd5896..2c784adc13e7 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -89,6 +89,24 @@ type Fixed <: AbstractLearningRateScheduler end get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate +#=doc +.. class:: LearningRate.Exp + + $$\eta_t = \eta_0\gamma^t$$. Here $$t$$ is the epoch count, or the iteration + count if ``decay_on_iteration`` is set to true. +=# +type Exp <: AbstractLearningRateScheduler + learning_rate :: Float64 + gamma :: Float64 + on_iteration :: Bool +end +function Exp(base_lr::Real; gamma::Real=0.9, decay_on_iteration::Bool=false) + @assert(0 < gamma < 1) + Exp(Float64(base_lr), Float64(gamma), decay_on_iteration) +end +get_learning_rate(self :: Exp, state :: OptimizationState) = + self.learning_rate * self.gamma ^ (self.on_iteration ? state.curr_iter : state.curr_epoch) + end # module LearningRate ################################################################################ function get_lr_scheduler(scheduler :: Any, lr :: Real) From e821f9501da6272ceee86384290650b6a63e48c9 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 00:09:50 -0400 Subject: [PATCH 151/630] update equation in sphinx doc --- docs/api/optimizer.rst | 13 +++++++++++-- src/optimizer.jl | 2 +- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/docs/api/optimizer.rst b/docs/api/optimizer.rst index e9f29e4aa07c..bc45cf1744ac 100644 --- a/docs/api/optimizer.rst +++ b/docs/api/optimizer.rst @@ -72,6 +72,14 @@ Common interfaces +.. class:: LearningRate.Exp + + :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration + count if ``decay_on_iteration`` is set to true. + + + + .. function:: get_momentum(scheduler, state) :param AbstractMomentumScheduler scheduler: the momentum scheduler. @@ -122,9 +130,10 @@ Built-in optimizers .. function:: normalized_gradient(opts, state, grad) :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field - ``grad_scale`` and ``grad_clip``. + ``grad_scale``, ``grad_clip`` and ``weight_decay``. :param OptimizationState state: the current optimization state. - :param NDArray grad: the original gradient. + :param NDArray weight: the trainable weights. + :param NDArray grad: the original gradient of the weights. Get the properly normalized gradient (re-scaled and clipped if necessary). diff --git a/src/optimizer.jl b/src/optimizer.jl index 2c784adc13e7..a5f0bfd5ec60 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -92,7 +92,7 @@ get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rat #=doc .. class:: LearningRate.Exp - $$\eta_t = \eta_0\gamma^t$$. Here $$t$$ is the epoch count, or the iteration + :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` is set to true. =# type Exp <: AbstractLearningRateScheduler From 04bde14aa923c2b259ad289c734200e721ae1841 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 00:18:41 -0400 Subject: [PATCH 152/630] fix accidental notebook changes --- .../Prediction with Pre-trained Model.ipynb | 48 +++++++------------ 1 file changed, 17 insertions(+), 31 deletions(-) diff --git a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb index 0f0bd945c723..9f4f2b8cb1d3 100644 --- a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb +++ b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb @@ -22,36 +22,31 @@ }, { "cell_type": "code", - "execution_count": 106, + "execution_count": 1, "metadata": { "collapsed": false }, "outputs": [ { "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlIAAAJ1CAIAAACdFImqAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAACAAElEQVR42sT9ea8tSZIfiNniHhFnu/e+NZfKquyqbk6zSTYHA4KiIAgQMPog+k/fQR+QACGCEISZbpHNYlfXkutb7na2iHA30x/m7uFnu++9rKppx8uTcePEifDwxX62G/6//p//D1VVVRGJMYqI/TmOYzkfQogxxhhVVQBijBJBRIJKjHEMEmMcJcYYhzHGGIPEEMKQfmP/iyIigKoaBUQgIqiiKIqI4NRUFREBABEpN/vKTh4dOMflT4b0FQCQfSocNYKhHCtQORZFABCkcn87j9Cki1XLZ31BeTqR3U2qi6OqpmfYwEIUERAt4xw12HEaXpG6t+XO5X2JgJkds70vETlGZpp3jXPQeOcbbDx677qGnHNzr8zsHTnnHCIzewfM3DhiZu+QmR2jc65hYmZ0iIiuHnlVACCGajwFAFBt3AYbPVUFIFUVJFUFpRjjaPMeQUSGGEQkDD7GGIKEEIJCjDIEiQF3YVChoBIjDiHEKFFAVXtxZe1FsFWqIiKqiFjPFwIDAMk05YrVUBJN84IwLbg4lPmNoOUYmcoUaz6pqj0hADASETEwIto4MCoRMAERMon3vmvIez9XT0TMzMzOk/e+cc577zwQkWf03jvH3ntHTETULhDRridyRKRkU58ehYg2zoqgqjsJABBFbGRijCGqKgKksQ0hiECMcRzHGGPYr8dx7Pt+HMdxHIdh6Pf7YRjsz9APdjwMQ0ibmcr+tfVpzXsvIsMwvHz5kplvb28RMYRA5OoNUsYwxmjLyX7OzF3XtW379vvvZrMZEfV9b7clohij/dCoQd1YAxE5YkQkRma2dQsYbCUTESIgYpqjqhvlJmU2y0HZjyEMABBURCQGNWpm14pIiJpImYht1QiNTg1UVQkBIOZ9jIiK01oijGfpWNnpzFw+EbFRR0Tk0Dlmh96z68h77xrnvW/atm1bbjwzO+eJqGNv1I+IbL/bWHlGGzciYgIi8rbeiIjIubLjyTtnHQAAozCpb4AAwN4V4nw0ngBQFkkhZYYdEkYRUYkhBAlDjDEMvR2HEGIYQwhxHEIIOG5FJI5hHMcxDCEECTbgAQAoTaWU8RTqE10HACUAUMRCz1VRVYNAnu3UHDPbjNkL2HSWNWF/FgSyqSaiiCoiCEpEgIKIKoiIooiIIAYhiIga06pCREywlP4BIOgBfhzBydlWcDGjiRSwNGqWLvjQfT6pHYzZhTO5J1JtqmkkL8HeGMejjZep9MGqOjs+eNyOzxxt9UuXfXDMETET/0uDMy39mpqUb+uTqSFoemOV6VZY7lbfJB1XsJQ+cVoPRwvjg/OVflK9I9Vjy1RGu4Y9pkQCnoA9x4ZzxMwkVFOxdE8URK7n5aifmRWciCkZ3/GxTWwkVWP+8yK79sTSemLJGVGzl7LVW14Qzu3iow4YnxdCWK1WwzAMw0BE3ntVDSF8cEHWt83k6/ir043z9FayHhrzauydkqoqgapqjFL2ztFPnmhPv8gTcwGHK7Z+6BFMni6hT2offAW7ACuB5NL5D97qj28/7TVLJ0tXnW8401wUSf8gc/dZCiT7JyIqJCLBRREJAjFG5MAOMXCMEUlCCCSRiCCGGEmGBAOIqGIjBYWYnI7mE6N/dHE5U05KBm9VpTQTp2MAcCjn5eOTS41fQKgp79Gsp0er2hIEVaPlkDZkVFVMhCyqKujENRsmxsRL6hFdOMWts5u2cCT2fT5M9IiIEI/55bOwd7alp3/KCiuvkTjfmgtWzLRY5RABy8VQuO882OeuUlU1BsvgAZSsox/ceGKrDg52bzWSFQ4RHjFPCZAqVtehgyz1OoIK9tQ550ySJkdE7JCZ2LCOFIyPLvOgQGWkNQIgQiK2qoqSBG1VBZOkIQIAiP1AbFFP+F2tZgJQhahql5WV8yHG6MwygENCbzh3BHtEpHpAo483X3UfE0Cvlsvb29txHNu2ZWYTSZn5I2nCyZ949KAjVLjE5NmcqioiqyqX9aTKaEKM8SLTEOW9f7F7l8bhlNU4vfIslpRH21AzIzEQg0Piiact1+fRQAFEALZjTOvCzoOdP2JYAcAkGAW19QmiUBHbI17zLGFM36qgHmiwjkfpcAQvzU7+lJNr6PxtJ6hDADuodIQmzGpWXxThlLJSyJSfRbQ32GOJMUYy2IsUQgDSGCNgJCKMARGVkIiCJH1FjFGxqKfShJxKe6er8yy7cRYFAY6J1CkZNIlBq7FWeIpWRtCElFod1PdJgiaICgCQTNKeqhTYMzXdKeyVpx+RHoBL4DS9ctkDJkwQJdgjqlXEWuuKq8soX0xPAGG1yM6P0qEYN8GeiBqlUMFD/VjSKEgEVZUIUUUEVUBARTQa5glqWix0FvZqhVLdN3v8KWXRajOc3VFEBEzVKEOBvfpX8QT2CJAAEJUJmJEJiWSCPXDMzM50bmlSHE6qexMc6wmtB9/G0kAPEQG1Ut2K6tHekfxpBEIBFFCq88ftaG08vQy0UjkWJRBmFZHdTeRgYOEcZS+wp6pEi7Zt7Xgcx7Nz9ESrV8Ola86i++kFaceKIKJoku2ISETLJ+ULPtjP+n31sBtn1+cHW1mYtmGZa373dM0czrJtCgU8oJHTGMKJsqSmulr1oT4PJ+fr3z71LgoAQCBnae/RixzR+Uvrys7ZFxkLyVCiXDFJe845m2/TYZZlVMOeEVZbmioUY6RIQswSY2ZZgZSIFAGDAnl7TFAI3peeiWG1MU2550fjVUjAUUdP5+PMYMGHF5AecAeVbQ9r+Q+LSKCnmJfkxfo4K5IxvZGCGiIm6FNVg8NK962VRFRm90i3eY4ATa9fMO8U9pg5Q5oUdMy3BkTMP5mA8JQo1Dvp8kKmCvkS7BlTWCwg5bNGLeOh8hBAHqiJkAUVBdRK/auAIsVyoIgoEYkwbaNkR8kzkt9Fqk2SuNcK2BQ4wVvFCCBi1MniLNXCcvkOtZLTYM8xFtjzzDYLHomTzpMRwRFxBj9C5CxiEiKhEiojISKBMoKiUpl0FVBSSPKnjTLaKQBQSavWSJsCqIKkfwQgYob0eDCzH2pEVNvdz1Kls+0skTo6aci33W5ns5lz7u7ubhiGtm2998MwGDt+cRdXmp4a9mricArbZ2Gv7Lt8pxrdJ5A28piBcLItPckzf0DwPerGWUKnOPE5WLk7MBIjESBBshwmNR2bklYoIRxO/9KfQInwJV0AYJL8ig5ONa0m2ySK1U2kMlNV/9A6nFQ6oiJQWxBMaza9nABW6odKIkTrFGpRhigIVhu8OqjYOD2W+XDqs72Z7YpE/yfYO2oxxkzL0pQzs0l7zBxCEBEUIiKBiIhEcSRKL1qxCA4m6ceEaqJiU0kLC57kDsqt8JzAd7oZDhbNBWlv+qus2wsb2dRicBb8Kh7H6KnCJJsUeq9H2roa+VSlokT1BnDOQbVXjRvA7NpQvsrqjiekPTgU9Y6lvXp7Y8WPTGcSiXliimy0SVVUUUFVMj4J2knT8WZpr5i+NahKhGC7RCga7OVf1RhZAardPyKiIhpbVpaHXiAxRteoHuQEKRWMMROl3VaPAVF9w3S9I0JgE/UAgEDZoSNkR46EiJwDZvMeMH8iosShgyMmtGkCyh1J00pqCtZqbxfuVUhRJl5NAWr5T0glogIKKAII2DHYzkuE5hQGjnDuSICAQ8Co/yzzUtaniCByPez1+J+eV9Xdbjefz51z6/W673tEfFrDebj9p42F55Sc5eAU806RL680xEraq3afgi3x7IGnT1pnju5/1Jmjry5xEobnSbNOWgwWJ3MkqmhiSgaD/EQT9T4kEGjFNKgaezVpbsoQ1RcfnT8ivKfHkLaJmCmfFC6pPs8ORT558Kxq3CqEtZFITo3pvzJZ1iHnmaCiyRPsZTlMRBjBbH6qJJK4HoM9RIx2R7FDhwqIAZQUEJBYOdM7IS22euvKmTF9YiXpOSWncf2aXVqmr9Jlp+Nng3cs5+VBra0kBBVHcTTreCj+lzsYu5DJelRVjQIACoJZ1ZdAMQkwiUxo0ocU1sx08YmZUsi78EQVXiFY2RVPKazqzZytgFp9d2LsgY9plMVaE3JrlM8AJqhi1FdEVUDj6bLLP4yVs0L+V8RnA0UFIK209tmqpNMSn1wfzbUXNW+b8naTstFQKBNoZ9IGT0JwXhM6wUMFe6jCDj3TBHusROSEGMkRGqQyIjuT8JSAyNxxVRiIFAkRRNLeSK/N9h5KiMgIQIkvJjX+AgWATCQwGpcopRbJTzEfY17Ol5Cvpib1uXJ9WfIF545gr0hpR7S+iFPl/nl9xHEcvfdt24YQbME8LerVBEErh6njjX4CdXAOYI6Ij7n4mU84MOhk2zsAwvIKH+wkfrSSs+5bResmVbZd4JCcCXgoCGKGNyQBIUAgm2LbyEaWkq7e/iJKco8mz0NlEEUCEAXChHkGfpBte5BUWUmN86RtD45wEbSQLAI5hboi5xEIVevTBtg8I03RAwCgJmdX4Hcg551RjZqLISImmBKB2rZ3QHuq3h9NMxHHGDXZ6lBVWQQgAjkAYBERIiVmZhWTFE/5x2q+f4rzzxH+TTMBVT8/buf8hEcfrdejKbdhra1ZZsMzCRjr86oAYN5rh5N9vC3Pngc4ZhXxQy1fA0e3PN2K9f0/PCBKOtFiSRCVzgc7ebLG0lYRxQhmzUPFpC+OSR1wEDGSIZUA4sTfZ65/2n5YdeyUra6G1A4ICU78ZgscamYgyn0ojx4jIXDyj1dAJMfoHHkmpEhEzEpEINP1lH2OGAlACKAOuamWk+l+EUWVIgCoEBLbeURWgOyimShjZs8U0zESSEyCgiAkme909cLJgJxFQTjUtdi3xRug9m25tISONss0zqDDMABA0zQxxmEYQgjOOZGn/CBOt+QHEejpHTTNr8EzJNrPzLa0jb3+afQKftLmOpImERFAkAy0THY3sSYCEIoqah2vZWa8YtUDgKQST2AoycnlsJ1i3tE+ggrqTl/hCVHPxLts1YNoiDhh3nnhrzANHzF6mO8EoAQIiKwCWcQ6mDvXel/rkYryjb2HytWltCAJXQkRY7Q/ItEYI4CLhaghMJIimRLVTIMoSmR4ZB6Mk/XoCSYo9Yf5yXc+tu2lbXOq5ExYLlCi9JIXxOSwLkDlVkU/XfZrpteSz2e6LAIAnkhVgUizzV9AEFAlglEgVczqXVUNIcAFwKsX2dGbMhcP+UkjZGo658i5pJapmZjpVwzOuUMyV2v0pp4kgiICAEiTRilhkPnlAucRSHqsFFcnk0Crqlnjm1xaahar8AAxSogmAgKAebgUeYITqyYxB9EoovmiTS5biDjGcXJstaEjNKtzGQrJgU2GRmUc7KwNVlRkM8MhaWUNjTJCwjlEyKI2YHFpIUIkZSQiZebGrgEkQCZkZkIgBXZsjgmOkQkI1Xw4CR2AiWwICsm6R2ZmobTO0uQoKDKpxcOiiiNAQSWKwSQ8xRQbqyoBNKpIcq06XF1FVa7Maaseejccyu5JIFPV5XJ5d3f36tUru6dp5i81zEJhuQ8AoOp+v1+tVm/evGnbFhGHYdjtdl3XnRIBAGA6duk6Wq5HrSjAsZLsT7dYafb6UAUzSKU/KPuovMXhHWoCdcZQV5MLOGS2yisURHmSc8XkvSmKoqACRKTCzDGORMTIiIpJy20GDizCE6Empk5FgBCB0UieAqpoRDF/YmQgVQgSKcf5qcREJgBAxQyB9s6QVWloevXs1QAAqFFAVCOiIima1TIPKaMCARE4xkxgJXWVSCmAipEfMtkuma/rpVXsAgyEoBSy8aisAmPExSkUac8MJOUTKh7KzthOICKNaQJM5gMANk9FVRZhxDBZCOTstMFHcDpPEP0ig55Zux9957QK8fj+9fmjXXd0cOl8ZbRLzoiaZZOyPaDeIpeFrUvtLOd46ef1Vvng4H98H04aZUJMJohU9jnJ7F15fVINYtYnFQASiKIgQAJRBcG8mlVVYz3eedAmRJeJccqo+zHjdoGUQPUnHKqkSrP9YrDHwBnzkFA9IbOFKBgLoszMgIxUnFo8MaIiKzMRILOhaba2ZoMHFou9+RSoIKBCREQzH+d3ksyFxKTrQ1EVQFWIqEksUIikkKnJh1fOKRN2drvVLH/RYR5deHb/nuIuVPuamY1XPn36kbxVwKbsr7MzfnZ/PSGpIGYP8xOB+HS48FP8Ti+1mtqck6JsWiOAcRUHghoqZEX3cTMuCZNtT0iL5lMAONn8Tt9IzxPGs7N26V3ONrMxkEI01r+S/OCcc8Wnk6MpaN3MEQCKyNnLcKIPznsuSicRU7hPWvhMwZEI7FtCjBFGVHOlHREFFEDNhscSvaDF8DACEJr//AX4+8BSOOKA6rGoBvf8+Uu2Pam3UApFQADzL0UBKlKCWoBVkmny0iyP0PPno6mSLCZPRSE5ZoidT8RaC7JSDls+4hGklkJw4lIdnNmKkDjHs5orPTlzSg4+KnTy0lJTlaRA02Lhq1vxFwAAFEUBsXwrWRCs9KKZfZHsDpoC1tLvqciaEtVWuMmCAKCJI0sjXCzdZviiJMebYrPy3bAZn4YSlZJFkIiBquA6Is05SNJcoEPTdiIiCGfYI0aHCfYaJERkhw0TMzITkhIgMyEiMXhKsJe5YEvsIKhMCghJ2AVRzGpdwmQ+TbobFQRJDmOiBKKipKAAtVWPQC65EeCFZpKsHnrtp32UlfkGeBZyV2tl9Enzz9FOn9YKoqkiikvd0dItPwGAbGw6Q2rPLnWAM1ygHqvspj2lTxoOjp51hAof4VR+xjekjFjZMBEFAEmVD2+ImhwgSYHUXDeZsu9n7dELRQuGtTMnmlMemUnQvlCEHDFkJkNTmxIAmcIBgEzNb0E7062h+FVQNi4DSHZWn6bycKYkR9cIHq1MTE5Y2ZEn/+6MyvMJdt+6ZBex/dbm7SBur/LiPYI9QUy6KaMQJkHashMRZWUFVWVhEWFJuYjUdCwn8t8TKvKzjE/ZEvU2qNdr6u3heTgHe+VMdXOZnlIvvtMIv9rOpOfPQ318+F5nv/0gE3o6nU9QqHq3F7vd0UY92rEA59nhn9DKypHkkWJgNgGhxeGJiBnzRGI08UQ0AlrMQwSSyRf6CEQPKMLRCslnpDA0R1/ZEBzFqJltD6phlAoEi2HPBpeyRImIpJC0l0mHCUTIaGHpwJxVhkiOyIIZmJGzAxFROl8mzhyrplk4iCaOiJDz3GVDcdIF62Q8FkyJgcx0AqIQzRR0tBovEfGz5L2+phwX126DKOdcSWd4dv8ebZazKGjXmzJ5coGu7jPdrVbCn8De0Xtd+rNcrFmnVf8aAIgmfWOiZtn5q7rujEPHMQrW5O5kk51lC2oKbItBNZp3PwAQapLYbJ2cC9kmBUAxxThmF74iYJ1SADz0ljviBo76c0R+65mFQz5mepCCqBJAUC1WvWTwO3zxumNwyKOcMwJNbwyQrHom+yoYQ2xGz8MsLZzcPdXmuPS1qDpTbgICARQRJVRVr4yiI8a0hwEdobDZ8slRFIKQA1x+Aj09YhXhwj45On/gV3nhoXXMTWbJjERbbk9By6EHOfdK/YgyJZDj9k7OF9cCoBREZvxPUVlNtr2zK+9TkK8+UxwyT668hJTH94RkQP40jU3aDII1Jh2hlESVOJmFcjMgzGJfTt9W2HeBlMwlrWYFTb6f0yBhFW0JSgAoGECFK6WXCeK2u9Krc3LahCp3AyIqYcE8ILS0nAaEioZYib0lPeYVjlgQzAF5lr0lQWP6FknBpfkCc6jDbCPBDNsInPRXCggMogAxybVmC7WRJ1WVzFhLcuKU5M1JmtKUEQhIBImnjgxnF0cV4pIsHYX9TWOlSkTDMHjvY4ze+91u98T+hctQV3iLckHS+GZW6fSG+coEe2buPdr4+KF2dhw0GztFBA4GBIsN6OAmhwk3DJI/Rtp7Qvw9etMCH5oVS2mUqhWOYAsJTaA3fSYcgwVWl6vlL0l6Tp3kqCTnqaoqZVoHkJ2D+dgGecTTnOI3AIDESrbTs+9OKqoSJ/+sA5KoB6le6mOqPo/HEKa4PchxigoArhiZoULp/CQtzE5FFrMwS0RgTlzp2AkFpPDk2jrlAk4vOAt4p6OMk0iX10d1/pKSUysDdZHzAEC0DkGtRuDEn7A++OD5qavVMZ5304JLfx59dakdhpedc1c5mJeag/6jjBOHi+fAqncChJNuU6sUsSKiSKoQk0a0pGgpPOYBv3k67Glu8/ye8oxlFR2tzEtDXQuFKaLcNBaZ9CCmgNpKxj4SEy2vB2JOi2xzwQCQY7BoivQ9dnGcVjGiQsxa2xPv1sOdawpnW9E5fFbgApE9iwQnyAc1EagvIyKT80IIXdfVsAGXGdbTnpR7FpBDRHPmrP0Mzq2647VxdMOz74UXmEsD2ppXhDwOxp8dhfpUFPnYDIZPSHsnb1EurglaTdNUoypXNxSoMU/hEsQmq97lC07HwXZwIf5QvUjhSOFJAnW0LOvPYtX7+HY82vl09bD6wZTD8SGP5+TqmBlcAADHblKAZnsBAAAm/h1UAQUtKVP6VkEsb0sFe5izWtDUt4NQ/LNk64mlAIfYcEQXqjt8mm0vTWryt7frk7wLACbnYf2sT7TtHTh3mWFGJ5qbuY9al5V8jGtams+k81Mw34mYWBZEtSePxuSSbW9avpcIwce3I3CqRb2qoSpGBdEjw165vJhqKvZIa9vetIVShnVMsiZi8s4Fzg9DVASBlBzuEunUc3bNQuiVTriGpEOxT0JEyra9Ei6ZHCNROYdYldyJhGTahGQRTMqSqU3gBpBj1Rkg5UgUiACAZuNUNb2nqBKo5KQ2ZuFLfuqJixeLlzhjGLnguXPwwud4hQJ7qhpCKC+eHEw+TtSrb4VVxjJE7LpORCyS7xRXnlZynu4OPORy6nc5/7sK9o7udnzbMy5Xn2DbO8K8Mm4TkcxTer6vWRBDBVS26Tb15gQAB8fHCcpqOa/uGEHSpKdrKE/ouc6X/h/dBCAZIE9R8GNacT198qqLOTkhDx/UtNdg71TaO+p3oeOYMmKYqGfoPVnsPjjB5eCDsAeHG+bsrc6SKv042q2qoFAtuKqHhzc/lSousa5nGXDI3ixgKs8TERA+TjF7dlKPLj5CuNMHfczJP6YdiR1wYvM4vJJMY5l+IjiFFF3mkE43j6RZqxCdqhnRg6mcaPG5ntuoHI3wEd3EbAusAxiwIpSZgpvvNRIRapUH1WQ+mmoXoAmDh7l68UKs0hG5Pzv4+RYpjWedLwngIjwcrYf6rYsUe+6rJBOYIR8yfw1P7t+zrb6bERzzOaCjTGBHL1Lv3HMvdfoIOIHAU/KNWXWpJ9h5hP2XnvXx7QyiH97wY0DiaXnuIwf/6Y7BhVX3Qbbm40Hu6R7+UW93QotcHdZ9sOxyOCpZ0lNLVxajs32qIyg4BQFBEARxjBKTB5HlNWTAaMXPkgnTqagKqQQVLWkpERQhqpY4/BL6g9VnlkdhWo75BRJ0IdZyGgQzth9KgTolNBKzZwKApaeyLOVkIZVIU8YnnaS3+qAuCQZV3II/1OwVeTOlMEmJ9CcBnLL2GQETk44AAOySq9HEvKfosRZTSnSMEBkZSMiRYEAioKjACpaB2Ak41VHE5sqZ0M7KUcwlEaMIIpiPvSAoIFl+ScAkdGr2G7Rcw2h5P7LHiSpG8+EE0FTvTUGRdLDloiAqESFiDBqjxkFJQKJqEB1iiBEiAngOMUTQACpKAUQUgkpUUTZpD6KCIAqiEKqCWLx2Vk0kWx1AhAaAk/uJaVxBo8Sm8YTkgFCRxgAZeMRRgSgyyxkhgDl0kpUNAiCIoEBREcAhIhAQUs57H1EBSJzzzilKAAQmNjZRcK+KoMzo2XPxIIsyIiJZEtpqV7ejJ0ph8oCokn2AnVXqEUQk74jILKUwJJOmKhCACKAio+9jwJTPkVQsIEQQkGKvMVIMbInMwJIpoRAjqXhHJkJHEcdBxTeKBAoS4hgliAggOI+73bjZjk3rFKJ33Xa77bpuHEcM24IciYBYROAF8kegw27r8SXG3oQMg7rH2zfX19c3N8u+7+/u7vq+b5pmNpvt+76YrTC5Fukw9t3sCnPwQ45qJSIabZyRAM0vKVMUzolbNQtnCqApMozR0i0lEGLmXHiotnoSM48aABRz+TVVUlEA0ixpIebIE9M2x0kbnolyIhDlz2xdEwVQRtQ6n61FigURy0Ybwag6CCqCiIuRlDwwkRIpqTplImVQInRgTlh50yM57UmIwFkEcOqn4ahGjZzf2QpWiqULgkOwLJNbB38nX1+FWOqLKapAFJETN10AEKSY7NzFA5lQlTSqKFs29km6QbIELhhsHSGgoACygoA6Y6MV0QydKqn8C2iAtIePFmLScU9FGDAnXD9ifKDmgPSYYayh9Ih5P+ZAEY6Gr/72LNRfYkaOGKWzfFN9cHoTPDEifryod6md3hMuMKTwoVb3+RIXfCp6nu2nXFIQHD7og+LCE88qkt8TP88LBI8MV/W3R+4wp4N50uGDun0AEELgVJuXBIkRlYDRhQMXp5Rxv1yfrTtK5IiA2VsWPoYcoCbR8j2zglIwP3NUYCTHRkAZS5i2oGS1v3cesUTNV3M6ntE0CGjrvQUL28SX0UgAQJbPTRJ5UXRIgrlSJkTzVSw0K91Wcg3oqr502ftE5JwbN1v7FnOe2KyQRBul/X6/We/atn3+/Pnr16+/+91vplD0KrL7iSWdIsQPV46ZDLfbraq2bWtEqe/7FCAhlRXQO8ttZh2z3sYYNWV0OHQFPKfVP7MxKzvQ6U/OikdPt8Jwny7XJ7YeAEiq/l2yPaSyJE886JPO/wnb08ThiTOnoPDH9uQo7lCPFfyuDvI9EFMqZ049tGlPzt8XstYevV5NuaqD8yhY/7DWQlw6f8QynELaWaqdlSQ1tqWb1CIaVDpeeBJOLrXT93oa7U41TsfDkicQs1tK0Q7Vgzn9CYdOp1OHJy3rKTh9jLo4PyhFKRwNxcEFcLAAjkboZMTKczHbF860i/yKRlRFQaLklSpqvpMAiStOmeZRMaYkSdmGnRJ/pqWBJvHlf0xIqC5rxc3rk1NEIDmC1rFjBxI9oyNwqYfmDUdWk6rsGhF7BIoqMU7TgWxs6mS4QQRQiaCCyQ4tRR4FRlVFgZKTdtoODkkoO8MSCIMqeJPqyJwSQxbpD9aMLSpHTIDYNFYAzzlniygLOt7scPP5fLvZ932/3W6ZeTabDcNgpdINOLXSgp5dRd57y1VUT2XbtlYFvmmshHi72+222+1s7us1hoje+9lsliSGjIWqymmop/jXs2TqaZY0j+oZA+dHNs1svR6y1DX/cXSmzAgiahRFiRFiJJFJaJ4GQVSrzDVU8bIHA1X7aooi2PL+o7SjT5KFYyPFWTr8xB0+8mJrcu49ysjXzQbK1XT2LL2uUdCSPOFhK/7HZ0WryVf90IvvgN7BsY3tCPBOsW3qMJyxrJ4C3ul8HN9Hq26fzNbp5J0O0RNeZ3DCUZaTeM5R5ek1MWEDmXKg/uoURapvLy9EOLf9tPLWqbxcDwfwsGOpib0spYGp3DLzt/bnYaWFcj67t3xwuZ+Zi1Tj/hgaMUcsgJKACCBGEQu1Qy6V8Apda5rGITnnkqemRhIiCJySdSsjMYNDZkJGUIgtWQoWYAESwYiqqpbwTEnFdDZomdEkWkICCxwyjbICAOMU7q0ASmi5Nfb7PZgClhAqvtPiiwBIQFAUEZBAo+QqMpJVcGoJGEzf6CrmNxfsTRu83teq2nWd1YMNIVjNTVvqq9Xq/v4+hDCbzZaLq+12u16v379//9VnL4loHMc6ev1oKR5sAYC2bS0tZ3lxIrKTfd8T0Ww2MwQ1cTPGqNEqGroS257qs7ukeywN8TgH2KX9hZkATXsTD3aoyeXwEbzptHcqwMsnn8K8U/A7bZDTFIHoWSPwAX7r9Jl++8cZAj9qD16WOE+J0tmfHKHmh0hBCkoEk/Mg1aI49BXLcwoIAI5q2IMkABV6Zw+083TohV8fnKKmVnVr6+NEAkVlwpcDbqheDWfXwSVEfGIaDs9cmp4qBPicVHcJCI9ePBr7DKo5LBALScMpF4ltKc25X+GjNSembsrS95H3zYFsVOLeVFUurJ56IZpBgauv6hG+tKzzDfAwYuEQgwU0a2lUXNFWVhcnnVv5XVl9NXt+lmwdT5DGlF9eVWGCkJCkUnApLpORAdCxI8RUG88i8xgUET2TI2Y2kQcR0SEQYZui2y0ODxwBMxBqjOpAGdI0MKiVSsg5YRgAYpAYk8HQswOZ0mnYDlXVUUeAxNAoIUqq854FJqTKtw9LmRkV8/clQAZEhDEGFQUJEG0CIM2BikJEUQs9JCLHgACBgyqpOhQdIIhiFAVUiTkZbwjjGFLRTdXl8irG+PDwsN1uF/OVc261WgFA0zSqOgzDOI7FtvfEejZk3W63R3u8MNMGaWaxWywW+34dQiBIQp6qhjBafVrnHJO3K+s8A3jSagqm50S9Er15Sdr7SJnvkF6dIUenxK2cr8eBLD3ZBZpjiVoygVBEsxRq9gKGchnmxCuYN9jHvMVHtiNqWUPXKd9zdMF08NGWo9wqbeVhfaWa88Cc6wXRQU7yNs13DTNlji9DyPFrHGHeQWRyOpkOipNZJnOAOClU4RywnT+PxwsXK+SGQ0guT0l9PnnxNAEnis0nSL8eurQonAeYepCfOPPBTTU9F6dPQZCUDVO51Ag86jAAAIhqzBUTSEEQOGPEpGfT4nSTuZ8LPamQrRJDk5WOtKosVBvnMkxO20GFJOVuq8XEnyLtQR38UKVmMOsRoRMGRkdE5Li4C1Ku/uoQEAlBWZFQG1LnHFvBWCYimjGaYckyXqJGCy3whJ7ZM1vues+ezAglAuoAUETiEHLmcZ21HRwmUlSNANSQAhCQIrKSEjp0gMC+dUTOSg+JBomgEBE4plIMhV+wuD0UNXNdVDBFGNhxZcsrc2FVzs3xBNVynSWT0nRZIQh2/PDwwMzz+Xy/39/d3TnnZrNZ13VWIXY+n5vZryZq57cDQNu2d3d3NbmHQ3Vl3/dWnKjrumHcEhEjNU3jnCv42jTNETLZHXzDRXg9m4r6SPFY04f0Q0xOVUf+DR/cpxOZqvRYekJm9YKSs2b4pikWMRcRELW6SB/YG2I+hhV7nfU3H7+/PqmdJZLVEj2AQD0HckcnP66f2aCQQvcKn1GUvmTUwGDQHQ06ntN0n3YdTnDuCPBqYqeHdh2pS4ZWisqzK+aD50/h7fTiSwMKB695UDN2WjYXbHunM51GBg9mq94hesjNQSXzYdKeJO80LaiWT5Z/ERSNjp5Ie/VzS34TQYig/rIBNR3gdJPTzVwg5HQtHtr2TOA76kySAku95ZMlU+u9YTKvAYCSXlholyhOrTUltOpOVj+vseRgyKRIZlkTAcKoIoCMwgjRcuswauvAOWw9MaNVy3OMzDRnBNEYzRXZ6qqnCgwegVVCGCXKGEZEFJEA6L1a5tswBFP9IWoYLAKvLskkqtB6AiUltXqrzJ4cErkZdsxIQKAgVpwYBQGj9nlXm7cwqooKVjk5hSGpSxUMlM3/UQgkC3PR5oYInCNEj6IQBUVHSk53xUJhf97f39/c3Mzn8/v7+7vbh/1+H0JYr9erztt52+ZHBR/OrsCmacZxPIIBq0Bk42IHRfmMiI7YsoD2fQ8i3nvnplispK2tcO5IZwDntsMR6zztryfrITz9akdKziPSBCeYd3qllVkw0JVQPJDIZ+dYKzkExfsRLVlPIvBk+Y3su5SqFgGAVNM/y6unQIe1RvM4ZLt0tlHr5Zc9e/zT2inyXRrtKSsWHpwsUgHmYwRUAWUCk/aeuGndj0so/QT+1b864i+eRvJLXXqS3h0rDeAjwO9Urfr0RF5iT34Sh3LQPlJtctDD6lOy0GYCnJ0/hue8FKSSFE1A/KjHPalYNo1qrpxFlvemYJ5lWi+YBQBJHJRJ8ZszTSd+FJTMNFwvp6mixSVVJxwsOUyO4zRrPVCijFGjRoxxJOCuSa57ag7tav4kOGvnjtgzOQJEIVVWJBGHrKoIIiCU4/MYcbfbhxgGkWHfm/xhRL9ZXrVtND9DCSrRkj1QCGJRxhnxgv0k9AAAFsDAzM4Fckw0KoJzgYMvWbssQDCGYNKhvW805JtcUwvhVlIBBO89IkqSHARJLQG+SDDyzswAao6UIjJWKzzGGMKk5yxKSES0FxzH8fHxcblcLpfLGGPf933ff3AvmCdnKVpk15s1sRCQYl/03pu0h4h93+/3+9azxbZD9nu0jjGz975sriOdSiEXH+zex1zzwS3zMd/qObGvfJV0ZSHGGCWQekUAlKTIP30GilKpn2aFZPH4fN5uf7L2NM08e/HxTz5ZyQmTqGfEzQBazcpd5W1RRLQANXJ1jpD8PAArq2h53LNhyriBAOcBrz6ZLQHjMAwhqB0Mgy1ciFFE4MjJxn4GAKBPRd2XZz1h5nl6uGvpTaqfE/uJrFcO5XKh3OXp+cwfmoRWrebyglVu62KbKfOGhwfpMkrW6KSIV3AgiBgYXZaTRMQq/hIoIYhIIOVDLiOCOkg5wEREAGM03z+LTkHEUkrgQIy2+cyjhTn6cIIfQhZJZeTLD498NgFINdjBkSu2iCpMiTGJ0IL/RESy0Fbc6wHAPDL0RGta1oNVnwMRJGLH3jdt23rv1+s1IBuWABMDAyoqxCEyM6E2nrvWzdvGN64hBxhQggYBQvbsyaHFOeo4juPYD6Y1zXowDCG0bcvMjgCLn4Vq3w/b7c7kj9b5pmmcYxXdb3eAMtV3SL4StB123ntGDiHu+1R/1Xv/7v2tuel3Xdd1XdM0VjnC4swMjGws0DERgYBGAYlokZegCCohCCCAmBBAgIzUelA2kIshiGlAY4yImkN2kwq0adDycI7j6Jxbr9eGcCq42+02m81sNmtc94//+Ie/+Au5vr5+9uzZZrP58ccfF4uFOaekvZZ8WSWEMGua29vb5XL58PBgdkHvvelILQtM2fvJyOfhF7/4xa//4b977y0jKDMWcbC4s5T7O5wEvppv0xPVy+F+PW45VE/psNU3QcTKlpSIQrWbbEdXlVU+2qJPMHW76J9jjEIoIlyqPqkyKoEymCUrWshvKqwHlBMwW8A0Ui4woAd20IMKhXlHSxnDI5pfaEWtDy9EUj+xwaGBrIxtyVpQ51cREXQeYAIG41/zpJi5Gy2MPNmDiEjpuCzkeQSuQK7u0FE01YWWLsimglRWFOijZAw9p8984jL9CMn16ccBTPZC+DSm40/fnnivqOqynVtTvGex0mkEdXm+jHIFFW/ilUBEJAAHKMkWmAXEvJj1QmdOJ+LwZBL1zICY029TLgVuxdatEUwSnqklD9ga08VDztYBVThNPRqFgJbVaJ573vumaZBJVfb7Xb/bKQKCSkSJIyIqs/eeGRamzGz9rPNN03gmB4Ky77wDBPMdohCGfhdDiDHe9htIZIhU4zjGvu+HYSSF2WzWNI25HLZta58AjcowhG2MccS+bdu2bZxzz58/H4Zh6Hfm64+I3rOB+jiOSWRBBAATsAxHTXgyVsDUeiY7OnLkSCFKBEVRZEKxlIE2OYnIElT21mpfg3RdF0IA6DU7QAEAIi6Xy/1+X+CnovVguNK27TjeaY7uDRqIwOIZbBZMCixzV5NOExPtPlDJVUS0Xq+7rmPm3W5HRIvFQlXv7u7+8q++FpGu6/b7vcGbibgfiR8f05Lw/ynXF4/An7C1P/JiKAmIMjwUiaVg3sFvrBw8pr2EqieJyUD1yAvkz9VOqehZiPnJN68USJa0BBFByeoLUipDkTTOJLVLyxFRqxH4LGgfgduBobxqIUw29BhjLun3US9TU/yzv7H5h0/RWhwM97nzR3e4JO39udvR65QDpeylaPpJI1oigoBiqrfE+5kVMKqARIqEiGSynZkPFWuzWcy7qMDeNPiVleJ0eaBiVm+WJXjGgyv7qjzt2VXfvODo0TVJ+DvyDyysGHun2aMBM2uMGsm7hGvMoFFEIPYg2LVt67DztGjIe0SIxprtxx2KGkpADOZMP44jYzCT0jhEcyqxTu52+6ZpGt+Zbm02W5jej9qFqpqrPSkMw7DZrMdx3N1sEZGzA2cIYbfbiMjq+kpEogAzMaUSPCHEtm1jjGGIEvo4yuBHm+e2cwldGAEZCFRRBBCIEZTMp0ZAlS11SOL0U7lRRLUyf54BIg3mBShB4hiDxBgljsxs4Ldeb4Zh0OR7uReRYRiur6+Tu5B1NQbvcbvdhhBevnw5m83att3v93Xl2DL7iDibzWxgDbckB1E0TWPunWYmXK/X3vsXL140TfPw8NC27f39/Xw+JyIJk0T4kVY3gPM2hWRrf5KAHD0o/5kSRalOGdou7Ws859Jy8XFF/XOZAhORWfhEBFKNPYWUbGiCw8zhaLrGwl1MiYeQFgZOGYERqlA/PYj8+1Q6Bhd45T++2ftJ8mNINSVAQXO5k5QFCdGSwCOdeHLCCdmqBbvaV+UIEZ8W+wocFnI2ETh7aG2lqtQOp7LOWTD4JE39B2FPNa0Y/ZBe/vz5P66aQX3/s+9VJsiE5kDCYFW5LWcziIgRu1N+xWAyF5KEiFb7/ACGRBRzBjI8V4MCzjFGCQGTQS5OICcJNA8ZuqTztPC+9Klq2bIst1PJIVOM+BeQsgh5SSFjJiKLGwMAR0BEjnEx60TCuFvvY2TC1Wr1/Pnzq+VKtm+987OWGo/MCipRY4Qo/W4MYRx60+mN/TCEPo4SZWdGJiLXtu1yMV8sFk3T2biBkgioKhFFFRAiAOdca5BLHELo+/0wDHd3d977WdeYR6Jp5urXNCGv5Lq04naa/TvMfBhjnM+7pumaZmyaDhmIyDY8IWiOvcsB+MleaNPKzFllbUZBQdIanERGERmGviTiMpjZ7/eGUqqa1YxsXRURjbFpmr7vh2F49uzZYrGYz+cGYEfcpN22bdvNZmMqTfuqrkOUFhai9361Wr169erh4fb+/v7Fs+chTIldikfukfLwg/h3lrxUP59cWurOl9uewt7HSIqnmKcfkvxUlZKSLkNXlOLvBdESIUhRBmrRn1j3NJv3Kv9PzYT4ie79adspv3sKioeEYvrV0/cEgNqNEwAQGYERU3K1pH/O8+UsqW5GnKoEg1XtkpQwX2P6d5o444PIV76t183TS0PPKffqoTkyBR/9/CdMm8hx4SH4SVyJfuirowumCKyjgwpBJxNgSdebPUjSmFu4MYIIEFEUpUq9OT3lcAyL+cyuDKqERGkccJIvTzpeL98J81SqKgrnmmDOjHeU0gUSWmtO+KnFBTTnbgWAqhjQRKxjLAQuEfRcygfEsrwSoTLAw/u3TeNmbbdazBZdu1wsFotm3uDMLxHIMmiqxDgOYCkxNMrY77eb3W43jiOIRIgEdPX8BhTNgATEKjiGsHt8sPmIUWPUcRzDKIrAyPPrlb2kc27etvP5vOs651w3a2KM6/WDDb7Z7Zxz232Ph7rNpmmapul7c5Mx0wuJaAgSowz7MY4wjjwM4hy5xru2YSaJgoAIlBVcSgBI5BBFMDs8WZyhqIIIWtybNu0QQ9/3zPthGNpm9vj4uF6vnXPz+aJtW1XdbrfONSbtqepqtXr37p2Z4hCAmft+UIX9fr9arVar1fv37/XQPiQiJhYj4jiOIsqcqL+B6OPj9uZmFUJ4eNg9e7b867/+ayL65ptvtruH9Xr38jl2XWujxCdQ95Hbv0avY1Q7IlbnKEyxfolVJMWjO38U//3BrtrioXPqt4nAImkSeQQ5GsLZDgJRzMWyTMor+Xaz9DaZyiCrPf+EyFd3+/T86QGecNUfuH8a6EmNmcgXEQJZsl1EJHImCCMxnEp7p309ATM8UmxaCyGEEOJJK9KepjwmB1kbzkp7l1iw07VScysfKfMd3O3kpdMT5RNqSpybiDPtU5eRXnovrGfHZD6MiKxoUVokQgQ1z1GQkiqBTzT5VpKIpArDUD5KgoNqMM/wX/X4T+CXge3ssp56U5XWU6X0Z7EvKhX1dZEDCkUrt6mtQdb6cSAi55gAAYVVSIUQbq5Ws9Yvl/PVcj7vWkYSCf3u4WbmVEMcYohxDH2JA9vv9+MQ+3EfozKR7xqzpd28uFZNVXOD6NCHEMIwhOurG1VEdoQuxmiyCDO/u7u1WSAiGcdxHM1Y9ezmKsYYMMGbiUcA0C4WJruIiAmsJmk9Pj4aOpa0zqY4VTEPMlQdYmQDDmwxWQerwEAbPYdsMqUxS2VZpvRgSkUotIRkMeBqtTJbWhHLXr58uV5vVXW/3+/3+1evXr1//74wJWYXIIKHh4fFYrFcLpumKYJsLdWZXBhCsHADM1iaE2nTsGHq8+err7766ubm5u3bt+/fv29a8p5U9fr6erfbxRib1huIfpKoV5HIg70GUwDfJO2d/rZulAqvT9LEn1DJWW2u/L8S+lzRZ0QUmPITZbDEEk9lvyWFetdoVmyWPfVnkvNqulGD2enB2V89fVvDuTwx5iXmKgGciBwi1olZXD36Z5tUHuQilh0uFlfmcK6Ziin/VdDRboWmlHp6dE5FvbNjcQp1H9RzPgF702/xo+bjz9ouvVdERTxiiGAS+wRiSs5/jmWxJJCJ4y6iVYIhs/VZuV1RKV5qeK5jE/DkDGT1KoKTJX7pfH1b08bpgVM14WEcZLlDkfOKZ5dp//oQmVVEPLPz5J1vHXvH/9Ovvt5t1/vt4+7xXgd/vVytFl3TNLs330A2DY5jtIUbY9xvtwDomNrGGdjYaLx9906TUOLYNb5tmq5VxaEfAcgx+6Z1qjiMIsLkP/vsNebKAI1LDjgiQgTeN81yQUSik+2wLHtmNs9GVR2GIY7Jd19VUZPbjnPOajKAoKhKjCpjFPAhkHPIybyfslgZ7AEhqhwqEkUkjkFVo+XZkZTQGQD2+73Z54hov+9NqzmbzXa73gZku91+9S9+8Q//8A9pLgSGYUAE7/39/XaxeHjx4oVl3Sx2O8jcjNVkL+454ziayne/33ddt9vtrq6uvv7669ls9tvf/vbNmzfeeyKdzWbr9fr58+fb7db0nyEEy1L28aLeUTvlsI++LSu8lhHLxvwY8a5e6h+v5KwzsJxS5lra0wyKSdUBhwlZTn6e8rkoTEY+TeHtmJHkj7ftfWqrN/vHEOE8bgX2ci0eS6Nk3reVbS+ZQp7uwdGDVTVGiZ/SiienHRTYwycdOZ/mO06//Rik/MgRr5EGntwJf8zsfnyXLr3X0ewUtDhaN5dgD4DtxzK9e5Imj0YjH5+3SJ89PoW0S29XlKIAUDmCinmuQQakOupZckicYYnNVwhhGIYYYwQEgIHAEXcz3y0Xi8Vi0TW//vWvW4bFcv782bOrq7ln7ne728dH3m2KpNg4JlBmjJHm85eGPab+3e/32+227/vrV8+zdo6GMD4+bO/u7jbrfTufgbqmaZqmi1G22+3QByIiTlPAzItZu1gsZrOZ936/jUSW/4zZobl9dl1397gtHiImORkH2TVteVMJ0fCDq6phIqImI8YwDLy4WjCiTkmriiyCeMjpWvdMxlLRGC2YUout9PHxMYTQNM2rV6/GcXz//v133323WKzsfn3fX19fQyUDjaM4h865IYybzcam6WhRJSbcue3jo4g0TQPZrmxi33q9+/zzVz//+c+7rvv++++/+eabcRyXyyViNNn35cuXxvp474tHzBHsfYzABx/iletvTzGvCHg/AXE/klgdbbHTrp5lLp/eklMHPmXD/rT2xPY/fTu8gHyXWiZQkJd2gj0FQDjWEpXRdsUImP17LJFRsu1pzKrMEMI4xhgl9BqjxhFkjHGMsQ9xGMc4hH4McRjHfhj249iHMGiMBIOMASSSCikggRCigEJ2agfIuSoyXY35JJbP6s8yQkk3Kjnv4kcyW1yPY8W9pIowlGTj9EwAJ9PoS5WGW2oYqO5vZWvgUIsCh9V66/PJzzjjjWT14HRB0aUYt049gYiyggI0KYWkZToGUEARCYIEKk6IaEjJRCx4i1QxqEDQkcQjqSVhN+9OZUQQcrZsiCgtJhVVILLM96aqovLeArYaoqIqoqBEUgHVqIqgKAIaIQYNVm8vwCgAI2oAHRWiagRn6fJFUWJQheSrGs1qqQ27UXS72wLAbDYDpv1+TwDztnPO9X0fxhEBGHAcw3zuttttO2t/+fWXr148j/129/iw2/WvFt2rZ9fPbpYQhoe3v3/crL3n68ViP78xxSYiNI2fXV+ZUPX4+Ljf7zfbHQC0bTubL+eLlYj0ipvNZr/bGtUWwbZZ0qrdbDbjuL0fY0nTtZjNAeBxs++62TiO9/f3b9++a5qm8w0APL95VuDWs+u6brlcdl239J3l9GoaJqQ+9ON+hBjfvb+fzWar1Wo+n0fAsR8GBU8cNRkRTAhzzgWJ49jfv+u7rmtnnXOOlKNAjAqg1KDzjfn+xWgV0TTGSI4ZYRANEoMEVQ0iIYSr2U3Yvd+sd9GpDvrs2bP5q+7uh/ey2Xrvv3z2fLfb/dP/73//V7/8xX/9r/910bZ3AzrXSYzbh+Fm3sKIv/uH3/zqy6//3//5//Py5Wo37B3R+4fdauXH2APJMO4Xy9m7d5urq2a5mm23m9m8VYg//8XnX375JRH9429+/fbt267r5otuu902hIi0mM322+3N1dXj42MYxlnbFXqlmHgmC2n0QAhIiqxImutqGtdfSVEAKWWrIgECqwqiEESFIFEVYi5cTgqW6YYgp0G1HURmuI6oCKqENQKZjMuIWJ56TAcutBEGc+hVIiBAZAWCSKpAEVCUoiJEIiTpCSgoKZBlgrXkKmEUFWRmFYyQOKqiDsUQiZlRSABAAJHIm8IIEQGFiBQIiYAsPawCFBOGUUICxCARECw7UEphpKhArJEABdkYrBCjmAMxYAQMiAFRFILCKBoVWIaoMWIQMpJipUe0Ypiy9Q5dQQtJxRIRAYEAQcRcu5x3LhnWzB3IUnU4+PQ2icknEvepYu3wJ4Z2n8AT/THS209rtZRTn/nnapdGoNpUxTIHtTOIVru6ViuaWGgRO3lWDsza9XPPcpdQyX+HEufxiB2pSC/d6rB7tZJz8uxYLBZmAzNdWdu2jfeIIDFsN+sYo/d+1rVtg59/9vUXn72etc393fu7t28agmc3Vy9fPkcJt7e3JIGJVtdXjpAQ4xgzgIklxDKz1mq1att2Pp+rqmk4U9k5cqra+NZ7b0lAzGhlubKGIWn49/v9er3u+17RmeTUdR0AxBh3u52IoEIphspIzrmHhwfv/dVyZko/06x2XWd+H8w8DIMJXiYvAsB+vyfvitbX5F0BNXlxv98HSfpDxFS3oa5s55AiopVjHUJQVWZsmsYEVAoBVdfrB+fc1dXSrJIxxqvl6q/+6q+++f1v7+7ubm5unHOi+vrVq7/7+7/f7/fMDQCIKhIau7xp3DAMTQO73U5QowgziEjbWqQgbDabL798/v3371Xhr/7q57vdruu6r7/+erPZvHnz5vb2VnMGjFpOhT+dxuVInjtapVhZ++BcpPmxfKkH/p/F8vRBMfQJIKy7dEk+u0SlDgetio7FP5ayHb1RrbDFIjsQEpCAsdHEzCCCiKNEIgJ1RKLsSAA5kqJEFEABVCRRUSTzSSU+KCA1PV2yIYYQgcESdyMxOWDHzEjOLHyJ0Dk8E67+4SbJpnrUjsIVjvzOtfgSZR+yc9g3XWOzhDml3VPqgz8RJqYeionIH1YO/LTnnrVja2VkhhPp/gnV/6TZAMj+kIpoakBKfEgyvSUgTP7N+eeTtVtAaXquwhnDu14wyZ5Ttx65vVAuRQR1oYYD6f3wz9KI3DiOzNi2LQCICBMulwvvfeiHfb8N49g48rP25ubm5uZm1mGM8f79m28f7sdhf3O1+sXPvvzys9e79T3ESOQd+8aRRtnvNpu+30tjGKNVOhhVnc1ms9kMcr6r4rE1jJGIfIPeOyZWVavesNv1zLxcNmY3Lcbv+8dNCRsI/WABbUT0cHfPTN67rutmXWcl5Zxzw7AHoAi62e8et5ti8Hvx4oUgjOO4HwdBMDBj4TkRqjIgRLHiO4mXRzDDutkIm6ZxRES0jwEQCZGRCMAhRiImWsx834/juAvDYKV3Hblm5t+8e9s2M9c1CBJG3awfJI5ffPHF7d3yx3dvZ2GMEu/X67/8y7+aLZe79QaZNErWkWo/xL4fg8Lq+ubdu7umI4jiHAbRVTfrxzCfz7///mG1GmczfniI79+/n81mX3zxxTiOP/zww48/vjEzoeUy9d6DSr01Jgv0n0LBeIR5WtKqn+zfIwVpTYhPNw7U2VgOc/1fog/TnxJB4ax8UvOLhdlFVDyn+EJVKlys1dbAQEgADKVSw+Htqzc36eUgUnbC/gO1XHpPO42ICB5ArFSzIiiSAZWoWrVnAFCMCCyEShppDjEqBxQxexjEiNllCU65jWhUJcGeWg0VZHQekIgIySGiZPMnEuFPgL1jPKhonGQ7XgnUizGahJcGW8GSXwOY2Ed5gA88OcsraWXZutifT0+OUE9sfZ9p0ev5tQsXHGc+pgenSzyPw/mvLl0P1XIHM30BprgDAAFBJItkOJK/YyBiwBTOjiIiSEQqIkqTdJjf9APSHtSgawdSeWbmULwjCE/YppScSbN4mjZtSdqJ09rw3otIjON+v0fEWde2bbtazB4eHrbrRxG5Wq6Wq7nJRg3D4/sfjN9adP71V59/9vp163izfnCg3XzeeScx7Lfr3WYdQiDk4j1o+UQwJ3+xQt45WjxY5hQA2G5TtfH9fh+DlJI3FjqSNyQXZ8iXL1+a5cl7ryEalHZdN7x6XZNCVbUMll3XNE1THm0BA4ho1rW2ba2ygX3VdV3gUEh/DMHcI5k5ZRmLKTrFOZdAHdQU5o4oIAZTfCN2bauqIq2Z1kREgBzS69evzNEVERaL+e3t7e3trdVSv7u72263QPi4WT/sNl9+9bM//OEPu51EFQF1ROZZYdv+9evX797dAcAQAzmOMTRNs16v+8fH+Rxubx9fvLhG3IjI3/7t3zrn/uN//I/b7ZYIF4sFIm42G4M9Gfp6g5Qx/8ntkpdAkdjgxHXztMEZGJjur7V7PZwBTjgLeIeNVAiK245VYDiot1fj61lDY4HGsjVFBD4yIW/VsSdEVQBzMZu89BFAkViiIGpEoIDCqqqE0eDNeRdDjBGDjzE2mrjMQruSluXCcDVKqigIWdRjJUQgZAeISoyIYIhbOan/FCXn2VYTuDSqUcCIm0zcfWHqCQBAct7+PEIAWXF8aKusvj5eDX9En6t8l1aHM+VoRoFUP6d6NfizaVz1EOnrkTxSHUx9UErh6aom0IlIhj2Q5C5oYh/kOIfksS6SEvaVMLsc4Tf1R0+2U/kLKtQ/xLwS/XwhaCHV5Cv3I7Wk8Dkb9QHEogAKkgoKNyz70Pd91zaL5bJx1O+2w27dEC5Wq+fPnzeet9v1br3diHRNfHlzc319PWvabtbMWheGPuy3Ny+eOYQYxvV6vds8qupyuVrOF81uLGFzpjk0we7x8dFwy5xKbOMx87PrmxKZMwxDT6gC5g9CRCkTRIoBcMw8W87HcXREJq32/YyRDL1EJAzF4TmleN7s1nUxgXpYLDknMrAnFBCI+2HnQFMQIQAgIDJqZOQxBpAIlpZQYhwHjaSq1LVkYQoqqGI56hRJxkCI81l75ZequNvtdv0eRH/5q5+vHx53/f7+/t77lkDevXtzf3/3+vPPvvjZF7/+9f9gdLv98Idvvvviy69++PHtfuwpM75WoTOEsNvvP//888Xym1HG0A+ekNgB8RDieq1fffXs4eHhzZt75+Crr74KIfzn//yf7++33qfqfRYLXyIfpgWU49OfYosx7eUkup0GyGYpJQX12xK1VMZJFkHJirvJvgQAlhHgDAYeeiHkZ2QWBwDOSI1QA+cRjuYakKUdgD0KAJIiEh+5Wlrm1ewVYO6aaIE9gHhIO40Sc956kL81mQ+ra44i5AqcV28KQFQVEgXLkyKAhEomw5HjGCO7GGMUDSxCKbGswxix0rtwZt+PBioTkfwWiibqIRMgI3tbIgAASlLYS3Conw57p6hr9SoZKZiTIABX03xI+CDXf8csVh0sUfv4eFEPPl3aO42pqbSqepCaCE9loErjd8Gl5RIunr5LPj4UKI87O2He0W/rJhGtyrooWGZpEZFIwhaJLEqU/EPQ5bRk9klFHEx5as8962jGz0FaAjaAVCQdjuS/ag1UIHd0bLcVqMoij+PoiZm58a5pmlnrw9CvH++vl8u2cV3jNGwf1ts4hvlstlqtfvb6yhGPMey367BTXSwW8+75s2sZh/UwjEOvqqvlddM0vmFC9+pVgjFENP3Efr8v2kLrQ7LAMTvndmOqA2fKQ0QERYM9Zm6aLoETohWUGFWYuXHOLvbeS4hlkdidDRE1JRAQi8wrDK8VH+j7frfbbbdbc5mxRMzb7XbGqchARWjAHEcNC5HJcDQlfFHgXGUGVFHFERJ5RRz7oR+HwAMwoeL1YtW27bxhmDfLefP4cLfd3HvvZq1XjQ8Pdzc3zxerRQziu/bHd2//4i9/NUps2zYYrxACIynAEMP9/f3Pf/7z6+vrd3fvBEBAF7OFeaguFvD27e1qNR+G7S9+8dWvfvWrv//7v//xx/Xnn1/ZXGw2PSK0rQOA3W7X8rRKy0s9TSKeaKdqyeSncRhyntDsspx3cMbq9x4WNSuXgcrR0+ECCtZ3Lt5PjtETOuYck6ZWWPhChprMH5y1iv3UETvCPD0YSUslMTGwigiiyM5YblIGDAAg6DFE5QgSTZ8JHDFGiYgiEALmHLyZY78QyRbBXBMQUdHCFcyXpRWEXMabXP55FPwpsJedhDDXcT5oxbRQvjINZ0r4ohl1JFXYNLoiOPnSE/CRVW9aOOdahE9rpxyOPVwq2545EmJ2vTwY5T/Oll5jRrnhWXXExUcoWW0hVVVBRbSkeybtJV7K0kSpFiFMRIlURAUlZ2vNy1NQyWj09IJnbXsw4X46o6rwZNzedMMqXZmqGWgOzCaHw1I+lUCjBCSYzdpZ6xACk14tZ6tZ4whVxr7fO9SXL69fvnx5tVjevfk9qBLzoussUDqEsN9sCa1EDs1m3WKxcM71/X6775dNV+deKHvMQiMg73PrWIzxxx9/MG2n957IgfkERr26us6dT8ypyXCu8UTU9+N2uwZJNxmG4fnNMwANGkMIGuMwDAa3ylBC0EyryWwlMlTEapgMRJAy0qA2joiQ2bJSEDMRKEikTKYVVSUoKrF3zm3DqEIKBDGgRI8Ejmw97FTGvo9kF/p511xdrd5++02M8dmzFxqH/fbx1Wef39xcvbu7f3x8XC6vXrx48c0fvu26bhjHYRxFtfHsPcexD3Fkh41vAGA/Dtt+P18t39y9t1FdLBa7vh/HuPReZGzb9osvvvibv/kbe+t//+//9bt378ZxJKK2TQGaifads+39tM14fmPitAAFoOSiPZT/AAiN9OAF294hlXhKjXkWigq6OARGqJEv09gDUMRMkwkUUz4ktGoPBJKuAvP2UAZKVwJkyc90nVLZ+Sp73seF7GFx3rE/CQFgjEiUtItApFmtigDgCAMiAhElrwQixTmKIATKvDCK0BP0kAtpKpjDSojkLT1fQeU0pJYv8KctkdLY8A+RAJmoBsJqBQhgzpE64Y5exrJ/BgfO+tFn7W1/1l59ULT9iEdjijmHkgNMa3BVhcmYmirvUXFSOVJsQiX2PYH0tdCbgc1yv0BJzjk97sRvpQh5qmpGBzMF1F1pHO/3e0Ztu0XjCUTmnbt6/ur7775xRI2j62X3+sXz589uYowPt+/efvfNfD5/8eLF89VqtugkAolg11pQc4yiqvv9viSEvL29NYmh5FIxB5C7u7vSh9pF6/r62vSZbds2TWfimnMuWKj7OAJA0zSWgQwAxhgAYLvdPjw8xDGY4c05t9lsbFSdc+y9peV0znXLWYFMyBi53W4tWNBSYj48PAzD0GVHGKzyqmjeerv9rtj8BMF8dhARxhHIKStGYQUlYCZmf/twz6DPVsvFYsXemV532Ozev/sBgG6uVwTaevf561fmYBkUdrvN1XLx33eb+XKlQJZvmh0SJu9R73nWzm1Ut9utaSyNOHRd97BZxxhd1/27f/fv3r17573/8ccff/3rX+92O0tY2jTNixcv5vO5hX+YtfXh/btpxf+JAO/S3eozR7IUnJN7oPLkxEPFyZFs9PQrnD4XEQmBsKg6lZEsB6VdXKorHd2K85lc4upjBk0+aDu6NFaa393O2QjI8QgwQAmAYDWjPrIgKQp6ghgRWUUS7RCBC5owAOCcRdbKjBKROXNaRrDEPlMCfUAkYlRwlnxWq2ws9U0Rp4JbB4x/RQ2JiBmYmUSJyBM3zquXGCNEaZ0fFQREVKIAqFgZZzSFmCnJD31x6w6U9z3KuD8N96Wpu1A5wdJuJXnOhOjqPiblmJ3PehKnH04OkABQ0j4dMWuXmDg81ByWFMMQJzmpXkMn+op0TYxa3NHzxCGAMNXQMrGHEiOokpMQAgiCEKAgCmCKlhaREERVkR0RJhsVTxiMoEWVZDlc8s09Im4f+lw23Tqjhi6AXKu2EZHZA1AKD4olL3lWPkTDSEQqLmeiKqHfr+aztm0I0EFs24YgrB/vQMOim796/uz5s5vWu367ef/+7f39/d/89b+wAkASYb/dmHdJ4yhVf0VsmgbJjSGIhFK9r8QSGIZZ2uUi/xnI2XlzvGZKScJs9MZ+MDXsYj6fz+fM3Pf9brMFgIf1vd38+c21RUQsFwurtqOqd+/ef/PNN+v1Zjab3VytYpxvwp6Q2DUtpBQtHbdt1/R9j4gtNcvVol4eEoL3vmsaU2kmQzXicjEv9shoMe997xDbxqsEFXAI3DZWk1Zj+OLVy/VmF0GHYXi8X19dXTnX/vDDm+V85px/++P347Af+t0ffvfbedf+/Ksv+yFw0yyWy1/+8pe//e1vZ4vlrGuGfrfdhJub58vl3KJNvvryZ7/97W8d0WLWfvvtt6vF/P7xoe9HQ77/8B/+w1XD/+W//Jff//7N//q//l++/fZbg/MffrjtOmLm29vb4nBkxtebX/zi7du3zrmu6yyjWxxhNpsVRXSZRxuiUSIzI6Gg2T7Bvo8xphDdIwVGDrhTQstXjGxVLRQRkz3PiEQp11xJn2jJIAFKoEiRgezKNE2HPynHdFgOHkoe2kPZjpm9d57Jqso7JiZiRkfozJ3KHDcIiMBqEEQZmZkUUVRQAMi2hmnRVZWBEYjIhRAQI7lGRAACAJHziIgEgCkRWmFfTDdWUVAqh7Z6KdWDQXM2UExxACIKQIqASCqAhISECv04EDtAoazetAm5hNYaU9XURNgBAFAVib2k3HJUmAIAUCEsfpU1wT2l2rV4Z4MuIk5EREP6E5iZg5SVVxpJzHK5mrALAqofZij+rO0nGwOO2v8BIumpsHXUeZOSJAJS8hHKaVOhmIKrnyQdaa5cq6UKzemYaOXVmaTFgyWoAGDMuAxWUhxU1TnPjCFaWnhFYKurnuM4KcudB/ppZo6qiVPOLI6IQBgamq1mjXcEMajEYdiHff/5yxcvn93cXC/D2L97824c9ler1de/+MqFXcPEgCJjDEElKoIqNm3L7Ljxs24BhOY2KSJXV1clnR7mvCeaI+1MoiKiEIJlj5x7JyISIVZFxm2pD8Ng5jcLsNtu9jFGZOi6bj6fW6mBt2/fxhCMdnS+UdXiMmqzfH29CkPc9dthPwYZGR05dOTdYlaOySGjQwYC9jESOkyZbSKBcw071wQNVsAdIAUdmyqm32ytw2gKUTaa4cZ9zyqqwgKd8yQ67vYQRo3iGlrc3Mxms/0QmqZ5XG/73XbXDzMAlXCzXL6bz9rG9bstgY7jEIYdEREKKoRh3zXuarm6vX03m81ms/bt21tiePHixc3NDQP+p//0n9br9XxOL168+M1vfiMifd+vVq2Jm7YGLHNb4h6uVjFGy2Fm6V0ItM5Wc7pDD/j1iiGusQdO+M5p313w0ipX5pycT7hiYn2kJ+bzesed3seIakI7QiZwhADmeCKkqRQ1nTxXc9iYglLFpj9BtVQjWmi5BtQ0qlpBuJ5oO/XQJ+NCm5J11BeZoUORFNRsXVrEEkiORQoKCHKRYmc8Mz1nuTMZu8FWV6m89ZnkZKcieT2pKYETu6AgLCLCos45JwBALoYY1bkYY3RRzC9cRBwEVReCiACoiEI0g3EeMwBzRgIzpl3SIV8aUfx0+NQqJQ9msa3Y+RBAPw7LnlCPnH0oVFurLP2Ph81Dq9v0z2BJRDFlKwAraigiqi4dkCsaxaPO2HkRtHpdlru92PnKkkjEAmtKoQCZbCITaTFrWaqaQk0mdi/nIgdIWtDSDQvyjHZ7BkSMEkXj81W3mLmZA0QVFETlhpTbv/rV13Ecdtv1uN85R6vls6urq9V8sb7dFzsioSNP5FgV54vFMIxDDLvdjhwj8ny+dM71/c76Z1q1kn5zNpsZI2xqRnt959x+t7frY4w1Y8HQjft+s9n0fW96k3nXOOeaRZeYPtUwDiHtBIjDGLhnZkJUle1mvRYBgMcfHiCCoDh0zayZzWfz1XzWzB42DwyMDg3yHDIwMLCGESCCKKj5ySmCIIhnJCVRyeoVIFAEaV0ShoAd5AkLGsZx9E0XxwgSurYBjbvtWiVcr1aIaNVXHJL3fj6bXa+uxv5t7Pfru3fLxerVs6uum+/ub+NuRwCM0LUN62IL66HfzmfNF5+9Dir9fvCNu1q2n335xYvnNz/++OP//r/9f7/99m6xwGfPnlmBvWEYdruwWDTFtmqjbeMWY7y/v9/tduZDVCSqkpOz3o+YpQD7p3m/a06kVO6P1f6CQztfseeZT3pQiaARVPBImpnqS6SKAJV2SisahadpmA5pyBEFZkLHZNZcZizi7ImLTfLYJBDMpkQCQgUggWSNSinm0dR+iXLEJCdpIXwRgFQEUTWaZ3IAEFXTMrki6iUlkHl3J5XmwUgCAIEoYAQAEAIVTeqmNLxqSV9Qs4dHUS4e6R0vCSoCB/n3ytBlPrJeAgBZfXhs25uWwqGEV5pF2lseBxb14iKpMnrm6NRzjC56UcuaISJOnYg4Z8pWggBRolpU+Kcg1mX57CfG7R0ffKLcdqC9PDzzMR34+IuPGcMTpxIRU/EX0a2Y96bdKCKIrJCL8gEnFBRz2IZyZaXAzhsbjzufDxQANvsNAACSTfc4hBjGYRiIfYxRItRyZ2KTlcrPy91EhIiJWESCBlv6zrnPX81jjLHfxhgd42w+Xy1edI13rOv79ebhvuu6z16/Wq1WYdjf3r1rHBM5JQByDtX7lhyDYggxBBmGIDKwN/tdg4hmjbMABtOVmfD37t274mYC2VxnBkKotkkJ6bsbHow0O+fatrP8ZN77fRws8s+wE82Yx+xnc8vOrtlXEwCcc6+ev2AkcuyIyTEjIVEch9Z5tEQREkRAISgCKlAQZvHee++MWIjqMOxjVQQjljTlcbPsFmimoBxeGWMQEYjiHY2DaJSW3DiOcRhbcrPZrN8P/TDsdrsY1Tm36GZfvn5FqPd3jw+3t8uue3G1Wq5W33zzDQmwg3dv74nuv/rixV//1b9puPnmm2/ev3vz4uXL2/X9z372s3/9r/7m+YsXb35880//+I/ffv9wswRmvrq6+uGHH/IiAXOLPVX3WTylaZ7HcbQhJcKjzH9QI98hnGjlflKo3MHavkBka2mvvoBS4SEs+GfWh9MAhnyEeBgOe2QNqSnMgY6NyGWXQSIijIzEgMRIBFwIdfWrfJNTH9Qzr2ZUIJ+LxbdHRCZjHaJWpb7SGUPdRD0EsaR/KjcXzEZ94wvoMrePiKQQEUgBiBgwgqJoACEFQTj61KSwNa8CQeTkMIkpP3geAUjHqlCHq58VrmtgL7Cnqg4pElllLGZmQBP7nIs+RnGq3ocQ1ImXEZyqB40AICrKIpklkpqI/9T03p8Gh6aKltqCaPZFsA6lnpR+PS2OfarAd4S1H68jra6n4q5SBCnzZDnbNxGwhVakvVpJk0U9422dCGCCyYkblapYzGkLQQAA2JIcpnVi9qTiGll8mUExlT8CSKBb4jiJ2qZj70RkN/Qi0ra+67oW+z72Me4b5uV8sVot5ouu9c3bH39o2L16/XIxmzeNG4Z9GAYAcL5F8oqS7IdhhBhEdJTI5Luuc64xXa35hnRdY+Gxmmu3WjGdvu/NpLRarYoICADb7TrGGEIiu+M4hpCcP9u2XSyW8/k8A2rYbreC0djzrvGBcBxHCSHEkZpGVJjI+2Y+7wCgdb7rugjRERmzE0TiOPbjqDE2XUcAwTiIFACEBDDvumJZsAkdQpAYd/tdZeUCAogxhhg3o6XwTrVtEaFBFqYRozMyigoaQSMj+NZv1lsAmrfzWTPr+9557x3NZ+2Xr1917B3o3Ll2hl3jPMjPP7/+bv24er5YrVbPb541TL//7T+9fftWoiyXnXf0xeevm3b247u3//Xv/m7fD89XTiH2fXj27Nnvf/970w8tFt57v91uS3aber013hvrYMIfZp/bSzuxRruy+8xee3TN0fVlZVrWk1PdJiT/kAnwpv1YXWmXlyPL7HNyART5poa6CfIPZQ/MSdLzFJsUiGadQ3MwRCIG89ssBXioLod74mKjEAEQlKEoYynF8JUOG4dRRD0090vV0nHNmJ+kCY2QbPgIqmLFqHNCLjwh01wsodVnKpqsQAhHn5Z6HhVy9GDJY4CQPXqmuQKIprY5JWRFSKwXh3HfBfbMdUXY8gqqVwzeq+DoRUQsGnqMEQBajJYWRAUhRNCgygBZnfXR7U8s7R0K46qf7KJZX38g+T2Z0+T05AefWgPk0f7UHG+nVfGE0+2nVUC6OfBUAp9qypli1RDzAq86m5MbHHSgiGtWSm2IYRxHS1VrgtTjekvkiIKIBddMfQOglMUa7IFSviq70US9pml26x8RcTVvb25uFouFJx4ljv123rWLxWK1WDJz3+/6vtcYiWi365kiEki00FwCAhFYLa+JrECrlILypsM0ACsaM6OnL1++NNdK2whmCxzHUTWGEPp+tHRZzrnZrHXOzedzY2RNEEG0iFtBhybtlRS1BqKW0rP1TQmqMy/o97dv+zJ/iETUek9tO45jyIPPaIY5QsSu6Ur1iSLbWccMC9E0ZYh2WUOOiExQUFUwFl+1dT6GERU8sYaoql3bIuJuO5pbhFn0CdAhzZrWE899++rZzTiOMY7jGBet++zF11f77dXyGlC2j9s3P959/92Pr17dvH792moVxTiuH4dvfv/b3Xbo5uwcPz4G5+Dzzz//3e9+p6p9rwCpxFIdlqfZ5w7Z24jZ61zahvWWKVFf9VY6Gwp2xJhWGorzHigmPECFVcysuaZJtYMmiOWDepl6eKsj15hJciujwTYXmLwI2ZEj9ozM7JIUaDHbafDsDFSh/Wf5g8wU23GiA9PogRShNkI8EsGTnJfsGikADCCRGtQiRouCJk9GIEABpRQMopXnoJq7P2oS5oAUAM37pUqDNn0mFS9kr5Z8IzmcsRRHgISgemzbg0M9QD2j1hw7VFCnABAAnHOWxr0hUMHGamGbJUMEAFqKRm5ACSikmBeUOEaAkiflvNfln6+lybC5k08wsJ251Z9TyXn0K8guwpknxYRTOW5PAI0hy2TCJdVlYkinW50If0etejRUpyrVWbbtTcQFTCmPCABt24ZgIUExBo2h+AlTSoVexTPYz4dhCGLBUMKZUs/bpm3b65ur6+trZh72fRQlwpefvTbk2u/7ZPgxs+Ko3pvyxzlHTdMpolmaVdGCvgXUoguYebvdlnSyRraM5lrA33q93u125jFo13z++Wvvfdd1iMhZW2p170IIm/XO0miZ26H3br1/KMEPxhN0zbxtm3k3M53qMAwSRlXah9D3/YtnN5bJ2ipLhMxu1p6BzIzoGIkI99vNOI5DGCdDl4qqXl9fE4FIEBGKKQWwxuB9h4gAIlEsb0sEFdCZW+z3PRJ2bTP0I4J2vhnHsevmJtSqqkYBwBTh7ti1zcsXz969ezefz96/v5237XLWtS9v3rx597vf/VPr2r/91//mL37x9fX19fZxMwzD493jP/3mN/PF4uHh7mrpFPHxoW87NPWyeX42Dex2StQXUaaoBI3+WMYWzLkFSkneFJt/gnxVAtiLe6putcOLAErJtECpcmG6OxEeAmdRj3GWO6odVMEenNlpk5ykyb5VpDpCKGyHsTJZuIfqGJmZCYnImduiFWywm5zkkklPVMHsSo3JzyAZPABAJACAFWRFCKl8cS5meyDgmsd7Nm0W2LNIpEJ3ErVBshwVB8MDQJqdXjTiJA+oRU6qKl/QN1kWlmLDm+Yioa4JjzYjRq8c4AVpTw8TYtVDRuYSrVIYQGZVRUcQnZoziwAUlxaPAQCTqQ80JexThPFTA83/LO2yEPlp7SOVnEd/fir4Hd/NOEc4gD3bPoWUTx4kqgBHwugkytebsKBR/SzNwQzVNaqqMSR3D1NyikgfQgy6WC2JRIFEeoqCOHmv1KNRzjjnJOo4jszsWtdkj/zPXr9qmmY+75rG930fwtBk0cp07AZOVqG0bdumnRl1AEglDsyy9cMPbyx5iohYmmZDF0QtUl39yvf393ZgVr1Chd+8+cHys1jkz263W6+T2tM5V0yqfd9b6SLXUdM0y+XSgJCZHRIibtYbu3Kz2YDIbDZDxHEcHx4ezInGykEUMFutVqrH9AsR7x8eAAAoab00O87s93sAGLKHqsXMDcOgfSzSA5IjIuAELWM/uFnrnNvvegBgz/v93pI8mTgrTSsiImG/3y8XCxHxRAzwxevPSOH7b77pt9uNjNfXq3/zN/9q1nU/+/Kr3/zjP97f3n733Xf7Xf/DDz8Au3/5L/8lIooEAWoamM1mFtVnJtKXL19+991b4wZMZi3KtEKzLXU4IlqhXe99KiV/bieWhX0EUbUIWMuCtbSn1R3gkAzCCVaVRxMeU+ga9ijF+RwbI5NdNqsQJ4EPpqBM4qlqB6JacpaMj6Vn9sODvn48R16uzD20FAFanlES1h/BXs2aJC9BywUlClo8gAA0KiKYNvWgets5+vbR7XD25UgFqGDLAwGS/tKlJyNYpoaSvwPT2JKIaAQEdejJ8TiOEdS0o0wooB4USeMoTaOI0BD2gzYIDeLoyG/mo4x773rpN0q9G7cKo4Zm3g5BxhjGqAgeES1/2hBHgANlAtV5GeBYLixeth/ZJgfcLCBn3oQAwNLXpfpYaXGHMqBUL6BY1vr0CQCh4MzhF8UpLdnMs5hY7MVHu+6SOY0IRNCpqQoGZjT/FabkpUzkgARIicD8jac3RkQmZiZmII0oORBBCVQxMoFXVZGYOMXJgZsqeyKKFMX7SAIEjKSqg+g4xlFCVP3Dj9963/p21qxmLa+GYdjudn3fj/c7UAUMYIUDkDw4RGbUfrf3Db64XjQkGvbX16svv/iMx7WqxnG3HbaIOO+c9y1blUiRfrdbPzwg8PPr50S03+914QOiMjVNg0T7YXhcP5oEpojMvB/3Bi2LxWJ1vfKCqaDkkBJGW+u6rlRRMMFi2Pci0mLbuKZxzlBKxjHKCAAz7/t+F0G8d4gYwuhbaWfdynfz+Xw2XzBzP4a+7/dhFJHZfBWiRMBmfqVAfYxDP4wjfvvd95YadBiGMQxlbcyvnkWJoNA0DbeenQsxhmHUbhVjDHGMQ0w1LKOo0mY7iMg49uM4gmjTNMvV/PrqajtKUX6KBImCQQHg3f2brutgP8hAXUNENPaPFHbjfryazeZNA2HT92McIhGtfINjmLWtw/jlZy9k3Hz28ur6//y/bDab7X4MIQTRGOP999+4cXj3cAcS79a3Iyhq6HX86i++/m//7b+NIXbLzpP77LPPbt+973f7EOJduL1ZdLvdftY2zjmmSWncWEI5BCKSIagqI2gMfa/OOWRABmUQUiEVFEEkEGC2VLSUgSTp1g8r30JxKnc+71Q1u7QYDQSnIIBRQUXAHHgRKdLa9kQhLGTJvtNmNyVtTN8AIAuoMpCIgIBaBisADYGJHBEpgAKKOgBScC40zI3TltUxOBCHkYkaJMfoiB0SIxKSSXvJjjVpRDF5eFrhKVCqgtgElJQQMEUxRVBQ0QAApg6Jmur3ghJRBESIARBVWBCFrKYVAyA7B2ZEVJbENyuoDjikDJ92e7AsiqAxQNYEE6iCekAAGNFqQEoqg1Pb+c7RSV8FU0G5I8Ch9F8FnVvumwMIOTRNYeXsVFqxnDOz4pTm32nyh3bOxZztSVWbBhHRrCnBxGECQpaxZ8Wo5oYVVVLkkaW0wQoM/lQC2Z+kXRLRPvU8/FRpTzNqTtJbsVOiqKJCChJIQ1cJ/vWQ1j8smVJO5VH7aZHtjBYUxUW9YHLuzfTb1eJqjMGyk1iWPEAkomfPnoUgQ4jjqGNQiRCVGXG323VdM/a7H7///nrZfPHZ65vrVRh6kOS2UGl+SFVvb9/vdjsAmM1mhG6/33vvb25uaLEwSc7K2lks3TAMy+XSdJWIuFwurSLB4+Njh857b0XP7WS5rO/7/X5vZd5KHOr8qjVzYEHEIGNJb6aEJcgKGR37BlkB+74XwBCSA5Bzze3tLSIOMW42m7v7R0sc0zTNFy9e9H3/8PDw+PhIjC9evHj9+vVqtYpxLBL8btfHuI1xDEH6fRjHMUowYDCHTlV98/aty6Y9Udlut+vNAwA0iysLAEdE1eiZSxJR0/zHGEWT8tA559r0banKZF41FsXoByYG7zkVZ1c1I9B+vxuGsB+GEEdP3Hb+f/43fztG/fH9WytDuFqtvlytAOg3/+O3//bf/lsrqte2zTAM3HazWWeryNypLBON9aHt2rI+J5HoUCl14ExSa6oOryx7sCzm0+1mFzJzjAcFg8odSjh2tXdo2hcHlSxJtTChJpjZIQAAccIj0gNH+joG2mEdFU12ZvLtJHP4nF520nMiWuDEkcBaXuaUwhY6UxMNO7ITAJDTMCmA5vB/RFRRKoolwUppnGx+NaNfl2o5LwDACTCdPfM0jT2S6d1Z4nt0KVa5Wuo+aa5Lh4gRpvsU5QARRQXmXBKCicgpk3NOe8QxAg2IMUiCPhWwbCNalADlVj/V0fNP1U5H8JPOn52kCzDzVCMDIErXF+lRLR6J+DDlzXGeF6hgT0rJoQr2ah9OIso2ZmNJUSWqqvlWpFycySsYM3Oc2tD3IXE+avWojG4CjKoYowQBjYTAjIqqBDqbte1qtpj51y9uXtysUMNms6k91nIdu32Mcbvdqup8Pl+tVoRuGAZDr22UsR/MIDcVfYyyXW8Wi8X16moYhs1msx/G5XL5+uUrlwMKi9eJqXcsRNpC2k1rau8VdOj73srmmehsQbKu6RyAeeIAqSoyo3PNsN1JH2OMfRhDCCEmRdYQg3nq930/DrFxvpvPuq57d/vOYt69969ev/7ss89ms1mIkdkrRMvKmVSgcYhBmZ2AmimTyYHiPowyhuVyWXYQojnvOFOfas45IiIBwjiOjTP1L8QYk8s5i/kHomPTjoYQstOMxhjNMGn1jowJCGEMITiixrmBeMSkTXVMPuLf/d3/1ofYdt2//tt/u1qtfveHbx4fH/t+XC4Xz58//7u/+7vdTq6viZkj6BBD27ZpzREZSNsSMpVmlFFBs5sHAJrnesopSUnbJ0g5X+WhKrDeHafbsyLEZRdAjCNOucEk+8dDdro5gD0oIKp1/JkJWBVNSEZuizElshtOsAdEOdyFjVdJFj7nHBMyEztkh0zIWcvtODmwmLSHxeUyJcg+IQgGrgCguf5MHa2RX6loXycfEmQCMMnF7g+qmPJtMgCYYjOkTGQRILsJAEFSluUgK0hZggEg2vGh/c9YsvzH5Jb4pORwID7hoVLtYgBDWRPZCXAKzJwmu/KH9FC5uVd0Mya5UG107bN3kZibENxA/RDGcRxigIimoEElgYhKQIpKgEg5OPr/+FZzgnri0Hh6XuHi9dNiuuCcefrc01ai63LaHjIuyrzh8TKOTj007yxRIsxx5VruWQFqmnrJfOLRW9hOUEgb2+oFiFh0tBrvz0jIDJrcIy0ZiokLYVQRkqgGoEO/G5y7+uLVr77+6tnVYrd5WD/caxy765X1ZBzHcSx3kLZtb25u5vO5iIDScrkUkfV6vR6D1Sswq1jx7Gfm/X6/2Wy899fX15aNWlXnvt3tdgaTkEsiYE6N0TQNZIWJPXrYbkrGMu+MBjVE1M3neYJgjCmjJlH8/rsfEdGIFToGBkAUkcVimWPbxaSZ7Xb7/v37x7s3V6ubl6+eP3/28vr6umkay8m5WCzsvcbRArcljBpCQEwur/Y6Y5/Q2jkH0Qqmm7uDJlXtGGKM4JuCJf1u2MnWCgGasd+udC7JcDkEhYjIsTMiaZpY1QhKoqNW8dpt651bzsY4hHG93d+rhBD+5V//i7/7+//6m39aO/67zz7//O72Hgi7xv3yl1/bCry6avt+j0jj2HvfAAgwI1hV5Ijg0GHDjfkTGIOVnPfxWDgo0oOes4bWG+F0m+hhKxunuDIeyUs17OX9noCnUnImaQ8qUKm0NaopNxgAAEq6T4kFKk65NiOWq4U4LVFHzAQlWMXWal7DUz1Cy8lJlXtLpgu1MWV68ZQip4LJ/HYlpE1V7VMtWg6RICoiIyooWQq0OklkhtKka5roybGLOBwN/hG1fFpAL/epL5soNp0oOc/eiKrSHmVx20muYM9VZVq1+m3UgBjFHkSgBECKzOSQAyEJIhArDbCHAUGHMZqLhjlWZ7/UXKjoj/G5/CPax4t6mEIECrtXDWkWq4pQ/MS0IZz/qqQCOnJEQSxMIhIiJbvhGSeuemXbslaYaFY0GzaAAjCpyT6I6PL7ls9k8hYAgKhRVVP9AlFVXSwWwzDs+mHowygRAAio9a1jikGHYRCMIYwhSIwRRGetXy66m8XMSdiv7/rtumGdLRea0jGHEmNgMDOfz2ezGZOXOBr53m63d3d34htVLXq8snrX67VhwGw2s1RhBqW/++4Hu8z8IxDR0kBnsq6l6GWK0hv2TdPM53PvPVr+aG6YeQgRwOBc+jCWYg6rZy+KpBvHaG73IYTd7sdhGDSKc65pQ4zx4eFhvV6/eHn1+c8+/9lXXzhuNpvNerdu29nq5nqz2e52u/1+L9FgmARcEJEhNEBAkliCvo9xRMX3724BlYg6b3VrTVCOwxB67Xe0a5pm1jXee2M7dputhKjSOOfMfxFFSVE4BQqa5EHojJaZy4/5EBKbTxN678M4ete03gJbmq5pLYTi7uH+L7762c31w3yxYoL5rL1/ePjN/7j9v/3f/6/ffffNq9cvXgP/91//AxOJhq5rhnFPxEgYJY4hMEnjfeObqCFqSMp0FKviBgBKIJhrr5G5UjAkB4dCOqmELasKTsqqGvmwOBDmH07qQYOWvFkEQGtNZd7OlcOL1LsY4ShuD6dPVKWEBLZtlRiouGs6ZIdEwA7zmE+hexbV4ImPhFqiCbSSxRFOYnArLRqrilo198y+q0mBNQHJB+oA0XJNA4oiIwKgUgZ3VJlUojXdQE0hfGA6X03etqY/kpwy5jR8LzMN9cHlVokAB9hZSXtFmakXdJ6FfJQDRCx5HQGscmWe+0xhiSgIEQUgJSIaEwPiQ8ReiQhVEJEGQFRADkEBAAVDyRVihXM/tQbwn67pBe0wXBTpLoqG9T3hkAepb1hfc6Y/ULyBowgwKiiIKqcM4+buPOn04ZC/s1Et0l46zmmBREQPIqVydc3iiwXJSbR0L9l7AERUVCQpyTDGCECOG2iIQhhiGIYxxgjoLYxdNTpGx8zgCPFqMf/6Fz//7MWNjn0Im+vlzHmOQ3+73oaQFJDM3LbtbLYwr5PNZrOLvXl1Pjw87HY7ZpZDy02JZmvb9vr6+ubmBhHX6/Xbt2/NUaLv++LGQqWwuIhZBFMxINXiTXf1/Lk53BPRKDKOo4qGIOvN2oL2YhRBIHQqEMboZr4f+81mv16vN5vN2O9ijCDx9vZ2tVo9e/ZssVj4hs3qSUQvXl6ZlVEklC49PDz2fb/fDft9b/lviSw1KM6XcxDdD2O/2+92G8tv4hmbrh32/Wa7ewgP3vvlbD5bLhaLlfeD6W93u53E0fxLEXG/24/jGGLTdZ2rKi/GXBMRQMYhIoZJYYAIqZo2hDAAKDOTAqhEkThGIJx3DeK1975pXFRoWg+KD48P93d3189uPvuf/xIR/8c//vemaa6vr1++fNF13d3d3f3D/XzeIZmVJIoooTCzb13Y7o9oVA1I2dIFNIVyU/3tRKNONlrev/VXU3ahSr15wETmKpV1rZJKyVnR1QSHVMmaaEKVOXqI6SHtL2Z2jpzzzNGzSXhsUp8jZmbH1LDzJgsSEJEjQ8Gpe1XFBsstn2oYHRC3Q7vV1LfK0i91fPoky45ITpPPiyiKlXjVeICsYoUiqvuLBsgqIlGpVJ3hUCd8PMtnRb0PasuOfmh3PCPtncqPVAWtl6RBIoIwrSrNOWvMtJrGmlmQQiByyBzIITvEgV0MSNKPkViRwTlwI3lHIQQ3iEYdQ0hWmWS8UnOK/WcR9j5e1Pvg+UsQCJd5jpPepBwdqkRpxUw5CJKoZ3kcLNAm7b7DblSKlvKR1aeTAhPMfxoUAEYFFJ2kOVULQo+QuDMRURFQsMKK+35L6Ihd17bqWx56iNqHGGKUGEGBCR1z43znG+94Meuu5r51OsQRVSAO293+4eFuxIV12znnfds0rUlyKinhyziOIjqOgdm1bbvp96pTPwtcffbZZ/v9fn1/R0SOedb4zcP9+/fvv/75X5i/jNkLS/rNx8fHMmXmVTGbzdq2bTtb82yRgiHGYQgx6nq7N1tdDOqa1nsMIex2u//+u28gA7CIMFE3n7WeX332+WI2Xyxm3ntCJaJ523Zd5+Y4juOu34sIk48qdw8P79/dtu3MbG9hFJEBEU2/CsjD2G922363H8MQY4xxrzHM5/Nhv39Yr/vdRkTm3Wx1fbWYzZ/fXDdNQ0QhhHEIMeydH7z3Icg4Ruu/1XNI7I6AqibDnmAxGxm34b3znkO0bDVjjDL3TApBJIYRmZjb1vPg3V9+/Yvb+wdGaGdz9+bHtz/+IMPw/NVn3//4A0p8//5t3+9evHiBiK9evdhuN8gApAIRGRqzZjlUiMnDyMLJzE/K5B7DBE8Z7Cj5e2Sh/8i2V6pyH8GeyETiLZ9bjKJi9UJRFZVKMBqqAqdKI3WwOZcbYi0kmJKzMiIATLKmOV0XfGWmxjvnnHfkPLnUOB8479g7U7OTYzRvl+R+XViBCvYoZc484RiyuKeQPC1JjYVN3htQAt+S2jNhJBERBMBS60oAc95OJQAyii0p+3weZDH38amYXw6stMC6WDnoASBqlqNSWrMz0vlTsIdVUHVywomHWVrwQBK8aH8qEGjdKITSVXyEViS+MX7QiLJFCCFyQFVL2gRENDp2PPYMcWTCqEGJcBgUVEICPkXSf8aiDZcA6VPPw2X25FP7Q2qWFVA1FpEYuWg1SuHDsizK8SnsFW7iSKVe68e1yIlF2jMzNRfPGtv8VltD22YWgvTDIFGJyDvXXd8g4m4UGYNRWBBtHDXOe3b/8l/8cuy3+92jI2w97Xebsd96piGm5Jxm5FBVUzZKhK7rROTh4SGEaAkwS7So5sIIVuunbdvvv//ezB7mkGJZ/L/66isrpG4qTctMZsj38PBQKidY6N58Pu+6bt8/jmMchu0YLLBBhyH0Y+z7IYzS9/0YxbvRe2+he9x1Jf+njEOM0bwPfvbFZyEECWMIoWv9fD6fty0z392/m8+Xs9ns7vbh2zffb7d7BGL29/ePjg2uohXumc+5bfz3b34c98N+7C1PNCvt+3G33W52+9Y3y+XSXFj73e77Nz9q0F98+bnph81tdRiGKMmtKYxjCIlQtgDOEQBYqlVVlBhFlQi9b6y2OxExE2JD6Ar7a5ikwsmRDTSEEIb9fu9urlbOUVTEly9n3rN3/W743e9/+8u/+NWPb354uH8c+/2727t/9a/+Zjab/f6b31vlUWZn4XlEyZvUOWfZDLRK6IPZ+lUEO2acfBwPke9oGx5KGJOmJDOCQYtOtWqJ0UzSElV7p9aHTcUZcmB4fQursJpcWowwoyqiOkfOOe+T16anFK/niD2xYzJVp3POO+fpQNiFE9iDbHzBc5Iunnj0TNTgDOWx/0tG+giIkJRGput2Vp3MyIOgL+kpbBAs4p9yGuvsvUKWNkQ1luQsAsCAikhqnyAIKCoI5tFAChH0bK7OdB6ATWEooISkEECPq6sfMQL1uBBNXrlmYrH4aMwhDUGTzY+Za9gTzIoFAAqBQIkgOsfMQxidIz/GYTBXJYxOkMY4Co3gCGLkIZhdRMdURQWIANhlVlRqk2mtfyhxvkftEhphzmBzNP1EH8DaIxVBuXvmT6qRTN6+CXDkSKo+ivM7F3ULAOQJQBmKUiUJebYHmqZpW995ZzYAysxdTAlkU6BuBGVH5rZHubqeDWlAMNRUUKapbJhKKEgJOWUwFB+tqkmuLQ5KDGiZeeMQLa2Rb9pm2TVNw4Ag6h0tZ/P5rN2u71vv2q4FibvtJgTLthVfv/78zZs32+3+xYsXs9lsv9+r4GK5zOm4YD5fmFYgBFGFN2/edF23XC6t7l0IYbPZ3N7emm+Iubp476+urqws39iHYRhub28fHh7MO9RQ6tWrV8vl8urqyoxU5s8SQtiPO0RUhFHidrvd74cwiiKFGPfDoKDz5cJxMwxjEPFtGzKpdc41beO979rGe795vO/7XmNYLBbd9Wq1WnmiEELTzjbb/d3d3fv374chMDkiDeOgCFEFkdtuvlg6AIox7odxt+/vbx8UZT6bbfseopB388XKez/s+8fN1jQ07WzhZ51GuH9cr7e71Wq1Wq2AKapIwMaTahBQJgYgc4pZ6KKdLfr9PoQwjnEcYt+PqjqbzZfLpemd9/tdqv+uwZZf57yqMshyNndtg4igm97tH+9vd20TQtj343w+f/nLr28f7r+5++bz16+++/abvt8t5rMw9ItZ9+0ffrdcXv2f/v2/+6ff/O7b7/4AosvlrGm6/X673w9t44lSbjZgLNJPN28R0Yxh6EpMNzrva+JwhHyZgNRRDakEXYghRo0SgYCQzFMLORIgAWuOv5aUIeJMgjH7f8GhJNwkScWea0JjBECIqTYQAyXY88QOZ771zlsMrtkv2JGpHxxRCmOwYFxMjq+V4mdyxWdLUXYiG6FSMVKQjYQjVdUQkodhgnxM7i2UX0RitMTTERCjIoAIiEQdzeBlQx2r0uKqOT0ZQIyginKiWyb29oD8GAABUVABVbD06znfPiSji4Llz6k/iUtHAfN9jF6i5Z6pAaP072gKD9Sj1dXTsFYlrw7MmFSlekkFUjHGqMZ/ASIOpo4jAuGgiJGEWBghBGTWEHCMqjpEMWVaYruKAgpORKg/iUT1025VXvbot8nvMcPFB29b7Gdl5K2ZRUoBiYGQgJEAidQzOiJicGjjwyCqlL3FYMpFkMqNxEiAlmxMiuaHKMYK6Ws7ZeVCA9VbhBhy91jVvMpVFS2QghQENBl+ERFxiCMQo0TfNI64827WNa1zI6FKGPYyjvt9vxv6XjQCwLfffvvs2bPVamUeKN57C1cYx7HvB/M9mc1mIvLwsH54eHj+4rkxyKU2gvmUmu+lud1bChhV3e/3t+/uLHqBmReLhQmIbdteXV3Zr3a7nWF/omsC6836YbMOQRAxjPK42W7WW8sh2s46UNrv930/ENFqtWoXcwtxQ4nceAR9eHgY9lsRmbXd9fX1s2fPVss5AGw2m/1+/259q4rjGLxvHbfMPsbY96NjVoUYFDSoopXiCSG8u7uXIF3XiGI/hDCMPFpdAuq872YLCXE/9EPfO0/etX2/gREs4hYIYgiWPinEuO97e2XvWVX7cdhut1/8/IsQJIaQeYtogXe2ALy3oneOiJzjevH3fb8fB1OWLhYLSzL+OI4xDH3PTdO0zj97du1W+NmLl+boc/tw+/CwHmTYO3f37u1XP//y5aubb775br/feu/bzu/3e7t5cqyzQkjzedu2QUZTbDrnuPElj9cUJYc13UqrGas8W/n8QQBDIYnFLqiqzFh2aJbhppvUO/uUyS7oSERF8wIAJY6cQTHH5znnmJMvrstWPRPpHJEjdkze8k8kMQ+IXXluLe2lAmF6nO5DLyfLOiJWJsygYC50MA1rTvCY4EgBEEgBQadELCb7SlY/aQ7vT2rULL1pzPDxcZ823FlQnj5TkPjprxDkbCrqS7N1NHNY/KJMyVmFyGCWtYmIhJgCkx9pdBwdjc6FEIJ9mkl2dGPj2I8cx4DsxUc/0uCMwZZxJC/J4DSGOOmdVVWnzOX1PP0zNssfolIBXrXUikSJJXynWmWmva6vnHYFgg03WW45mjTmRIQoTWO2AFeiUzX5QicjTSWfKQDGGBXQsqGmPiuo6ohIoM7qZaRFXe3nFJwHAGhyntlaEBkxZQAiQEDyrVNVFSt2YXkiHCIuGmB03rOpa5hQxnE39m3TyDj0w9D3QwgixIiOGBpKBUVDCN61s9ms7/vb21siNp5XFTebnYmz19fXV6uluWL2+77AlWOSGBDRiuo558ZxXK/X2+328WFrZMXOG+ZZHaIS+WczYtLe2/vb3bbf9Xtm7rrOt12nKgrX18+YPTln0pISIpJrmtv370wwbduWEJbzbjnvpGGrYTufzz27/X6/3W7NiabXIKLjOMYIiChR+37c74fHx0ci9q5tmsaqWpo+ll0TxmHXj/shiEjTtDGOd/ePiNi1vm1bBtyPQxxG7IGobxgBYJQ4hFEJ4zBaNsXlchl3++1uM45jVu163zZ9P+73+2FI4fDMEmOMQReLBYA5FqYFZoElwiGFtwOMw2hFkdq2nS8X+/1eVR0SMBFq17SvX766+8O3TA4dScRZ63G5GGVEie/fvQkhLFfzr774/P7h9mG9VY1XqwVEolxxwGaNHAJp23giQsemG0xFx4lM4XYk5BVqXtuq7WQIIZvcECBirm3CjkTEyq5Zbq1MeTjj3YECJis5zxBPAMi5bONEkkHZUo5hiv703jnH3otz5I9se+ycc4zAjM4RO3SmXgYE5yYSXXZwlvPwxBU+SgBQFEE1dZrk2mPmfW2bWLMhEiAQAAhJYXGsJKGxvKUMECJHUUQOGiBH8uWLUzINcx8og4Nq5bD6s3T1EmG/pL2rtYAH5y1LSxHsak5tmrbDB5SL67i9QtEnRqwEixDBVJ4pp5ENxMwUuTjgBudG55hZXCAXwihu4IGH3rthGNlhGIWZhyBkSXeN7ZCpbsCfD+0u3ZkvFHQugHfEK9U20RNNyJlnHXGmkHepbyy0NKICkhIAMXjixnnvXcPEzIzTmKQ405RbDETEdN2Y624bhSo9OY3XqWBPVWIRQ3O8VOo7AKSSxkAGMLnaQ+GSHAEur2caxcifYyIFiWOMIyhFK+UjEYgdsymyrm9e7Pf79Xpt3vObzWYcR+ec8dchiMUDOOdMsfmwuS9ZVCDHj5tsl1J0xmh+jBY21zRtspHkZqyDBTyoqhkUrUrRdrtd70fn3NXqGeeS6Fer5s9u0r0AAIAASURBVGr1TAT2+/12vS3Dvh/64f14NfOL1iF6AIj9diC9vr5ePrt59uxZjHG/3T08POz2mxCCoThos91ud9vRDJCqaMHpMcpsNrMexhh2u/1m89j3Y7NcIlOMcdz3IQy4XHrfdt18sZiFEBSAPD9bLgBgv9/3251QJEAFGFUgwhDGbb973G4etpvQD6JBQKXHoMINd0xv3rwxfXXXzdvWOWc1StFyhFrd9irQIwwSZ92i7Tyh8x6JHCKISMPsnLNsOEElhAAK3vtf/fxrG1uK2l7dxCtdr9f3jw8a4e72XQzDsxfPzdXl8fFRwoDQOufa2axtW0ugpSgxxtl8TkTAVItlImIG0WKu01LJobKS1MiXCVeicnZu2st0Ereaf1doUYkXPNrjl2SJVDdAiZJDJmU3TvvnStxe8udMJRfMjcVC95CZzZwPOSmXScPlKQX2jgia1No+ONDTTm9XmflMw1fqSGOpt6AKENXqKqAgsJIisiQeggsZAKSk6TWcwyKb8kH20sN2CcYuwR7ES2IsgcGeHlazfeK+BfPS54E77KRyrA1UApRLxCEiAxAARYxEVh8RCdAl8zPFMSCPwYm5trvReXI88kCRRd0oiDhiCJYUyrzp4kTiaz3tn7tdHq4MdZOt0FQEx/B8BIfHsyZTymDN4d5aFCsKSmQsp3OucSWm1eRsAQAQ1VwGGpL3GqoqkIICo4oqkoqIZut9sZVGZlVw9XLPnLGIgk68jClVICnbpwWjqrn75n7twVIOWqEhRM/cNc6zA21ijD/88F2MUSQAITlGz8CkiDHorFsws8lnBlSLxWK/3z8+Pm42u6Zpnj17NpvNxiG+ffN+HzZl7dXEzjwPC+YNw5BsUe2iLJ6cqXJSL5vTh6U6s18tljfee/ZOVS172RCDRDDZqx8HADBwQlR2tOyapmnatjObU9d1V1dXy+Xy8fHRAiTGcURg71gRRPXd+/vNZmPxGDHGmGzb0nVdFBlCv+u3j4+PDw/rYehVwfWjSY2gNMagSLPFcj5fxjjyMMQYiYldQ0SiKBGG8CgiMqgAMqMCIDEi9uOwWi1XqxUR9f1OQhhCHEKqLM/kTckMAEzee7/b7RARSUtuZEKyUBMcxqhC5FzDxB5QYgiDFZYHQmYYxxhUZPTev371ervdto3f7xfsXdM0Ywzr7fb+/n62mI8S15uN9/7nP/vZ4+PjN998w03jGhvSlv0kK0Sr3C2CyAJsziQAoChW9MW0bsktXBUJQcHSDqOYq5aCAoNTVYgRiCBOapIYABCBzFZEkCuVsyZpL/GL2VJ4pCOd2EpLT3IUO4eKDGaytJRyLjlwkmNwqcYCsktEM/nsZOfNXHQPEFNm4wx7Ux8y7B3H6VsqO8s7rFEBUHN4+7T3czppAEAUzYpEqSKPJdEKUUUgRRQQQeRQ6lQk73JEREFIOZBzZDFMTMB5ensJ9i7R4YvngQHAFXp3RIIp15g9Evtq5CsGHwAQrSo3VlIO6yTlZMcKZGaSSSmaatWOHDhQ4BACm7E6sHMOB3ZuHIMMHJWQeRxDDCFoND+rqZ/lhf+E4PepQrR3k/vy2euPpMBLUyUhQCUgQl4iRnpQwTlC9czcOt+2Zuc3A6pCpZVIyelr2FNVBSQr82huR1Or3+sg1lZTHXARKcqT4jmW1owpM4kRWQWI0bmmaZrWtcXFACkAKwB4Yu8IFVQURcdxJCLfdmzZTAgtH924H83kZiUF5vO5iL57926325nxbLlcWtkay//purTA6rCtomwooXiIaGpGBJezpk3qCpsL0z2aHcskldVq9biHfh/H7c6km77vHzeb/X6/WCyapmm61kyJlimt67pu3DD5rmvmq+VyuXS+FZF+u314uDc/iLZtY9DHx8cf3765vb1d93Eco2rJGZ9C0MwX1DB7vV7HOM5mXdd1++iGYWDm1Wr12eevDLceHh4e7rbm44SqpbB7N5/RGKIlXiH0Xbuczc3S2bR+1nZEtNlsNrt1CKFFIOeeP38+plp+GkKQCESiuTZCYpuUnXMxyjAOjryVtgDE2cwKS2AIEXKdW+OeMNdlfP/mLSE27NxsLiKEtLi6/vzzz3e7XVRdbzfmr9vNZldXV5999tntQw+ZI6kT8YxhpMwIYvatK5JfvZ0PvBAy0z9lfmG0CNEQAmLhM1VipClN1eTbjGxiZb3TD6gsHIHfqUYHBMBySVsgBhn1S/U6WFJCMsr+nJyi94goZ+kkIrKYPK0LVlRKTqvzV6rfFdHW+q+HkcdlTBAO6CpAKkmd3rR60ahhuoOAVVxBjCJJl5QcCJSQNAt1lCAQIPlZKhGfr6Ie9NPOX/TwRIeaYY9y2Yuaip1iXqJuRTQsWl3LFZl/RVUON0VPFO2f5RViHmKMTlyg4IgDhxhj4OCIo4s8YnTeez8OsQ+j44DOhRD6Ibgo7OM4jsMYQgg0hpjX4pGm4k+LfGfbJSmtbKqzHSg6liOPlWrNTeNYb0uLXEZECFaFNJaINGunkq4mt1shIoiAiKlohAoAxqiYPW5pktOQsFbpEFj0FABhCV5IsJcZIygrmNIm9Ijs2DvnWmfVdnx593mbamGjahyHfr/fb9YpZrxtmrZF50eVqGKB+d67/X5vwq4l8bJ8JUTu+vr6+voaEdeP281mE2Ocz+fcSpmg4hdARDGOlpb6/0/bf3ZHsibpgaCZvcJlCAAp7i3RbDbJ5X6Ys///j+zh2bMz7JmpqqsyAYRy9Qqz/WDuHg5k3uoukht9Cx0ZCES4fE09IqVkLTnnnDMieSFh3Yf/q45a3/crttM5p+v+1+crs+ScVrAMAJVlbb1zhZ+LS5Kqqdp9W5blP7UfliuSwtgvkNFxt9/nnEPOzDyFdLvdXl5eLpeLKffeO+dMURQAoAgm51zTVCKioJ6yKgBAmRVfL+l2u1nr2na33++qquKUCx8/fPJmRhHOe6TXax9IIeTe+6Zpmro2Rs+4kvT7a3eLOVlnyZrEuWp2Od3Gccw5IxKCISJmUBSacE6c9DIKYRyGoSjmb2SEDBhZdLOrqlgP7zwWdQYRiyxK92LG2204v3xhgKqp//jnP71ezpzjw36Xcx7C5F3xxz/8YH+95JynMEzTNCnJEhkQZ1SHRbp3BA1tdUrwHhFhgaPjfey93pKqOaAJusqQkogsajV6n9zz7LzSG5aEG+9NvzfrwLuaBlflFDQAQICGVH5s7W2StcYSWLtqct4bnmpPYVWW0xhHCtXcIOcX04l5XVIzIM7v024BEbFbchiDgCR1q9gIVugKn940D+eBCCw6TbKAX9Y9FU6zxeY8xkMAWUMg3KGt2qMipIQbxOa/+TNx/u7rDPLd1zM60rC3xTK9K0HwLYZz+zoAwBtBrI152yYAyMLp0U/IOSMuqQTONe6aleectVOPmYgSREI0QhijQbIuC9nonLMxhRDAzMqHa+o3J6Xf0O3/Zx6/9znWfh8NZDaGF9ufOiLajhP0LlIpyO98jrXrASci7epYayWOagkuIspegt8pHHF5wsxIqAn+OtuDzIrk3IY9PS/bM444vwfNxlhS7tfM8mZYUmydjdm22RljvPFEJIKKq0wpWSVaWIuAIedhGG7X6zAMh8cHDZmMyBliylEYER3TMAyqB3a9Xn/99Vdj7OfPn6uqyTmrXtc4BGXsGWPK0q/HVouksiyttdtZnYJWtPijzRW+XqK6qVq06ZEPIXz9+vX5+Xnkqqoq53zOowbF/cPx4eFBFa1TSmVZ7o67Dx8+1HUtIhg67x0g9X1/Pp9vt1tiKEt/Op26rjvfbtM0Idm6rj98+PCHP/yhj7OCKBGlFLQZW5ZlCOPqwKdf1HVdSimbXV3XP37+XBTFb7/9Nk3T54+fPnz4MIVhHMccg9apiDh0Xdd1ZVlqh6CqKtWaCdMwDIPy1Yqi2O12ReG0qxxCuN1up9PpcrkhUlVVZVErJKHve2PMvPD57JzTUxwje+/JaXRkNdIbx7HvXVmWWrPqqg0gIYTS+b7vkbGpa/3tubvFKfz1r3+t65qILuczLKPEf/3Xf2Vq9EorisI6JyIq5qIEKlwc6e7L/XbWNfP55lb2NlNctRpUiADuQztZlBu2hSOv8ZI3YezvrDnb4IeIqhu3hD0Bnf3QHMOM0YH0LDy2zW5niRZaZMkszPBOJJpFPb/f5DRmTlK3tQHAalN9X8nfrV3vduptl4thIbn/ftjLitwRSW8md8bc6064K8UQpX9off5d6MrvNkUTAFgQ1v8DuaOeAIDzeyw+ziOrt8NhgDmLylm0p7uQYUjQojGQGNhiZuIMwigZkYkSiyXKxmVLkXNKiQykhMnvIGeOWVLyLpFPJhY55zjGlFIZTIwxIkXEgJgI+3JKKU2ZU+TAkhLkmCULWwd3SCujmm8BxKztvnk2S4CLmmhA1FyWQEkXC91wky9uY8zdF3D78N9L7gCAOagePACKUM6iyJyWfM455jRXgcJpFsKc4cDAmPKYJ5w6QsRdRd77pq6895rLC8c8hqLeIQhylqjkzFki0zmLgMTq6qjBjDKiJBQiIRAhyUJCFsiAwSEQEVogO0s7ZFhEXJhzThomEVDVbZ2daXOGXF01CrsgosIWskhIowAhNlVJRJ4GkSySx2n68vX5dDqVVfWHf/4n54qUUuQMDAW5slANs8wIrixCjC+//DyOY1FXT09PDw8PX758AYAY4xCGkIMxpmyrpmlSyswRGZxzVVk553LK/TC8/PaiKthlWRZVVdgiSyI0SXrtiTrrkPw4xtu1u916X1TWtu2hRMTL7fX5+aUfelvXf/j452mapqEznj788Nj4siitR8ycnIViV+6berdv6oIMjczcNx9/+fpVHRsu1xACIGLqpl++fHXOldXj/rGy3gFASqmPce+nMKWum7ohjClnMMZ56gZfVOPIwuhcHWJ+PZ2FQ13X/6//+p/GcYxpjBIPbSVN6bzJHJGo3e2ETAjh5XZTkybny2Nbake0bdsQwul0Ynbt4QdmVpdBNaTtuu7l/HK5dFXj6g+fnj596LouxtEUYis2CBzj0HeQoShqSXjrxjhNnGn3uE85j5cxhEAozjlV0tk3O2TiyMaYPIU+zqub+Ml7Y521Tqyxxh6K1o/jeLqcQeLu4fE//Ic/D9P49evXfhyttSGOdV03dc2M/RhSTsb4wrsVXoeoznI4K/mT0UUNiQQMA3IGACCr0uFLb0PNKQFSZBFB8sYyYEbKcw8cc86ZIQFkBFZhbxZ2br7ONQUHAIAsd+bPe1YxzdiOOeBplxEFjCVH4qwYw5bEGnTWWSuFXUtANEaMFevIGAJkIBK0GYlAEorCI4yleX1bm+SwyKIgziJlyiHW7pgFzjkLMCEIgiCzpJw0MwAW4NmzM6fMzMZsxAtxAzVXvCeziOQNPx3od6Alb6MbLj85/WNhj/I945/rb91f2Sy/eO/AZYywzvZwmdi9K8nXCd/25MnvzPy2OdTaBDNvAoBsqroNanTRqUPEEijnbDAmpIQUiFSZwhubEnvvY4wx5BjjlCLHlDKaLJRTdGwyxMTki5Q4pjQfyoXukIBJ5lHqhlY+b7m1bg17qnGiYc9u+of3Dd50MN4FRbexHdkezG12+abmS6LV7b0E5LxOm+5J2ZKQdt2UUjKAxhjvi6IoCu+spcv1pBc14jzkU/NlnQXOY3B9HQEWwsn8OooxhhmttRbmyXzOeSXbKkyGiIg8gBDcC/Rpmvy8HYW3XjtaAODIrYXj2nFCRM6cc+6G/nw+9/1YVVW72+mlolhNEclJ1bCyiDTNTiGXxpjj8aiiIRrzlIGgi3hVVSJyu928L0QJ2kVRFEVK6XQ6nc9nYGnbdrc7GGNCCJfLJccEAK6k3W5vrX95Pf30t78MQ9gdHv7whz+dzmfdu2maXs+nruuMtU1TXa4nSRkAmqbZNVXhLOeUprDf78vCtVVVeEuA4zgqXW9ydDqdFLu/nAsiov/6X/9rCKHvxmEY+tPr+n4IZ0RCsmgsGG/LyhpvvddLcYzT7Xy5Xk6E8qc//emf/umfLq83Y4yiWOcOBBlrrfqqD+OkbdLdbqeVa23lcDg452632ziOq1zLMAzakLhcLq+vr7fbDefxp7PWemfKsnSGCm+tJRL421//ej6f+2tfVfVx/6Cr/zRN6fV1GZLdx6vOuXEczeIhgM6sLce+vyGida6qKl+V3vudys0YkzhzTMMwiEhRFIremhIoQUIN33POeVZxIq0jVzznfKsu9y8R4RYuP3en7k3OpSraeiksa+tGkXjbD4cN9MMs1lR6+2ynxduFQhFgGvZUVGkNe2ZBZhra2uzd55Tbh9lYLtxBLsascmL3VWi71n0TalbG4bqy6Udt16vfW/3e/eH9Rbg3UeV3qq7fe7D8Y+8Hfj8kYiRYwt4csDYbr3Q+u8JMcINV+XZvv615v42F707wCp1fUTM6H5ovQSbtr+acSXRJpZQSIeUsy8zPuORSSjnzlCLlPMuPOzExUoqccgSbEoecUpbInCLHzDlJPw4z8jizcEIW5oQyM1fM7KMuRr2JAYvCISJZVPYPot4jaMm8O+vwTeN+e1jsW5DW9lp511SZy7skOeewaPanlDAn7bPJLELwRgwXCJg55GCiISNk0Fgg8IfDQURw1jLXDEsAgPJ9Cxe3LQFgIeXJaHY846wAyHoFnij8dsYji0hO8zkyFg3eWVDGGOdcWVbOOVg0HYiIOamOz8K6NXNSPE2qATaOo/f+6empKEuVr3TO2cITkSxymkQ0TdNqrl2WJRGpHZ33Xn+lM6q6rvUzSeS42+lvf/vt18vlolT0p8cn1RMJYbzdbtqNrOvaeIyRX0/Pz6/nMSbjnHE25cwIXddfu8s4jjEHRLDeobHdy0vhfV3XVeF0idRta9u2qeq2Lq0hxcJ0/Rhj/Nr/NAxDURRN05QV6J72fa/tzaGfhmEIKa5ufwWxSAZislCUhVlGpP00DsPQ9x0wf/z48fHhcDzuS+dTXdd1rT5KDMDMUwx9f0tZ+r6/DT0z13VdV7W2N49tg4hdP45TdL6s6zqldL31RVlbV7DgOMVhDJlht2sPhwNIyBxjjN5bAOi7MeUAmZ0tlA3S933hSiLrK2+9Z5AYI6LMat0x5pzM4k2t+5gTgYiYjIiKNxbDIGKRXFEURVnl3Lbt+XoZhiFOg7W2KnypVJOslC+xxtRlgYhBbZ60C6X6LPbe5MzLtUpECGazfOkmvSGniwjapXu5yeB5o72+TWGJiDgvdFgwBhdZDpoNhb9RhzGWvq32CJAMWiRnZ0KCtdZ5swA752GeqrGs+cTmyT00Mt4DNm3CHM2V3z1csdKU3mI7t8F70+blNRC+naeQyp/IIs+2Lnv3DED+MQQm/x3+/Pf/4P5Ri6AnA0Dm+/IrGyXSLAha7cGSgcoG1fJ7DV95C3XBbyCUa5ozv0Huf7t2wJkZGO8hcAl7ROSAMmULksjknK3NKVFMDIZyzkjZOJtYTHJWaclU55xdTilzFshJQuacxE6jCEhmySnnLFl7h2w3YY9AFBKMiKo0gYu4EZHCEpHgTdSX7TT4m9gGG2jP3w9790sqybz9aUZJUIrKAWdmHSBvv64s/RpFtHpIKRhjnh6PtGz0zFMXFhFjt9uvJ4MBwFoig25xdCYiFXo3Jq9xOS/fjgsgjYhMNgtVAomobVtajAvM4v3mvYfMRGSIlF3LzKoN1l9eEZFB9vt90+yapmERnWC9mxfqV3/58kXrtmUNzcxcFIVGzaqqdDVXybHD4WBQEHEYu5fn08vr15zk4eHh8fGx8H4cJkW+OOfatlVF5ts0PD8/v76ciezD41NZ1uMQfvr1l5T49fX1dHklot2hbZoGUaYxVM7u2t2uqRExTCEj7nbtcd82TWMJs2CawjBOY8whc8y8DDuVZZiVo/b8/PxyPiOikgH0GGpIcCjjOPbTxEJA1hiHQimlOE6QubCu2dc/fH768PDInPquU9NBa+00TcM05Zz7cbjdbiHOarbOu7L0GvI1neq6TmOhjjwVJqOVk5q8f/z4Uc+jc67vTnGIKUeOqePY3245RmMQWKqm/vDhU12UADgNoeu6aYo///LbOI7OGRWTm6aJc7TWArAlcs56r7ZE3joioqYu5u8qvCFCFmOM8aZATClxyjxHBUIwxhi2EoPiA+eZtwXPzJkVhDUHmTXSEN6hvH8/7M3LdL6jHLd3MS+p2HojLAUDC+NWIAQJhEFBFYBCaJDAkEUCQkNz8fk+7CGBI2NVaXN2lyXFrWzr5m3Zh3dUBNxfvHueIm1wpLqB27A3r2Z8X+HXmmd7J26mbipjJvI7EWsb0r6tAv+dYe/f/PBv3r8pNN8uvfcotiVikQWgNwSGteZbl+nfW9zXf37b8FyjJs4E5/kq0S1Z37ZaYBARCN+B5iwZRE23s0GTTSAiI+RcFraJc86JYZ2HZZScxc4e2pAEXMo5i4+RmSWz5JhzFv1PxM/eb0CAhGJxDntWLRlnjgxq+w8ASL5T6n17bu6H9Xdef1dDrw/O875ozIsxYjRr2KPFdGf9Q2NUmsFYa9Cqol5mll9++ck5VxSuLEvvrA4RETFlXrfKCABkvRuMoLBq6wqyMDIIZwaJ3T3G8x3o5nVWikzEbMR7Mt4QmsIVKaWcEiK6siyKonTeGqsKnBogmXOIU3+7jePYdx0AlHV1OByqqkkphRgR8cOHpxhjP40xxhRnLOUWpA6KgFdjHee05XXXoBJRzHeOw/PLl+fnLznLft8+PDxVVYVgzuczM1tvd9WuLKuc8/V6vV6vr9dbzmKcd7bIIJfbreuGW9+9PJ9yzsZ6NXAvS69WSm1VlWVVlgUiVmVROr8/tLvdDgCmabpeb2PfLW7jkhkPh4N66f3yyy9dP+pJTykpBVtFnAVnm/WUUhYYYk4ZvXNlWRe+EpFpmIBl17Rl4QrrHBlgqatqXzfXIYAKGMWomJcwTmGchmnc7XaHw6Fq6rL0xrgQxhCnl5eBiLS3qarc3vv9fn+5XPRQq9eENpNTSqUrIEvOjgymFLAhb7GuaxS21hokSTJNE5AoMjNJCjkkoWvfhTh2XTcMAwECsDGmKopaa8+6VmBRWzpHtnQFIMUpTmOw1hpnjXfe2F3dRM7MnBQZIWCdRURMACCRIzIa5713yj1NPDccEBHQIJESiubxjdyFhYmsLprvEOCy+M7j5rFWBWvY29ySUTSTtAQZAQiZRZ1LFr0SIjDGEoE+v3/sJuwBiiW0Bo0hM/vqab8TNwEP323VGs83r74fr9Ac8O4Jt7krqwnzm27f+nwt0O/NQ2XgzVpSggg4Y1UIcaZCvSsHZIZofj9g/c7L/6jfACMiI8kC4lvamxrwNgDVtQZew952Rb63xb8J+LjYN8DbsPxuKX+TLCx/SESA25hxbyCsJryIiImVw54wJzbGMGSiLAicMoMVYiZAZjY555wbV+ScY86JhQWZJSRmhrlJmFk4afDT6qfQ+QoAChCKASRQh8Sl9LQ63lsVtGf/ZlzYOrN9uA5gZUOE+ea0b4/SNnS9KfsYmXkNeyEEtEY1O3LOJs1V4PpXzmW9H2DV7URAROvea+wuPaVvaLMsgDzENIsFsKz7TkSVC3NfhRwRIhGIEKNmriIAYhGdMYW1hap8rTSsGaoHqFKZZpGx1+V1HIdpCqqfqQUcIioQoK5rjWqzeXqGdfuPx6OIKNPOLCYMzPzw8LAembm9wxxjHPtLSqGqCu/L/X5fVSVzHqdRIJdltbZJr9fr6XqZ+iEIKfo9Mo/XfhzHfpxCCF3XPT4+Pn54KopCm8ZFYauqMhwIBCU31W632+lv1bddlV9iyGDIWku2MBZ+/fVXnaKN4xhj0vK0KIrD42NKKUwpxhhS1CgOAImpNH7n3G53aHc7Y0zfDzGmuq6dtXVR7HdN29aFdYAMrENZXkdKyqCPMXrrdrvm6elBqXM55xAgx1DXrdbNfd9XVfXjjz+KyO12u91us1iac4rA1Fu1LQsiK6K1YwlNW3oqq2LXtF13fX1+ud1uyEhExpmU46dPnzRp895zCuulWzhrjPHWrna+eh619NQlIuTEzNY5773PBRKWReFyDjEqUo5AbGWJmCzmJIyQcyabAYz3nplJVR30OjcGF9zmfGvIlpes3bx7JTc/SGSj9Lt2+dYw8C48IJIIMtMiM6w67WgtbMMe0Rr23jskmBnbwBZJqz1DcOcqvCGhvpnzrffsOtvThevdsrzAW+6Gsrhpy0lO76q97epNtMVY0rp4z/XiRtVF5O5EgRt5tt+1y/5fpKuF33AN3pYo779RA9J9trf99Tb+vyv5V1UOeFvKwNuG3rpNKisOi3DJ+k68m9MTLu7eRGQp54w5Y2IbBbIwZLYMJJlEKEtCYcEMQlmYmX2Vs1hWrU5kBp9nYJWoy4VkyAlknu2VcRDloLAg8NztFBDhGanhEBexAwBANFvC41yM8t32YvsKssjbFvG67+v9vxyWOazqEm9V6TAlbbHqNCvnTDFqBw+WjNX7pNdSFlYrdLRkAQ+HAywaeiFFWaSSkO8AbiZBBDPrx7BkVIl17aerPRlCclm8JzJqHU0irCU7IFky1jpnvTXOkDVkY4xafnnvLdJcOAKQCqBxjuM09v00DJmjtWa3K4nI+pkGZ4xROcHz+ayy98yaV8xAGGcohMBppq856zRAekuouibCnIQBtOcd09Q01ePTHoRSStfrVUSI7OHhQUSmEG+32/l86fs+z+53TZyt27U3Mc8Uf/jx08PDQ7vfaSyBZUhZIjpjfFnWdeO9zSn04zAMw8vpom1hva9iUr45PD8/I2Jd123bpixq2jdNkyvLlJLTNsVmZXg5j6u3HyLGGMsiG8KicJJyVfoPD4/7fRvDeDqdxn6oHx9UvWyapr7rlNVgCZ+eHvb7tvBWePYiNwTWYOF8jDFOAQBKX5S+6Pu+u96qoiyLsnAeEbVqnPPXDAatAGaJSAbRhBC77vTrT78aSwZBtQKmYfjy9cvXr18Xs6MGAAZOZV0fDruyLJ25O1q7ZTSFiDnGgccwRmOMjhiYSC8qsgYJScQQopv5P4kTAhgEVzg0BGRBqZJaEungGhdUPoDZhJm3YW9exNfB1bK+yQxORFBeusCM5wZEAeVXI7P6DQAyCaOAoSQswopiZCRjNhRsILRIgmC0ufoG0jK3SN+EvRX+YwHvRD1cyAxvKLuzu7oe0kVLZRYJXOLcm8inkRzmIGdVWgVnz1sSQa1D1C5QBEWy7j2iKjmv8Ual7TMAoBgAQRRF9QPMr2/Vif89Ye8fDYd5wR6KrEQInSne4w5vJnEZAADnsEdv9VnwG+Thu7C33bhvN3Qb/7Z5BPyuPeD98x2gQUpEkAUASViMBJbaVlEgAidABsmCWZizJPSGGTkzg4Y9k+6sOBDWyIecNTsuJ0sK/eKsNAYSUFVBRXXQYhOhm5TTm73blr/wNh+ERZ9te1jWd6745nc9ihSZmXEZbsKihznzKZczsp6XRfEZiNCg9YUty9J7O41RIDMntcO8R9Z810dFmgXiELGqCiRBAebETAJz1B+HkKzNgiJIRDy3iGXX1MYAojLzvHLSZ52HmYYF2o/1hXPLRSIphzgOYxfTHB0BoCxLNDRNEzMoXP52u6XE6+eElFbQYxjH9TrUY6iVoiIDFdk7S3whElHTNAobQURDznurd3JRFF033G63y+U6jKO6GAcGj8EYVxSVCrgwc1UVx+Nxt2sQTYxTmCIgeOsAJYzTp4+7/X6/3x9zzq/n6+l0mmJA0OGi90WZs/R933XDGIac5ePHj9qATSkNY1DWIBGdrldmVsoQ0L1G976sqrqqChEZxzHHZKypC4+IvjGPh2PTVEN/e319lcy73S4yq5iZYkFjjGof8fHjk9Z5IU4MUHpfew9V+fzyxbvy+LCvq7Yfbv/9v/93QN61h7LynCHEUTUVjTEsSbXcnHMALIzWGyLqh/H1dOYUnTMphtPpRTmRhTNtWz99+INyFpWY2Pc9e4uI4kXbe8YYBrBEOvL0gtrgNcYUlbcqUqIXT2Y1vrGIaJ3eaJcwIM5edEVRkHWRc8qid4hxDomEjM7FFVR1D3vwZl3aLN/3Gu5drfNOweN71Z4Bo/ZDnLNBZBKrZ/y79zuIXcPeXO2hwkGzJWMVnLLl6sG9wlOEJ33ncQdLC37T3QEQXJqcCOswb37Dpq5dF4r1n9vHu7Ud7gOd+a9U1WXzHtwulf9/Cnvb7uN2hrd+zNpxXPbXAIAdhkHXI7PI9cJsQJPNoq0Oy+L7rvKATWm59bfbtkwzwzp9ScLrtRVzopX/J4wr+DhPRGTRAN0VkwlwypmQrDEqEUcgBMKCFZXCmIWZISuXRkQEc87AWYCBhTgjZ4EsIm1BS9hjFEEBZEFd9AUYeb1Q5r0ulhV8g25an/Bb+XYAIOPfEdL1UVincAxZiCK672ovAHwX+hKaxSFDCLLEvLXy9gZ16OW8qeraOcspX68dohhAJCAyAAyIwsDC1nrd2sQCKQOAQAaQlJJIJgNFUVRFCcDDMAy37rCzgmR9McV0u50f9senpw9DNxJp252UG+BsAcxpSvt9a4xZ83fnXF2V3vvr9YyzmFny1ihfwlpbFGYYBjRU17UxbhiGYRwBQJ16knCMURh1TWRm54x2L0WkqqqmqYyZm8ApBb2cytJ7b7W+729dzrmoyqZpOEPXdbYoH45P3dC/ns+n0ykndmVRtTtjDKEtyt3Ly+nl5WWaJiKoqurh4eF4PPb9LWcBYGcICEvvyroqnH9oLSJez5fL5XI6X6dpskVZVsXnz59vt+5y7aYpCkJZNbYscpZxHLVtiIjWWpVGUwp513WZhYhCipfLRXfw8PhBe93GGBBOOTrj6qqdpuHDw7Gqqsv5PI692uqmlF5eXnLOwBLGKYTBe/9w3D88PJSFVzkeZ4hB4jQysyU8tI0xjoSnoZMUd01lDHrvSu+GYRJh55z3hTbojHGYdKQaEDCNKeWggdBXdVE4wFzW1dh3RFQ3ZVmWj48//OUvf/nt66/H4/E//5d/ud1uOedd3fz0009d1yEaZ6xmJ1WFKSWbpqZp2l2r1kJFUQBwjNEp1WHOv4Ws0bK7cEbVpXPOMQSPxltrCPNCIDPGoHXa3BeRwvv7UrhR29sMAbcsHVjMvgVRPeEUrIgxJkS01mg3L6WkSrfazSIyAEbErguC2UigrWEPAAjdcg8CLUw6AgEEi2QQiJD058I+XCu5Neyt6Oi12rtXFyjbRRiWmg9pXvdEqzpCnFs4gEQIyk/MCGTIEhrOE4IQmlnRVFAMIFCWmV6s6x/Aqq7JRCRZ6RQKShdhJrLbdXKTgnx/hrd60/87wyHfwxts67y5l6Y5io5jRbtcBEpg2Jzy94QMeYvkBFALtnstuO1nbnus37Zc371nzSk0R1j/Sc4Kq5SMJWECi4iEaAUISYmoWZABgZkQPJYikhmYOYkYZsViFfPmJmDBnDTsoUA5zdEOWFBEp4bAUlgHAFsDZUYmgcCoUjcqsgosq+yNPpfM629RgNGsKK9tnsgbTc63ScOblOoOdDbGWpsXDR1YqoGcszFudoxjDCECi7IMGMXMCqW01PqzDurbdIRERKUmiUCBPyqD0jTNNF7669DdxrIsfVGjcTFm9QrfN/WubZ1zOQnnqamqtm2naUBERKvGPYSzY8DY99p/1HzZeaMCzXEadUahO+WcY5Gc8zCcrbULAN3O61qMOU56GM3ieKB/qJFvQUgmjXkK7avrWhBuV8Xut0D269eXL89fU8wiYnxBRClymBJA/P/+73/RBKUs/dPT0+PjEQCul9eYZ9HXuvS+KtuqtYV15ADGy+V2vV5FsGma3e4whOl26/ppdLYoiorIdv3YTz0jEJHXVq1zAIDEakIUQgAdZIaccx7DXKU554axq8tKRPruBgAP+50hGG7XH3/8jIjXy6nvewBQst00Taq4FsbJe//4dDwcDk3TqKKbUi29JbIGUVIa45QYrbPGWU9EzhbLepSmMRvCumqKomBm1d0Wkbbc6eo2zyDAixciKpxXQc6yLJuqLkuv3iB/+ctf/va3v43jaK097PePj4+3220IU9u2RVE4VwDANIwppb7vQwjcX6/Xq+Z5ZeXbtlUqURHKRdEbV5Nx731rnDEmg2iFRygaM4hmqWZCJEA0pkC0bxN3ecvWhe8hOVU+dN3fVdJFr7RVDWpN00tXr+sYbzxh3i7lmxt8caqDufcIBoWAANHMYY8I79Ueyaby28z23sW27xZk28oMtI201QjFe9X7bpHffuC7NxBupn2bhhbDvV5cETbfi2rff/1/8vF23LZ+Pn77ur4y58i4FZjeMBa3A+F5vaY7Murbw7RGtc0Wvd9tfUJvFNE2Q0RBQRAQK6gXBgAQEhmXEQ1SRtI+p1HGBnoAyOroLbLijw0iCoAkyDw3OSWjQGEiwJwSIAuxAGcUmN+/kVdVH2DDZp3bbWd462wPMq8TPhKYBDTX2yY4sPjSwVsmg4jQW9djETFs7jePUvuXgAcAWajwpS8sAOQQU8wAYCyCKo6RkGqyLOAjZsBZUEW1W0SSEhuMqtKnmLs0TiGVlffeu6LJCadpygnLoui6KQzpeNgVReVcgWBiyNPYQ+YUxhhrVZAiREI0y9LAKSAKcxadxNi5ccCcyrLULVFKg4jowq37zjoBXeADOWeD5KxT6KZzzluHiAZJzYaIKISQY0pxViKedTKFVp7sMAzPz685cwgpMRSEFs0wDqqBGQL/8Y9//Pj0iIjGIoKklOI0dMNQet+27WF3aNrWkk2cUgjn80tKSRCMseoRQcYURTGEKcYYhlm9mojqZlfXdVk4WdRqYowaqFJKrix1mjuOY+LsvZ+VU7IZx8Ea87DfsaQw9sbbjx+OVemv1+vtdkWksq7QUN+Nt1vHnDllAGjb9vPnj4fDAXIexzGFsSicdwaFp6EP48SSUfCwPxRFoRWzCJC1KaVxnLz3xpBByXFShI4mFqchwDLXdK7wzpVFIZKHYQBBa8vKmar2zrlpGm63Wzd27aF9+vB4PB5SzorlSSmpIUNVlkREaAGgLIqcMx12AJBzTCkBUcwZEiAiD5JSinEiopgoJqfhp/pwRMQsbICFjA6qUcBZK2RkCSeWCK1Dh1Oa7sviG58ZTaHu+K9to093We/TFTG0pTibRfOsdPO9mReV7XcLMbwLe3PqRrBASwwqkpPnas8gzfM8NAbtVpts0+RcU2QiWmwMcC4e1+99S74CUOOkWcQJCGG2ZnnTiZWFeitbs527CKUREcTMzGaOK6v89LzqKFFkHseIzkgRBBa8p0aK36Ol/2PIT1l4fiKqcD3HIEYEEMG1xF9eV3EynZd8N+xp2/NdhMOl2lubb78XgZdfbdroy+vrcn8PdpuGOgARCACRqsogARIYOzPLAREpiZAICwIYAGCdiwgwQ5Zlcs3ZiBVOGvOAhQAKqjXsqTyPEX0bAAuyrNVeFtHUyLLdbOcmu1mHefae6wFAfguHXv9qhau9O2jbmwGWTjIsmiwwC8LyepqsqZ1zhmzOiRlyFiJaTq2gkCgJ9X5VkaJQRUTUJVY5dtatnAhddKYwENFxf2h2R+snZEFyKcScGNAcD48oOaVEgGVZqvQ7M6vvOQBrXCciAgYibQfoXWrtMrZkNt6oAEcIgRlwMQOq65qZhzCllBKxzvyIyJtZQhOWZIs2ehyzydsyN5UZHDgURXE8PGbhvh+v1y6lDGjQOEQZh2maLteu7/s+Mf75z3/+/PHT4bCLaer7rr9dcs4pTCSx9M1xv3vY740x4zh21+swTNM0eu/LojTGRc7TNKkw+jiEYRj6cULEtm33+31d19Z4ljSOo/oWTWHmoqx31mx+ZGe1MGtt6MbC27Yuq6Lsh0tGqcvyw8eHL1++9H2PSO2+Lcvq2g3XvruNoxIJ2rY9HA5VVSHLFMIUBudMU9VF4bquu17O0zA2TbM/HNp61iNNOSBA4UtvLIk1BmOcrt1FB4SgM1TvjKmWexZSCuPI2mMvS18UZVWVzhvEPE3j6+v5dDpN01RVVV3VMcbr5dZ1XYpRFuWd0ntjTE5SluVsJq7kTm+KpXfS9SPnWFWlDvCMMSbi6p9cpkjkAFFI13WRzDmLcw7FsLBkYMkG0Viy1ia5i12tTU4RQZyRnGvSv0Qst22urDWcXt66GXPtrvcyj9uWmNlIgH437CFuGGJL2EMBJLIIZuEKL2HPkGx5eyt0k7Zkhu3rvJntobwvM5btuf9zDXvbxf/dT9giOXGmd+sergFsWcGImWkTTXT2ta2I1oz/d4Lb//gs8Hvdyu93Me0WYfju11q1iGzwEUs/mt7kTf+ux/ZCQFzpACIiZjP8nFhWSQFCFCRBK4gwq6lYQEyAFjAxk4jXC3T2NF+esyAKgqPMwAY5o2QRRgGPBQCAZMqzOgQIkQDNHlii3WIGgVnD837it5FvpXLrI2dgBhFx8+206XuAVp8EKAILThEWObvZHRLXpoqR+/xvBSmtSUZZ1MwcY0opZhYgh4ujngijEIDmPytvVdMgFaUBQ4Y8iUiYJhERyEQzUwGEc5Lnl+uHRy9CwFwU1WF3THEyxl4uFwI2SN6ZyhZl6Qu1I3ee76Iqc74nIooVNBadM2XhNUZqhWeMybPuqE77ipRmwtV6HeoHGmOQGJBZ0rwbPCvsMXPKohFFGRF6re78nmheL+MUxiGEEAXo9XRxrkAywzC9ni9TjI+PTx8/fm6cm/rh59ulKFxZOUAzpFFyfDoed7vdrikJ8zj012s3DEOMef/wGGOcUqIMghBien19PZ/Pr69n7/3ucHh6ejoej9b6cRyHob/crsqoY2Yio91pIurGEQAIrdoVqR1gCCGG/j/86c/emV9/+jlz+POf/tjW1fV8GvsOEaq6LIqin8bn15fX640ZLOTj8fjx01Nd15DzMPQCufQFogin7jZ2XSc51aU/7tvjYTcOQXtTjkh7sAAAzilvPYQgzKVKhXlPRGNSTi3A3D9fsn7jiqosqirGcL3dbrdL399iyk1TMfO1uxljSl88VY9hiuM4AmPf9ywoDJxzjPF660MIBoIz1hWu9IVxBAhoyKAdptFmy+AKKlCIRaxF59zQ3bwvyRoDRggwC4NIzJwykkFBQBDKGcCSA56hJd9We0uqfRfZwFkyHtckVVmjPItNvnGWXvuZkkmhMoTaCPvdTtgc8yDLjLGYx3AGBYERwSAtYU8UpUJEJBuVGXxTlm0C4JZy/r7DuYBZtP0IsKn59P0iqrSSl1Gl3s6yIjkV6qnP89zxNoiIkEVm30EEjXOGiHRVYTZESJLWMLmun/8L+5wC23O61HkaYOSNTBojAWLmt9Xeu2j3LVVl+Zq8zQvwmz7yuyf4exqem03fmt0AKZUc5mErGUYyYMAYQGI0BkgQQdAIgyBIAoBt2AMgFCADJEKcQQxxlpwBGVkMFUpc07Cnsz3DAMx455+IAWDQhts9+9uGPYUOwpIV0qJUYumuw6RdkXV6Khs+0HfTAljSrjvmZXMXadfF2WKaphQ5ZQZAIvWL5ZmZyqiaoOuXsGQ9+Ep9NcaQsYgYEGKMMaUYo8zFEwCAMF1vI6J4Z5wtHx4ep2nozicy0BSuKL1TkpFk713TNOr9NJOxSJRmqJphSPPdu0mlZ+hmzrrK4Fqr3W4352ahGIQZzoCIztz7DetgQwngiKgrtXNOXXhyzoUrnXP9MPz886/ny00EAsswTGVZd13f9aMg1e3+qaratq2qejyfx6kH4MIb4AycSufaR//4+GitBcl9dxuGIY3BEJRVmbMYskLQ9/3ldlW9UK16Hx4ePnz6pFDSy+WkqiVjmHTiSESARjb9alg8Iqx3OsgMIXx4PKQ4jt1IyPvDvm0KSbG/XcrKIxEavHS3r88vX18uglDX7Yd9+/Dw8PB4EJHuchmGwVlypQfh6/XW950BPO73h8OhLD0RISTMiBYrZ5Ok8Xad0sSRr7dzYYu6ck3ZlE1Z2GJK09RPOjEHAGbISURAZQGYeRqjqpueTq8pB+9tVTXHh4aZU2LVWMk5q5iOJad2UUSUpiAi+h5v/bywIADAbOFhbUqTIXLOqGHvegnFaUIRz6UYRkWVCBKw5AzAhAYRmAVZ9Mokt0FWb3J/7TSstNs1j9fQpimaIvhmpfXF1WEJh1mvWyWSmsVPYA0/6wWPK45aX1dghYL7RAlGgkCo0hmzctkiBGiM2VZ7WzWWN43TN9Xe+xVY3iwvmw0D2AybtlMneNvkhLe8vXXFUyjifACR1gmoBht6ywLclrn31f47Yex/pNp7U7B976/WrdLIY7cNorWxuXJZ1th233q6v7LNa9ZU6F14Wzq5dyeqb8Pem0YoWUQUBEESJEAnhAwE1hMQoGEkAUQBYhYkJarwHPPvn2oRFcMJklEyMKNkkGzQaUygLCBze9Mw5BhUng4BlJzLoP0KB2+q5vmJWcrXeSGjnEmBmrTeNqgCS4sy77uwN5eDb6+AbTYHiiHbJHfGmMRa2tLmHqb5bBMKIAqKABJpR0dyZmFAmb18ha3gGqg82xhjDkGhkohYVm0IqWmqttnlLMMwWktFUTw+HUtLzhlvyDpTF76p6qIo+i6ujVlJmePM9yoLD7NuBoeciGbzTO8KNW8DmKVJxmlaD9dsz7Q0SK21lV3HIerVgnEK4ziO/VCWZeG8QXLOVUWpYQ/EXs63ry/Pz8+vXd+LAJMRwWZ3HIdoTK7q9vD44JwfxvHr86sLfdu2dV0KcEqjM7jbt6o9xjlP45hDRgZnrCCgyOutUzjJ+Xp7fn5JKe12u+Px2DSNDiCHrrvdLmq5gGi0Q6tnfwpJW4jMrEQXVXIDQtWqrqpq3zbPX74wpx8+fdzv2u5y6fveOUvOssA4Ta/n7nS9AOH+sD/sH/7w4RERFR4ShmFdX1KMAFD5om7K43FflxVLUiRkCpFzZrTD1HfXPksqXFkVrqnqqimd8WSQc0xhDNPY7g4xpu42XK9d1485s7XeGr877KfpOk1D119jDEXpvC+99yEEPVkhjC8vp9vtVrryeDx6W+Sci2J1ZER97p3EGAEFjRGCyDknQETVmDUWETHHENMkwSCiMYgCnDMwo0VLKtQHwGIMGmuBKAsiIAkgi9LSYVEdgvelxhu+MgAg8rpYa+RbZ3tLX+c+4bPWEst25V3vZd4YeG3DnsF72APJAMqPhW/DnrFoDFm2tDpr4lICLpXft9UeGJRFIhGXCu++y5vFWUdB2yB3/5Bvpn0rfGxbH5u5IcwAIBtexFY0Zj2A/I22/v+SxzawMcLKkVv1WUTFqdfKDQG2UtTvY+MmRG9rNVwInutKtF2v4RskJ8D9xe2T7WP7J2QNzHhRYwiFjEEEtGRtBmORMlolQarjhGHWsLd+l8yOiArxypRJkAlIJIOQc0vYIwGhNew5Q6vtqoY99T/k35ntbbdcD8h8KfB8GWmSuE3KzNZiaskx80IgfXdAvg17GgZut3FJUJzytAVlNmnUDo8eSFajoXkLV0sHIrJm9voqy3JmT/d93/f6hhCSiHz82B6Pj/1wOZ1OP/zw+enH/TB2l8uNOe2b+sPHh6qqkKQfbs61uuMpJU5Bl6qyLGMcAIA55ZwRpCgK/brLueu6borBGKNiOpqVa1dNCLWWlcXrztG9q6zAEJUU0amYcqUBQPHciBiD/PTTT19fXsuyrKr69fWcMT4cP/7yyy+79uGff/yx8NUQwuVyiYm1cHHOZU7TNFSF+/DhsW2qnOM09jFGjoxoysIxwzhM/TCQrc/n8+vrq1oWPD09HQ4H1bdUjWl1lNQNSynlxW9SaQk6NrPWDiHoaXXOJc4iUhTFw8PD9fy1LH1T7euqinFKOYBkYUkJUua+H/px0Kjw8PHDfn+0hs7n8+vpOaXUlOVut6srR0Tj0DVNs2+rqqoIJMYY4hhjpCzazzTGKFWxaZrHx6OWpCIyDMN4Htcu39/+z/8zxtT3/TikLFxXu6bZtc1ep7PDMAhj0zRt2zpvcs4xjpfLRUkORVGICGSYpsmS217kKt6NiGG6ppSQwHsPoCinvBKlkNEYo1eyjkKdmXUnOWtXE5DAolVHNGctkony/iaFTTq+3nrb1Wl9c85hu+7Thsa39m/0hGpDIo3Tds1cZ4Hrerj9KFh8NGeit65gS9gz97CnywUREcH7ak/p6mr1vP385Rvf8/bejpbeR4vtUfpu2INNpYgLknMJLfcYtkZeeYcOhTcfu/4t/q/rc76LXN+mMt/urx1DNhlsBmPEGjSGrRVrLc1NTiYiAwhEmTMYQ7YQAKBZw1TTAFhbgvhmZwGAtd8NDCAGVt6YMnQpoQATEKAQkEGAbIxaywJ5NiToGC2jUfEFFkQAI2AQrLI0nDb15oEqqOqYQOZoFQlsAQENg4ABFh+SUjwIBNgSC1JGnNGDquGcJK0Zii00Vt1BNzrkWIAn65yPdEgOQeubrB7WSCRIAJJnuI3+NyuUCojVjq5iTThLBszGCAGSiIQ4elupgUTT7G63GxoWZjFZIAtmWZBMelEpWUnzL2IAgJCUCmNgNl/GJAwZIHOfGG4TM1tbVLtjZa2IPB3aw37fthWxkBUjMAxDnkYOU124jw8Px7ZxiOOpswTe+5FfnHOArL2+oiissQjMHAEASarCq3z2NE3n305t85A4T9OkQ5cYJ87Z2SLGQGgQMEMuSrPf1+ol9LVLcQoCqTQOJU79EOLgLX78+JhS7vubtWV7OBa+unTdy8vrzy+/IBpsyufbgEz18QMA3PrpTz/8U9u2ZOByOXXXk0WsPSGGch8BJmuK46d2vz96X8YYpymnbNEUxkCKfB50OCdgysv55eeff75cLofD4cfPf3h4eADClNL53BGRKwrDnLPknMFa50qIGELoh4uqP+/3h3Eczufz2HWfPn0qSzq9/goAj4+PRVH015cS02F/qOs2hNB1Q85CvgQARMNxQjT7ujrud3VdN5X3mF5+e5mmyaHsdk1VeEMCKRhv/vzjRzJgkEHGnGNOkeMIOQ+3MQxDPwYiuzvsf/jhD/X+4F0VJHe36fV0uV7jOKWUOLIw86nrmXkYppTSw353/PRBQUx5ikPsxjQURdEe9mVZhhC6YTIQvHUEGEKIcRRmApNimOBGREMOE1HhimxydwsiUjhniaw1qk+NLaqhrsZg68ham0xFFK21zpVWJu+8SB7DCLk3BAZdFq7q9tY/33qu9w+H/SGj6frhchka+1FzTu88GiOiANFoLW1X53Vl9FTKNw8ASCkJkjWGcZnqaZwzcxGjqFBCQbonx/eO4qLFKUq3AF4NgQgyANiZ/wdEK41BF1JgxAwISGyQrc0IIOw8AiFaAFWJIW0ACYnZhhlYBh6LPguAzEL8Mq+fkjNnyAwMgEBGMuSc0VhClcDJKTJDIjRATCICwsIIjPNAHgAgi0EQBEZgyZmYQZhQInYiGVGpIEmFGJlZ7jKXsH1uFijeu4hFb2VA7k+2oseCCKi2t6DFpY41dT6pTV1empz3tuSmNDaEc1OT5iV42cQ7lGOt+bZJwbbvDACzWBe8T74A5+izrYzn3xISWVW44yWSKjwd4T0yZ77Ivgl7hEIChGD0TCOAgKAYdQdhARBAMQQoCsPQjFKv4LlLSURg3ii1w4IHWyZ2d1FaLe+shZyzWSLrXexmUyluMx2Ed6IP94xp+1hbykKIgkKIjIhG1XY2LQxAARXaUIoFfcPInPNBQ9M4AYDK8P/2229VVX348MEYcz6fvv72CwGUpa2cTTGEvvv8dPzx0394eDimcRDmfbtLYfz55593D3si0uUpb/wiZDm72i1XIRVYUOBrU6UoCt2Gqqq00brf78uyBIDb7RZCmMRVZUlih/6WptEb2u8OztDz83NdN2Xd+qISkdP1+vz8/PJ8Ik9DP47jRETWFIjoXdnUO+995hhizjlaaxFFM+i2LcqyrMrGOccMXddNY0wpte1uHMM0TcKoSoOXy+V6vd6Grqqqz58/H49H74ppmsYwMfN+f4wxhlmO0onIFEMI4fW1Y+aidGpxfrlcLpdzCOGHH37Q/o+KMitbjpmPT0fvvQrNqCgazFQ09SCM3hfNblfXNaJJKbEkJLFI3hrnjTfWe1cWLoSxcMYU3liMUaZhvN1uwzCEMTb17s8fPx8OD9b5mPnWdV/H0xRzShxi1pPiPYbMzJwwqif7fr//4eOHtm1l4ZzUdf34+KB7ocAi731hZ5+Hla3ojDfG1EWpiZFzDhj6vieisizF2pTSNI0xxrquy7LUKkrbD0pgr+tajSPO5/Ou1KQTNdla9HhzWTVlWSKnsetT5mZ3UP3VON+n90BCi4j5u2pmvkfSoqH/loG3XevWnyIyt+W/94BNc+s+D9IeDNwBJigEC/xyrRfuN/7bRUB/S6iMoTVsw7//sa5F33l925MD2R6f7dL0biXfTvLm0LXZnrSsZLCpGv/Njfy9Ku3bftt33v/vOBo25GRgTkyyAYXYrVW1ZUMkmTIROeVKy1YCdX7AMtv7dsley2HdKv2JM0tfn9zDniCS2r0iibGCBsnNYtGk2pk093ABZsQRWoClUb30c0kABA0DAhoBAgQWEETOzloAQM02JFMWAm1vMs1ZAiPg2qwQ2iqRvwcjySxuN/PTl0t8vmQk3xX/RBLc+xK8HAEQROVK6GW3XmIK4JovdEO4Tk/RqDSeIChcFQCWqeQ9wdEPZ5gHf7ABDQkDCqQUcxYAmOI0hQEI67re7/d9dxOROI1xmm432dfVrq4Pu/0///M/W2PGcVSAweVysQSPj4+ZkrGoYy3FK8bIS5oCajSTc9RlUWVKZAOPVmQjrOJGiOrTdr1e+77XLL3yjplvajlUV0VR5BgBsCiK3W4H6G7d8PL6+vJ6vvWDYRyGkRmaeudtwSze+aZpcozDMKYwIal5E+mS6m2uq7quaxHp+zHGqB1XbVFO0xRDFkGl3KWUdIynTq1DP/Z9n4Wdc+qGqiWAVnuJMzM/fXhQWeecc9d1z8/PIqxkg5QSGVC9FRFBlKapmqaJMXZDf+u7nLM1Xs+nsv0QsSz9YbcriqIbhr6/GQTjZo0652zhTFEUpXdV7TimaZrCZZqmIcYIQlVRf3jal2Vd1zWhmWK4XcdrP4xTQONmqTlrmHMMKYQYQujHPufctu0PP/xwPB5TSmmcjHFN29Z1rQfqer1OYW7evp5Pxhi1oDLGEZG3TjvAaKhwTvNX653k2WeDmRFn40ZrLTMw8/F4DCEo/UN3XKVK+/MX7aBqgAQBQPbeD8NQtjtPbhiHKWdXVGXtrDEhs4gASQYBYDurn8131lvEAYkImjccvm+f01vnuXUwsmU7bGcZ63pIi+qKhgaAWd+SVOV9WTPWsEezqgutc/0ZujKHPVQ4JehasCz23w0qupPyFpyI+IZUsH2umykLhl+1E+UbRN6c8ev+kFn3VPj++ZQRcP4E5RBrwq4rl+r3oqJqZ0jhG5rd+uRtafeOh76+ci9LYHFgeHdA9J+Lgr4KnPM8ibVzqUPMbI3RXiYbdmQEF3rWcjJW7O827K0gz1mjUmuQOamZcTU8RwBAMGCMIJIqlKMBskgkxgiQYlvEWFqqOtxqe8516rdhDwwxgSVmIyCQkAGNMWxISZo8+30QADHoudJTuw17jFsQyqY3vVzW642DGzuSLJmZFXz87nLEe6ifT/C7U7JcOvdRxHo85zCsF4gGTTYLHzSjmDkE4iLIgCDMIMQw0/iXSbuEFEtfhBCu12tRuj/96U+Ph6OI9LebQb3EwTu33+9//PzhYdcej8f+eh7H3hSeEEIIriqOx2Mfu6IoVpi4WowaY8qyAABdyFKaITPe+1s3IKIOb3SyhYjK2tYcXO0a1HW2bVsmp4rWpbOmsIUvUkrd7fbhwwdXVCnLrT9/fTm/ni7DFAVN3w8AVFd10+zUFIkzxBitOt9KLqxzzhiLZVm0bWspEdE0TSEknWs6a53zX758EcGcZBzHcQwiUpbl4XDYPxw0InZdl2Im5X4AnM/nqqpcUTDzOIYYo3G2LEtjC1Uk6fte3f72+93j46O+v6or/XZErOuybdsQhr7vb12vGmbWkTKlUo7WUlWUdV0jyjRN09DFadrVvrCurLwiHgkQRf1GJIxT19/GcRTJRVEc94eiKMpmF8bpcr3dbv00TYkByTlXMFBmSDmFKQwh6l7HGINMDw8PP/7448PDw8w/Iaqq6nDYhRD6vl/Dkp79aZqKovKFca6YVwMBnYoVRZFi6vu+KMrHx0dnbNd1S7XE6libUlKWZ1VVTdNkVunwMwDs9/vdbpfHi2YAzKwTMrTonLt1g5AxhS+cZ4OcwjQgM4AvEWBFzxIgWVDFrDVoLTcawyJdLZubZW1Xbsu4dU0wiy/reoNvo926Nt7RyBsbalwWK51QGhAkWUzP5vdbY3GlNs2KnUjwpiLEtzXfu+oKvolz24V6+wps1p/ZJO5trblN+vFOY7sXNvj2AauesLyBzGy/d1u6bY7um19998hvn8t3QuCbIv7dOmxVAwQgGRAScowikQ0bMZZmVCciWgMiwsQe5+bAqrS5rsjr/mxTAx0Rz4d4gziSuc+86AQAIBpAo0hOUHMRsgIWEAkpw1zfwDts0kpFWcLeLDCNQIIETFrbIQEJcrao7AgGBBIyCCiCBMh6jNQrWd3V9Zy+wV6KrPkIi4Aw3Z/LBnNoMjFoWoMEd21ozoiKV0YRRq1CYRF6X/IARsjCAMiwmBwRAWLKWayBtdojwaWxKYuNMeHdL0J1ZAgpC6PMligiAMLWusQcc/Jl8fjw2Db7fgzPz8+HIk85I+Khbf7w4+c/fPpUeBtj+Nd//dfHw64q3DiOzprD4eAMPj8/7x5bXe9ijCkFtdLeXtwppRjz7AbBAIunsf52togLgZmbptF2mVrI1nVtjOGUQphEpPAWZfaTana7omrGKV5ebs+n08vruRsmImudR6a22ZVlSag2xZYljf2tLEsAsZasJesIEZxBb6lpDmoYpDFPRKYxppTP5zORlcUM2VpbVaUaJK3LkJ7HMUzTNKnbrd4XWoioL8vf/vLX+arIXBXFYbfz3qcQYxjbpkKBaRhFctu2VVGiwNyKDFFzAiJKnHMWS6Zp2rppiGgch3EMKQVvzWFXzVMxxJSDpJxCTBOfTidF/Twcjgol1Vvm5XSZhuHW9zFmIrLOIRIDXm+3FHkMoR+DOr0j2aIsP3/49Pnz58fHRz01atRHRFmkn8L5elMVaWstEgrz09NHQ9ZYQiDapI+73U6VqUWkABnHgb33pS+tzzmP43C9XlNKRVEcDru2bU+n0263q5tSwUp6VXz9+vXQ7oAGAnbOGdWnTRlG8M6FYaScq/ZAzoaUmSfvPROgJn2cGDjrWmxn/e+Z4iX3uZFZhjICMo8w5iR7XuBZhADnRH72mtO34TIjWe5oEVoXxmWcQ0rug3udtIY9AlzBEmZ5I5HBlaIw2/W9D0lLKUSLud77PPtdZxI3keNNgNygOgG1AXOX5/42/MwouUX38rthzxgHkA3rhCqJCLERgAxJ910EGOfR0upY9G142zact294WyluO4sksvqqE8wCMbOVgg0pWjIzu57U2ZycFjrERsjgXPYZQYMEYnAZ2Kw/cWGVbQvBGcEPgIvqPyySqQq1FySFfQDOcUqHiiJzZa+HfN5unFFKm6J10zNYwh6JNs6ZBEjACBICEBAAZIHZzB1QNWAEVBgN16Myn1qt/HR8+0ahfD3p8z/hfYqnCSBx1j24N4IXBKzIfdApmwcsGFxhkW/SmXu1N3c0gVVhTQAEMjKQWabWDGgUz6Q7xiIIRlDAAGZtQFgAvl2v1po///nPx/2h67rT6RRjHGXcN+3Dw8PDcf9w2CPKMHRx6IHz6+vrWLim8NaaKQYAq71BDVqr/ohC+PREqwKLylxpVbdVps856wQr56xNP83iVbKEma/Xa04RgawzKFYFLff7/dPTxy9fXy637vn1cr7c+jHkPPsOtrtDXTdENA+HikJdpc7nE6F4b7XOKwrXVEVZ+lk+JczaoTlJ1/W32w2ARCYE471v29nA/Xa7+arUaBRjHMcp5ywgzrn9fq9CXMaYsqydc7e+67oOF+UOALDWqN3gMAzaJlUga9NUu91ORF5fXyFOax2srRSFsNZ10zR1URVhilMYQwhF6Zu63deFMQYRUgySksINU0pKqCh9oR3UGNM49jEmsUXMCEBm1nqmkDjGlJPEnNTxDlHquq7bpizqH/70qSxLZlZRaeecmhFOC+1E73T9VVmWlHMIIXYREQvnlb+BiF+//Pb6+tq27X/6T/+pdP6nn366Xq/H4zFPUSUL9JP1kkgplWXZ971AVoDrfr/vuk6/V0S1B9WC3CRJItK07eVyiSGUKYIxwKwbMOPKgFkAWJiTEQQxxiKimY1WNmGP8I0QB70RN3mDVF+y7jdjnbUk4sW++121t/yhengiyUxRtnMtKCukBTc+KhtD2dnT9Q1z73u0adhUe2uow99pcr7bQW1srrsvtFmgNobktPitIyLL+9fn53Bn7229e2GDYH9byb0PbPD28f5X79+P24HaXP0tgh7rPtqQEhMzWCIhygZJxGYSK+zIMJusaQYSs8kkLDPw2ugEcDFGN4tEzfpYEn5akSugCAtCZBQ0gqwoR5lrMx2lEhKqPTEjscKD9CQtFoqz8tus1T2jIgGA5jyMDRMRGlFrHICcCUEIgcHM82MiAa2XNHYhKiJy07IQIyKLWhmKgGpkz11vtakkjSwqZsCaHInAzPdnNgZVY1MgiVC6K3jPLZqsaSMAA2ZAlvk/AUKtd0XAIDmP1jFSBsQ5SRKZafpwn+St1d56mRiVV9CHKsVkEkgplFV1POyfnp5SiOfzWSB//uGj3H798PTwww8/VIVn5r7rDIgj07Tt6eUZOH04HpqmjmFE754+frz2LzlHkWwMWVsCgDGkmA6d9k1jRMS6akVEMRoaJjWQ6HOlImhZoFQHgLmwMyDGEApzis6YpqqcK4Zhulxul2vX3YaU2BhnLPmyKMu6dBbBcBZDDjnEcdIRsrPknClK15Tlrq2bpkLEEMbT6SaMIghCMeRpChqk9Vr2rtQVWUSmaV7slK3R932YojHGOAsAitlRWJMaGKnZwsPDUZ8YQ+q2IZB9YZXpjyhVVWuZ2/e3vr+VDoDQIhozN+csGet9WRUAPHS3YZjiNHrn922z3x8cjSIpTGkaxizsrXOGUOjp6WPOOaU8TSGHmBLnnIDBNxWgFYAppSnN0S5nQUMWbIFoyBGRDnrruq28C+OgzcycpReJs7Acgw4dlIqGBGgATTdO0zTlEK211nrBOSieL1dflB8/fjoeH0KYMoimRBxSWZZNUysLIsY4jr0CfZk5xNF73zSNzlOLoohhAKCUwiX2VeGapqmcY2bJbIzhxGGcTBZBsmQkR5wXinm8rdkwCdh5Uq5aSLQukcBv2pVrPFjjHy0ye0vqxttosWTG76lf628X0OIsJrpELF7ec/9p8d41QZx76Ws4RiBENbUkXKhK92rgdyZ8v9fkxAVeAZuK7f5ZeP9M2pgxbWd72rzFua94/wrDCIB5DoFGAFSrE+UOddRMIS9e7ct6K2+f3Pfjza/mQeI7rRZa8JnmHUV8DnspJTGGAZwxzCLGSAYrRstlRrAizMBEDGJFy7b3rlTrQdQjskqa4exxDzB7LMzFrCJtRZBJzRvnyAiz29CM2ARUTgsJEZDF+3bfL7XtLpHMCiVEC5JTshFQhXMtgIzcCTsE8/CNZEaCzHskd5na9VBqbbfp4/9Ou3mmdooxxig4iDPzG1WFLeh2lc+R1fLkndwDwlo06v7me5WIs+yOZjRqAQEEMMM4AYDQAgJJnvdIswsWEfPp48eHh2MK8eeff76cXg6PD4d21+yo9H4aekihKIqy9MQ5x3C9nh+P+/2+bZumaau6fAKW2+1W1iUAaBmnwR5glrQgIsUJK1BTe6HjOCm8pSxL3VOlcGmyj4hqoXe5XHTA46Bg5mmacsz743F3eLheu3/9P//vmPja91qnAaB1VBRV27ZxmKY4WWvrus7RXC7nnHPh7Z/+9AdjkUTIQFl6Y8ztdnl5eSmLvfMWAPp+uN06Nepzrsg5N03T1DtE1ILVOdc0zfl2jTHqNKttW2NMN/TX6xXRKMSDmft+GMcRDbVtez6fu64josfHx7Isx3HMnJVZz8xVVRwOB0Q8nU7D0BHR3P5a4isA6PcC0TiOXX+NY0SLra+ruqhLD6HPOY1DPwwDERVe7cv99Xqds2FGIuucOOdA6OV6I7LknAOTk6Q0hpiYeQqzWpOx5L0rK1/VRV2XKccYAqB47/u+v916Y8xuvwOAvu+7vlcvQ+2yhhCaprHWSpE1XCFi39+66+3jx4+fP3+uq+rnn3/++vULMzd1bYzxpU0pvb6+OucU1Kp4zr/+9a+73a5pK60pc86K/Tke2nEcn5+/XF7Pg0ERqetSRGLfG++8dSmFnLMvK+E8XDvatzoKsyAMYjRikDpCI75hecHarPtunMCFpLtdAHGDZvh2BLW+5x4U8+JneydPg4IAVCWJvo09v/P4+2/YRsF3kePbV/6tb3nzXdtguRa+Ziv8tiEh8JuJ3ve39t8u6f6x1+ndr779XvOf//Q5s7AAs2RVkAQE9TTIwpJZZP4PQP0GdCzHqgHLWUDfoA3LeczECiTFpS7muTOng2hmzsICgMYgOkEDRIiWrBsJwVgwVsiwMguMQTKgeBejI12F7zKg6DMDxizJkSG0QIRgAEjEiJAwCRgAQ2By1r6+RSS1Vie1Ap8/CAk03SIzK1neWeOkmnV30ZnlgAoiKDdlLtu1848iahAPkjQrRFDlkpyzACNBYrMM9lA0HgEIwBSjcixCjAJgjI0xERm0xdCP4xSs89ZbEAFA5wsAQZyNSQjJGmesM9bNGr4GyRhrFHYrwLLbtf/xP/7HsR/+r3/9P+qq/E//8s9Pjw/eWUpd09R1XQHknKIzWFfVrq27W/f08PDx44fSez3bREKEKtKxSDDMMU+XyNPpdLt1zs2k8q7rp2luh+o8j4jatt3tdirLqevCGlQU4yDTAAJFVe52e2Pt5Xp7PZ2nGM+n6zCGmBIgVVXVNo2zPqdU+xoBhXMMYZpGRNzvmqenBzJQOrfb7ZqmJoBxGGKIhiwLxRj7ftAGGqIpilIfzha0mAgaY1KK4ziQsdqJtdYKS9/3UwjW2v3+ME3T6XwehoHIFEUhACEEPbWF91VZMqdh6IWlLIqmLg1RUbii8Mx5Ggdh9s5NY4+IK0PZOVdVZVWVIDwO/TT0ztinx+PHpw9NU6FAf36+Xa/TMHhnd4dD0zQAGGO6Xq8aMr0vjLUsOIU4TjEbK0B91//229dff/2t63pRAklZECEIGzJVUezb9rDb1VUVwjj1w8vL15eXl2kaC++rqjSEOcWhH7q+F86+KMuyVFDuri4RKLNklhATZ/beHx8eCu+7rlMA3e12BYCyrlik9uXr66v3/ng86jWjqM7Hx0ciss6cz+eyLHPO2ha+dbcUU1XX+90up/j169dx6Ou6bpvGO49EKSVgcMZZY4jwFgZCctYqUtKStdaigHOeiCxZbWYZMohkyBALwYI3m8cfQCoLpWIQMPvC6H+yDGnmd+pNiCSa6oL+c/6JoGMWY2dsxGK0RcZYmtc1rStEU390ZGYwAxAqXZ2MGvJteoVrg5G20WBtJ66Ra464mwC2BSQKb2B6C1ZPNgTwb4tXItLhQt7aLb2xE88AShZbpm6KRF8WTs7CknVKh3M79js9228D3hx6lxmeqLU9C8vaC8R54Z4PmzHGgBgQnHl7ACBGSO7NViSd7aHl2QOWmdmwXVj6991eOtcqf7W+vo79EJG1Z8Tq8a3OTwbRIAtAFiRkEAJgzui0WpO5721E+yf0PmJvc417eS5AIkga7QSBAZlw5iio+iouY2Sae+uwngXYQGaWz4SlQnvfE3iXdGx/u2lOLJKSpEZGb/72zVQPNhzYpavOi2ybAuuNMd3ltlqwKmRD1KBnwyvalpZLcmqQhYWtQecqZ+nTp0/PX75MQ/f50wfvfems1lv/+T//izVmmqaxj0Xpm6rKcfr166WpS+9tTGEYJoG8a1oEl1Lqh16HNworV0llhSzqpaVRUESGYVg90xX0MSt5ImoU0Sfa+VyFTJumQTSIlgFylimkfpxu1/759Wyc9670VVmWlZuR8SAsZMBYL5KJJKcgkmOajvtH5xwZiDFOQ780Wl2IMcY8TVOMGWC26yvLktCq9KKqbc2SKzmrH9vsAh9DjJEMFUWhQ8rF5H1GOzvnhu6qRwCU8iyiZbHS1MjA6+vraqJ0vV6dmdWTx3Gs6/rh4QEALpfLOPY5pbIojsfj09Ojte56vZxOZxkuVdU8Pn4oiiKDdLchhJQzPzw8qq5CCCmkGEPWHuwlDIqN1Pnffr9XuZaUUo4BCdumeHw8tm0rwtfL6zj2l/Nrfzkb4w7HY121McZb3yGi9/bJHYwvyrL0RaVXbH99BcDD4aGuS7V6yHESkeOHx19//kXhqUVRfP369Xq91nU94k0P7/l8VhajHitNNaYwtG17u92897/88kvbtlVVMGmTyFfNjpkJJcZ4vV6dcwKQUxaRFA2SIBjjyAgTZ9bZCGXIRlAIWMTomiAzEuUOzfj7Jch6877753ovb9eEbQTCJZ4ujUEEbXcqsE4AEAwgiiB9pzbaFEz0rnf3P/nA7838NrvwhsO37jK+beS+iUZLcUwbZU6DZn2+Rjj4XvX5j1Z78DY8/5u1rA0xqmianSEtkoVtNkiSlNeZWAG1emf6RR9uRbKsrd53XlCzEfC6/hMZ1VdEFkIygoAoBqyIwhKZBY0gLSwFFEReeZRwd4mkhVyvYWWJUHcFa72GaLYskhkdKawpk8JeCBHn9qAWdNojFqXSzWmcAGVAJIC8GHsBILOwjisXQ0CcuXMgSALCiLKAsmY4MhGJ5OXSAQBhziLC6AGAhbNIZlZZzyyMhnLOSRiMGq7OgsvD2LVtW5Lv+54zm8Kp1r4Cv3BzzRlUyW5FG0lKCUS8sXVd1lUJzP3t0jbVvqkvl4twfnz4yMzn03MKERGrurTWXG/nseunsf9P//JPhXXDMMR+bNqqqipEGW63lIPzBhBZEhlXlC5FNSmNALiYgHMIISUmssbMN4/SFVaXJQ0JGva0nPLel2VZ1w0zxJxjjGPg67W7nG+n6zXmZH1RlmVTN7pE6oXXdaOIoDASWIPO+rLyTVlWVQXIOaZxGPp+0OLSWhNjDlMKIeUkzvmyqKuymc0HxnFly604hcz3qT6oEjGCLMY0GuC1nasUWEmxqOvKO84ph2iRqsIVzhiDliDFFIYxpSg6oErJVEUSJuGicFVViOS+78/nVxB+enr6+PFjU1ZTGC6vr1MYHJFpd96VQHYMrEYW1vmy8tYVMeYpTH03dkO/7Eu+DTGlJMxNWT0cdk1ThxDGFACxKlxRV4fDoWkqkNx3Xd/3Uz94Yz5/+uCcI2MAsnd4dO00TdaXZVka6wAgg7J38NPnH9Rl6XS5csoKkHHG/PTTTznnHAMAxBT6cUgpoAFjqg8fPhBh3/fee637VTXNOTeF4fHxcZompfBba3MWRFJYZVmWliCGMU0BgElq1WLJwJIZEqNCIufJPACAMrUEQDQOGu08AgBkkOU2FoVGrBhrEYHFnFtYYGb9goggC4iQwGK1Mpu84zIjXAujZX3fRDBZGmQAiJqjw8zZU0fzOwL0rY7/LM5NQrDM/P4Ho+DfCXhvI997svn6z3kJXWzO12HWvOPgUEjMfIiRQJ/nbACQFezOtGpL/X3e3ndeX+ZOmuTpJmyCIW5qGlq2F+yqlbwQFQwzZmMQxRojIpnILv5MRASY34W99fm22lt/xUvMIyIWrfbUVA8IDGKmbAQyoFV5TGFUKw8UyIBAJEAK9H+X/byr8zQQmqU8X44ja49CLZiBxSwfQ4gkd58O2NhzrKcTAJDu3sqwabt/b9b6/irZsBtnOwvKb/wLWSmTInlZ8ef270IVUo1z5Ush4mwHI3D3dwYkQq23cHP5GhTaGAKLZEsGEQpvq7KoSj/1w2HfVt7FMCHk0jtrJMaQYuiHvq7rQ7sTTrfT0NTl//O//pfDfnc7X4auL8vyeDxWVdX3txBCWZf6FSq5iYtaTUpJqVda/HVdZ4xpmmarpq8P7SIOw7BaV2vpVlVVVVUCKCgMuR/D5Tq8ni7n220a4+HwsLpvIwBIFmYEnqYhhMA5OWeaptofdvumKSsPyDHGoICLnJkhpRSm3Pch58wZFIioGFQt8jTmrY0NWJJ6bcM658qi8t4P09j3fVFUiJhFUkrTFMdxHMOUc26LSudeitCpqqquK+dcVRXX61VVuIwhRfbXde286bqu8u54PCLiL7/8Mo5jXVV/+MOP+/2+rctpmrquG8beObPb7Ry6EMI0BY3BZVGiIRH88uU5Jp6mqZvN/oZpDCklMsXT09PT05O1NsRx6PoxTDmGZtfud+1uvyeU/nJapLTRE7RNVe9aRNMPwzQFNFS4whJaX1hrGCDGOE4hxpiz/NLftMQvioIQYozTNOSc/z//7f/9L//yLwTy9etXlnkNYeZm3+z3+xAmZa0Q0TRFPRQiglQ656qqUpRsCOHh4cE5Z0lXW/a2EE46nmROThwRSEbIiQOiMVg4YIZluk4oKMxIkhkICbPMU4yFcvA75QV8U9J9d4z37Wz+HYbzzjfW5FvXZl3j0BDwFvC+DXXbagbusICZpv1tafg/Ev/ePN/uxfuydV2gdGgiGyb7EvqXwve93tT9sAAAwRvu/7cHfHt4v/863L9xfv3vHok57E0pGwMZsmNAFGY2bCyDMeofmIiIFX+PbABZInFWGCcibp9r61TjNxEZEBL2OLf4jDEGjCxMCxY2oraxhGgwZy0Ak4DhWZJspjdoTxSVSz6nXLrXv7dvWjDODXcR0N4xCwEjyAyzuRfmTKJuvMsQT4+OZABBQlxKQQCGO88h3y9EBbuwbA+u7sFa6s0wH2Xez9APEJGkiEwWZkk6FJ0xmhCVu4eQsqQsJkvK8njY912nzSJjjAiLQKnmA4o51iJ48asEAOYMkL0l51xVloVzFsnWnsCdX1+A5T/88Q/euq+//DwMw//jP3xoygIAuttlmgYieHp6+Pz546+//Dx2vTN0OOy8t+fraeh6Y0xd+2maEKmuS+/VZG6y1j4+PuoxTimviHznXM4RN5h+WWxcZtxKzpryry3QME4IJjGMU7xcu2vfC2NR1bvdDoSsQWvQEYLkceinaeqHlFJCFO+ruil3u7auCkQJ46icdBEpXJkw9904DP2lU8pBWdf1zBRknqYYwmwfulodqaRIZFmbtNpq1h1R5GFOaZqmYZhijCyskEi1Ms85O2/a3Qx7UaZjzklbJKtBR4zRWuvKIgmPt77rul3b/PnPf/rjH3/srtcvX36NMVoyh12bOU5DL6ZFtHVTOFcAwDiO55dT1w3n81khlzHNKuS+LAos9+3hw4cPu91uGIbz6Tb2vXHusGvrXVt5xzmGMHa32zRNzpnCF23dMvPQXfWu9o5CisN1rHe7OI1jnxOLiMSsFMzcNDt1kO/7c9ddT6fT7XoJYfzP//LP+/3+cjmNYbKGiqLQdshut0spnc9nlSvTc3c8HnPO4ziWle/7fr/fv7y8ENH5fCbr69KX3mnYAzJEVBTF2CdOOVMiItV/yRiNMdR4zSh1cmeR0JjMkHNWE2cQ0SR5FVbeBrZ3//w7j3er87q4b8FoAMBZF5/8d1bmZXWnbXGDSHMVyiAkK+RuE/BolqT/xwPeupvvnq9cq/VS/xbRivPG3ePTNuypm4piOHkWVCQAyRr22BAIv6nz/rEOp3ZeliO/Ecuc9au2R2Z9vq32iA3M9jTZGKeO2Fvimc6QhLXhuYorrgXfu9me/lOWCm99mxhGMkImSTA6qgWDYJiSAUwpzeY9gkDabc8ksMz2kIjeMjM2I7f5KskCs0qUSkvrxSKSV4wyzO6LApsTth6mv98afne1f/fSf3f1b7vBxhgjuA4+hWXFqmXO65+r7yrPduhZa75xHHf7NoagmofW2hwTzFU4k5jlwmWzDPhCCAbRGFsWrixLb61BAcnd9VJ65yw9HvafPj49//Zrdznv9/vz+Xw4HBDgcrl4Q58/f/be//f//f/gFKuq+vD0eDwex/728vJikB4eHojmcZ21NqV0u91SZJX3PJ1O12sHACortaz1k/de4Q8a6lTmAxaJHK0a13AogTPwOE3dMPbTJIy+rLz3CAYJrTWFdc5SDHEah+5yjWiLomiaan9oD4d9UbgYp3EcLRktQ4ksAQlzjHkck7qBK4NChTE1IM0yRUR6JYcQdDUPmQ+Hg5rqdbc+hCAI3nv9bTcMwzDkPINXlfyg8VKLdaV7T9N0Pp+0qL1er1oHM3Pf92Ly4XDw3r6+vsZxenx8/OMffnx8fPz6228hhBAmFNDBVEoppkjSlmXhfZFSulwuz88vl9slhDilkGKOOQGALXzbtm3blmXZ+Drn/Ntvv51fXqcw1HX9eDy0bQ0AMYxdf5WcLFF1aDWXFY6GrAFImdMUsoAlsqU3AIFTDBMLWu9q5723KfHX55e+75WpYgw+PT396Y9/KMuyqdzf/va3v/3tL0T0cDwUReGcUUDT6+vr6+ur1nPe+8NhZ4xRo3akhnnWcqvrWt+md5a3BkFCCt6Cdx6ru4qKQZBZVV5yiBkQUYxxS2MKAAk4AxpmtkTCb9T7vrvg/t49/u5mh+81e97NvdYHLcuRrHy1Td25/Mkdizj7kgJwBqRZP0zmsL0Gqn845m23/93zTZl6r/P0sc625moPv1/t8e9XeyJvWl/bQ/Ttifi919cTIIteDPxdnCqu4mQr/ERLE9Xe5KwKUsYYQyjKslqJeuuqvYY93Mzzts8TCxGRgfWdYjIgoXWYkcSgGCOEQgaBBWNKRsRouJbZS4dpYdjwinnEhe22ORYLeREWZiVzQkl2bscLCAMT3EW/ZGnFAywEPRG9Zea7SLW/ZIaiIoigZBRWaQG9vRaq3f3Ge1eVg7LjF/6pMcKMcxEgLCBZOC/ue6oTowuWiMQYGYQMCUI/Dk1bVlVp1L2WmQgBRXJ0dnGAkoWMhGAAMkpR+Louq7IkQs4xholzDOPUFMWnH36sSn89vQ599/hwfHx87M4/IXBVVUXx1Nblfr8P49gP3dPx4eHh4XDYK93KOVc4b4yZppuCFHLOo7bRyFprtRM4XzPGICJn4DxPxdY8SeXwNYoTzYgS55zqk4mIQT+O4+XaXa5dmBIaa5wzrkBrHBlrCUlyCnEcJQaQXJTV4+Px6cNDVZWWgFOYpmmaBipqvcJTyuMY+n7s+zFMsW13uqTqWE69XmOMzt1b9xqbM0cy4M3csZympV8KogViSmmKEQDc8sAFmMqS9FuMMTnHlILGdWXo62pCBsiAK0vlO+acd/v28w+f6rrsuusvv/zUNE1dFUrozjF5b3dNzdlPIZ5vt9vtdr1eh36KHAEos6Choqicc74olFzhvU9DuFwuzy9fhq6v62rXNG1TOWtjnAilcLMVgjEzvChOQ9PsqqoaQ7per5ySq+qqaWOMjoidI7TWOyKbOCdKu91htzuoR6NIHoZhHPoQQsrjL19+izF+/vz58HCsqkL9Gqdpul6v2g9Qx2BjjFZ4ACCQV7iKdol19wnY1pUhjDFKZvRQlmWYJuYE4AwRkGRBgzSGcfZZdSjGrctuZkZk7T8BIrAg64Lzplr67vr77vn6yraZuc6AttFi/atZUp8V5Yhz6Hsb9gwiiPBGpJDnGAKEtKT4mqf/L0C44EygX1AqsE3c7wEDvzfbgwVvoauu+oMDAG+8ypfVaZ7zMWdEmmd7ArQ4J/yjs71V3UO+3+x9f2Rwne2tW8YsWpkRkTCsVYghuAutgmzRm7QRanuH3lza94KIZGYGNzMLy6y1JcpIsIKJ0EpCM7d6iTkbmO0XABesCuKS7+ie5/VCAVBmnoAs4svvQFnrf3MHf5sU3Ot62fSL3136smC9lst9+/P72d/2WL/LdNSGYzsxeif6t9TKknMWBGVM68CpLEtv3fV6jTE6N5u8aI8LFzw1IqoRrpYdh/3OOZfiOIxpnHoJ6U9/+kNp3X5fD9fbL7/+/HA4Ho/7L1++/OHTp3Ech2Fo2zbn/MtPPxtDx+Px6empbduU0ul0khx3u51Bul6vaIJ6JozjqNqSRVEYY758+SIiOirTQEKKYnHF3A9cBFxERMGEiKiwHQBQXAMAgDFDP52vl9t1yEjez4pCVVUZQATOMU5jn8ZRJFeFo6Z5eHh4fHxAxGm4hhCYk+JdEQ2ixJCu19v1OoQQhOHj8XNZlmVZbINZSsn7Gpep6io0471HU+hCHEJw1ltr+3E4n8+IZm3kGuNEJKY0jmNpnLY3tcbVXVNx59PpNI6Don7GcSQDTdPUu/pyucQ4HQ6Hzx8/eO9Pp9PlclF3Ot0STlmFx6y1/cTn8+Xl9VX73mVZQqRhGETQe182ZVVVbiHVjePYP19CCDmmsix3u51yA2KMCBqYK2NMCOM0TZkjINRlxSnebllRtRXLlOLtcrK+JETnLAAyc4xDSDnG6H0zTdMCFg3TNKUYAKAozcPDw/Gf/rzf7wVSXddNVfd9/9tPX67Xq7VGeZwPDw+Hw0HzjLZt+2HUue+nT59ijE3T/OVvP4cQDEpTlYiUUop5kszONpo/WZucLVhVxQBizIh6cqwAAwuY+5rwbVX3jsb3ewHv29++iQSbqd5SEsHm5l6D38oUvDcnNQwvZRy+/fZ5YZqrKTTzcvU/N9jDv1seffu2dTdF7nDMFeu+jALXQ/HGY11EVn3O7UF79/g7Zfd3n/x7tn/7sLbmnGPMGRi8cZYsCCGjA5clWdaenOUMCRkxOecQGRc8vTGgvRBt9poZUmKI0BggEnTZGGMBDQtBtiJL31MQhVgMo0AmEYuekE9xbxgJwBgGFCImQSKyYAmQCRwTIy7iEJBoRAFABgZCBgGEhCCOBTliTsTMEA3PAzqKaT2NernwRrBVx4GWlXM3B0ojhAzIkASE1cFAzZhYoZcMyIBZgJktYQZgFsmMwsRiAREwpWwBPamydxQBg+IMNpBjjFNKnENiYYbAmVmmlClbJhRXBOYpZDLOVPvX19Pnz5/b/Y5Bzq+nGGPhsPSu8DYMo3AqisJbBd8nRDwcyz/+8XOYpucvv1lLIEly3rUNAJRleTpdvv7ya1EUjw8fCQWYYpDD/qn0dhrGaeibonw4tG1Zt6WdLq8hDrUx4CHHS+AIyM41bbtXYTMU8NY9HPfn8zn0t7IskTmEYI15aGtdefuQ9HiHEFKIkplTHseRAD58/DjPnM7nlJLKNV1S/vV2+e3lhbPsm7aqCu0TjrdrXdcscjpfpn4oiqLefTTGfPrUNk1BnKdpmsYgIgAeRC7XwXvPDJdb9+vLS4zxcHh4eHhAmQwZJIoxXm9dCMEVRXvYE9E0Td3UhxAY2JTWeGeMITx002nKkZyPIqdLN4wdC6UYd7tdUbgQwhgmVYxExNIZAFM19eGwt5b6vhvHERFvw2ALU7s65zzlwVpry8IUtns5Hdp2/+kHa20a0ngdEXHfPCiO0TlXFCQiKaUxhG4If/npr4uSdU1Et9tNVdnatlXq92pjpM3k55cv+/3+6YeHsiwNilBwnpqqFsgQcxxPicVZU5bW2pqIzjdh5imEcRiFyJDLICnC8+sX7wpX1YiYOOWcAY2zxen6Mk1TGEYlpTRFYdsKEcd++PN//C/OmS9fvsQYvaknycMtNk8Ne/7lbz+N4/j09GRHM30ZtHkQOT4s/eQvX77s292Pn38IU8/Mp9Ppr3/7y2HX1nXNWNwSl2hM3XaXkxAdHvbqEvXx48fh8hWlAszWG29oDAMm2+4fJcehHyhObQsOaJomA9xWVeBSk/OceR65awTapM4qkjTzkRaVqHfruLCgCC0YGWYGZhSBpOn1rDmB61gsMyzTLxYhoiQJAWu0MOtJCWU0AGqNbRRvzorSyQAos6qUcoiVYbz0vwC2oZHf/GIdGK3/zRZszIoxEDKk+ANhVhXveWQ3hzgAgDjLnQKiAKuusBGRKFGJiEgikBfhRSRnGcFAFkERm1LKKnJk3gyb1gpkDpw0zwvX1zMmADUU0BXdGAFEggyIxgASWQJLQMjKxhYAsL4oV/uPLMiJVRmFLRgwmbJJ1hi2NPd8FjdvVK3OlbRAs9+3VnuZyIpki5YhccpiLZMYECYyhtEaYgROBmyWKIJGcka2nLPJADqzBZit65ch6lwkEpHy7gAWaU2DkkWtblVDjJNkEsachRkkg8w2H+q2s83L/n7G8XvV2++lF+8qv+2ffFvzERHkzXtYmDMolDMzQxIxLAAqdAsMYKy1Ly8vIUxt23rrLpcLSq7rMkxD0zSGQGs15YoR0adPny7ns0JFuutZMn94fHx6eCycffn6tb9dHx4e6rJ4fX0dhy6lVNcftMdVlK4sdruq3u3qyhU5x/tOicCCwPRFoUUSAJhFeFPl9mEl3gHo9M4YY2VW41yRLPq2tq4VvK60ca3PhmH469ez+pgXdblr2rIsrSVjTF2rH+yAKFVdeO/bXb3b7Qqvqlc3VQgzxgiIDke/fPnS96O1Vs0EnCucc7X3AKClKvOso62VqNYc+iHako0h326/aUWrDnDDMCBh0zQaXfq+Vxq+tVbn0Cpc0rattXaaxq7rtK4t6kKJdDCTyr2xlpk/f/6sWJ71EOl9pzPCEILOCwHgcrkolbuqKgBYa1CV8iKi4/HovT+fz1+/flV1UCL63/63/81b55whgJQCCOuEtXAGDThQZKyklIZhyjmfrqzK/3qarCMSg8CHwwEEBSWx5JxCSCGEnOQyXCFD5ghCZIDMzINMTdv3NxXBeXx83O12cQrM/Pr8rEH66enp4eHBWiuZy7LUd2pEr6rqcDgAyy+//KJzXzWy77pORFRH7Xw+a7NdY+Rut/v8+XPf9/OoJaVpmnxKZAsWmaZJtRsAIKVk0BpjQCDnDPj+VoWF1r19sr2vv1u1rH+7XUZUrANggc/MbbolLgKoev38Cap1OQuPaVU3k6YQUTZF5IrOk7ctLvydKvAfrY2+fdx7oW/nmr/3ye/q4G01jJshqCwSZb+3qMqm6oW1Op/LR5UGgzfsvVnA8v3Dto8fYoxxCiEESZmZU86SWQSyCGY2AMZIovnmR4ka9gjQGENZ+3XqiRHdOudDK44EbUpsjEmJDZJBsWTIAkQhi+gIIRJbkGw5GbExU7QTs7Fi2bKGOJYZIKNhD8ggokEiYBQgIgPIyMRglPI/S8qwcCbOLEycRACQSSAleXcCtv/cHuXlLG5PJ/+bQKnl/CyyNqLTurx8g9wnfEhCBjFv57qylJDzXQLAM4tWma6sYL++H4qi9NaWZRnGfhxH4dx1XeFtURSRMITQev/p06dx7F6fX9q29t5fOZdFsds1w9DlZLruSiTH495ZGn67AcjT02PTVgDAORmkqih00oaIMab1htejocWHsVaR5dpzU9xNSqkqSwUlVlXFOV+vV12hKCt6J6cYcmZaNJePx6OInF4v5/PVWutscbvdfv3ly+ulR8S6KHd1U1WVtcZZssZk5tCNkmNbV+p+0DTNw8MhTL1282KMxjgRnMLU9z0ivby8jGN4enraVTUzozVl6Z2VYRi6vk8pW2uLwgshS1JcRkiKQbWIJsU4TbHvgkYdlalExNIXqoTZ34budkPEqi4IRXShNKZq5oh+u900PyCiwjlOaeJIRN6XdV0rS32/b7Wzqp1hAFBbNcXxg9A0Ta8vX7uuSykR2uPTkwbX2+2mrI+Hh4e2bfu+B4Cu667XKzOrTaAxhkBinELMBtA5W5VlVRWF8waFUEhYT02MMYUYYyzLpigK5wqdcGQQTowA3tgxxCmGlDgxx7ioikNyxhdlQWSJdM4mBJJzfH7+Eqbp06dPu7YGyVMYWOYWd7XbqVjP6XTqrjcF1mpYGsdRgU6IMAxDYawOgPu+j9Oowm91XSvy83A4WOvPp5eiKJqmOZ/PvnDW2inFYRh81ReNZYEYozfkrZ0Vg1A5x0Zk5XkhoKqirAFvFh/RV7Tug4V1921QQZVH2UY8NbthvZ1nrB0iEjMiZla/aEacUeuGBVDZXKDYQiZAVC4WirnL4s+hgGd4J/xbnUPYBOPfa4/Oq9XvBDDZstRnyORGPGS7tIJRJvQ2yMFi2LTaNmnVJIKbZXOurec0YbbTVrDH7JChgMaZ0T17ralJPJiZ+b36j745Jvb48ccU4jiOYZxijDlq6ykbJMm88HN1IU9EFkCLSrRIDGLAMIhBDjlbcmJJL3eDljMa4xKRJWEjgsSImYASoHWUE7JBYJSssCoitiypmP1fDLPM5txLOgBoCQJmgwREymlzSAkAgO1MOdcSOycQ4kySMItA1jFqBsj5+zTSb8PecmX8vbLv7yUmb3v52w/f5jjGADOb2f05kQBIhswgIpkEslJ6lHwHLIlTURREdLlcmqra7XZcFefzeX9oLJJ+jnP7siyronTOpWv/w6cP1toQwqcPH3/8/Nlb89Nf//Z//eX/3re7x8NBMl+7mzHmePz4+PhYVZSmkHJSWH/lvUFMOUBejZTWOpustZlZV38F9Gs/DRbtAr2stdTTTHkVJ9OLfhVqIaKX59PpdAIAdWM4nU7Pz8/14QkAvL1b52in4ny5IkHbNvv9vqy8huGUp/Uu8t4T2Rhj3/eXy0VrL/Ul0G8vCu+c627P0xRzztaasiysdZGzTolE0BhjyIFQihxjjjEpaPN8Po/jWFXV8XhElGEYnp9var+w2+3KapbctNbu93stDa/X6yybWRSaxsHiR6irvL4+TWOMSSEtGu2810GeB6FhHF5eXp6fn2OMbdvudofX03OOPEy9ZNgfd48PD66wt+s1S3r+8vJ6fnHG//Djp12zTxzDGH/+5f+ui7Jt26ppSw14hChZKTlJMi+EE3VZGpLVYW2MOYxjjCnkFGJOiaeQxhhYr1A0pbfekilsVZZVXRfeC8AMxxX23n769Kks/NPTB2vN5XIlwv1+37Q+pYQ8E/UU4aLwcmOMM0avqGEYDrt90zTX28kYU9f18Xgculvf9+fzOef84fF4Op2GYWiapqoq1Qmaj7BzIadxHG+3G6CzZU3AOSYzm0EmxdkaOwsewga4sGp3bbNSvZLXmPM+kLwfyN2h2iKSGRGAZwKDEADPpjmsohnrOqTl3Exrg8334nadeb/oIN5Xqu1C/23/6d85z9P/D5uKDX6n2tsO/+5Hb8N1nks6WJgP33vQG/zEuoczfUIWAON8tObH4jq3rJSIRtAQGERU3ZvN8i4AYHdPP+YQi2kM45RCSGFKMUrKOQXInFLIKXHKzMIgxJJS1HZ0IiIDhomILJIwCzEwEllDkNEaiy7lZAokDb5a5QhZQEayCJII3SxogqyBP06BjWFnmVnUbnEGmQoRCREr/DPPpLRFmVzyjErW+VsWFOGs/ulZkjYJCZD5fmK2Z+tdt3MT9rS5/14i4U3lh7L+p29fKYMzYQQxS95UiqzpjzHGMyBbMZkNsOXMYHNmAsk5A+ss0RCpEQUt/d71miiKoj7sdrvd7fq6a9pp7F9fX4/7/Q+fPo/j+N/+2387HgqD9Ntvvw1d98c//vhqze16Hm5dmAZ32CNKP9zCOO7b9rhv69JbTBnFGKzKomnrypfCKUnOS4ediGb3CSAB0rpKUYLKl1LXGABQPkDf9ylGDVq6vqzFoiG7Qt1+/vVL13WA0DS7yPzly5eX15MtyuP+QblczjljiQiZc4w5x+lwOOz3rbXGEFtrAXgcrsaWgiBALBymaRiGvh9iTKpl7H0JAJljURTeUpyG2+1ijKvqwrtyNYXQmsw5h2RFFF+TQsrCqFibRZPFWkvjOF4ul8vlVBRFu6u1XFakWF3XTVOLcN8Pw9BrdxRJpjB76zwejjp+s9Zqw+R26ziDiFirKJjCWqsNnPPp9PLyMgyTtb4oKhF5fn65dK8GDCNXvnKGJMVTdz49n27DbexGV7qHh6fC2Zcvv126Cwk9HY/Hw+HDhw9NVacU+tvtdrnGKVR1YREUTi2SUQCJjDG3l1PwnohiyGMMrLYhItMQsoDiusnZwpez4FxdWCRb+MI6IYwxahXorDnsd2XpRfB6PXddz5wQ8ePHj+rYUFUVEQ3DgAJlWd5ut9vtRgDK97jdbmGcPnz4oPePOj0VzjPD+fyaUjrsdrvdoe9vl8tl3+40NWnbdh3EqN8ToKuJ6qaJce5PAHMIIUqwVFnn1HcTcKYHMKDa/MoiyqjdF164YkzL/bj5iQCyEHPvYMBZFUTDZyYBQMmqzyKAM09eSEDX/hnDiSQgogz2xZ4AQLGfzKAungIgPAv0f18z7F1e/u8OeN+8KPcgt7y6/lii0ALjJCKTF801ACAGIhTDKuokQKrsu9hi0AzXAWaVv1m/TllnM7RxS0oX0v/N4W6uZoB0UUZanAppDn6KOLXt8Yccow9jCjHFKc9hL4Zp4pQ4hTgFTiFrj4slSRCApPpPCQCTAUQBR0a0bM05IRpKhg1jEhImstbq4IfQEhs0DJmRkSgBBkRjKNpoMqcQwkqT0GhNVkXOwCAykUPDBqyO+RDdarKopZ4wCAuCBQHOIJlZDOQsoGb3lOfTtiWOvIt/28xozTk21wr/fbjwtt3/LpHRXSBQxjoAgNUiXryIiNoeLda1kCUzz87LwiiAHOu6VupSW9eIeHr5mtr24bAPvf3bX/46heHhcPTe//Wvf9VJT2Eape798ccf/ulPf359+Xp6fnl8OByaWgWomqr68MMPTVO1bdtU9eXytxyTJVM6760DZM6ROWWOzKzuOWCsSBaclSoBYKblLVj8oij0etUhmTBrpzSEANbpjeGcIzSaa+csv/32W1FUKhH5/Pz866+/IuLHD5/ZmP8fZ//VJEmSpAmCzCxAgQF3D4+IzKzKqmrcsz2983R087a/4H7APd5PXaJ7OtqX3aXbuZ2e7qruKZCZEeHIgAIRYeZ7YFU1dRBV1WsUZGFuUIGosDDzBxDRe6rr2gcnJeecFLhpqraNsaKcR2EgqhFBpJBqzrnvB4te4zjmPHXsFgv4qqr2+72qWmGwqkLT1t5Hq44Ow1CyOBecCwqYUhmGNA65qCC6p6cny4Rsq8w61eC11lRbonvTTC56RoqYG34wDEPXddaIsi6XiOQx5ZJgyoDJOeddtPeLKHM5Ho85sfcxRh3H8XzurAx7+35v7LcQgogcjo9d142pH7pus93c3t5WVfzy+bO1u372i599eLcPoXKEQ3fq+76k0ZNrdxvnMDhPDqXwMHS2mlFV7yeTdAX2BIoEAE7AtySmz0fO9iVWlfceI+UxDd3hnItRO6xaW7UN9OV4ehj6yWrKcK02GJTI8qeprAOw2+2sZ2xODgbJub+///jNrb0hxhicH8dxGDoR+fz58y9/+cucxy9fvkQf2rYVLVNpIWVg9eRAdOjOFLzs94QIUgguQdH63+vLdpkNFnEJnX1mLjPDJe17Vum56OuKrEHaIibDIaxq0Lgpw7OAhypzWY6m1yzJU7Ba4Ryz1vPSkgvqzF17EfPwz+ICngAAgABJREFUOYMC/owiJ8yqkJfI9mpy+1rYw1ewCZtsrV21zgjfuMHMUpuURoxnhhe+udIMINJpeWI/hQQWX4gQHKJDBSIPRDDZF1yCtd/efBDmlAZOmUuSXEpOXBIPA0jhkkqaKp9T5aE4MGJfYS3MktkK3caudsqApMCIRJSJBCU4V9jE/5HIO/bqPPiIqt6glGCUdfSeR6fOOS+enJvsZsvM+TMTMnITuhPVARacdKWNvyAgRuUAUKeiUhBAoaBplwAWntpsi+TYkoC/LlDApRQgz1mkS+SznG9lbwcXTc5JTJ0mBz4AkOkbHCqgB0ZgdDQVJuJ8XhmRFVhVPatMuu+2SpwySEQElJSHMqZh6HLqbvZXdyht2/ziF98Dyr/+y6+Z+a//8lc//e7XJeXr/f6XP/8ZcDnc3wlnj/DtNx/SMCDizdXu5uamroJzzgFIKY6wqmPdVMFhSXkcBskpZUNFeiIKVVDVzElnUSLnnPG1jf1WVZXkYtLMzjkXAiJa1W7sB1PQ9pGIKJXSHTv7oMXOuy8PD/dPjsLuat9ud30B7zUG553jXFIaWXIg3O43gJLHEScpAAYURBhSOXXDuTuXUiRzmWQhsa5rRCTQWMWmigSaS0aV7a41p4WcszUpASCEMKmX5TIMaehTzlxUEIhUgMUyoTKmcRxzGYPDzWZrlrbm8Ldt223bRueMcmDfqcApJ2NfXO93V1f7tqkQpOQxDX0WdoAxOO+jMdmZdRzHoR9z5vP5HEJFgGM/fPr0+XQ6ee+bZlMFXwVfxwAAx+PBhK1DCN9+/GAeQKfTkXP69uOHn/3sZ99++21/ehj7c0qDKa+2ddxut21ddV3nPAFAKpMgTUlJVa9v39nFUnwAACAUkSEV55yxyMjK1OCK8NCPPAqn3KeRU2aYRCqaOl5f7cc+MY9mBm+0DUQcE6tiSuXx8TBVj7u+ruvvvvtuHEctPPSpqqpNu8NJ9QO4CCKG4H0VmqZpmo2IHJ8eHh8fQwgx1qYA0DZbQyGNkouI99H52I9pOJ6H9ry7vuFcRtUYI1WVVZWZmWYYpHUVCElEFi62Ve3s36SmxBP+0bQ6F7SnFF6C31IRVVUwNUK9iLUgzhQ9AKtIgSl8EgAg06Q1CIspn9W9zN5lmpMUJhVQ42a9gWf5v1bkfBEFp7D0dm/va2HPASgAW84jADiV75z1d5aungg4h1AyIIoAyGwnqoiT5apZJKLBWAAFlNCM5VAQnPXBYEmY0FAmEyHdzpuVjn27fye5hDyyQUhzzrmHwqUZlRmklDRyHqewxxwYRaTMKBiXSymJRPOYFEBEHQBYV07Fi2ccWciJC2SUdkYtpEGU5+UWIJHLWEhZQ/GTdw+JIBHThJaZ6taC6NQjMRISFAABmZiWamRIKaBOhQlARVmcmsiskJVZ57BHK8uCdc6+FPFf3+sE8DXIsvyJ8YG4BoU5Zwbv4pxbL7G8KCApOXasnpidkFOvpYAHZRBUBAUPkynI0+Fhv9nGGHMaEXHbNoByeLgvQ//z777bbrf9+Xh3d1fyWNf14emhjUGD/4vvf77bb37961+Tyl//xa/2u21T1d/c3njvVQooo1DO6fF8bpsmkNtsNrtNG5DKOEhOxqpWVcPtGomQtdgMa8gLK/1ZQyiEUEQNdtE0TQzBZn9EPJ/PAGBVUJyJccMwfPj2O2G4u7v7wx/+ICIfPnyz2W1zzjE2RORImEvXncbUR0duU8cYhqFjInMfTSnlXBDR0gITYfBVVFUDKtuG1VW92WwA4HQ6qWpd13WDAJByMVk1YQgxxtCeTl1OY9cPfTeakFEREcnXV1ePj482sb5o0cHsoNs09WazMRjLLA1DiDimMaXUNM3N1fWHD7dWmO2Op67rJk2yqlomNxEYx/F0PHddN445xvrp6enuy8PDw0NKqa6bm5ub3e7q6joQUC6JgEQZAXxwm7bd7bcEdDofh3748PH9L7//JTr8/OmT457IN9HXu22MkUBTSoenp27orK3CKecyAsB2u40xdkNH6G1accF7H41r75yzhUB0GENg5v58Ph6PBdlb3N5tY/R1s9lut03TPJ2OjsgHQkQWMJSs996FymrLT09Pp9NpGAaHZGYgzrmUslnx3d7eXu33ANB1nUmnOme9OW8nwnv/008/ffvtt+/evfv04w8PDw/fffddCGEYjyklAbiq61CFYRjS0PXd+cOHD8euZ8kxxskpQpWZyT+7ite+PC9ylDev/SWrs/P+MuYZbFMuqiw0lzdFGBWsy0pTmRMEJ8+YBVyCiGR+1itH+IWm/edEsn9vtPvaFPciSVhPnvDqSZ1F1JY0eim2vXigcxqhSgKiinMvywxFAYDMntXMUhEUkCY37SnUEZC3kiiaCNg84S9H0v0//p//L/KRvHcuTmziGKuq8SHWdR2qKsQmhDrEJlRVrFtfo4+V8zHEKsQYQnShsjaE80EReOGjEyo5hSIqrFyksDJrYS2ZswZiUAYuoIVEULNykSxUCUthNvl6FRZVZsbJn6AIF+EiUgwQGLRIKcxFhYWzTmloQRU19oIYthNUWERgNlpaig92MmyYrs/WVJqAeTkHE5dVhC0ALufRwqEtElULIhAQKoias4Jhw3TOQgFAQCfFduGpEWjUfSR0hI4weu+8MwkHRHCE0bvgHHMhML9AAJGSehCJ3h8Ph92m2bQVl9z3Hals23a33XhOf/NXf/nu5ro/HT3hz7/99sPtbR28cEGQ4F0dvCcCFUJs6nrb+DpG7xxIySkVw1ekRABVjLGKiFikiEhhq905Y1mJiHd+gm6K3N/d2YRiJT4r/R2Px5TGzaa92l2BwuPD48P9A4s2TVO37f3dw5f7O3L+5t273W4PiMxS1y2owV+hpDENQ1X729t3p+PTdtNcX18RYR5zyVlYhPnhMDCL1fRzzv04qEiMUQpf7a/2+72hSOu6apvGOTqeHruuG4aRmZ3zzvnCPPTp7u7+dDqfu86sJOYegb+/+ywiITrnyTnyJhoT/Ax/ABPgNgmenHOoQ5HS9+euOyPi9fX1Nx/e39xc100saezO55Ky91RVERFKyU1VE7pxSI8Pj8fDseSiqlLk00+fzqfz0Hc5ZVBtmvrqan+1319dNShwOh0e7+7P3akO8frm6mZ/zZzrUL17d/3th49tW6d+7PszKQaSuqqaOjqinNLQdymNWoqCKJecxjENIuLJxRCDD6BAaBACy2LUOYwxxhBicN4TIZY85jRwSY4g1LFp6s2m3bSbpjGxgvF8OvddPw7D+XQeuj5W8eb6ypM/HU//9tvfHQ6Hw9PTZrPJOe/3e1CwQNg0TXc6Wzn66enp7u7ub//2b4dhIHLH40FVr/ZXwzAeDk+bzUZZcyospWmapm67rh+Gcbfbb7b14+PjZrtNKXWn7i/+8i9///vfNU1Nzl1fXY1japtNiLHrunazBcCSMiF4R96R6X8hqB2B5U+0a1lYhUHAuKdcirIIsz3OKdmfyjI9b1OTIsrsScpmuCLKAirWVbWk0gqhNvejGnodLRFEg7/BtNAHABRZMJcmOPwiNq+BJ+t7RHwWolZ9O1MVg5lhsRR4X6AccK4yyiuF5HkLBBSEzYvTsmR7rCo6J8qioioCqsJJVVXQIBJztuaYVc1QB0iJEL2ZsE4gSued884HR8G5CSJH5JCcRSJC80y0fUdft1tmjqkunEopUrJyLYWT71FFSpZQchgpF+IkhYODUqQkU4sonFMZkzCnoVdmzZlzYc4wa+qY6KVlSlYZLLbHwgIJUJEIMxQChEIScBwRkbxJvJBzDkU8UVYkYyUigXNKJrmMBQUnL6FJMgBBCLQUQQCvIACkQoiGmaIV7ebFauXF4Jir4W8ujmSiiZAqXz4CMzB3KoDOhXHVyb5dJockREQTYN1smknjOOeMNA1inOqeWMVMmafVnABAGx2ROiiOnHfk0DuPDvC7j+9225bHYeiO7/bbjx//ClXu7u6++/ghEHbHw+lwzDlxCJxHBNm0deVdFUMMfk5/AdHVITmL7hbW+qnNs900VrDy3itqKaWY3qP31r5i5uhNo8SJiCVVkzTX7MmwGEdYFLQeWPCTG3jXdc6FTdtUser7XhFCqMZxrOuahc/ns4q8f/8uVu58PlZVAIBhGMTmHQGLr33PC9Eip1RK8UhEdLXbW50TVutxZkZwKsJcmFWFmWVIJWcGAPPhI/QxeiLXDUPXDX46CpOBhuXx9hwAmBvGpD2rCgCW5hpcc7PZ7Ha7qoqq2p/OqBCcFyRRRsSqqjy5lJIq55xKYUNsp7F0XXc6ndKsxXp1df3u3fvr633TtCWdTsfT6fRE4G5vrq521y4QCNbNJvoqVN6hY1UB9AAEjpgDgootQQunnFIS5lTG4LwLfrfZLmu+YRiQPCI8Vx9EIirKC7rHVut1IAhxc3MzqZrlcexL4jIzUdFIJs45Qfj06dOPP3z69OnT1e37Ukoex2XvDIr58PCQc37//r2tFZ6enkD1y5cv19d7RBzHibfXNM1+vzeSHzOfz2fTf9lsNqp6PB5F/d/8zd/8f/6X/+Xm5uZqf/PD7377V3/5q1//5t9+9rOfDX1XB993p1p1v993wxhjnMub+iLPW88PS0q3fmld0lyaeZfV8/J+EYObT1magqpYMLvkfwgmRwwrFaoXW/KiF/Pm7Wulzj//hs+xmuuPv/7z8ouvrEnfCLdfwUBMkmGkS1hEdAKIztu8qeiQ0BzpQRFIzOENp4UBAiHq9D3TU4hW5CQ1Fj/4yujq5Fx23rH4XEqBUHwMICqctXAuc5mLRdVz0VIn+9PagVKYc9IF+ZkTlyJSVBVwRBsQbMsZNfLFwnlRYJSiwsQooJgGRER26r2qVyfkixKJICEyoJJjKQ7JUC0Z8jRrW+MU1BGBCgojAJsbA07FBFSgmRG3HPrlT5uq1jJ6NlQuQwfl8idOlD4kmdy2rJMHXkQUlVCRUZCKioCSOCJSKkhgxW3EwsZWVxACIVCHogTqqVAdXSpi4cT463Y5AZHlwR4hhOBNFkf0Zx/f55yPT49pHLfX+4CSS+Y8vrv+JufcD0Pl3K7ZGbzeASJq8C5452lWUXJA6KIVY5lzGfu+S0NXSkGAEEzfy6a8MuakACFUi0kpzEw+mEFchokgIi7FYicA7NpNW9WoPPb9LFjsEPHp8WkYBh/idrsV0HM/KGLd7sdx7Puec6+F20293+8Q+MzZQCXKoCKIjhCQmUdBH1AEWEopLOAoNHVs29YI4yosws45j6TKUhKRJ1ItLGzl0Mly1iKfaV4b4yKXJFJi5RcQBAAjQHAhhOCDs32JMcbgiIBZEEQUnHOhiZvN5mq7q6pKOKc8aGED4zmk4CdEKykMw6iCosVyjJTGw+Hw+Ph4Oh1UNcZ4fX318ePH/f6aCEpJY//EudRV2Dbbq3dXm3pTtKQ+OQKHqmK1lgIM6MCTc4BSSsnZmk+qiiAOdNO0dYi+8sG5hTjILEY5XatS2FWLyqTqLBE2GVbvbbkmLGVMY05clIi8D+ic9zHnXFhSHrthfHx8HPu+qSbjiF27SyltWkopqeLp1A1Deno67r7bjeO429UhDJ7cTz9+3myauq43m93hcLj78nBzc7PfXzELKRBR13XHw7mtm91uZ9wGH3YA+O3Hb3788cfoQ7Np89h7h6RShn57/e7h8UAu3F5dH8/dlNMD2qmZ6jmTdInhMxVW/1TEsr3p38xUsOlOX91sErFF1xT2pjW0Fe8AQMTQAIb4xEmAbKqLWhVQYFm8oT2eFBsVXkW1deT7cyLlnwx4r8ubX3t1nicdzu1Q1WKtSZzYqEDERGQNPiJRBRP6MogGWlI4aYeTklMgQqeECKQIiE6wTE5Ntmw3iKISklu7bdOil2YEBlOXAAAmcqUwO+cys/dQo/KkyZlTztmbG1xWYibmYBIbUqQwS1YWFGbJFgJLKaUkESEsoMrMwKJSrJ8rCCyi5JgIQkAfMAR1RORmL/upgQyGHkEEJQZwSIDiGM1BFhFHGXEK5+QIFIhRHSAokwKhFcqFAFGUABxf0Jswt5dsGTuJesyRb70eWa9o0BbyZMPXjvKlyUwIF7oPqoiYWKpVmCeQjuicKLghTRHXeG/eew6xCCNiKSWzNYcYgFRZFdGbRnAGAI8G+SNE/fjh5u7TZ2nCh3f7GH3qjjHGv/urX21C1RWhWMfddrPZRO/QkbmwOiTn0DlnQuGTCg8LAghzSbmkQQp7cjOkUCb/HS3M7ENo2zanhDNpb5FdLaWkYZKn8t5zKSYDZF59hlM/nU4pjTFOJLaShcgTulKEVQg9BXNW0oeHO5Dy7t3N1dUOiZV1t9uJsPeRHBJ5UGRF1SxCk5qJJKta++CrtjHVEjMGQsTKhxBCYeNyIhfgAtaG42J1cejO/Waz2dTbNJbD4TAMiQLtr3YkeRkelpZbqDPWlzERFxlGC5ze+6oOTdNMlc80ljFVIQ7jWHKqqqqu68qHcRzPfc/FBr9jzn0/PD093N3dHQ4HIx3e3t5++Hi7n7tcXddBGvf7/fX1dV3XIpLSICJEILlkUa9e5oSDlNgB5jGNJeUBlEJ0dWx8IEJv6RFP/hPZqHtEVARoLTGvwqqkktPgwNWVq3zrPKogS+Yix8PR7AwRtAohxhiqxscgDPdjOp1OOXPmQkTv3r1rm+1hHEVkUzcmzXM6nUC17/sQwvl8/vTp02JE7AKVUn73u9998803Nzc3zPxwdy8it+9v3r9/P3bdnO73fd9b84+Z7+/v27b967/+68fHR0PMPgzju+v92J2bzRWpEqAULik3sZqItCt+3noGuPT4VyndIiyxvi3yQ+ss8NLbm2lRoJMCCymAN0ttNWVhpZnQ8CbIYKY3yIRR0NX3X96zzsBexzyd5Rj/jPj37PHLOAeKzydKfIbzXMWeldP6AiFct/dUVYyNQICOUFWQFBDQWbZnSBAEAEMCAngXAIze5hEV0AGig5ec7EmneJZc9U2M7Fw2RzHvRaSUYGfOikfIjM6BD8QsIlV09iqqrRZZmEVKSSMAqNFdJUthC3sgxSpKIALCKkIqADCWrAjsnJBT55C8OgJChoCTU6wJk0/ybzPN3FwBSQEZEBEjFQBwqCgoSAJIqF7RMjwLewAyZ3DgZ7+k9fJkyfCWJ5cw8LpWribdgga9IVXFVdca2QYliQg6MZ13VSUVYiAmRmJQm1aY2dmRl7xUSEopmbmU4hAAxNnSiAKSIjgXiIicBV0u3vumClVVlTRGQt82u+2WCJn5ar/95ptvxsejvTdGbwAWKewId5s9wWIHCMsBsaZqKSnnSSfae9dU0XC8UBARmcT44FVVdeezc87mdFPXXGSclqzIptGqqna7XV2FruuOT4eh652jGCM5l1IJIXgfc+HD6UjeN+2mqipVHM4d5xyCC9F5hwAEHn2g83H0FEJVc5HUl75LXTcOQx4YOReDlXoCgz+EEMzL3hIyciBaOBfOJQvlzCmVlDIXzTkPOZl+8XQ6OBVOIiW4KgSnJm2MujAvfXDOk+U6s4zXZRTFypv2mKqala4tyFgKOaxdXVWVNwMyVEcQKOSch354enp6fHw8HB5zzg5xu9nc3t58++3H3W6rquM4okgT42Z7s9/vjSlxOp3S0NupUYXg3QSkAi2FlYWFZehVMXoXY13XMcbaluZcUillzJPAjS1lvPdNcOu5SVnMDgukkCPvyNZRrJpLzqkQBfJexRQrSURSPwzDMKbSnTsV2bYb8q7vx34Y+n5MJQ3DcD4c//CHPzRNw8y3794ZvWEcx0+fPn377bemxG36AMfjoWlOtsv9uTPCyX6/r3wQUZPCOZ/7GGsiqqrm0+f//vHjx+54+v67nzlPv/vd7z5+/FhSfnx8vL59P6a+biJ6fHp62u2vsqjDpakP5NzcixIQgRm7qVxA2GYzFRS2B5d/MMtMX3RGlkgz8xpgLmBaNVxnOLoiGFmPTHhrjmhqhu846VKiAQgteMBFEX8dfpYy49di3p8MeIu/zWrqe5YDvGgMrcPedLnNtqcAOKmAW4pqz6AHACRFUSBHCgwegIFslaYGhzd6oiKhCZUATPVMIO8tt1MEh6QzhhMBdep3XmgYoDPH3Xvvac5JbSK2BGiRjSkqxIylELOqRmmmbMNybFERFi1lHADAYPe6AFJUOZnUGSOLckFl08ZMKSlAQWUkISeEQA4QC0RYeN0GxkUAy30VEOHiZ2j3OFXA5zUQEGpR8JNWpyXAylMFEnIuS0iDWY5hOUnrsziHvWeS6sugQTISGOpUe5g/XnAqP4gUYREhRVUtws45YmVmBhURx7a28MwsGmyFaIVBVZ1AvmRjyJkQJSIKlGjpBSpAjMG1bbupq59++undzV5EuKQY4s3tdVVV4/kUffDkJnwgKfoYY6hDdN5Ous5EKbANcM4pC+fCOUthmIsyxr9e1mgWS6z0bSIiRDTkMo6jqpqSllG/TcTLe9+27WazMbGMrpu424FCFlE1Up3LMnBiF6q6bs2Apu/73X7XtjUA5Jx3uw2goflDVTXehTSm87k/n4ZxzFy0K52qcilE1DTVbrezkJzFROCAFCa3ozHlnDFuQb0KChNzYlZmtSCdUjp1vVlhbLYBiFIaah9stY6IlslZejfrelz6FgsFzUZXKUVyAdFY+cqHNA7WhfJIOQ3DOABAXdfnUzqd+vv7+y93n56enkR4u22vr99vd/XV1VXT1AqcSwYs+6u2aZpt9ObFY6Vmi1UX22fn7LTaPSJG1SqGZvbUNU2vlFLXDTCrYCxN3Evp0uY4FhNSENTdZjuNeSmsqKqOMAan6gCAbZdFOZXCSQRciCFUHrFpNi54FXc4np+engZlVZVcLGI550xNzbJPK4PblXU6ncwyMOf8+PhoriCG/0TEXbuxbFhV0zgeDofdbrfZbLb99l//9V//9q/+2k7Qt99++/T0ZFBkyaWwVJsakA5dv93tVRXxAvC2n55ygHkz1tmeiDDDOpl7sUR+fZun4lemB7PnH05IBVj/ol2FMH0KL9meLTHpZZK3zvZex7l18Psj4mT/riLn697e+kKwitp0r88yPyKaZTBIVWm2imWjiJoSMQYlR+gUUa2QCVPrztGlMjfN0jhTywhpZcM7FzkBAHwMThEQtRQQQXFY2ImB0qy2MDud2pUTSpmyPSu+ijJn0VLVrSorGDaJLyua0YOKSIFiq6QCKqRSSmHQYt0ZIDasCkBGB7MSuc4ENZgRkwBKMpNUTClcyzRmVFVYRVkZFZQIFWQOe2a7CgCxFJp8f746Opc5i+hCczRS7eU95JehRnpZ+NgaoqioklMvImYY69QzM7GKyBz2mJmlgHcoQurVdEdNw7P1kdlVYdJXXSJuAQ2EDjV4cs7VwdfBe0cfP7xv6ggAeehDCFf7raqeng4B0FcT45gIqqqq68p73w9nEJ56wCYzKFxy9l4L55THnJMIL4ILqFpVVaiiqkLAGKOoHo9Hu/Ym4SXmMh9ee2wSzxbzjELQnc79uQOA7XbrYxiHfB4HZuS5pr3dbqtmQ0T9MJy73oCRbVujmbshFuZSyn67I6I85NPT6enxOAwZIRA5VSFyvnIxxpur3W63I9Q8JlVFIkKVUvq+H/vBVEKg2CINZkVfInSO1NYBOY8AEGOMMQiaG5QJU81K3DE6vzS95nYGIiJ6T4jIhEY9QcQQnCMKIUTng5/KoSqmnT3mYSyl/PQ5nU6nw+Ew9Ml7X1Xt7e3N7e3tdlfP5VP1nryv27ZpmsZxyamMQ29H3hu2EnQqr3OxEyFT0RLbqqqqKsSAoDmNwzAYtV+MKe/ishBEo9oKL7Aunb8VVGKY3KPQZl4Ah0pE3WCNiSlfMowdkCMffJQhpXEcsWQAqGM1hOpwemrbVoBubt6dz+cYY3fuU0o5lc1m07ab0+l8c3NTCpfCfT9sdw0RHQ4nIn97c20ynoS95FLX9Xa7TSnd390Nw2AYovfvP/7zP/9T3/c+uPHU/fKXvzwdnkopjvnu7vPm6h2wYPDe+8PhULUbMol+AMLJ48XkUKYF36t/wIqiwIIy6Xku/wDR0j1RWWjkeRGaBrB5Y0lF4LkL+LPy0qq3hwI053drW/Jnwex5wHsz/v05Cd8c1dbR7u0i5zqcrwOhke3Wk+380Hp+TESqjkxuUid6GLrgkJHBMNwKnk3LBS3nQ0WLdoTu8v0TyhFtHnZgLOpVzAMAG65+27TMHHJm5qIiIoVVRLLV3CxkMbtJSVVjKSvov3HUk4gAGj2zLGHPwpWrtiDCPEJh4YRFRBMqsBQFKCo8mSeokAMAb2eajAg57c10oEUt7KmadA2rKqXJGcAQUSgiyiQWsIERPE0QF0vblhLcusP3Ypw9WzusFFR1/RFcK6Bfwp6bSg0qIqhTwNDJEGTq9tmrJp3DqxMjIiEE8rgAQ+ZV2yxRzbyrK1ujee/r4EMI3pGq5jQI5yaG7XbrCPq+dwibbQujWp+jlAIgJu0/jmPbBFVv6BrEKdUTkVL6nHMZLwi95VDUMYa6ZmYLe13fn06nTdtaKrOQ2CzsHR4eVdWoC9vN5urqCgBM0MTkMLbbLas8PZ6OpyOAdyGkxL6K+911bOquG87nbhzH7z7cWgX45nofPJzPxzF1VooYx/H81N3dPTw9nlRcXZNzHgGdc9E7s3UNIYxDdzwe27YxVQxmzmMax9E5rKrq8XQ2q6CFaGXnwrT/jV9oB9/FUFWVLVxgFpqJMdpae2EFLVUEQ3hO1QkA51zlo7f1lsLV9XV/7o7HIyk4AlV9eHj48uXL4RjM0jbGut3s27a9utput9urq42IKBTnJqc9Ezk7nzorSNpSw/ZiXYCaepzeG5+k0UyzdKrlecyCiO1mg4hmKzOBp6y7oBdA41xuEwCwtB4n22fRGalLtGdmFlZVdOR9jFXlffX54R7RlSLjOCJZ4beq6zqMXQjh3A8AkFLabreoUFVV13Xv3r2Lm839/b1R12OMlptawnc8HrdtI7O+69PTEyK2bdO27fFw6LrOeKIi8td//bf//bf/9p/+03/qTsc//OEPf//3f/+//q//e922Dw8Pu6vbnHPwoWmaz1/u3tfNSnnimQ7nul+7PhrrPG89b6znFlq5zS3ENqvEvQh7L1KTrwWnpdS1vPjHY9jrV//MIqeC/ok3vPqeJdtbLqVn9c+vQDqX21zTsuYOAKAQKnpVAPSMQKa3ad60cMkHljwPp+qgAwBrqK2Pp73bqwd05H1AJvODIQFV9cwAoIIrPC6o6sjDcg7nVnllV9froaCqvgwATsQtl9P0cSuZvtR91tG9JDYuQ2fNtJvqNsy1TEmGqoCSKmuBwuID5FxUWciRCdKQCyEkREKMk7wnCoDFIEcO3dSbQRKFoqCADt0W162+52fRttxCndXBS4w69QDEXKomQaFSHKl3l763faynvFxdysWhC65lZjGjH5lsGC5jyFkpAEMIsfLeexQVkWZfOeeCdxYSvANb+3A7Uh2Di5pSHksSrGPV1gEktW0Nmg/HB85jCBiIM5+P9091XXPmoRv2+12MMecMRLurLQXKaXDOtaEehyEdjzUiKmyaFgCOx2MZU1vXMTjOyYOmnLyWpm2uti0pHw6H4+Fw4qhhW1XViP54Pj30PYPbbNpxHK/f7ckF50V5uP/y+9Pp9PHjx8rdU/BVrVi6lFRGdbCJrj6f0vl8/nL/eDgcBaXZRPZ8ltO1R4Syrbf7XRTJp7sDEd1e71NhRTgP6Xw+51x83SjiufBxTKrKCIwwmyeq8yQFVIVUnHM++Nk/T+sWhH3mooCxdqHyI0vf9ze7TR6T927XNp5gHHrlwbtYq/fOBw8xkvezbwjQ4eku+KZutn2Xv9wd7r4cn55Kd64Z7zSyQ6zacP2ufX/zbrNtKvLd6aGtm23bxBhROU9MIYYmpJxzTkTUNnX0AVGRpes6TgWkVFW12bZVVaFCKeUkWYtKYRFRAtdU0bwGQIU5p1FVyXJZ9ITkfdQ5EUfvrfjpnBuzubXoUuRAJ151HMZYV7XBlES8ByQW7bcbdzweg4P2KqQ05nyKHq52Ug5Fz8eb/eanzw/vtzeV356eHnf766vbv/iXf/mXv/rrX77/+V8ex/J0eLze7ZrKH0/y6dc//PJXP+uH/MNPn/7xP/ztfhP/9df//P7dzXC8kzFu2vbjx3dfvkg3nD99kd1mu2mb3fb67u5ut9v99OkHpPK3f/fLP/z4o/L404//+rPvf7WJ+8Jy0zbjw8Pu+qoOIXHOQ45Ng4hOknMgPIoIMJOqVyEUQjbaMaECCcI0OxECkvrZIcEUJpdjCJouM95z2AfApLAFNKU1gFhTJhJCIASAgkCGWnUOVUGRBACUDCQDIuY89zoevw5405TCk+suPv+UWK3O2Ge2urFpFHmK22oeCGqIF+PVWwJA4BTUPpuRAR1RpczIrKWAegAxQyCHHggFCioABVSKnBk8kRZ0RFDUmnmewZwGCRB1ZqDDhba2jp0WIAoiMtoq8+LEKzRhzr08E8N2MovjqaDSWl8HVFUoLIf1BT0FXsUwAHBlxi09/6LCbJAVe9UeIwC55+PgecF6rX0wJWEhKjEhackAoKRCpvrPWooqI6kjpFlSdoGlLZhsP/Ou7Gbr9EkObQGwvQp7uGoHrnd5kaxd5/vLq3YMeKKpqqrG+Qst9lrOxMwqpJcGOC9hDyYjEvTex+idc2T2tn5CdS6Mscm9sozLJec9hVBVsfLe1FonGzwiEuGSk0lHGurSUgr7KpOSijHKLP1nN/stYyao6iJcYmKYiFhVVVVVlpkZMevu4fjzn/98s9nc3d2ZAEdV+ZSS+ZKnzF++fLHUoWmaqqpExt1uUzeblErf9yqOvGPmw+Hw6dOnh6dDCKHZNgYiVdW23Sw0eQEylcWcc8qFmYdxaoCZmtr5fE4JDXXMzDC1HWBZaRHiMtGbCsQ49sFXdd0qoInkB+dps2Hmtm2r4FQ45xJC8BSRlPRSArVTX1RUsak3w5CeDk8P98fD4Xw6dsOQc+bEw2azubm5ubm+3m43dYgOUVU3TWsNVFzBCBHRLOWapqlCsL5sHpOU4pBCiEjBITHz0PU2DJKmeS0FiOhmQjBnE9eOgS5XAiISussAvtT+qfFxCXu66nXtdnGx3UBm55zOhBbvPbhJ4gcAWKSUstvt+m4cx5GIoo/m+f709KSO/vEf/6EfzqWU97fvN9vq//0//8//9//8f6vrJjr/448/Xm3a66uPP/zwg0re7/fDMEzD3jurmghMpR3r8xGBqr5//96Esna7XRqL9Z43WzaxDhOgWaYdw50ta25967a0rF5MVssUsXCl1njF1zniixzoMoU/D10vgtYfv6l+lbf3Zv73x8Pkv+ul17cXe7cURV88b5B4ABChCcBDBOAUQCdVY9S5gw5zoRhmBdElbsilyfecIAgIAH7q7gowMxEbBN2SvHlAP4twpO7FJL4OcuvDN83U4kmBwcqagqIMiqL2WBCWZ+yxkaffjCtLZ34ekYWZvYxIrEAMQECsavJjpbAym0MrTbgU9QQBgyPnnTfSsXMuOO+ci0SOZuYxgXPOO28YR1y3+p6n56/3d4Edr6L8dHnALA1j88I09VjFbLrMivd+WolP4KCyfP/S4AUAcrCAF0hFVf08VyGisUQmnyCqHDkAm3dCVVXRBwCIPoypmx3d1PpwKSVPZIBDizpmptO2rfcUYxQpdgoW21gBSCkBC7A4j0haUhnH0b7ByoxpHM7nsxGubbyZF665ECBiKWW73YYQxnE8HZ+KcFVVTdv6QHXcVE1tVUcRqWKTijw+3n/+/OXh4WHMpa5rC9XOkaFmFucHAVpcSfsx0croyxTRUkqloHnLiYjz5Jyf2PpOENF0Z5yVAQARccza1L5p28IypFJK8bGOPk5xOrjz8VBSaurKFg2Wf4fgvPc4EVpIhHPOh8Ph049f7u8P3blnRlN33G7a29t333777dXVFREoMwgTQh3jsshgzkYLAwAHWPlQxejJWf22lKzMTVV7JHIghW0HVZUA2Anp7M5CztgpRFSAnHOVDzFG55ybkR3TGn+2/gAAseKSIxD0k02BGldSmTdVAwDGwJmURVVNK66OlQs+hEAKDjBx4VLqqyvQ493jo/ceHVbBv3t3/fs//Ph0Pn377bei1dPTfYw/u3m3q+s6Jx5z9+2Hj1/ufgwIqno4HOxQd0NPRKySUkIXttutT9mi6dPT6Wfffky5Tym9u70+HB66rtvtdgc52dLnKg3bXV1VoZRSmG2tpwCF2XYbiSYbz+W6nt8z0X5WVpQvQsty4U9zGkzZ0fLOdcyDqf9i2Cgw6PoyvbzGIVim8Hz+eYltgefVyBcPvjZ7vxkIX7z5+TN/jArxOqJ/jdiAGJxlQUgKSOgUnCJ5y1uAdIZbTN88ibTJAj0ENB9W0yd7pjmgAAAOLNuzsGfTuhji4jJfP5vBAQClvJjrv3IU5rOupi3OwhCBVVCBF0KuAtu9PQ9KVbAZXhammt1P9recRYA5E3kwzmOoAAspiIBQUiA7XghO0YGaMxF6JG8ZHlwyOe+88xh8cM45hEvCh7rEOVmduXW2Z7Pn6712c9jDlfTccpSWI7lUa2EOeyLiHDGz2soDGABehz023IW7sKmsbesucXkifhj1wtRSLFZVobZiF+cRIEhhs/8oIuYlLiKqZA4PbduG4MdxDCHUdV04wdyAlJVhnvMh51xSQsSwqgTEGSBvCL3D4WB9vu+///7u7s6IWdvttu975mQW4ZOaVwgRY13XTdsG56+vNznx4XzIuTT1JoTweHj46aefjqdzCKHd7pxzfd+r6vX11c3NTfC0AGIBJ9DmMAwpl6ZpqtkD3YKuc87yvCUd8d7P2bN3zgXviWgi1AACQNM05rAjklBZBQjEI/k6OlQpmUAQdPZFJ4vK3htiTZbU6MfPPx4Op8PhOI69zWhVHUMI33z3YbvdbrdbTyAiIOqJog+IoGxVWFEuy1LsetuKCOeS8iAiKOocuhCdzZmFOZeSs/UUGKCqIiqgI2ekT+8tsYOqsjzPe09wSeinCDkhCCaInaoqApl6F4Ci5XPPGmA2PkMIbAKeKojo4yTig4iUSXKJoc4tPx6P5MOQCjNfv3sfo/909+Wf/+Wf/vIvfwUAx+NRpPxP/9P/9Nvf/vbz3elmf3Vzc8PjcDgcbq72dV2dD08CpvmuOWeP9rtgKSARjSVz5lyGbWpDiE9Ph3a7dc4Bcx7GcRx3W6hCyJFNFX25/Jdr9s2pnIgmWvk8fS+53XpyWCaN5fE64C2H68U8My2yV9K/qm8w8F6EQ11VmNYx7/Us/WakfHNW/9pHXrz0R2LesuPrWKeqaoY0cCnMkpIqCKhDp4AKBOgAHQIpEJun3rpXZ8zp2dXPrIlss54ZFC3HGQms7qWqYlkROEExiXDhVUpnSD+65O8vjs6L558dDqQldC33S0iz0uazxx5MUXsdII0Ma9HIpB9sOCAIhhqxgAB4RAV1isxCgo6RFylwBGN2ghJ5IkfknfMODYXnnRFu0RF5RJvmnIG5dF4m2D+84EufDd/lCNDzsPcqF3yZBaIu6SAqo0MSV1QJxKmqugk/ZisaADAg/lSenUVWEdHKm95KrHS5wELApSBcGyKRWQqkPEEZFSXnnMbRuCtpHEvJMcaqmrxbqzo4j8NYSikmQm5hD1G9Jxcc51G0EBKIicFBCMHsrayIdDidh5S9D3VdO+/HlLq+v4qREK3k+O7dNYJ+uf/SdZ3liJbtNXVNCMNwHsfR+8Cg5+Ph4eFhsl7absm7ruuk5P1+f7O/qkMkFAtpMUYgb+VWAKjrWlVNKszAFxYa0zgAADn03kXnEVC0iHDbVBYDENHAU3bOd7ud997OWQjBhcrSI/KYhl5Vo/fBO1sjVlXVNI1dDiKSUjYJ5pzzl0+fhj6lIZOCJyRy222z2159980OEREkjxkA/LREQxNFAim2xFkq8ORcSnnsOi7Fe1+F6B25SQGg5DwaCpGct/EZYkWIpmdxqerDhKKayk841T8BIc3Zh1V8VY3joMFXIsJ6WRxnZmIuxSYXRFLvfPAxADhwDBp95WMAgJIYlRx6zoIKTVvtr3apaDeMKQ91DB8/3CJpdzref7l7//6267ovXz79j//pH8LnT4jnf/u3f/vlr36WCR/un673OwWoN+1YRu+ngrZiNgwqCBfhpmkeHx/r4AuXw/H44f07M6RFdCGYD9855TFWdVvHx4eDUVZsDbRUmGz9uq5YTqvAucADE49uudINC29mQTKL5Suu5uF18HuxYlgiJc8e7YSW2M2oTSUAkon/Rgv+hoBsYTy/GeeZfx0vLyHpRaj7WuL4Ory9ioz/jphnlcwVNn4qwwAACakCgYISgXMmLY0OFBUIHMIEnJ+Pm9kywJT04SxCqZPl+lor9LKCmIQw1qnnlKDQuoF3qXkuZkivs+Ov7LdbH9OvHdZLVCC05McyPBFQ9apYCjkCZsekIoVQqBChOGSkAkqIY7GBguqQFEEUhMdlCE0C3c65BXJOROjNoomUiGZWw+SDhIu304sbrNL2Fws3NxtzvLm/L/I/VSWVy3FGy/MmjtT0+vOwBzLNVohIOndoiEwNefp14OXiKWWyPvfk6rqOwUsuUsbudAYUB5BKyWlgnvjyQ9dZkHDO5ZyrqmqaeunezeihaZfXVV8bSBZLYoxggh8pmb2AM6/2pvnhxx+dc+amlnOOMe52u7qu7758ub+/F5H9ft80jZnVxRifHn5k1hgjAD49Pd3fHSYNDi8WVnPOm0378eNH0zKOYVpxW9hLabILiHX9+Ph4PJ2t+Ga4x67rnKuMe+cNHKlsUgNrApzBweywW6uyFCaiKsa6bbz3IpCZpRREreMuRF9K8YRm5m4hdhiG0+lgnEWzczZjJkOth1BtNu3t+2vnxFSvPJF54CEiiKICIYJzRGRleRt73dMx51xKDuTqEKsQAWdl5MIgQoDBGSTHAQBFt5w7GzDPWyLTUDP9NppU7Zfe9mKepd6FIqwmYrAUIYjqOlqVfdFfteuijkFEXPDmNWFj0vJUILy5uTmeu9O5dwgl9998eI8I/Tj87ve//cUvf9Z1p5zzr3/9m5///Oclu1//t3+6ud42TTWk8fPdfS6b77/7LnOJ3tn1VUQWg3gRCTF0jz1AFb0vRUKottt934113VYhsEjfdX1/DrHy/mIwtLqCyjIpLwgGmBt+vJr+/3hepcsC4RXF+3L837q9SC3efOlrsQqfdwdfT9dLU+bN8PZmzvf6Dfqnipzr3cSV1hWtwt40HwLJVCNEAlI0h9g58zP1EVpBQ4lQwWjt9kViRrQTu3/aSFRQdKggyrSEvRknuRJOlbfDHi22tl9Pe58dKfrqquHNsGdUlhmX5EXUHA+cc8zC7OwZ76UUzyzgsJRCLpQcKEVxUbxXP0LxxQ1QvFMhFI/kvfM+ovPognNkEvroHJEzywSgSWYXjcsGSORnSIsZJLyB50R8rmGGb+zXi0C4fsnZK8bv96yqIF5VxRVVRV31ABQAwKtY1FsOoJvW4uSRjOwxcdMJiEgYFmcWH5xzKEUABFCIkIFTGmx5yyX3fW/GLk3TIGLh1LZtXddd11VVsJKPiTRam4yI8jgol+AoeOuZDR59CJEVcs79kFJmcsF7Ty4U1q4bzEd7GJIj2u/3IYTj4fD58+eU0s1+d7XdNFXcNHUd/NB352GMMYLq8XC6v3/ouoSOnPeAIiIEuN/urq73TV0jiEphdpMvErPKxK5bOpdGtACARX8rVDHGWIdoYBxRCeCJiDkTgZ9MrzwRWd/LhGVRwSHF4KMPqso5iXBtriTBE5IL0ebQzJozG0n/fDwfDkdjcXgXSynCJYaqqmJdV5s21jGAdp7Ie6NlzksoYXJAiog+OG/GeJa2mmliPdkhOGHOeZQ8zdR1XZsn/QJUKXgxmLys4xRo8r9eSRQRIVHw08R0GeRKiMAqxRQ/RRHRBe+NKUVTfDVZHyJSQlsJTVUHBU/O00TkyDFBP+58LCrvbq8F/Pl83Gw2TVcNY7dt6s8//uQrf3t7+/nLTx8/ftw29X6///z587t31977z58/lzRe7/Yx1MEbbghzf7Za5WazOafJ33EcxypsvPeHp9N2sz+cjs45IDemZDxO3ozOBReDEmZhEXEQyBGFhbCvqA5lMbZWUvIwTZJrzMs6BL6AoBdmnbQzX6yY10ZmaK/O5TtcP7N4rqoqqFNBdDYVuHWfT//c3t4b4flrofePRL4/Hu1gVSfDNbl1HoqXsEceJ0qY9dMdg/dkbukkFwjL/OvoUUFJFrMIk3o2hZs5GOsyMwuwqrHa5lWMGQtMKxqaIt+cpU6ALnq+4lj2eWnqvliSLJYYX1tNvHhe5ueN7LBQHozgP0Oq/CLuxYjoEpAzeq0QCYGQc7FicmJhT4ubhJIrh8G5gI4IHZAjdGhlHhOhJUdoXVNCh7O06QrB9haNYb3ja+roegy9QLouL5GFSVIREUWdS9LMRlV8GfZQre00gT8RYJLPZXmBh7bnl6zFITkEkFJKKnkkQk8wSil5VGVb/fR9P45j27Y2STHzBLBMabttnXM5PxumiGg1T4suxsmzlwyfIrP0qPEFU0q73c7sxYlot9tvNpvz+fzly5eHh4e2qt+/f2+inXVdi5bHhwdy5MgPw/D0dOz7McYKnR/HEYFsWt9ut7EKKSUi8N5bhVZVh2FgnRS2xnE8nA6IuNlsRMQs0Ylot9uNWRdUQuEkIhYnRJbzjgDOe19XdVVVx+6oq24Nl1xK6fuBCPc3N03TWAJqNcNxHM0aYpxvZknBzKBFmAGgbqr9ft/EqqqCQ46evAs+OIdOQJQVABySqgKCpwlabAdzHEdPLjjvgrOKcc5ZcrKJvqqqTVN774FFJ9/2MK9aLiPZFrILZHE9vEXEuWBT6HR5zitTC2bTyY3BtOWIqB+zIVl8ytmUiWhSK7WxYak/AFi259WllIIPVefcfldEf/x037ZtHfzxePzHf/zH//V/+99uP97+wz/8w7k7/vrXv659+/333/+3f/o/iejbj++Px+Op6//77/7w3c++se0ps+65DWPvfd/3Vds83T8UUefC57svv/rFL51zzgVW4JJwxFxG5owzDGcxSsSZSQazlMg661I1cMulY/d6pbu0GKb5U+nF5LAc6j8SOV6kel/Lrv7o828/eDEVf+3XX3/kz8zwXs+Q62A8j7RVCERSQSF14BQQyAE4RufAGB0vN0OIQAEVlcQmSQRL94xf6lBZCUHMJBqFgRA8gaiqQyACRVSxX6JSimUiYidsrszxV+RslN4+ZEwvm7evk7/n2R6ussxLD8wWjzY9LePMOZcRhRyCYyTvvfpRg0cumkeKgcsApZAwgXrzKXbROnaCNKl6I4liVTVucrAga/gjAgK5Za1HBERWylEAmqs3L26OXu7mMmrfHHOFky1ukIz4pEZXMIEHMNdL5cv3z+hdRHR+qsqSAgQgIrc6OcsU5r0PbvJY0KL9cHp4+HJzc9OdDsPQVVXwns6nY9d1IhIr/823HwDldD6Ya8HpdGia6nQ6WNqUcwZUQ2AOY+8Q7BBJycG5/XZbshjGpOu6Po0AUIcKnVctRdhCo0VTRDyfz6Yv1Vbx+nrfbuq6Ct77oT933ZlUXKwOp+PpdALCWFXDkGRMNm/WdX19tW+axhoqogWUiciADN57T96ai5bnNU2jgKfT6Xw+E9Fms6nrOkZLtrNdBd47IlTl7aadr1JtqsoUzpjLZrM7Ho9aStO23tM4dDlnB66KPg0dgdR1HZ3POTNiXbeG2Pzy5UvXdWVMOWdmJfSHpyfnXFWFQOgRmio2dXCkdbT4AcCQcwKGpcVIiCKSS8qn0VYbqurQF07DyMBiF4j3VNf1zdW1nXFEDDNGAwAaE1fDS0vJBomtXZZu3zJQXaxlVuci8jIrJyC5uGh1zoAXANhs97ZhvnJ+pbG+IKHqBtCKzyLex4enT/v93hC5h3OXc77ebdM4fPftx67r7u4+h+ju7u6+fPmy2e6Z+Xf/+m9/93d/d3t7e39//8XTX/7FX/zwww9/+PGn2w/vCxYirOtqq1vv/W/+5b9t23os5fb29nB4nGRRu8H7+Hg4Dl1fEreb7W63G8b86Yc/OId/8Rd/Ferqp59+2mw2V1dXh8Mh57zdbruuW/ykZJasWrTOLZzJ6qaqwc0qNlNeMWHBHLzd9Fl4Ry9DBXlAFBUwSQRy6Kz16pGsFgjmRqNAOvfzzKzczuE65LwuabolcXwV/Ja3rRH7L+ax1fPPYtvlnXqRB5lJC0pEtp4zKMSaQUdkAE4VRQKn4OYnjcsIACjPimt+7l5OYQ9g0k+Y+qyCADrtJYDzARW8GXha6LYEWlVB1c10S8R1L8oEdxBe3SvIm88bjH513N8oeK4PnzzP0Nflcrv2FlqhXaugVKBYGjodF0QoWR0iB5cduBFUPCiBte8iLjHTlJzIAZFBZBVRiBxNF/SLtt56wbLenRcrmvX4WAbBumW9HjcOJkSoEaWXUSdw6SLgqh2NOtsKIk69WAWkqew4GzpdklGLLnbjlPr+rFy2u3bsz+PQjUOnLApTLyTG6ILXFWOJZ0hiCJNAORH5cPGTm/PyC+LVmJl9349jLqV4Fw3UZ2dtHHvrnDVNU9dxmNhqebvdXO3323YDAH3flZQcoq9jV0pKZRiSOTyqqp2bKtaGFEXEnFPOGUm99955S1VLKeNg/jnJJq9SymnO87bb7SxKmebDJaiz7LKblgvek/e+jtFkboZhOPdjXdfNZpNz7voBFeq6rkMUlLpunHPDMAhPNoRG23h6ejLREDtcOWcuxTnXxKpt6u2m2bR1VbsY0AcIDknNrVRBxIanm6kLUz7NF7jg+XS0oWSZStM0m03TNM1URaRpgeithqFgXENd1ZmWBq0t7IAmwSeYCkUT1RUReZEmmiQkERHBken/TkM6OHOXhpUcueVPJPTicjYHYABglVj5tkTnXBoZM9dVbJvq6XgwzeXD4fDu/a1z7vvvv//Nb37jye33+8Ph8P/7r//093//97/+5/92d/+4321CcKzICpFot9udTqe43wMiqww5IalzLiAZiHeKNwreoQKlNBwOj+3+u7ZtLbxN28b84tpfBr+stItfTBRTbjcZVMGSJrqVw8OLyPfmMno94eCf6gUuGdh6/nwx7bxISd98/o+/c/XSy19/Ddj5c27r/SIitmFLpEqKk+tCMbDcm7nsNFZJca44KgGKgClykoKATAHJrL69qQkAoAqqJYaIqtOVZif0sv9KArwQDJ7f45vPO+QXceLFkX1xfPPq59Y1cZqVHpcllQ0pQjGmnTonnpAdFAclgAbkAiFqGdHk7QEQ0fOlXuG8Jzdf084jGDfAoQOz5cVF+honc/rpHnFKer8i6bk8UH0Zh17kfDQhZnmCy84AlgIewIy/1Er2tiFWNLZvcxP/UgDATwtqRUTrSFmUqkK0zVTgcez7oQMtTVP1kohA1WhdOfVDHsaS8/XtDQCIlBCc96TKiOocElFKSZRjFax5NtnpoSqIcBa2rZql900YnUygC4tINwxd141JDBjSti0oD8PAafSe3t1ev3t33TQ15zx0J2UJIRDi0+HU933KuViP16rS6K72eyPRM+c89jnnEBw5UlXLSsdxPHWDHYRSio86DMP5fBYRYwhYJHMzogMWbA6S4UVCcJumsmSFmfM4DN1ZwYcQHVHXdXlM27ZuquAQFbGpAgD0fS6szrmSy+FweDw8dl039iMiOkBVUFYustm2m7rZ7Ta7bVvXsYq+qnwI3pEAM3PSogjgnSMEjzCaScQ4qiqRl9kSr/Sjc66qoh3S7Xa73bYm3wUAONdazKcaFUxCfRqBNO+4uZW5SVGNVglfVrCziBfxJ0LEZCQK9DPDYWo4iYvL/OVWpVSljKIkgs6TD+RDzllZaqm5iIDWdSSiKnFPI8BQB7/bbu4e7r2nzHh/f7+/vqnr+v2H69//9nfNVcPDUDXtDz/88B//x/+0v755OByrqlLlpq7syrq9vf306dPN1R4RHQVVHPoUXHRVFNbgAgCIMqB671Qhj8PTw13YfKiaOuc85tRuWhvnsa5mjRWTFEQgRBFSdXNWYCYN5r6gEyhmXS5eSoLLn6CTgIuYiCo+szuw9yO6CTIKc8HJLNZnhDkBOgGdBFLw5fTyOi6+mIEnNZbVk2/OzC9C4IsHqgrwdTD/n4p2sIrr6IJTRcVieR46RefQiSgAKaLAQvUQABD0c6gDRVlICwps6MvJq2ApRuo0W06zKpD1aKd9UFUVhAuDx2ZjJSTDN/7Z92r6zIhuoSssWM03CAx6KXLSShyBZneI5VVLRBwB5+II1HkVh1yEHXJBzsoFuShHUEYVh4qILs/yLgTknDcA9yQIhEBqPTFwTiec9zMNzxdrutfnD56r2K2zvTdHJE5ZHSxrQ1pTWdd5nmlwPw979hWI5k51AVguP2dVJlUe+yGXZGpCnEcQmVHRoIWnhhNA27b2wUWlxYpswzAws/PO0C5WObyMVecQxVDEMl3PAgCOAhGpAnMx7D5RbRrBiHg4HB8fHxHg/fv379/d7toNqqZhlDI1bodhOBwONtE7xwAKStHHGON+v0cEVV43F0MI5p9gvmvjmBZlgMfHR1vj13Vty3lDVzZVbemdXvB708kyHUtEtDTLNB7f374vpZzPZ+G82zS7zZYI0jDGugJhAIieACTnfOr7w+FwPB4X7Gthc/fyMcbr3a6tq+2ubesqRBeC956CU2BmFmABUD9bfRdOKQ+lTDOUQUOtWbip6rqud7ut1Wyt72uncqkHTIEHEQEczXITF5X1SfrHwht4h97jMtplEgJcbBnIhJSYAADJry4HUtVxKqc7P0PAAIAUFEBFbEGN1mv2TnKppCqusJQYvSfHQdU6GpybGAhgt9mg892QDPebUvr5z3+eSv7d7373q1/96uHh4b/8l//yd3/3d4fDY2Kz454kVW+udl++fDkcDoZt2e/3T08Pfd+H4GoAazYbLMiFYJy/vu9Op9Nms7GXbm5uENGEF/q+X67Z9dREMHfZl+O8Sv5eZIrLlb68c5kQXmRvy7ct64/V/PMSP/EisH0t5LyZw80B+E+Hvbei3etf/HdAXeCSD7yYVwlwgnRO+R84RKscrkGcFx7DTA2ZspGpw+cWR4s5hgGAI1Lw5AB02QFY0PIiunDQEEBlfg9+dQfWoKNn/8zNw5YvigpWkDYau6rYM6SgoCS6jBtY7pdnlj+NP0OEishIxKDEoIQcpHiQAsWDiEoBKSBsySwiRspTbHATcZ2IECa3eSJUIqVJEA/mYYevbtMaehXb5j/5a8cHn4OJ59Fsx9ikyORSDJlHu6rSNE9dTvZkqIFGdV/5QoDYEnEdm80Jb1I5qaoxlcPTYei6lIc8mh0rmzkDIgJISmnxSYeZmN8PnXOurtsYY0rDOPb2NpwufnLOKxAzcylc9HQ6ARC54BYMsQsuVN7Vm80mxtj3/ZcvX47H4zfvb7///vtNWyuXvuvGcSAHnrDrusPhwKBmv8RcRKSK1X6332w2TVOP42hCXKbTZr7nMnDOueu6cRwt97Ug0Q9jjHGzaW03DcnpvQ/RLyAR5QIAiN455wmaKnhPOec09jlncthu6hjj+XwehqGp42azIQKVEj0hyNgPLviqqhTL49Px4enJ/IAAgJBUtWRm1rZuttv2aretgq9qo06gJ3DICMAlmdLAVGxQKSmLAOcy1T7nBYqRAq/bXVVV9aYxzomqijKbdYbZAMyDHAjX6G9dlN5t5UukhOTcItZnr0YKfd+XnEXVVZW5eYiIJ7fMxZMo/rRQs0KKQ8sapwYPIDhkFBGrIyNiQCiIoThV9eSiD0LCrHWsxpD7/kwO6rr2Mfo6py/34ziGWD8ND1dXu3M/pJTuHp42u6v//f/7u48fP242DTOjSikMAH3fv7ve77eb39zfNU3zzYeP79+/H4Yhd7Z6MDIGS+E8JiIIMRIqKJ/PZ8M9Lbp9OFs4wVy/ecZB4pdQlHXYWz6yhIHFw+jVPPAMLHMJe5bNoOAFiGQtngu3D5SmWRTRMj+i6c91z201yV8i1p/M9t58dd7TlzH1jbD3x+LFJXDgqnR8+Ti6xZGCDMyJlxL99BL5paX2DA1rkqAwDT7LQ1FNgBm89f1wgsNeHOsR1cKknbYZI2su92ASL8/v8bIpq3sFcoCC6AAFHYravU6mgg5RGabHpFD0UvteWKLTAJu9AGFecDEzmPwKqhKhEHhlT8iByRGIMqsU1CnhJYUQHSkIgsH9p94DoPkaT+Q+IgRUU/l+bjO7DOV1qHs+xC+H/muLsnX8wxV499mDFayA5gtpOcxLdxhxkrcTeVZMhrmzMgskJgDx5JCE89j3XSmGKkylTHcA5L23TpgJVSxJQ0pJRKyXpqqW+timWs7nXJiLOUbXYwDw3jsfLK9KKeVcAMDwdaWYZflQ17XpT5Yydt1onkSOKOfc9+dh6EKzV8VSUsni0bdtu9/vN5sNlzyl+85VlauqyhZ1AGDWo9ZiTPOtqirzxfXeG5zS1GcIYWEi285aNtZUwVK9pZdsQ+Xh4SH1QxX9tmkJNA0petpsNn3fM48i4hXT0B0Ph/PhkJhTSs65xazYmpr7zb5pKmfITAfBzGlVShEpxUCGNkhyyWnMzIzomDnnYkln0zS73VXbtjUFY5/C3IVawJM4ly6XBhsiTtjgVWkXEXWCMSOZ7dKyHESMLi5X4gJ4ERFaNbP00vKDAZGQ0DtzBLV513jFgqxFVcG480YRnMxGSjSFICoSK9+kOAwHIn+13zIQ+dht0zjkqs4O1eRPf/GLX/y3f/nNz3/+cyL4L//nP/3jP/6DA961zfF4rKNT5nHs27blz58ej4fr6+vNdrvdbp+GZJqx0QcRUJBSCmV0zhlRwUr3Rlq1YWMH1vtVcXjVmZO5TvMi7D0rJD7PBV8EtsviY10EepHt4fqZy0T04qa2xJi3cB1K38zh4DnC/I+Eveex7bKZr8Pe8nN/POzBKhNYdp/MKf4SQdZRzJJcfPGdiBNlT/EZuWHO/JAmMemVfgiAty/UqTgLMAl8gDk6TYuJiQeBy3z75j68+bwJAE6FTlhAF9MHzFDRAYKquQmHmSlhVfKVNKggEvMibgkigKiOHBMQBRWHwqAqQsgijqZopyIiaFEBIIqzdGkOcESLI69Vj6eTZccSZvzRG13l18/j7Ef1+j2wKmU8q2kowVJKtovC9Fnm6hAgqF7gLW4KfvaMwVhsHaCI6Pyl6BSdBwehiufzOQ1DCEFYT+eTiReDeuacVUspBpSoY1PHirmYtHGMsetOiIjo7fo39KaJSi84iPmSLgBeZ9HRpZTnQ8XMQxqHIS05CjOfzofT6dS27YcPH8wa1Jz5rLRYSj6dDqkfKh96VuZiqPf9dn99fW3UiDQOqhrCggnFklLOuevz6XQahsFG+SJCZuKcFhis8Dux/oERlQAEptjW1rFp6hg9gJZchNl7Mmxz13Xns9Z1vdnUIQTQPEm4qiBoCCEVub+/fzoejqdhHMdSOJXknKNIdV37yhP6tqmWTBpAnAsxekeQc8p5dKLk0BOqasolDWPKRoBTEXFEVdvGqtpsNm27jTGSHf6ZHUtELtAq8plJy6VE730FK0jL0t7z3ovNrbMO+8RYVazr2leVzlLjSBMgABGBLmAWmYgr3gGCIwcT3d2GMU59azIHLg3oQAUhloiIxluVwg5VK1DVU9ezwn6/fzicvPe73e5wPKtqyYl2OxH5/vvvf/2v/z2l9P0vbv/w+7vf/e53V7vNbre5u7//eHsdQuj7vgrx6urqdDp9/vz5Z99+17btWHdc9Hg8h1mVV2fJUPRIBDmlMWd0Dp0bc3Yh+BgVkby/zOarvMk5WNZM67Anz/T9X0wRF3cqgGe1ojeDIgCguZDjmsn3csrVFVx8/fiN0Pg8yH3t8auQ9iryPc/8XuwCvBEmvxY41kouhJOWDQGhAgpMA1jn/vG64rhecNirpLMJoSqAKIGqU1Ayf1bnAcCWMGtkqsCKxwO6Tr3tsfv61r/1/KtX18eCVofbHlvvQVdIpOXP9WlbBoQzG0NQsSRNVQQQRQicBlVWFlBRVYt8YfIjnuhQ04oY0dNK6lkULkuKS3V+PUC/MjpfxsXX9y/WXyb4v3RQRQTJHNvnpsjK8fnyJSt3LovTs7/rpQFpLPaqqu7v73NKu92mZDmfj33ft21zGE5lvgGAqaXs9/uHx88GCAwhDEO3IIk2252lekY+syJYNh7fcmrg4hQxjiORdz4ufT4LUEQ0juPxeBzH8cOH22+//Xbb1N35bOrVForG4XQ6nUhhv9//4f4kIghus2nevXu33+/7c//w8OAIY4xNU1VVhci2Nh/H8XjszuezqlZVpTqJc7ZtOy3kCzvnbEdEZBzHujL5MV3CoamJEoF9oYiE4AGg7/vD4eD93hJHUHDONXUjkk+nUwihCjFx9/DwcP/4IOBtPpU5+4+h3rato+BokjZFBFXySCEEQi1lFBGPF0bBoqMGADY1V1W93W63u12MUQTGcWxCTURWqgXDOaOIyBRZ6XKl2KkJMSzZnph9sY2W4B0YK30upjlCRE0cY2xinHZnFi+dZLroUvqzQWDoR/tRg1xZ5MMykWxlCrWTrI96Y79wKaUoEIFEqaWqqpCK1hT44UkRqqrixwMiPj4+fv/993/49Hm/379//77ruuub29/97u73v/+D/8XPiOhwOLy72m42bUqdQ7q5ubm/v7+/v393fWNntuu6ru+3bVNVlVHsrQxOmYgoQRrHsaqqBUxgCd86HsjsQ/Likn8dV742T66ntRcZ1Yup5vnyev3qH4soywastxNexTZ4K/97M279OQHs9eT259+e7SZMkc9WZEQkXwk6f/JLYZWE2GPL2vGnPq137E/vp14kUxffcwBYiOYv3l7oj23x65/jPK2qRGQWikFVzcyqKhM+XkVEBVVVFv8qFmYGEZDJpg5VlU1hVGfSPHi0XssEDyEAACacFKgMdzoZS5lLr1SXQ6EMAKhCAN4jLfJgq5qySddb1WdaOtB6GDEA6Apw5cplOIrVgGTGdqqCrKWoEREzlyWwTVxZKaoavev7XqG0bVv5UEoBFO/9/fGhjlXh8Xh/350OWjJLKv0omh7v7q1HOozjbrcz5nhsd1MWhCIlK4g5IF/tNqp6Ohyfnp5EDPmCfd+3JCULEwDFwvpwPD8enrqh7K+u2najCIfDYewHM5SJMT4cT8fHp5SHj7e3f/GL77d19fR4/+XTZ1La7a4Q8XTss4L38elw+vHHH8MuxhjNnL3yQURySimlbVOHEOpYxRhFue/70+HYdd0pkZELF89bO1bn89mqmgbVWcQHmtp4adOEvhAADLfCko0OcTgc+v4MAPuripmbqtpttohuOI/KXMW2brc/fbr/77/7/ZfHAysi+T7nvu+vKBtwtKqqKvq2qn0gr9i0FedSV3G32wXCYRjSMCJirKbrq5Qy5pRzTkVExMeqaZpmt23b1odqGTM+XMTq4DknYUagXHySiajQG5h4nIH1RM9SQ0TUUL2amNZX9EW9BcEBAIVmfXXrSujLitLL9WuPc+nymMZ05iGVPPCYhqErKXdd99vf/h4dnbuxH/OY5O7weHP97rc/HL777ruf//wX//RP/4Te3d3d/fff/na33TJnp/I3f/NXP/vmfd+f923z/v1t33XtVTAxhP1+//Hjx5TS0+HRouN/+A//oe/P1hN9enq6vt4T0fu2fff+fb29Kup9s726/YaqTT+UqtmUwpwTiDpkEhDNwMIEzCwlX0h7nI2ZWkopeVztMouIlkvl80Wx8XmEW6Bwl4P/4pS9ON1TNu8uWASCy5tRVtP7KioXyZcZWC6RbwLQrUArdl9Enw+GOdd/ld3OH3km03FRqyllOQjrQzHYLwIBACNNToboGNT+LACKwDqX1lfpkEzz6uV5wRWQUAAACgjM2d4bt68F7Re5zuqZ/yuM/TeeJzUpagKa+2SKSG7ihUx6MYiopCqEk8mrKqCzTgwLEjAzqoK1J2Ud9qZsD3UObOAQBNGjCuBkaLAAhmjWFIUZVznVjBEBRKxCi4j6J3ZwXdxQeNa7Xlc27EBestv1l8xcqPXQX97JzOQA0TNztur4vHIfx76k0dhyLLkkLpy0ZEQszFa8wtnbfXXedUoacYKYGnRQFb13iA5Avffd6VGAkLxS7ofU970whBB2uz0ijjmBNfmcM2RmSoNo2TTtzc1NDMGYbYjY1M04jqCE3pV+/HL/MA65aZqRk5Uom6YhXZKeyrp0JuefS7Z+3jiOhcPSpVsmBQAw6MjS5bqoMM9OGpYFWrKSUrIEzvk4jqMR+Q0nCZDnt029T0cBAH788ceHx5Px8wQIZHLPAJQYY7upze9JtBBURoJcfs4apUaIHsaTnYJSSsqJmYm8eU2EuTe5HlrL7rwIb/ZgeX55p74V9l5czuvJdOkxr0bsmuC8moX1WYKyjGp67jM3Z7rTtznnxDlHAb0QVgXAbJXM6SIXnpJL1eiDUUvP5/Px+LTdbh8OT6p6c3MzDgOKgsPPnz9/fH9zOBze3950XZfGsWLabrfmM9V1namcH49HIvo//o//4z/+x/9BVZ+envb7fUpps9nYcqeAc3FDlZRSkPIyCyJemGnLRf8yXM3J+lKUmvddlo+vy1fLUXprUgVY8c2/NmdeZogXGeT6pVUPD1fp4DNIyyo0Wth7ocAMz2Es+JU66ptb9ToDfpFW/pkJIhpz3Fhkb9Om396GdWXY64u9evU1r352PUev8RXT0wCwRtTQ15L91X+qepGypBl0Y7DPGXfkHU3pHYqAdf4UgESCnTAhsc4SOAVRZ/bDluqtVi5kMgBTn1EAxDR9AWVO+GyDDRKJKmZObxspIGpvI1AjRU7fhFO/bSb0vbHXyxGjGRO7Cm8CqnPLRQDUmoyyKmPCqxKH2XIoIoCUkpxziCAiWYr3HsDlnFGl6/s09KjinONZy1FyJqKUs4iYOdFYOMZqVp+ZKYzzLJlzPp1O3bm38ACGSVMaMztHhJoT9/2QUiLyVVWHEIYhDd0IgnXd1nUNwjnlksbo/e27m/c314j69PR0Pp+rENE7FS7MY8qHw/F0OgP57XZP6JqmqUNE0TElZvbOxRibukJEEDUdltPx3PdDSjnPQ2+B3q0n/fVFaG/LeVI8scTI9nQYBpYcyZdSjsdjSoNNl8aUaNu2DrVchPlxLPmHH37o+mwCMQomJKueMEbfbur9dmPERwBBVB9IhKsq1nUUYeaEBM4RIljdmFVKKZmVyNVNXdd1u9l5712snHMw4wkBgCzdC2HJXxeZlYVmvgw/IvK0HkWXBxaK6PkNVtCqV2FvNTub+QtO1xesJsQ173ad1lwqhBRAVKQAAqhn5030tG7ddv90OJ589opOQeq6dkQxuL7rvnz5cnt7+3B4QsTrq6vfn05tFYexM3iwIypZXHBjyuPor6+vReT3v//98Xjcbrd1XT8+Ppqe5zAM+/3eouBms7PtTCmRG13c2FrNC4cQFXjCa6Ctc0EViGjh0V7C1Rz2YE6j54SGVNVaGK9vL6LdKsLJ6+dfxL/X8WM6p2vD21k1UOfepH2KZwvrF9ne1F7RV9//quHyenueBZRV7fTNUur6YlweTsCcGXOqYGDhS0gzCjoCyFfSrTnwv/wtC/P+RZnxZebx9cer0/xaTe4ZouZNcrtdH6xKAHYdyExzUDU0KQKAyqz/MjcpEQ2G4ObT6gBAWYhoCntzkXNSTl6FPVzzYFb1SQKZmIZTGBMCmAwslmqDmIsfgDAqANrIuJyGqd+24tLBvDZZHbc3cr7LAmI9nqzbhCQiaxbq+lxMkVhlydUArPGgNn13XceQxzSUPAZPoGywRjXza0SemWQAkFJqmoacU2VAJSLvZtywat+dzudzTmW323kfx3Ec+iQiVdWoTIr7CuRccL6KdZtSOZ/6fuhjjNYpLGlUVVK5urp6f3PtHXbH8zgMqOBcKKXEWGUe7h4eT6euajbtZoeI19c7B2j7knMO3jd13bZt9M7yofP5fDweu67juWyyTNnri8pyqQtSY44Q49DZYJyYBIjMYljBpQdpZHA7RNebpmk20fm+H0HFOTcM4+Hp1PdjSgUAHIKoKIAjCFW93+NuWzdtRQqoDGrSqBqcD9ERUSlp4YFNMY85cxEBg322201dt87sICykIeKM2FSadEeXyq090Bl9uh5UVvlaX7wvimbrwGkHqnwFnzX1pO0xXMqer7OW5RQ8T32WsOeZ2bMH5xwo+8RSmLmqqt12P4yFWSFzkTQTbMA5fHi8e/fu3W636fveAkyM8fHp9P5d8/vf//AP/8Pf393d/eKXPxeRYRi892aFYWRHAKjrWkRub29/+9vf/v3f//1mszkcDr/61a/u7+8hNJPqGAGIDMNQu9jWGxZ1iEIEwKhokv8z1/+yXzCTkVbUyQvGW1VBny0I/kiWsyTNfzzmrWcDfA5pwTW8bh11Vr/LegmHqKsoNc+ZL6MUvh3wnkNLnm3V69uLl56/DQDMQ4YUzNuRFHgOVUig8tavrB6skKuw+jlRAGBhAPArCNIs1zKtztZVuNWxRp4f65x2rE/bapOMVaaICoTw+h5E3awaigA4Pa/GPgEBB6CEkyaNtbEIBJRUkVBAUaf2nLqLq5yxO6y3Z2d7XR+nS23CKA2Cz0KgBViG5S2XAocCCCmoM49koXktNAufW5zmOar9idsiHwfzu1FhCpLz8yiWz60GhZ0XK0eYG9dc/LTDQyCmKjl03eHx0TVukYdO4zgMQynFuO0KrMDkQAkZJvNoTyCCOkUCR0TMknPuuiGl4ogsNliOparv9/txzGlMbEjz4NA5RCxZmBWUvIsELo+l74ec86apPtxcbzdNdzo8Pj0CSF23iJi5jEmeDse+74F83WzatgXn2ppSSuMwcC7euaap6jpGR8ZJNwPb8/lsiB5yDssEdNOZA2NTkoEUFgDnQr7OCkZsqKrKeQQATyF6L6UM566MQxNDFStgAYBN3bRVW/mwXFes0nXd3d0deYeZbQCSsih47+u6vn3Xtm3jHKFoCB5BnENVjnVEVJEiWqyPOw7j+XyumpaVi4CVdne7q6qpnfdFBZEm0pu/3HIpzjk/K0aSaQ8RWWK6TnCnq2PxJVuKZnMYW4qb6BxOtL+5rjCD2l5Oc0r4Vpqy/LmM2zXW8RlYX7x3CkERhFQIsYpZGiWi3dX+3A+IKN3Q92P0JKyqXNf16cvp8fHx3Yf3Dw8PDw8n51ziYnWKcRz7vs9jGvqELoxjPwwDkd/vr1Mazuez936/uzocn9q2/fHHH3//+x+++eYDADw9PSFiKsW5MOGJuo5hFMVQ1UQelRyCEoHItGZ1iCpEBM9EyNbi/pejMS9BnoUoWTkZvT1LrMghbwa8F5+Vufe25Go4M33XYW/JwtdFznXYw6+GvZcb86LG+yL+6b/zRjqVuN7YTVtCgcEuLGN6Fthm16wLMUPWsjWGVhQBAL8Mxxdp35sHel21x1eab2/eaOYCvLxXQ9pcyo9z9XRO6icxronqi5PhGBAtnHo7vlNvz1nlB2wpBlNK7GBh7M1XtccpfxcAUBDSOedTMDcxnCudqgpQFudYQFsymCUggiggTdQIVQHFt9gLz4/qG7ih9XCcPitT1Reff4OqTrotqjrx4gWmDqaUUkyJxpSr+r7PORcqpIIOteRx6FIeVVkVFNjMWo2dhoiGV4RZHRSs34kXTRAiqmLlXMg5j0OeYN/ochn6lEURyTkHbDyTyadzW4VoaVPOYwjh4822rSPn8fD41J+7pmmij+OQmfXz3efuPFbNJtYNKGaR/XY7DE+cCypsNhsTInEwqex3XXc8HM7n8wIoXWbyRfhYVS1Or2trL3pLpvDinLPP2XG+v79n5rqu67o2plfbtre3t6KjKpZSUippGHMqh6fT+dw7H+1AISIhBIdtU223zXbb1tGjsjA7kui9TX8xEoBkY7DbER6GnLOPDAAhhKpqNptt1dQ+BCJvxBLvfYwVBb8Ya9Aqii/zzguNhWW8ERGsipyvI9byDcv89RyJPR/fS/xD/Go6+KxEtMTd9SYhIgB776fKnxRSDXWD6Iah2273+/3gnBtSKSV5F4qUqo5c2Hu6+/Lp2+8+OudU2Xt/Pp/bpnl4Ouw27W9/94ePH24/ffpyfX095nw+n82+cRhc13WmurLdbu/u7q6uru7u7rbb9vb29suXLz//+c81jwDAYNp7SSkwM+dEnqxbTgDWEUEguJD+L1msTcfWddaVL810SN2zIuc6G15PvMvjPznBLu/HJSosU/SzgLj63VWiKZP84Z9b5BRQ/MoUd7n8nkWKl5AWeB4Ln02AtiPTdM8I5mzOoE6ggKlRA6mo4hSwBS9b+MxSdpJ9WwN5Llvry7wI0+d7OPHDdM7i52iFS6izB6rPj+7qsSoAVPiVhYzyclEtGY5O9U2TB1VFQItyaJkdEoAqMoDKBIyemlxzGVgnXRhFQbIDSC+uajed0YmrwDCRFc2WUFDUgp9t3rJItSyQEECZFJAUkO171eqoiCAKdFE+f7HrukjgLGMaAMS2m5eTj8qTOfDUlp72zQYrTm1GVWUwCpTlxrkoqTnm5jQYtqJu4ph7FwIo9CmZYggAGCy+6zpFDFWlCM5N7OzCiYVV2bkKZvq55YhVVdV1CwDGYSCiGKthTCmVkgXIyBOESAgupQRA0TsiOp378+FYVeF6t99tGs751Pdj35tpDquMOZ37IY2Zgt9d7Ztm0w1JAVLOue8dYhWqXbvZbrZENAxD3/enw7nv+/O5z5nNFlFVc+bJ003VWpg2g5vKGqyQijCng3UTiUi0sIAVGI29PwxdXddW2xRlH1zdVD64rhOAVFIeuv7wdOz7se97RTCrwiIaY6xCdDHsdturq6vohUCVC3NBUudCVQVPLkSfc2YuIlzyxJSIdRwz13XdbFqjTvpQee/Ru0p9CDHUVQw1+UsR0oAeU8FTp1Y/EAGgiUrjsri0eEaXmPRspfDsGrmA1Oj5+wHgQiXWZV1Pf2R2xlUncv09s+IuIaJ64zAAeg1A3sVT39XtdrMb0YXj8azMPgR0eL3b/uY3/9Zs2s+fvpzPZ2O5lHKfM28bfzr3sIHT6fzu+uZ4PO92V6qYUgmhmALDONoCY9hsNk/usN1uz+fz09Nxs9lVVZNSaQlVdRxHpBBqX20aQh37IewigThyqlNGBZYig744hksJGidlt2eRD0GNu/zin8jkK6DPpwiitw+szhPUs6kYJi/2ZTK+RKx5erdgvEQg1nL5UbmEH3me7V1+0V36Yi/O5rKgeb6yeSOwLY+XP1881vXEN++HgoqKmiSpTN0nWWNNV7nd5Fuwcj+1sMew6u29TvXWK+L18MXn2R68hXpZLgb4M26vVzrwTOpEAKzkRpfza6zyqUW8UjkRVXe5yFXVrfLIucjjLKaSOgVGIlWeoqUqgJt8lgBUGZXUTSsGmPlzaGcABMkBCAjNpEgCAuB82TW8RL6vHecXSzwUnRYBcsEar8fKXJnnKTtEMTBN4URK3pOhPPq+NxPzdOgIzRY9CRePIARjZsNVxroOoQIAIqrrupTCbEQxURWRAoakn4nkJrCSxsIsIcS6rlOXMqsiIZKgiROgI/d4ODrngiNVLWMqpVxd7T58+FD7fDqdToejiNZ1hejymFPhp8dD1W4ao2B7f3XV9ONwOp1q08zyRLO4tml79n1v6azONGqza1cIdklbaMeZjWe3hb2wDOntpjKI+TJVWdizyqfNgORwv99XVWX9RRHhLOOQjsfz+dyravDV8XA/DIMiVVWoqlA3cb+tr/etpgdEEBCQYql+FYOhY5YzagsLa4I+HQdTHG3qDRiDM/jgoyLEWMe6DiGgW9pIaIXoCXU5JxCvbzCHvdm36g1IMLyVqK3DFcCFvSBva2zNw/g5UHn5oRe/gojM6JxTwAKAKiAOHYUALGAHZHE5sO3fbTePj/d/eXM1puHp6UGBb25uHh8fiSBxqWJ9PJ632+2p6673VyUzTB1/QUQ7p+PYW/633+/P3en6+vrTp0//9m//9h//43/8wx/+8P37awXWXHLRnasaBcmly8ftdjtzmp/jNvXZbi4v0CsN3ldrjJfH6s0b/tloyfURnp+9vGphbBp1cqlqCPxpJOf6O9fyHW+mpy/+fAZF/3rAe/UNioCgAggExKgos3WrMiLJnCqtQSs6Q9CXL1/ngvaBZNhyk7B780jiBM16dlaYl1M4Xwxy2Vx8hfsSfmNp8OKH1qdouqpo5mHIJJdiiB3LwwlIFVQZYV5yKiDgOgGnReXk+U2sRjmBgjwqINJcFIUp2bLMT4kUBQIpGIZzWn8AkYKozWKCqDDLwRhOyEYQzofFWoCmYagTMlP1cpqn+upUuyYFRVQoUlYjhVXVgQJCzklErMKJiMCKolkLcw6uSmN/Pp9LyW1VEVF3PHJJro6lgAWJtopZyvl8/PTjjx8+fGg2m2EYYl3VmxacH/qhrd1m0xBZ3lMsOnZd9/7dO6vOrXVzRGBIJcaoWM5dj1422z2Dnk/d6XT4+PEjIv7www/9+fj+/btffP+ztqnS8didTqXwdrut26brhsfD0+nUVW1TN42JQjFoFRtUES7e4367azatiHTnk9kJWdWUiCz7FJGczZYIYdYoJ8S6boxa7pC0cN203vmhH7z3+/3eDEiVxSE5B6oyKbUNfRqG3XbLzMKlbeqqqhw5KYyI23ZzPp/v7u4f7h9LkaZpUua+H8cxA5DpjW03zX6/DcGdj4+NS2MurIXAtW29aduqqpxz5+PJkmbLhMxxnpm/+9n3BmOJseIpjoEAtputc84FTz6YYKtd1kWEAAjmChzaggXm58xda9Xho+fduLkPTe5ZNrbM0BRMue0ydxMRgjM+7LK0xSnRR4aXa+UlG3gRCaZ1hncsBttyMTh1Rn0roWoyi6FRAOT773929/nL0PdVbP/2b/7qx08/Xe+2x6fDdz///p/++dfffPNNSuXTp+PNPt5cXx8OBy18++5DP6Z2Q6fTKaX0/fc7Efnmm++sBsDMu93u7u6uO/d/8zd/81//63/9zW9+86tf/eo3//xP//k//+cfPn0monHoTj6EUpp2ez4edlfXnuI4jqb7o6rDMPhgBk9TP8/Ixsu+09zzW/ZXTZd8rt/Awmt8AWCZgwrn9ObsPPdWV6VFVUTkddSTCyWO9NLtXoc9xbeLnGv0+3oRk8fxzUXVslUvFlLMl+bli5wPVg34y6pe2GZfm+AYFciUplGt9rh8yr7QrdQrlx7l5C805XZzunBZFvgpyLzCWooIokOcRE8QHaJYS8suFtuAWS8HprxWYN5nmPz2/rzb60UNIgK8tYT5egd4/SXTVr1+zwIlmYbK/F5EFQVEFWdgZUASmYvTS+MfQJUERXTh9qFMLzzbXcFFyvu5LMuLfN8kH21QThT1y7psHfZsIAoUUVnn2ZOyU2F2LMoigqIiAigK7BzmsR+6Xrg4UJY8DN0wDG3bWtMrVNHHaPhAH4KNN+sblTxa8c10LAEg5Xw8nFPKMdbee4uLRbiIonc+BADJuaSU6rpOw3gYhu50aJrqw/t3Jmzf970jX1WUM3f3j+duGIakSLv9NSISTX24/nwEgKZy+20bgpOSSzFMf1nM2ZfrTVeOHCwXR2KD5NkJruvapt0QQlVV5p106bjMzoIGcDXQYAgBCZYyqb3teDz2/ZhTscpeyXI6dU9PTwQYY6zruN00wZFTIUFHWkejlnvv/Xa7reuoqqaUbdKgdr4QKYTQNK0FPO/DPI94mlE46DyiMyEVMBkfVFpFqXU8wxWBb/0krOapFw/erHyu87PpganGL5f5n91/Wl+hy4OlNquT9qRJl/tms0UV7/3Qna+vr58eHmP051MO3nFJdQxIsR/7u8+fTIR6UprO+XA855xLVeXMVYWIaKorp9Pp5uYm51xVVd/3pThEbJrmeDz+27/9W9M0h8MkmPfp05fdbtcPOdYt56SCTdMQCKoQgrEalvnaapO4mnbm6eVZJo0vwUGXbtyLWevPye1ghmbgc4AFIvIqCYFVNIEVpGVZtsKMbNRXRU7zdMRX51dnoWZ8jl5Z7HFeBMKF0fwi5r35YNk0vehTTaAJmWqAxICKIAIyARz4ddgDAJ4gLZegOBU5lQHA82s0KKz20bCSC6rrArvX1RWkAOAcXc6rzsSWFblhJmq+wnqsj9AzFdAF1AKIbhIYm4yR7GCR6qS0tkoZnxe338IErf/DuTUItlae8JhuvrBV9NLZV1VCAQQVAnJzdV0BnJiRDyjhRDkBAEFAmSRPp2GqIqsRBgDKGWA+dSxT4QGAJ1fSKYypqkl/CfKCwjLgjYW6wokKoLk4A4sWZRUpDqHrzt3pLFK8x5zHvu9THq+ur1zw6Cj46EMQEdFiyZKqICkhjkMehi7nvFjw9Gnous45v91uAeB4OAOhSb1UVeV8NLBJykPbNOfz+eHhAbhc33x49+4mBH8+HnMqVVU50Kfj6eHxkDOTD7FqNpvNMAwA4n3ImbkM3vv9bt9WXgRMmYWLJi5pHMdhmJVKQERykZQ5Z2ZmKcWKls45DECApu6/bTdW7q/rpmkaR66U4sk5nOghymJ1YEdUVxVziSEYMMETBkcAMI7jl093pZR+GJmZhcZcjDW43W6bGHe73W7btrUP3nkPznlUNihKXdebduO97/vxfDzlXHIui5RMCHWz3ey2V+1251yYahVoGpshhMqYeehI0SkSOSIEUBItNgMtEsU2vKeAZzrRdq3Si/noRVr2DAXz6g3WXaDJRwVgVo1Yvmz55B8jPq2XgFM2SQ7A8AvT+wwDV8W65BQjjX13dXV1Ph6Cd2UcnUMAraoKyB3Opx9++OH7v/jL87l3zu33+/OpYxZQ6rvx4elYNRtCz2XIiZ8ej7e3t6WUzWY3DAnRAdC7d+9E5Keffvrrv/7rx8fHknmz2fzwww9/d/0fSuktKVRNDiANIzdZothJAQAg50IUyTgtm5cD8Ibe9CqYPfOWeVEDfD4j/rH4t86Z1sf5ddib08257jdne3Nv723e3pr09exsriBR+qpl+2IFprNdOXwl1dMZU7quIcG0uEed2dBCIAqMqKCCJKCgEzOyCC5fVUy1SnHedV2HvWcqLdOvwNv3iGhJHk4m6TKbewPSLIoNgAhzXQVmQ7jpT5nXOYvznugUK+Z7XB5bc/d5jZeWkGPNvPmFl4Ll+rxK/rUVqCC9oaiiC+FDkRCMDqeqhIbZmogDpvUJQkYemSO0GTwC4RLkFzzVTDiccEc4Be9VXfv5AkxVzUthZh1csjcrbBbNuqgCTu1iNmG2Uoqb993Ye6rKXMZ+yGkwLnseUy7JObfdbkUEZ0x/KpklA0Dwk7+aqvZ9b/qWpunVnYdhGBDJ9Jxy4kmESZgIHQVWGYZhHHs7HV130pLff3j3zYcPDrU7HbruTCGiDyWN524Yx+xj1W521jKca0SMiLtNW1Vh29acxlLKOE6OdyVP3m+2jyKaE+eSF+dxU3ODWZbFFLQX70ArJ5pwlKrGGENQawraM4uAi6qsSX4A0Pf94+Pj09OTKhTWUiRlHvrMRSx9NMnHbVs3dQiESOwQh36wX5z1X0zPujNZS1WNoTYNmu1227Zb5+MlSyN0FHwVvfcueEBnVgkGoLVphuWy9F7uYS6CLcPszZLUs0tgFe3Wj+UZXfoNXPd6ev8j1936Cl1fqradbrpKAAiUAVBjXTGzCxERt9ttXVXbpnXOFU7bbXv/+DT2XR3i3d1nKcl7/9NPnx357XbLzGPfm3Drfr+n222Mseu6GONcGK/btgUQZt5u25ubm9PppKrGag9VyDnf3d1VsQEpgC6ESqUM3SnGGEIgHyclBEAi6rtxDnYvj+36mC8TPa0yvK/lduvYsH7m9ePXz1hRcfpmeTa1wAIjWtUVTZxsypLeIjC8LAm4Z7t2oY2t0KrrQ/HvLXJOjxFAVRRVSUBFQUAFnaKx/pFVzKKO8RL2ZLIVIlW1bI+fHQIAALaWk2mjvb4tTXIrT6/2ZG7qyqXWCaQrdNPlHgAKASkoocp0b9W/9b0STo8BUQQvIW1hn9DEz7PLQ2FOMvHimqsgCK+vK3ie8ykuHgtTwIMJpTntE80eEfPSy+IdgKi5Xs3TiDOOHSKoqCKKCqLKXEWVlyJANowuYW9SyZtagXb+eU5lVaRM71FlLUbDV1XRsoRAQUVE0+QUYWASUiQlVhAWYFAdhy6XkQi895xHA3Pu2s3CaA4hoHPCAgBu6gTRghwRER8oVj6l9PT01J3PTb018+6uG5i1yIjoAJCZ+3HsxkERYh2Oh6dx7PdX21/84vvrm313PByPR+9cCPWY+PDU9V1yIe72123bClI/dNYxTUl2TX2134bgck55TNbskcLDMKSxAKAnslSpiObEY045cynCLJyL7ZTZ0ZmY5xLzbOYCmMRQYoyOSt/3w9jhBfCpIouh8cTCHIbh/v7+8+fPCxl56POp63MqFOLVZttWsW3qprFuonfIKixcPFEdYxMbUhq7CYzDKUfnDazTtturqyvbSOeC+aUheQNwTv089M7IgoREHi7gFCJZVS9pioqXic+GH6IATpHyK7TiNVbhcl0jTivgWYdF537efKGtg96k7wNfub2OiLZtgIjo0LwilRAIUNp2y7nYcsGD7vd7YL653nfd+f276/v7+6fD4/sP3zqHj4+P11fvc85Zi6o6F2KoU0gp5aenQ/muNn5eVVWfP919/Phx8nM4PBonJ8b69vb2D3/4wy9+8YuHh4f95rurdzefPn3667/623EcY6iD9+fzmVwYUz+ObURn/Mln2JHnJbs393oKLRNyW0lBZLrHCbc+VbKmszZNNKwzkWlZKL/42vXPicA8dJ+FPS38ZrZnGILl/csXvgh7l+CnL7O6ZZmFcy9zXfzUFS/wdaq3bNJSwQIpFvAElM2BB52KMgIjKILA9KqFvbnHDCKyoOFhbhLx3FMSEZjCpAKA/5rt4bJItN1Y7Z6fx/rq6tBLnLNAdPlTUQBQzR1HjYq4zigVceo8AsgEaZmSJpkrmaZBo3Lhzy5tsufAqjey/te3KdZOH5t+TxBQFQF48v9RQJgYAqqWzC05n+qsNWP8ckJgnSvhsES+9ckWVSO5216twp7YgFukbCekpp0XM2TRojqVQHkV9nCGNllGyMzusshi0QIA1pxzRKhTWuMAzaXavGS990LokTw6IgpebE1tlghVVcXKE1EaUinFGlRV1ZxPPbN674/nk/deGXORbkwiavD60+lQ1fHjxw/vbq4QdRg74eyiZ9bj6Xw4nhiwbTZNswHnpBQRCcE58ahc1WFTV6LcD50qzYtBNkwpAIQQhzEza+ZSsqTCliza0TZyetu22+3WMi3DeRqkE+ZM2hI7LqPpVltEZOZSJgdaVbUGWUrpcDjc39+fTqebm/dpLDkng5Kq0Lap9rvr6/22iq6pY4yEyKiiwqpiGt9mlnQ6ncYxiYhzAdF553wVTag6xno9haGJsHjvvX/mYO78otIiKxL0stBebutpBVbL8+dT0rN6yfp+Vad6NnfjW/289atvVe1ezv7rtek6TSJ0Cot3nVr/taqqseRd25ah2263T4+H29s2Vkb/4LZtn56edtubd+/e/fjDT8djX9exreq6agn1eDw+PVXfffedKep1XTcMw26/sWhqZzDGuN1uf/Ob3/R9//T01N9e39zcPD09FU5S0Huf0pBSuXn3XgvncfDeu6pWhVLY0DHT7L7M3a+6VrAwB75yexEG1s/PFmMvv/PZsn7Nq5M1vmCe8eemyets780i5xL2CPDF6cM3ot7lbOrzsueyEX8k8r0oclqvUSyfA1JARWH0jFOHj3EKe0VAJ0oZzPtyKXLyKuxNP8EKs/6WL/z26gyf6fledsa5C5Jz6fABqpvtwC1UwOXamCIAfhVj8uywTk7iMgNmJirDLO0puo4pJlq9fNXFfW5ZZUzLpNWlaFFhgaDNmZ3O/vEKKjpd3LMzL07OdlPOZx+U9fcqwh8txRt6RWnuLOMc/NRO83TjJezZ6mzK7XgFOMZ80Tmc2ohsQxlJUayuXUzhSVVLSYYYy1nHcWApKMycVScpZJxkcxFAM0sThJyTXMaxV+C2bZ1zVj9s29Y7Z+D7pauvpEqYufT9WESDj4DY9+cQ/bfffvvx43vmfDr1AFBVEUTOfT8MgyC0m81ms2PFMuacR0cEADHGTRWb2osyjx0qo6qUxJlBxNxaC7Nq6vtBBCzslUtLFIx7boUsi3lgOt2zUpdt+cxPh2HsCiecQSsL7cFuVuc8HA5fvnw5d8dYeRQdhuF0Og1D1hlk37btZrOpq1BF9MAq2bTioqfd5so5l1PqzuehGwAgVtH7qKp1VbW77dXVTV3Xxv4iIvHVSkvaE/k5PyOdpcPUIJGqqurcJDLwQpNzmb/WoRQAiNxqllzjKteYzFXFUi42p8+/zaAN9nl6Pgcu3/kSYqav65w6fcF8LToU42Wpj4FTatt2PB/atj08ubaOViS8urpqmrv7+y9V1Z67/nw+769vfvjxJ+v1Ho9Hc+0ozD/99Pn29sN+vzf576enp9v3N+M4bja7rhuspgJAHz58YwIunz9/3mw2Hz58eHp62m2vHNJw7gQJEY1n4mPlqhoneCpMkpuFl7BHc7bwIqRNz8xEXTP1Fp2g+JNaCuiECF/C3lfcz9cnd32uecbCqyrqCnfNl3nmBYHhzbBHc8kKXgCRVnFhnQ4tp9tuq1XUvy/bm55RNKq/KCgCI5rnpAAy0pQLgqrqure3RnIuYe8SVm1mdQhrlZbX0/Q6nsvFV8LBZVV4we2saiSTc8J0FJ5Hg9fj/s2Vy0K00KmosqSTc+4FsGLL/l+5WbpmUVRnSZTZidfI5vNgwtl7wTLFOZoKTCKi086+insvTrMzvt+S562Scb0Aw2YyKbNdJCIis3eJqiU/c0FgCntWr2ARWhoJ1upbtoSZJRfjnAHzOI6+qg1qIaI5Z2FiKv9/6v7sV5rkyhPEzmJm7h7LXb4ldxaXYlX1YEroQgOCWgIE/eHCPM2LNBLQKDRKrWaRTRbJJDPzW+4SEb6Z2Tl6OGYefuPeL4s189CjYCIYn99YfbGz/RbJunFsWLUYI0KROJmmCQU3m413bp5y34/jOI3jNE3T5rolojQa9Q18cHNMp9Npt9t98cUX19fbD+/ePz7e77uNa7k/nsZRcs6h6Xa7K9eEaYzjNIoIN5xzdm17db3zTOPxcZ7n4LmPEmOcpmhByHtvC8w8zyKQJOekJXOrkcMAODbPs1PXijnrwKiqlblUbSUM/1IFOYtIqb2VFRzGl8gSb25upnEaTqfj8ZgTejMD6jqb2zFzCA7zHLMSQtM0V7sNgUsp2ZQ0RZN92YQQ5iltNpurq5vtdotAholvmiaxXweJJWbYiVcut09w4BCf2C9cnIfr8HMRlp6Wa6uY93QdqC88f/TFy//VGz5Dcq6uFqPSo4KCEnsXHM0Am9CdHj56RO/ZTDDiNO/3+6vd/p//2w9ffbkDgL7vN1fXzHx9vSOid989juPUdZ1z7uPHj+M43t5ei8hms3n//r2IDMPQdZ19DUMXf/XVV//tv/366urq/fvvX7169bOf/ey//tf/en11y8zDdNp0VzlnzQA8tGkLAMyeRYmK/S/Ueg4q6O/FMu55wfd8lbh4zgXd+yL4Pb9/0rv7ZHm5+jKr94cn71YWmMtD9qSyPBu4rwkqurKbr8Xwj4W95z9N1RIBMdEOBc05i6oAKlgujAZOETr/yL8o7AECgIvT43k3rS4VJAdLGlftxwEgxNX21UVoKjFiV86Ko6Z45p1cNEYWcueKAwpxKcO0ajsv/7fsNV0m4JA/yZGwb/VsaZCMT6NskYg1h2zLtjAvAZvUBBizedcTYLZDQcACuZpEiAiiSXoygAqqIFpGrMZkIM2gKllAQM8dOZdtMpekOpBVjz2r/qIaylAlSxSRqe8X2EW9Tgpe39Zxm95534DS6XS6QXcaT0rYT/Fw94DM25vrGCO3bbvfu6YZxxFBch4x6n6z2W72j4+Px8eh9TvnXJpBonjY+eCYGDPnuT/dHx8fHx2Hm90+0/54PA7j6N2GHaR5ytOpc/L3f/uzlMb79ycCuNntU4TTYY7RRZ1d0zaN3ziSNGgcHcyh8aR5u93ud1vOMPRT3+dpZlWdD4ecs2RNKc4xz9M0znOMOUlOSVJWIgcEKkCevfeYs29CaD17EshJlBicQ9+QSPbBbTYdEZ1Op9PjIcYIgoBMzAKSU8wwuUa7znVdg8Cnvn/3w8ePH++ROobNw31+d38/TVNO0TPttu3ra7zajC2m62bXuZkyTNNEIjZTJBfglOZ+mIcxQOj2oQkdOB8ThN2+211ztx8FM5Jvt+BDVA1IzOx8Y7AdBQRgAHTAqKgZRYEICMmTAwAw28wFKw9oGAJaX6GrRYaqatFZfgUFzvIUuoYIqCqgg/I16pjEOFGl+V/EXRENmIacz2FVzssnxDgjItCTCAoAUTwAICgSmoIgOECmlBMoELvHYeb9q8PhINvXeqU3d48pTdfd5svP3/72dz/MWYjccRinP//57/7d3//P//P/Y7frbt7sHh6O7+/ub25u8pH+8z/+9v/2f/2Ph4fvX7/ekY6nuz9//vrt+x+++3c//+v/z3/9zccfDm+/+vzh4WGzvzmNE4L7+OEh8J9++fNfvnv3btO2N7tdilHiScB5h5hnjb1IVslM6ojHOUpKTESMMUbRzMysOecsOYIIi6IIS8o5J23Kwqxqc2LTq5JYa8F68ZfkuPDSoI7e0OrybHu3rLoWKEhVFS79U225ykhFC4pQDHleGuFldmjzRlUlBFUtPM5nHSzHpeYoqzuYTDJ4dmIkZhA2ADsqIs7J/BrPhSCUxmP9UTbkkZJOMYlatSeiAiISVQVmBUD0QGAacSLiLLCxzURBREhRFROqCmZ7cxAVTIrGElJVggEuIS1Pwh6swt65yTnrDGDYjnoGGzA60YIoQVFYxCBq/rHOKC9C0frBIgN9GcRWGe6SJ+qqOXNxWz9zqYGeF5pLlnHxWfq8RXNBK7SqzGjppSnxLJUr96gqWTIqFI73k966gGjWUtJZuYeiURJkiZLq+ZJzVs2qSOU/E2lTyQKoaj2cpZtHgCWIEhDRGGeTIoSanVnUXACQiBiC77rudDqZe47VDaX3mrPk3DRNjGbEM7dtG3xr12c/HGOcr652SDIMsW3bt2/fPun7AyrEGOM4zkSOECuzEKyuCt53obGOq9Wpi1XsNI4ppXGO8zxPcxrjPE5zSkKhWb4kYGnli0izEpuupY8u+B17YGideZ5FxBFbXmV1nqra66BANx8fHx/neVaFnGWe58PhQARNCF0TDMDpvQ/BL1NDmyx23RYR53meDqeCEXWOyZtsMSCavqgZHDKx9x6IpdpHXMSGi3LqSX9pJb+5fvD82vnxLc8v0k9x8j59JT6Ztf+bPnH5m5bGqYChqNlclfzSTN7v9w+Ph5ubm/f3h/0+HI9H58KcdDj1fX+8udmO4wySQ2BySi5mAACAAElEQVSDogSRcRyHeTIlBOfc6TTc3koI4eP9/dXV1d3d/TAMXdccewWApmlM5OhwONgRZOZpnMEzcuNDm1Kcp4lYhJjIrbs11WNhVUstfxPJKqBqiSzK+jkGpjf6da4rkgoqimZZtlgmQYvjiq6tBs7ssjImWrrQAgrKoLnAKHTpHWQEMrTFou5oM5JlYvPCMVsJ+SOiFuFjyAJIphgM5ixoZ6JArn00tPWr4hysfMhiYxqAskWTGcxlBVFJan4mKEiiCZQFRM67DqU+v+4+FEVRzJJVUMCKGFtXUVWTzADgqk4jIlYvDZtjlRkeE5HiudExiQF2z1ssNLKy4qrtuXhOroZ89cqEp/e6LhoXW4lnl/rLKN6XAMDn7Qv65vJTzs9ZTp2LQ3weyT5924rPlFKCqwiU092yW7SCUQVVLJNSVZQMJf0QsHvDvtrhzaW1DtkqT7EHKoAqlh9hNp4EMRBJkUdVBCMSIrkwTZPaSMnzPM8pzyop5qiq4ziO89Q0TVadUzJkv42y5nkGVesNeu+ncUAAe4yIFn5SSt45ERnH6XA4SErb7daxH8fxFA+GEEl5dkrbzeb2dv/ll18eHu7Pc4SkU7mNu5s3IoJQCPVM5BzZogYA9nFGV5imOaWUssSUR3txTPM8x5RzLlc1ICOy2R6ZOmLbtW3wwXnPzhHjcl6reOc9O0l57E/D6WggF2I2MG6ao2hCBc/Os8sxnQ7Huw8fD4+PkoHIzdPcn8Z5njabbrvdXm02W7ODCKFpQuV4qfeh3WxdCPM8n05D7Hsicr4JIbjQeu/BewTa7XbBt+Q9EYHzzKxIOedFXHqdHa6DxJmowEZyOBPS18FPnooLnt+tTtFqRlL/Wi0ozxlh/cOLseoi7D27+vTimcunrJ9g1efqcssARQ4SEQkds6pXD9K27Wa3n1OW4VU/jNfX19fXj69ur3/7L+92N5thmvphuvv48dWrV3/84x9jhqZpgvcfPhyQoT9Np9Npt/HzPDVN8+HDh7dvP+92+/u746vXb6dpPpxOX3zx2W7aHY+PwbPJq5q23ziOllCeTicX1PlmHEclIk7ctE3DKUcp7Zma+Nb6xboUlo+piGTIWVXjKkwWYlIt/S5bf6CaStg7O8faEm1tJAAw2QRBAMiAAEgF8VBVBYxjmJEQCDSBIoggAQOrwCJ0tyBIDWd+1t9/ujIKnddDRlxEi1WhCiIDVbg/AoikpVVXtVPKYwAUzQpg91WnI1oYSwpZJCskEUUSRSBWSSYDlAsEAiKUvZGtpgAERQFMKQlgVlTFbAWKUdKsyWlhr9RMVQACALAMinS5tOzGWjCEhSHEVGAUUA0tCYkAlQEEkUGeJKEXk4PnV6w53mmRGjPbncr40/MoDk0LXEFBUUlRLu4FzvdKavekBFSGegCkajmIEROXiFa2qK6AL6oAmczJDwQL/kVw2f1VS3M5m6UICSwi4Gd1oHW1pyqomgw2b2pFgIqC7BQTZEVXEA1MLCKohX2siKBZK6LUoqu5BQFAnMY0T4g6jZMgjPM0TRMFb2Xjdru10sq+FRH5gsnWst2onSmZP1/OuWtby4IBwCKiVYrHKXrvRbTv+91m8+VXn19f76uxGYpInPM8TdM05SwGqNOUc45ZEoGaCIlDsmLLuHlTzDFWa8AY53mO0Wq/jEDMBKRTjKoKKDnnDChZGZyF82WwV0oiKgILFk7MaNsqWu/94vZoabi9ltmb/+04jjmpWS4M/XQ6ndq22XbNtttst9ttFzbBB/O3M6d1ZisBc9bTaXh8fGzBOd+0beubzjnnQuOawM43oQMrR71j55lZABHRsWFSzg52uO5XEmEF2pCRF59eWS8+vox8q38uIWedktZ/Wh12CZTHZ6FrCW/4fGJ3rucuJgsVkUh48SZlSWVGRFecGc4wJby5ef/hY5R8c3Oz3W5DeKeaY5xU4f379199/TUzpznVUROkBDHB4+FwffPZNPRdG/747d39/f3nX+6sqWDuDcMw3N7e5hwpRvP0OBwOV1dX0zTt9/umDadhVBlG50JogTk0RJI0e8t1tAz21nP6DCK55H1ZRLJkyRk426KgILkIySmIZk2oZPXQuSoCzJAL5K4WEhlXNQCWA4VnWFCzgBVsaSh8SHIZlMCJCGpx/0PVLGKr4bIFAEhUzH4MoUo2lrVXWJbHCYTRFWUvzahG5kZFZMCMwIBKFatSIuMCGyyTKlOXNmiPjYss3ReVCCAqWSFrBuSyi8FGUCigoDTnckZZRBVAFRTErAmAVFFk0fJXcy9HJRenablOzMK2nKnp7GOCXCb8ADBBAX+jmDrf0q0v+DezLicSAEUUTE/UH3485lnVbLvVAp5W1AkAnLeXx1bEEQApwuW9HSz7KwghAaraFKh8BgCiCiqgvW8xfhAjGhS6gQ0vzO6vkOfASrkFkJVVs1XYWAOXWFWX7XwqVSBURn85HVVqv76YI4lRKUrQVUUyURo7tRVQERVZAbhowSAiIyZATEmIyHxtprEfxj7lGNiNKa5XE1vrTScs56xSQpGFPZOdJCI1z+9xnMcxpWydN8N/G1d9nud5TkoIIDlHFQnOX19fX11dkerDwzHGhIiaJcU0z7NkMIaApmhpAqgSk/c+BMdIOec0x2jM8yQ1OE1z36eUphRTSoUZg0Tn+X+2sIeILbdd05pzngE1bcS8XoJzjvM8mtGunXWiiYkBVMRAJd48b+/v74chAhiEXcbBrArzbt+FEJgxOGqbpm2Cd0SoAOK9N96eAJkCnIiG7SaEzjdNCIEcO2uNhsb6IFoVlq0T4Zwzc4XiYL6kkUTABAtihYkqld7SrouiSleYgueX20UYe/qcJ9ssXL34zH/rbV1crgsaKmf4yqbA8nHn7JrPOQMSEnsXmtBqt9lutw8PD7v97WbTvXp983gcc4YQ+Hicp3Hs2jAqzPMcQggNpBEQ4P7+4euvPjOnjnme33/8eHXz2jn38PCASDHG77///u/+h7/tum6YJiKKWWIuynwppf1+P0yzgV98OLm2cb7RlBOMWbKCZMgCRU1JKh5NygjDEl3IKqKiEsHQFQW0XZZlMbeAMr+r+AVQdZVGonUHkhXNCLA0uhGrX2KSpoiAQAH9laJQgKy8ts6WKJhlbDXyXrZYkAORkv5XkGcRFqFsc6xK8jN0AyAGLHEHAdH82UQBYcbaJDsXlACaKl9QQER0odwAqoIomriiANp+UUJRyIa4R9BCaZOKvKnua1pQ/8QoAAhCUosZAFQSAQJ0fX9EJSBF5CW2qV1pQMhAaFkzIwMBI8TzhVqmJwQA5AqMpTZhuGi55DOv6CL4wYtp6UrhGxHzckEayUcXhczKJxf6hMoMlrmaKdmgotg9lLIRitOxlXeolu7A+h5Aqw2jaMkcMhRaZCIFgWQi1CKChRiyrvakwk5r0/9ZtZdUUDSDgrHzQAUUwApzlHOTgdT6mSZ6i4oqhAQghIiMMcXGoXMkOc7TmOep8GABYozM3G03zjlF4OCbpjkcj7ayeO83TeuIrZk5ToOIQNYY4zAM0zQx+RCCzcPMgo6Zh2FIKXnvQ+MeHh4aH77++idvP3sdYzycjpUoA9m4Teh8y1ZI3T8cCRAJmLDx3Iam8QUCDgA553lKwxz703Ay4cp5sp2JYGgMuzyQyJo5ljwiM1s1sK7zll6dXRoppRiz/Ypl+Kc610xXDc+Zcz4e+sPjKcYsGUTAYnDOYkE6OPaOnOPAFDx7R4gQ2IXGdV1H6E7jMI4TIm03+7bbBud9E9gF8s754HxgF4AJEIBpUZxBdszMZSkjqN5mTGxhz667xVevZJnVR3Adz5ZratmOT7GdFyFpKbYuwt7zIuyicbr+3OUTnz8N4HIasZ57XbyDtT1LUNezRzl7F0LQprm+vv39t3/cX+H19fVtPx777wmACYjg8fF+u92B0sPDUURCCOM4E8LHu4djf+o6EoBuszscDsMwtO321I+bTbff7u4f74Zh2O/3w/39ZrOxa8GIEGbX4BzLHCXOcR7TNGc/29Q9V0VGEVkagylnLCpClaJQPVNiSlTJwYs4BSCgTZWA1uMZBCBf1HYWR1/zOCkzG6Al7GHhVGwWLeSCuoeyfFgJaG9l35wswRc1GcWlCl9IVst2C42ooJRLeKvBjKqOj9mxLZ25cjT1iVrQ+UGMAMUMHNdzULZZnaCiSCZFUCGLZ7aSg2atvQgAV6W7BEEFqZa5BZSIoGKpIWZQUFIhAHCn4yMDK6nBNRe5OavklIDIgpsrYQ8LaLBknUQlzzBVl/KQiJwVuyx5uepeLPiehb1P/vXp5bHwSD6pMrPOUpe34oUCWC6/coJJGaGdwUWliwArFYOcLewRgIp1jC3smVR0VlVfg50uZ5laLiUgClqqQBGwSzrlJdJClqX8L/1+01yAmikLoEJxliBAAbFyD5VVZ6NwzfM8zyOSokiaRkXoxwGZdtutQQBdU7jqzGwsHO89KqSUJKVpMGkxsBaQiARHTQjH45GI2GOSPKcYc8oqEuehP4HIbrd9/fr1tu0+nPppiiEEAh6mYRxHBN60Xdu2ACApI6jzzhE6IkOEqOY0R1U1Nvrp1B/6oR/GfhpTFM+EhExs1XDOFkkl5wxKxGQCH8455zk03jGylUYLSkAzIApklWTUK5XsnTO9TUZgtLaTOCRSGPrh4/v38zDPc5znNI2x78d5io5913Wb1m03m5v91XbTNcEzs3PsiUzqGpTmGPt+mKfkm2a3u3LkPTvXBCt2nQ8ueGTnnFMgDt57b7g1BCkK1PWIF166YzzLlZFVf4hYXHA+cR2tY9I6TMpi1LLmSFzM+Wy34QJaeIJb+VS1V7cX/KaVbedLtTB5F+z0Qk3LAIgGRS7dpvNHKLIiKwqz9z7ltkmj399cNx/eT9P02Wefvfv4sWm89zDHvNu602l49erNHDN7VKSsoADOwceP/f39Y9feiuTXr1/f3d33fR/8ZrfbMbubm5uHh7vf/+5f/u7f/Y1ZX71/9z0AnE6npmn64+HUtKFtiAiQRGSeBiLSrmsQTS7O2jQZDDyF+UwYRi2VkYHvCdmZ4161FihozCeyzmtdU7eq2AAQeFljbRK7hDdbOQk2bG5kAKabQ2gnQ+kuaMV8Ug17pJDhzKpaxntFaEpqZWDpISZzpClnhfnSVIAprnp1ZR1Wf5HolMeurKvyZOKjmlgV2LCsBQAgAJQkk2pSoDp3tJMka3XCgtrktLAnAkAV4woqmM3ADj0AuDj0aXVVmAYEAAETAq/Cm6sFNTHimRxLRMR2KZZBNDmrAJWUCzbI0HQlEJber03+QC62LB4OL8a8F9o1n2bvvRj2qErurp+wPiTrYwBLM/oc8+yqzZIzSt0CGaX4J7h1drNU9LVBWsauSxr4VINVVaVqqytARig62Iu+ImIy8SEQBWQkc7ZdRq2iMc4TSHLEGdMQR1To+77pWva+pCnMfd9fXV+3TRNjhCyooDnHaTJwox3TdZpikM62bVM01c0p52yEv7v7D69fvX395tYHrt6zwc6zaYyn4xBCuNrtTRfGpBEbHzwjAZqpQYrJfN6nKQ79dDoNh9NpHOdouErnCLiMzUVSkmmKMUZQQlQuqNUnYN31unw+rCoWVud5JirjsSek1dKGwXGcH+4PqjDP6XQcxnGcpigCbdt2Xbfb+pv91e3NTRc8goBmBjCmYM55nuM4zv1pTEnajd90O9Nk8SE0bbGNBSYAYBcQ0YXgnEtZDVC9/kolWa4AFqpnwmVF9glE9BL5Ls5/fMrYe/ZWTy6ZJXC+2Odcv8+TS/Ivu9VrxKhBT6rD5zEbiMiFEGB2ruu63e7qeDze3FyBaOuDd9j3enOzOR4fnSPnaJHmQQJkOozyeDh88eVtEri+vnl4eHx4eGBqPvv8y2manSOF/Ovf/Ornv/jpfr/vuu7+7sPC4JymcRhOvgmOEAhJJc4jmmuSD7WueuGHCxbvb0EABONHkWvJvLQVuWo9Fd+zZa6JJRYCgLoiF2BB5Jy+cxGrW05dC3uONmCUF0RBWPwRrZQsSw0CCDgjE4sAQJHVX617i8GsVtyplQtcxNLO7HJcmaNSwUWdwYwkDtZIwLokMhX6/GW1Vy2cUGxJlZqUZhQBlVy0RAhRAciv4KwmbmdhL1W1l+VM4/L7EADc6eHDIu4ATBUgUXBi9pdF9QgRKWwZiZy5NtsBKC+sOanVvEwOGZ1jM7+GDMkCGxEQORGzbjkHwhL8nvl+lR1Kn9iO4eJqLOfcUxr+Kn98op1zruufXo3LQU05gRWDqjXsZQLJOZMKqNhsD6WwRGI2WFcWKXBM0IyaS9+8gL5SlgKXVzaCumTJOS8i4sUmu3iYrdoeMc+MxI4cs2cERY1TSrPkmJKiJptGSIog2TPPKV3dXGfVeZ5D20TJOufQNFf7/el0AoDdbicpPz4+TtPk2fmm6fv+/v6xtEab1vuw0AlC07F3eRzaTcfs37179+r66usvP39zewNZeotIUzqdTkQkAsE1bWhVdRxHSckzb7cbRmRGR0wghplRkeNpOB6Px9MwTdGkUkwvselCmvM4F5sew7nMUyLHzrnAbDx055xR5UIIzDzP4zj2ABBCcJ4KPU1s3gxqeyOEpmmY8Hg8ItDt7a1z7v37jz/88F5E5jmeTqdxnBExBJezhuC2u831Pux3G+85S0SF3ba72m6bppGcp3F8PBznOYXQ3t7uu82G2bWbwOjIIZIj9uSd957YW2WATAKKTO2mszPcZGVKDV/oQ+eGp5mRQ8FAYu2PPUkK12f+krWcoxc+X6PLLE1VDXyhZeKIRCTxrMqxfo3Bd19ITJ/MFKt/FgAxG7RjubiKqa8WRcL1Jbn+iAU/ZcvZ/urmcPzTN99886tf/Srn/M1Pvn54OGya0PNEIIjwhz/8Ybu7evPmze/+8Ke2DUzY91PXwg8fPv4ifRM89H2/6Xanfozph5/81c/6fhjH8a//+q/78fSP//if/i//x//z/f39F1988atf/er6zevf/va3f/3zn5lps29aIEVKodsAAIJsN93702PKiIjsKIumnAGh3XTH47HEAUUEFlNoJAD0huwwFLYVZEhckl1ENB4Al/2Z9HwcoXTAGQCQ/RLq6lwPASDW9RCRi0YqISiaSota/xgAuBb3drhXe97CVUoJFnApFW6DqhJWlR9Vm7Vh4dtZYSUAi7iLLdDn9VlXN9OMNxNgpfOSS8U8XFGEVMBwfgigUVlRlc6TqayKwN7O6lJmCNQoi+Z/CataGQCSCgC4/vARKzYMasdy6a8QEbNHd3ZwhiEuAkgAQOhsPmEG2MweEWVVDE4YHZHViGKYVwuDRARwGfQQ1QU8x3I0YAsB5mdb7DmZlIANvQl0RnXa42ULaXmO4CXqbB35npThpYoXNPtYUYUMklGzgEJOohmhTPWkEhggF+07NQqlJpBMpc9jk+xUxYGyQQSXSl+MEm94LRE0eU94wvZgRjqvdPayqDk75pTmOPXTcFJJnlkBZsh93wOAC4G9Q8cOkZ0LIVhVB1niNOcYJWUQzZCJcL1uGv9dVbuuG8fRipLtdgsAfd/3ff/NTz7bbjsAGIbTMEyL6c88z4zkuy6EoKoxRgJwzlULSLSWSc45zvM0TafT6XA4PTw+pqRCTOiQmYjGcbZoZ7FQstGW0NJ5uxndYingRMR0K6n4qhMA2DvY/XpCJiJNaJlZBA6H0+FwGoc5payK1YpBnXPbbfP69as3b26vr71nRhAm7Nr2art1zs3z3J9O0zjnrEy+abomtE3Tet8oEyGRcxw8eUeOyXyFEIioBolzVV01Np+EPWtO1kzoaScTzvXc8uB/RQV2URcuV8OL3C142pR7UhE+ne3hRRn3FCBjOTiiKj6ZgDyPgogMJOQQnXPOAdBut/twf/fq5tp77trQNZOk6BBiLE2ztg0xRsmcAaLAMM7jNIXQNV0Ape+/fwfqDoeDVNGftm2nafjuu+8+//zz0/Hxm2+++f3vfvvVV1+N47jf73POXrJzzjE6BABJcRqGE64EWXK1dbWkrazvKkmymF+2ZxEuEBJAUM6VMyaAAFTV3tgEj6E6BpTTA0iAKybWa+kioxSUi6FY2GB/i1WOsRrQWqy1vhQoJAfAdB7+nQs4oGoYdMbu2cHVuIKoEKgYC08QaZEP5bPXrohbGQ8oqmYQAlByZXaoYgxFQS2mBYX6J6oF1AKgyMSCQhmlyiaAszAMSKoZijbIsm4/O8MRUAGdkoI7fPzeAl61WeHSIyZCYKwsIvMAQ0SgyVYTgyUQkZFwkR3Z8AeXMtExs7qcbTiDpAgZEJkEaQldds9IQJgU1Kd1YFvCGzJdhMOynVwJbMDLfW2pnrcDSA2Hl1xgWJqQL4U9SkksrzGxG1HUDCql51mnepWxrpzjuV4U0VIgmti1gKhogoJpTiIgkLTogdZVhixhgsJ0LF3ichY6JgYkUsAEqipZUpQY28alNMd51BwZCRFinofxFHP23oe2CSEgswUMM/ohIhUdhiFVxrpmOQ6HeZ7THJmoDY333vhkgNi07ek0mBFr3/enYdru969e3W63HTvUSSVlyVkVCFmSmA1CYBfjnGKiwCEEAwRlERAG0XmaTqdT3/f3Hx+O/TCOo3NN0zRMPqmIwKkfbcpYVUBBkcj5EFpL14y04L0P7BgQJKEiAROzcxRcIX7kec7znEwJOrAjYFQCmedkL5/G+eHu8fBwnMc5Z+37PsaY0wwAXde9ut2/fXNzc7O7vgpGqGDnNpuu7cI8z0M/HA4HFSB2wbdN27ZtG0LLLgghMzvjtIdA3oHlfCVxL9pNiEB2/ZL1h87DcFOOeMKiI1QEc9BapIHX4efFqFPSqaWNWWVWLJ0q0AymczMKVFX5E3IQz4uzGvaeQGywZpnEKKJY+ve6fE+yJZsKJQfRIIdlNi/mAIYEpMyMwBp8s+lyzle3N9+9++Htm1fXV/vHx8e2dVOaQwOHHnLOY5y7zab/cI+mGaUwDHAcht2+tZzPJrt3d3evXr1uUjPNw83Nzfv3829//y+fffZZ0zTOuWEYEHEcRyO5QsUoMEHOaRzleHigq50WtsIiQKF2NQGaqg7OSdQ8rYizApmQfOnOWRLLYieB4TOLzxwDgBTrNgZAKd1gRGQFAjTxfLT3KBIhVoygro0yAIhpFerODW1EWWkpn1EqoJie6+lrLaeWKEh6dnJeeogK+WyMU9xMQRfgJ5iLKYKYmQ4RKJhesSioK8R5lPKfKqgysSqSZqVqClc+jhey2dqBgZ/18EsG5wQV3HD8UFrnZSZHQHhWI2Ni9gZ/KGEPByLDyjklZPIWFIGQ0KFzhG4pE4mIQpW6p3I2c9XSXfI7Ilpo/TmL1WkM9K/ekyJwNkaeKikpAAsKqaGKSFCgAHbEtmcgBAvBjFTErI3tD0oKWQUVDOWUQaniW0RVNWdUFU0kKhoL/FeyrJQUQKK9Voo9rIChXaxSNP5mlpyzGLwZEgDpYvywIGlqAwQAkEznDwHAWdNElS0uakQVEzgAAGbm4EAlTYWdtttdOed8CMuutmxUcg7sEoCN9ByxI86arfq0+skkEE3oeY7Raqm+71NKx+Ox3ex++ctfBh/t7PCBfWCdMKXZ5DCY2Vf+GVaJS0gxi+ScckySs8lhHI/H0+kEipvNpm233LSSIQ7jPBt8VHOOIuf3seCt1R7PpFLspLSYYpmac2f5aYtVVuM6qwGLXCAROsnQ9+Pj43HoRyMIWgcYAEJwt7fXn3/x2e3ttXMMAIxEwQfnCWQaxmEYTqch5+xdG5qubTe+acl5NeCA944dB2/V3hmHudjAPhW/rfHgEhJZrj4t0/fnldxFMvc84H0ydC1dUyKsLdAFaPCpYpFWAowX/RJ4VmheVqirJamuKutdsJ7cn38aoUNWcO7q6ur+/n6zaff7vUL+6qsvfv3r3zjGYda29Y+nGGOMgt1mw0yo6LxHjOMMp76f533uAID2+/3pOL1///6LL77c4e7xz/cmr3p8PL37+OFnf/XNu+9/+Prrrz9+/Hhzte/73kjrqhkk55xUIWedem73W0S0fo1WdUpLDYmo+DUjJslGQqjiSoXmayxck7xXG8FgIUArAShRoZsjIFsuBISgVQCSa7Qr6QvVQ1pj2+JXWjIsAoT1Qa0wWzVmWMn166FcHPjOx7d0ERffBsaqq3UmKFvnVjPA2fVb1ajcQqqK+uI9kAUsqxkKpKD2NKX2NlVW4Q3VZoeoBeqC57W7Vpn1pwooKSMquvH0sJxYT6ArZZEkZm/tpoK0Vk/EzjERKzKzZ2Ymz94xO2LPzOWNnCOiOHo6l5K4LEnL9X+eGhIBQCaxoJaBgVTQgYV4hqy0bJEa/lCMwA92r0BAoIqoaFsAMhCoJtuuSohMCpZHmG87ANms0bIG1WysGwAqKgTVpwrN2U5BFpZfFY+2wyySAVYyRZBBFSDnHEu1VzJDEUmqaEktgnVBCutEyzpgvR/jp5c0C3MkRSJkBQRBFaPPS4qoSqBZNc5znAYA2Wzabr8DgAyaVKwfbZMSBJAFYm3xAwmybNpWRECRmcFU5+c4z7OdPMM8RcnkuNl0t7e3X3zxxenwZwRJs8RpjjHOc47TnGLabvYiYiCXUmI6R0YVyCnHZNaww/E0HI/jqW+axtrpAGyKLMdhHMY5qopY1i+qysSmPnZOvZmZ0TliRmasIbCOqwFSitNUuHo+sNWgJoqmql23ReRxHI+HoT8N0xSnaYrRVMq4bcPtq5uvvvris8/eOE8xxnmK3vumaRkpxngcxhgjZAjsfAhd17Vtx761C4HIkQvITOyJPZIDq2usa4dYyfKF5AsLdGWFji4cPiqs43WEWMeS5xXexZ9+vNv54pOxQsBefD6+hItZfz390c8qj8uPQytjliWyrmsIAGJkXFZQhcT7q5v7+3tm/uLtmz//+fvPP/98miISkIL3HJooInMcm7YNXRtP2RMhJBGd5zRNkyV219fXx8P379+/n+fZBz/PM3Oz7TaM4XA4qGoI4ac//ek//dM/5ZyZQHLULDmmCLO1QFQkxck0xInNBsXCjqrKnISZyXkgJPagkFVzEi7m9IYzrqhdRFMRtAXc+AywAH2U4CzIUxuhjMtpYm9TbXptS9W+WfoDBhV51rLW+n20cq0M0gm0FGhPXNfB2QFiVaUSirj8IKjyLngOfhnPYpOqitUVPeeMT8zwSorDUMgV9kzU4o+ONh19hrSHah2nWthtbJCZUneuf7ACaEYgRTf1D2UbwkIAUSqMEEKHRTaiXJDsmiWMKbLp3Vt7k9lhaYeedRGj65g9M9aJXqkUmdFmect2m+5lCkaoYGBgcOiKugrDevv5MZFVe8uW5X693aETFAbOjhHTKtyWo7KILKx1y4uMGIDJjKnpREsGBQXBrApx0eS0fASKK7oCSCH2iBKISIIqQQslrGq1qihsS7QtCKgUnJOSfBRiPpaMSRCRQBEINYOI5gSSNGNKU5znNI7T3GvKXdtsNps+ll7AUm+dw56Izeo8O4t5ZrxARLbY1e5iOaFPp9M0TW3bXl1dMXPXbmOMRuk7HvvDYz8Mk2RCRQT23g/DkJIhRwIzE6qIpBhTSnGex3E0z7oYIwBsd50KD3M8Hh8Ow5giTDmlJK5tgURzEXnQyu/Wqiy6nJxW9jmHzjnmEhFtrGhcPdNIWepCWy/adnM69ofH4+k0TFMcCi09heBCcFfX+7dvX9/eXvvAIlkkOmTPLjhPAP0wDsOAoiG0RNSFpm3bpmmVHZPzofXei3cXZDuqbQ8rbJYfUuaQZNLSfBH2kjUPnzJgLwLNen15HmDKP5/Cm8vK+/R9lgL0R4Ll8+1Yu6br156XS738Vs/jtK6w02I22WdNwTpCU7WsBQA2m03Osd1urq62d/cnIhCRtiVVzjEeh3Gz2TycTjEnU+CKMZ6GaR/nxvnd/ma7PR4Oh4eHhzefv+66jgiur6+JhnlOf/zDn7768vNpGneb7eH4cHN1HWOMabI6I6TGey8AIsnMHIpuuC7WKMXfqgQhJla2Rr3zBrwwfCYbEELR5LdKuYYIQMXrhWyAi7XhjaJkTDNAIF3VyVLme9ZfoSXtqBAYKmHwIuw9LdPJqM5VtqpsXwZm9WzRAu8sax0C2OPCXKwhEACWBVYLTn4RKDgnN0Tn6aF9TQILb7gQzEhYz7cnwtY17FWVj3U4RKFVZwEAojgEcGnuS0sadDFWKACk8pgsRtg/XbCQ5hANb8Q22xNEJmuHeiTHzM4FZs7+itmyb0cEFgKZvQU5IjAk8BL21LVW7TG65Z6AkQGVCneQwbajEjKtG5+MzuZ5NtuzqhEZrDrMSim7JebRsznfky52IfSdIScWYMDm1pBIwKgLRksgi5o5lpdDJoXqhmGEazUDqRr2RCtWClRtRysSEoLaQlmT4YJTQkT0bIsfEGTVpDJrjjnOU4IUJ0np4jw+Ho9N07gmLBGiLLKIeY42eydCFTVoyTSPIYTgWURSTiklJA4hGHmcmQ0tudlsfGgPp2Prpr7vHx8fj4chZ2Vq2tAG32rOmtR45cF7Zs45GoUg5zzP0zgOY9/P84yIbdvmnOOcjqf+cDhOWZhsaRNkIsJCaEywKJskFe/80q6k6jdEEBF1IWBYaDcv9RoXi2Y0MzrnNEl/PB4eHsZTn6aUpqRJPfG27douvL65fX1z23iO46gQG+873wFATBNkiXFSzUyOmbuu69pNaFtml0ubhINvU1hYQAzEyIwVblP7edayYiPa52wD3TPEo/C0Sm1w1jwqS55elmgvdiYvzvMnseclvvnz4PTjwW8JlnA2X4EiNVLCla2dAmeMhuExTGSK7QW2UAqQqahb0DBwuJiQiIiIbLdbo5zvdrvT4fiLX/zsf/l//5PzcDrNTdcKeJf0eDxeXd+0bZ6mkwgIwDin4/E47Ltm751z19fX79+///Of/3x1u3/9+nXfP3ab9nSahmF49+7d55+9iTG2bft4+GjSDSGEpmJ5iAgki8h4OjhHzm0BbGYt2ZD0hFllnmciYh/IsRGtBY2VV7jpYIglEGQSI+wRIkKhKVXyfmVLOzRmHCg5LlIVQEaQMH0Qjw4AZE3TgvMqp58AKJ0jXI0YpWdbkZznlao2RbVawNXXwsLWx5U9XF5peNo7WbUni8hLLeBqNLUUX0qns27ntUvh6oV0ln8xYsMT13urKdY3EkcKTnPSlS9tKfUqXq7uMcYqMj1OcUFsKhAiMTOhI+dr2HOwCnszw1oR3zAIS9mx9KmWCEThjLomIsR5nR2/sB3P3tNElDDaAfbi69WVKJNVeIgYqxL/clsvCutGtt0vmnuaDZYpKhlFTUZ1CXslcwAwbcaibSAmGFc0yqrFbMXTogBozpbh2pnNhKBcMn1GzHg+gexKcQZZLnSJDFlyjprzmCbIQiwhBGKd+sGklfq+d8615tO96ko559I055wdESPFSg0wWL93IaWUpjTPs3GrrS4049ZhGIjIh9Ykevt+qKbnPvi2a7omdKfHk4ggoc2GETElleKTV0KRDducc96H+8NjnHUcJxXsuq5tdgl0GKZYtUahIo/OGcmq2luiGsS4Xuixug+u4ce56Gc6Iur7/nA4HA/9PCejaiFi23ZXV1fdprm9vb263gHIqe+9591u03DX9/3UD+ZMsVTGm82mCa0LAYCzWWQCIKL99vWNKt0YK6FbCz7R9vNsNdMClVz/Xv0ESPJ5BHrxTz8ewPAZb+9ffdULz3wGFrvII9eHhog0Z3z2ElObRUFCt4wbyyWpmlLquo6IDofD9fX1t9/++ac/+cn/8//1T4g4DNrtHVDwUabHmYg2m43InOJcqr3TaRyv8yZP02Tys999993Xf/XV7avrYTgAgJlNUvB9P7ZtYwkNAFh31IQdbH5sMixL1xSqedMyZY8xmrBfgfvV9pJqqd7AhHVREHDRDS641po6PJ176gplXzQLa14EhoAjPHcIlrC3evxjx1Qr70Ar0H0926NqRbr8FfT8mAoI3ToKsiyna2mgUrohqKpbGSqpFt5CUS9YRbV6MlvbsuzkOtvLAMUGWSGryZedg7fAgg+snw8AlD0AOICxAFFNKC6feWNak0FdEkwApBYQBcczpQFIEdEFRcxEwEzklDkTCSK47wURbfDCrMzCXhFd6CzpBSKo40NEnNzNEtsKgrxqUiwXzJNazVn7qDCxF4TcPBbiPJx1EBwRzY09n85hGG3R9GBTWVWwBrTJE1AEVdBMqihJs4Am1JxTVM0ERXIItOjneDCHvPNFb5pieG6datZUhVmhKDswIZE6A7QZhGaC2mgmBVHxknPOLhgQQ3KOGmfIY8tIrRuGqR+GOUfsuq7dOW4Op+PjMN7cXO/3e4NuOoBNCAjQxzgfjpJSIBKR0+l4PB5zzj54CV6bcIrx7uEupbTZbAjxNBwfH+5fv37dNM00jRsfXu02jiTHof+IwyHHCZtuu9ltkSlJPE4PEUduabvtbvcdA5yOx/FwiDFSAs2aJ4DEvrlqiKaU7k995q04odaHAEocEVUhtM3xPscY+zjFOAtkdjhh9jnudhtyyIyBqXOhc6EVx6cE2waJFCkJYJ7neZ7mIaZpt22IYiAOwcU0TdOUZwLufniXhpGS+NMwjv0AAFf77urq6vpqe3t7e71tUz86wjfbV85zGtL98IiIIEFSyuJCCJv9brPZKLnknLCJlllHBDKPVHwzCIoqrCR1COjJ+FtESMwOiROwCFGzXccSXRdhiPTU0hoRwTE8u+HC3jMDtLpqAoCXp7M6hRW0r1xh5Z/oAQuk+HmcOzcVFlgmAAAkDE+WHiwgC1Pix0KuyoDIzjHSjJUrpgpc5EcRsQFz/dWUYlZlJgBSyaM2jG1UaNtXrrnrT/Pf/e3f/8tvf08KKK718btvj1/9ZHdztbm/O318/7C9mvwmzifYOIiz07Ade8RX4e7D93/3dz//+D78l//ybew/DJQ/v/7iw4e73a5lvvn9739/dXv1ze6r288/+9P7H2ZN++3u93/+4X/8+//h4+Oj75qmaTYti6BmxeGUUYJz7CDGWVWtkdl6JISUZskSfOuYmCFlBgQuqPjl2AmBMwA7ggLmJdABr8UdmchMCRHIVfZ6hboUKwY1FsD5VeWQ8bL0wyoYWJ29HK/6AHOOemakmEMMqGoxskOzy4YiBaHq2GmdEBnHwF7qsIEKgSkbaRkDgRbHsApkV8ViI0pPpFtW1cjCDAFwqupxUela6jxrgV7aLNtfyY0A4Gz+YQm1VH0/ADAw0jrTrAdoXsQClgEJAuWcF43OMswnQGT12ZRduA78iCZETimBEjFYj5TZ2/NzOMtSrCzTVp/1NOxlLDPIhcRVc39X1USt/VIk1iKUdxPjVzAbbmQpE8lopTVxSDITQLITPItKUhUCUUkGJ7GO5fJ80VQUyRWxtDRrqlNOrEsePda0DSuYxVYu20iAgOCQMikqOMeqIrkUMXGeU5wwJwBt2xapBQAbwgHh1dWVuYlafQMAMUYViTGCiFkoiEiaIyzTJgADPVqlYrAUKxmtOFNVaoov+TRNDw8PNg405hwQMrIjbhsPooHZxoPzOM3znOcYXAsA7BAAYpwUaLYJItIwTP00IrnQGhFwOJ1OQNdzTvMY55SBwHlsmrZrG78CRi1rh/2K4ppEGkcxlr2RHOznHI9HYmiaRkQOh0Pfy/F4Oh4P8zyZPfp+v9/tNl3XVe20ojJnteBy1JxzDtkkxwpvtU60z6gt5gWxeQ5X9dpZDn85mYmISPTlem7peVx05j8FG7k4wf61Z52ffLHlxZbpRTX5/B2WUH3xANEQDfj8N+KnITDrm6kTqGrbtrvd7v3xewC4vt5//vmrP377URVCgPv7+93Nq6urq0Pf+zYzcdsqZZjn+fExdw31/c6zHh5Pm82maeDjx49N2Jis60z6u9/97vb29v7+ngD+6q/+arfbWSPkq6+++v3vf/+zn/3scDh479+8eXU6nRR5mZEbYATF1O4FijQHFXsXBCLiilNFRKAlGgGyIXsAsUiFLBXceZXA8240nl2hWddK3c6y9UEvD7Subedb7SWsmgpPjywvkckALPU4nole1fk9w9NZIIKunk/rN18X/VonN1o6n0/qyyX6PO/APXkfeUahefr44oHtCGdCwLV9hBUvqgT09DVlsc6SbM6qxUu9it9IBliFK3RGEtDkDbpifIbSkgKaR14UW2q4srA3rQPbcp276iK9LvgQMWMo/8S1s6g9dsb1KZkSMxGnWB+jE2ZhjuyWXAmBHZJxIKEYO01ZVEDBhFdEQJKAqBoJXapK56LWcw57Ntpd6C9gw17UkmMX0jbaKAcIkM1joZyVJdjrgsqidRsKRTWLmbJCiiE4ds4U65NkRWhC2G63UE95771jtpgnIsZbsP6M7ZDlQTHY854AFkHqm6trAMg5m7NP1c883R8evPebdts0gQgEhACJyQfWLKCiKatkRGycF6R+GHJWC2n9NMYkSSTGOGXJSTMooCbRnPNpHMYxIk0xTTlnVGCG4Hxw3nvvmDwTsfG9FFFL44DZOdcGBwBDOp5OpzjPRlVExKnvT6dT24WmuUrzcP/xbhz96XQc+yMAdJvN9fX+9uZqu+02XbvZdN67nKOIJEmkGmNEDACA5JjIRL1D07EzJDOzC8xM7oxkVl4159ERFuYorcCowIz11H3ShnqG1bQ3ehJa/oLw8+Sfn9Dy01XgWUcgxSVxe+H9zx34hSO/WkYRSgWABTWPqopFpgKBivnfeSVWc6gGWOo/a5QBUOl+gXMOAVNKIYT9fv/dH/84TeOm3fzkJz/5l99/ZIbW84eHsd2l6+vrjw+PfQ+7jsxhap5iGufdxk/T1F119/f3Nzc319c3f/rTd5vu+vWr7Jxz7W63u9psWuPVTNP0y1/+8je/+Y3xAAzzZWfvfr8XAXSYc47TrKrIDgBMpERVqaxayZZwJHWOzv6jFTxh8o1MDEpVbxPL/lkURkriRAXXDUhIxde1IjzLIabliEOpBQEq4HONaiyPMy8DXVxpqwDxQlB50qm2tpv1roBL2/l8UlgoM/SMGK20qIUtLc26xV7HFUle+weaAS4D3kXoehIIk3wqvP1IOHTOMQCKZFVe7gGQeXF3gcWUAMCO0ipBOO+RjMjFTEdAsWCQclar6iFLZtJEwERK2TsDqTjks84nQIrDRWCzPNeUHpfrf8l5lTo8DwjP/EIr7wqVHtE6pkSknhE5EznnlFnJk2NFhoKPcskKxAqIojxZVqC6TPIiigLmLAqYUAE1i6pARgCBjJCtGy0VRGsXw+oAWJomAMQO0CA5SAXQSQpKgpmRycA6khGt788pDQiiOQEKojpiYGc97JSSyVt3Xbfst7ZtrXQLIThmE3kiIsOS2F7NsSjLWHph0ErnnKQC+DbpLzMu2Gw2wflpmo7H4ziOWaR1rm1b8s7ERrGQj8vKFWOUGPMcU4wS0/39/TRNfT+ehiFmMTnKKaYparvpmq6NSY7HYz9FImhbf3ff23HxHn3DzpGqxmlst53tVUQlBmZ0nrxHqiPMlFLf98fjkQk2m42qmgugFW3jOPbH4/F4THmb8iiSmqbZ77vb2+vrq13XtV0bQnBMIIJQtJpyztmxt30bQghd27Ytu0BEzntmR84x8+JMwsyyEtVceymsEzi46Fu+NMA7d72ebf9E4PskbQ5+9HbxqouI96/WeQvk4WLdWb6/1hnAp77JqlC41FFSVVFxzosIae42LTM/Pj7mmG9ubq6uQkzaz5IA5nlmF9q2jXFMXthQnqIF6S4YQjge795+9tPb29v//J9/++r242dvj8yeAP/2b//2t7/9jfFWf/e73/3H//h/+uGHH0D022+//etf/vz777+/3u1zznd3d7vdDldEPU9s9Thb7wtBGDx6s4pFFCYPi0NmyXipoqwBzfUcuYh51LCHBfaCFeT5BHK/bEezcUBZOnBINWiiiWXXEGoS2WD1pSJUB1MtHHIQBeJi4SIokEFI0XJaISWlDMoKGQQKHw8NUeKWfibSAgBFrT4JNoSrR3mp6jIAF7Gx6nWqxX2wds6KH+ozl3bMhXstWM11M5TVlNZb7DnGz3beESgJqRqTUFBITUZ6Td8+k7gRbVT2tPa0w2b/KYKQCeEIEJn2uihkRBLgatDkABgqmUCBTXsl68mOqKzCGwBodi+GPcAtVBb8udBEtIaQc45KPKstU++IXCZSF9Q5cEEiETn2ARCVnZTqkMvpIKYUnm3CB6IKSUVBI6mRI0EMtFkMcuc1X2QZmNS1oSbIqsD1eUZRBVCGgvJCJUKHaNruGcUQyGQwN3snBUYUAwcpjf0JCVxB2BpUpyh1AcCSC4DtdKgWNgoLj9sgFVL74pqzmQ2hwqbbmLDFpu02bZdSGsdxHkZN2TXOt548lWEzgGP0nkkFCCTqPE5xGIdT3x8PYz/0mk2WJQtYmpJUUMgTuuBtEZlzShmcJbVYiUhEBIhatf9EUDOBekZvZHnPLrD3hbQwDMM4DCriQggh5DnO06Sqm82GCU6H4+FwFIHxdMzz5Bxtt93N9f76arvdbkLjvPOIkDQnLQAVBckqDhmJyJupQut9w84pAjlPzOwdsy+KlsxQqQul3168FM4gF2BehPwQGBSIz2vik/DzNN6c//lpOvnzSPav3n4krP5IwHsatexz82p9sNWtnICqWg25EZRM0cqKSlj6HAD52SeiKCmknF3bEZGk6Jzb7XZ3d3fv3r0TkdevX3+8e7w/nhig70/jPG233TiPoBhjxiTMzCxgSQzAOE4icnV7kxXuD48fH+5vbl7JEN++fQtAMc7M/N133/3ww7s3b97effggIjmVWsZSqLZtC4CywEYAGRkZAJhyzWkAhFTB8jNUhMUq3biaZUVDk05YWhdQFDrLpvM5s0qZLuAuiFjdxmwdLn3IgoWButvP1kRLZ6q4mJa1ywSACKQESEVCAUXC2ruy6R4ogbHMiRCK19tZxwAAZAVRgUJLBLB06ix/hssDEIDiC1d8T6GQ7otffHVIJQUFJQEudWr5AlCFuO3nlS2FhU8owgDovGcUzcCQZblH0aRV6YwgA6FoBiKFjFVTTszUDqWgFJHKNBUBmSAD4lL3QGFcG/URCFCSAqaCL0VexMbIhJcXFmddrHM+630sQRERs874dNq3yMQQubTQ5JHZESE7z4RMjsEFcE5cYGZkh9IaDREKLoYFARS5pJxFVFpUQTKgYEkdBQzVWUiagJRKtMM10Xc9pLFuQ/EFRLcwEkTNRAFBNTORM1vwynZAJEcIjJpFNOccJSWVhJoBJKZ5t9s1TTPP86kfRKRpOiPVWbdNiiVrtgcmlTkN4/F4NPqdaXQdx9GeYCO9lFIbmqZpYoxd15kd3VD5dqpqxqo2biFA55wPzIAxzqQgKacY0xwlpjTHeZ4jahZBIu+d45ABMXvXgAt+mObDsZ9SJnZtCzHGx8cphOuUUs4x5TlnJG6IXBscATKjdy4E37ShaUJo2BcwZJqm6XA4GOSv6xrnaB5VVYswW5yGYej7Pud8OB4QcbPd3L66urm53mzb0Djv2TwssyQRWXKXwjl1zgeLeZ5KS5Od98zexnu6aldQCXtF85aZF/U+rPx0qjazy/n9Yrha14iwaub8eAD7C7d/Kvgt7h8X1dvzT6+Pf8y3oaITcb0yniNmec/L6c76mYgIID5wykyZttvtZrP57s9/TgCbbft46HMGRzBNce7jF1/ukRtVzMNMAo6YCOOcD6d+mPYxp7u7u+12e3sbpmm6v7/f7a6I4cOHD69fv/7Tn/40z/PNzc1vfvObf/iHf7j78OHrr7/+/e9//zd/8zenw4Oq7vf7cRyD52XxASOuEQKwExAQUkDnkUGMgI3KTLbK2w87F2oMoAbKtCl72Z/ECoBKQIveNKFBoaCs5Gs5U0nn+Z+czyQwuKyVXgY1L9GHkMEEm0HwfHyzzeoYpXhya2bbrsYuANWMYNpxCmYNUgY99uvMqAEQqG5cmpW4alvi8teS8mey6rDc8xK6yhYlOP/VEnyhjEBCQoIZhS7/lzEv94mEhFxwhMXrGrMKGEdKlBUXa8H1fQIkBUGzgMRFBiarcS+Rq6yWEpBCJGVAQSXVKkgKNjkz+xwytraC2bKruYEDrBGkAEDq1lfaudrTaMmMINpIdxkCM3k904QLO1AICB06Fh/Ee+eCsAd2GhslZvLkvNGuanw1hGcu0MvywFIOAVBAKdaw1oHHmRSkCOudlwNrsms9y8/LhI2gGWsLA6qBRSkKxE4cEQTzFc8gOc2FcZBTZFRGaJqGmVNKw2DKlg6IkDlPadN1FrfyXMD9KSUQzUsPk7jxgQDjNJsAm6l5aZbGB3vn4HzXtEQ0DMPp8TD1g9WIt5/deO8dsfVOG8egklKK40SAcZ7nYZzGMadkLJzT3OecyXlmTiLTnACQfZOzmjJLFvA+KFKSOM7gJZm/BCJ4YlJySI648Rycb5rQtL5pvJnLOk/GjOhPx9PxACC73bYNLqcUY3QuOMI0p2GcYsxxSn3fS47b7fbtm1dffP726urKOUIE1SyIWXJOyY4RVkct5saH0LStDw0VfQbHvnHBW3uByNXnExEpORu/VP0/tkCCbtHw40WPCgCU3JpDcA4Yy5Zl8IZP/u9Z6Hp5iKefCHvruLKe8DEVOIYW6PEZowxQIObrV0GR4kcFBWMmF62BrKX3BIKl/49VTasuh2f0PJVcvShm2chKVYnA2DIhhH44IWm3aSwbEzGDe4onSQIIkFJSIhUw4AEiS5Z+ms1D2Lvm/buPv/jrn33xxRfffvvdhw8fXr1682p/ezqdrq/3r1+/Pj483tzcPDzeDcNwfX1jQj+GgchZjdlkIwCPhogUEVRCYnTEc4qiOYBj5gwioATinFNFY/4uiUVd5ayZabgX0qKNDGaWRzXVVwJQpUJ1OG+3dXEZo9bjsvAclnlLkR6zs03qLHAJSMtJVeqz9VFfI36XvhAirEjoddynBl/nEreWIeA5CpbWF+Dqc6Ga3lqvEZY6r4pHqv1VxaIGIRISOQRUElshEcFE/clg1GhUWUazxyMkct6ZMBaogRizCYcAI50ddfFcCLPCagvWviuYxoACLULS5fkveTqrirH3S4fA0kOt9NUn0WIVPNYZnxT3euZke3gpD5d0UoggFw6DNZSUCNTAfj4nR6nJvlHHQA5yQGJgry6wd1Jk8gks7BlgqLR3M0BVtkMprrWFYQEEGQEIM1a6TC0WnvqHXYQ9NHkqQMTqawVshjmSUaOZIaNqmmOOc5qnOE0pTiAJCIF5t9tZidP3vfNhs9mFprFPMTkuUxIsV47qQpvz3m/aruu6eZ5PpxM3AaokoxHAm6YhwKurK7OUM7VPE3CJMe52G0anmiUKIRBqjGnqB1QQlTTNp8NxPPXTNA19fzqdHmTy5F3ArNiPw7GPjl235YfHY5SiLprnJIqi6LweHk+IwAQhkHPOEYOophy2bRN8G1zjQ+PZ+YKYs57tOI7DMDSNb5qGCUxXumkaUDmdTv1wssL38XFsWt7vt69eXb16ddO2bUoppVkEichWUqyDYWZyziOwd8H74JxDJgSGM4Kz8HAWMXdCt6xJ6878ov9QevJ8vkby6uRfP6CnBNOLWPVS2MMf+eu/evtUxQnPKr+L24Iv10pMtm+dcrp41b/16y3PH4Yh5xy8Nx8q51zbtnZCh8bt9/vD8OAIvaGINQJQBhPHZJGYothgu2mau/v3jsPNzc2//MsfHx4e+r5/ja/MbOTNmzcf370fx/HVq1fffvvt3/3N3/7jP/7jN99888MPP9xeXyPi8Xi8vb01cTJrOIKqQgZwBKiWzmsGVGJcFHfZkQogle4foiCDWSVXmazSraxSWWKGQ2bPgKDWNLW53Xq7NcUIYcVYwaVHjrj0n8+BDUBK2YHWt8sLIsFK1XpvNtkmPCaFt254FvOlYQDzVVApPhA2GLLJl/EFVaxHqIhg+odW3YKIQmmyAiQFQhU8uyrY4yhAKjZCNL6eIKIIFYCMxck6X2IT1xKAGmeLZppYH9cFx8U512leuejGnMsYVJ9sp1L5WbBUJATkFXxj2Yk20wR4Qita0g283PtQwEV0Hnfr+hrXOK+vw6UcJPdE2m4BpDEzKiKOqBUeroSCkACZQZ0qpzTm6Nk5ch6lAWRxnnOA7Ng7JUJ0uZ6JZLJRAEgKADlHVASUArdb9F11rlq6BarHeGbeSEHTLYM28I4Bih2lQSEWIascUxrnnBKJOkJVyXH2THmScRzG8eQIt9uOAGOa3333PREJaNu2m+1ut9tl1dNpcAoSk7hkJ+s4jiaeWcjdoibcZZKvZvo+nPphGJxzPngicsSbzWa32w3H0+FwOJ1Ox+Nxnqarq6tvvv66a5rj8SSSd93WMQ3HU386ScpN0x4fD/3xJDHlnB8fH0+nk6o2241miTkBCLLf7nzOOIyzoks5xmztHkyS4lwhylIIPsTQtmG3Cd6BZ9f64L2vAE5UlHEeENz93Yf7+7umCa+ubzzxOPUxxjY0jvh4PE3TBAJ3H+4eHg5XV90XX3z+6tXrN29et41PcYox2snT970NRhHRe0RyBCiKm65zvjGNTZOWJseiCOQEAVRUKtwAKCs4E+esTPm6JoEjQmAtIwsz3cJ6Ij9JjC7aiZ+KFhdp4tJpXL/JsvEvjzHmN/n8g55SR86T/mmKNcBTKfZMJhhdgU1Ybw+thaEkZ5L1mQkogmzq8sCACTTGWGzXAJqm4YzzOIYQdrvdhw8fnHPHuzsAOB6PALzftx8eRsnJN3mckiOnAiatAeyTDIfTKWW53u2G8fjHP3379vPPiNk34bvvv//s67e3t7eHw+Hu7uGnP/35r371qxC+vL6+Pp1Of/u3f/vrX/+6aVpVbdv2cDh88cUX4BAUCkCsbdg5sW4BqA+spUkszjMrq4KmCEBs3c5FUdMWRTC6HgNZD1MRgNmAryB4Dm+EwlxY4VKG3yXIkTtbWS2nm4Uha28uBD5EBCAh69gJKQgSigopAWRFAsir3p716nKWdQGiBACMClkiVRInFEKfnXhSISWlyy0IxBhFyFjLWmjN9v6e0aA1WMzJgEpIrN1HyxGWeaTkpVhcXxmGqn0O5iz4A+MvMBTlmOVxLcNsIFhgrEuYWlj06y2rMHZ+bHRVrYGu3CvULGMJk6A2lJT6N1jQR4LICnnZsvwVkVX68g6AunLjQ3LWciUlVCIlW29IkZRJhISJEmpG8ZCzxIjIKKwaVJ2IR2ZBJApqOSyRLpJCdqihjlzRTsgisGkHCdT66YzlO0PtZ7AhVkqrk4xtZu+hRAqQEdCQ/IRi6wOWYa5Ow5hjJFTPxKiSctI8z7PJLhOiY18tQ4GIgguIuHj3LAFYVY+PBzNVMXV5K/7mYUgpGaq4eLe2XQhhOJ7MV32aJoRiNe69T/OMKowIKhIlp4QKgd146udxmodxcWOviyMKYM6S1aajnJMkwZglKyI5BU4pjbOmCACQMzCD97Btw6Zt22Ci09p4Dt43PizMOTs/D4fDNE3GI4Rqs2doJgMy2Ebn3Nu3r6+urr788guDs8Y416m7Vt0NMuKN/eecN4rCQssjU6DFQh80+Y6lI2qITV7JuJcItJpAr1WQamJ02RX4/6/b86+9DOqWJ8DT4F3bp6Wpq1WMY1lV1g3YLGLDBUS0Om+73e52G/pgtlqompmxaXiO2YKlIqiiCuasmou9zvF43GwcM0/TNI7T7e3t4TCo6sPDAzPv93sRUdFXr16dTqfdbmfSDVdXV+ba2LZt0zTjODbbdpomreBxS5vIOxdY0MoCKSagWExky6RNFVBqcgzIizakeS2UKSbxUtQVu4ZaEVgDCVhNcVdLOVBHTHVXrwOeRcR0DoQgNj1jrFO1gh4VsqCLCrWRaWEYIdcDZSQNsKGgxVCAs77BKmklhzWykDIIADgEIFmmduYmAECaBcz8CoCrqAsgMqj9gBLgqik9lFig1QfXYBNU7rU+rtvNvs5ZtfEUlnkJGj4PIws7sG7RdTq5PtfXKgBqsy46fx0iBWCy/OJsQmgKpBkqFNUwPIUSaYevSIpXCA1YngKgCAwo1qe1FESlBFYlAEKlqnLvoPSgRQUpZ4SMyJoQyYkS6qzi1LEKAyJQY8sYYAlWWvlJdl4uobysVhbtFBC5QqYQgMrkmbi68NYlzxqPte0MUOgiNjomI7wTaMqYs4iMwwmrgSpISjmmNMdxYiYRYXZt21o3MguQ4zYERCzM9JS1qvDM4zQMg4g03juieZrmefbeHw8Hc+chojY0V7t90zSq+nh3H2OcxnEaRwDYbDZGk0jzRJIREVJMKcs8YRZybhz6HGdrnE4pouPQbWKMQ1abi2TTc9CcFLJgEhClLJAV56jjBCnaYAOIoWl5t9tst23TeOeICYyT7r1vXMPMACSSU5Lj472kuQ1u0waF3PeDpGyB7XQ6pTnGKR0PPRG9uX1zfX399vVtjDHOaZojmhuRYM6GkDbASnA++NCYZiaHhp3jENj76v7AxB6QCRl5Bdfkws9bIA9idZLlOKa5a0Xe+r+qPbWEAVvCPsnPu3yI8PKWOpP7BHFPP/EJFXn3coRbv7xMdGA9gCRRUYNAVLksG+ogkUpCRDRZjFXuXEKdqfspigjZVL2Cy9ahlBlDcG3b1gwDRTIihuCywDTN2TxbgQQwZtWUGUAF7+4eXr+6DqE5Hg+Pj49ffvn13cOv55zv7x69a968/qxr8+Hh7tWrV3/6059y1mmKTaNXV1en0+nx7j62yTk/9OPmaosFzKGlkickAnaICzuOlLCS0RcUQI1hAoAITOfZnvHxbH3gEuUAUGqSZE4tANXvAagAWAq+sVj0AVQkZ12Qcwl4lXVua1cdVZXAUasrsDlF/cYl+C3ng5VGZL4HluzZulvqgWwnnlRM45K5rLt8C90TyscDMFbaV/HDsbhROpXVIWfZbnhVwDPmU6HKaRdEKJSSpLwrgqJzHlBQUFEoQ66PDYTCglK3k6CgYC6QF0Blm/wBISoZxcF6xKuqjtDpxZYKoDXfc62vogX0D1S/dJFtX8nhUAHPWoQBVX7qt26HtIxBUQDM87fuG0AGNpo5WgxVtaBJ6lAEiUkzqaNMtiQVR08lQipGmAXDW2pO490wkJbUyQDAaLk8gVnGKJqFL2QiD2z6WQgAIJnIkVOiAvsRzZBBJaMAltQsZRHNkyYVEUZCFUNJkgJkWdagIsdMNM4pa/nnUuoZwcgkJ8Z+8N5v2tZkoIsvj/dlRO+9KZsY4HM89dYLNSa7Zdne+5yzA0miKpIENGdJOc9znuNwPBk97jj0ItJ0LXoHKnFOIpCSCKAKCJAAZiUkzjEPUxSRmHKKEAVQYN+CZ/CNa1ofGueYHZNnDc4HdsGkBpRy0mmepjFabdd1XQjBHAcJsOs6Ve37XlLBsrZtu9/vTaslpRRjsvE8M0uGnBWITGM2hMY3oQmd6bxwCGd7P2YzSyIiMH/mYrxcmp+4op8ux8hKvZdjjCqe53+XNRP897h9quL8kV7rua5dPWe1EJ+nlRcZ9ktd08vPWjqnOceU5uXcNlKpcw5wLjq3qimBIKgjACpWbWLsCDycjnOK2013d3d3PB5/+ld/Tb/6bzHGw+Hw+vVr64I8iNhVMM8zYTGiyjkfiYZhsIrQrCVVNU1zcrTd79i7ovAOoIgCxR0WK9qz7qbaJiraLGXG8zT/seqKSghdqjpUqOWdgX/KqUUgJeVQMIGUpXEMVlnaC3UBttg8ipa0g4oUNKkq5ErQzot3A5LUVR7R+pNQO1uGVShDy3LUaKFpvdCKrJZbpZCoPJYz8WB5YJrUSw/yfJIsggdL2bXCiK7eoeDny7s5T7VaBiQ4PwYiVBJQgIKEkSJ+AyiaoVAWqcCzKMkSl2lBbJKikry43bq6YsBTBKMFkxKwMXbKvfmS22GroWZJYQRAkXjxWzczHmuRFhdhQmsSEKD5szNZAFqSbCXMVLkSpMRAKJGQKRuaGMEGyZatQVVHRzRfRrLc1T5dc8UTM2Je2lWKiJTP9EYgRG/tCGQkBCYktAF3cbfKhSlogjAZJEtOOWXPpKqiKcYoOTkEY5EZYdaab6Kac9ZynaH5hNmKP8/zNIw2ithsNm0IRlfw3sdpPjw85pgMtGlgFgCY+uHx8XEYBlA1T/bWhy40njjn7BxqlBRjikAKOqexH6ZpOjw8ZpXT6TQMYyZQR0huTjErZlFRtm6nAKqYl6CLcxyGnHJJUozA23VAqM4Rs7W+1SG0odm0bRua4BpHXlRTysMUh3Ey4rn3DCCahaC4yffHU5xmg/DcXt1eX19f7W8QcewHXcjUoiIpK6oio3fOh6YLoQ0hhBBc03jv2QXnnDMnBybDYi7oTSBmYjL5RcNwggJUHk6Bf1FVJ6EKaKTlgkS+DDPPQ8j/vm9Y/yudOhU6J4tQGuzW5lBVgIjVUtIKEFrh+p50kApR7GzSthByKmFcrZmcUpzmnDMg2XpXaoWiWgmwSOu9frVTxNMwhK7tum4cp4eHh5zzw8PDN998s9tdpZSurq4OhwPT7nQ6tW3bdVvbknPuuu7h/v7q6kpRTqc+Stzud8w4DGPbbIjIutcG/rPLvLjYmzgH6dLmzqBYp3dAdgXYfpQC2SvgEFvehRexm3OTE8ugaDHntTYgVPgBVOocVjsfxApCASgV/yoROQ9bz2FmKf4MxVGOEZYZHgAUHZdKHyzhBuAM3bSvKBaJl88FAFWQBd21im0AttZXLHH9ngYjXskIIaqoETysMgU0LArZYzVAjYJzzMBg5uzVx2ix9QYGV4bSpPaYidU02gT4XKoWuh0DAJSZKQNUxPKyXSz8gxKKFH1sLonH0q2tSJpFVuD896osvtqiZ9GyurwYjq7MsMpAr95I/FkXgwqxmBABhAEBhS1cSvE1Fgv0TKAFBWpf36EDALIyDsoQH9AcIRhICFltNL00rKzbQrY7BJEQlYBL3mf6ZmYdDIJS7I4gZciSU5SUc04AhKoSk0omFWJnfQqTDbOBVtKiTmlwxFhvs1kLTZOI7Pd7E2QxHTLvfX88vX//3ncdInrvN5uNc26apr7vx9rbtGH+drP13heikiTQrCmDalJNcRqHwbiAKaUomT0DQgbVnKKKqrPETAAEUDLMSVJGgTjMcY5gCxSh856ZfWhGEC0ASSJE9d5v2866rFba5myw0jxPxUoJAOZ5NnpiCEFE7u7uxnEkorZtr19d73Y7k7tTBSJHjKqQcxZbZICc8943IbRN0znnvGu98941uHIUUbLDXr3xwFgKT7Q3z5isutDUTWsB6DPY5EXUyX/HmPdvrfbWczi7L9jOQh3DZbQMy8T7fCsvBThXFxfAURFRyaqFDL5WPy3lBZGqxggpATEIsApIFgIA7xBRMuSkOenhcKCv3zrnhmGapun6+rrvv+v76f7+3q6dm5ub+/t757bv3r3bdA1VWdTr62tDPCHi0A/b7ZY9WeWX5ugCq2qOCYOnijcAAGIgLlMpG5vAwtjUs0k62khkmeFBMWpA0HNVUpp3Yksv1DXTeqfnnQjLGBCWFAKL755tV5DzgStTGsild1roJMuZmZeehfUhEUkhV7ZZXrrpqLKKRFhCHpbhVEHwGMuxnAZagU6A55nR6lN0gTcai7Ss/6pabZC58CKwlpZ1R6gWXKWqLq7hznv3fIa3BsBcNCLgjE8+Xw+Fal5uTwRP8bxfy6yunM1Lva/r5xv9/UUa7MugapQAl2EPAIDZqn7bX7rgvzV7ItNyLOeEo6rzCwikTGAMTguZAjMZCRCJkFaaeuVDqUgtWLKGjIWPYe0LskIRgDwpkKmjKCGWEY5yTd+0DEekRART+8wpS9Kcc045R5CcZlFVkUxkRQWnOM3zaHVMCAEUSsMnBCYf42DRznqb0zQZViXYok8UQtAs/fFkdkLWvSwilqLjqT+dTkVpV4SZt9vtpttAlbqI0wQpE6rknGKO45SmOU0zKlQTnzYjzSlOOYkKoQMCATX7sRjTNOc56jBKEiAEZRJlUfKu7bqOaFRQJGWHwbF3HEKwgOd9IGJVTSnPc5zGeRxnE0JEhTjNOacQgqR893h4fHxkZk++cc3V1ZW1c733zjWmaypZBdR2KiJbp9eiJrJH5yrrTld9S65CqUDISohMQKxIUqyyVRmXswWUqvIGFYneeiqBKVdWZALCk0IH8JMTNj4Lm//obK8Sr+RTYewT70/68vNrNbJ6vQIoJBCqopLlpxkcTkQBEFkBDP9sywmviBmWLkuJiwBQFik5WzdDlrRcgitCCHZd9/BYTlTnHMAsWqd6IjFHwNApApAIxKgp5bu7h5zVe9/348PDw+3t7Z///F3bbr777oftdnt3d7ffbruuE0n7/b4/jZtuBwCn0+lmfxVCa+p31upnT5vNRhGHYQAHNllABg6BqEwusQru1qSokM5Kn0BhkfhHpkVj06Yx9SU2naJqsQCLrg2AaAGELgelqpQt9Z+ttAvEtxzGBeILUDEhz88XrcLTssi0gJV0iyirjfSMgH15ZmHpecpyotXlfzFWq5nQ0zBc46IhWqGeBmd+IZ1njef7Jx//tGosw6AFAvciquWF7bTeeM7Q9En0WgkjPZf9Ls9cMRvWJhFPpyAX988fkL4c9krVX7acw56U8UsJe3YAiMhVqB0RIeX6lOrwQMhM9tL6AVAzqSXIldwMCvPGVjhLyIgYQVltQk2ElaZYvUJXiQUAA8bayYFsDrcW7WSeZ9vdzOyZEHGeZJomIuu9+XmKqurYe+8JOU/Z6jy7GTl9WfS32y0R3X34+OHDh5zz7e2t77rFjjWOk5HfLVLaSwzAaW9FRDCMZjQgkCUmK/JqKJqTqusaVJxTGqckCD44UCUSySASc5ZxzvMEjxECgGscoZ9TFlEibppWsoAqEThn9uPsA5uuNFTUZS1k4zRN+53z3hPgglwdx/HDhw8AsNvtAgcjY+Scp2ny3gNojHEcppwz+9AEZueYAtXZHpOvrYMz7eTpdaV4Nj2ji+3rQgaf3lYnc+lPQNUfXF4O8N+z1PtfcdPV0nBxwZ7/+WzL01oPVdWKtnotn2vi9Q5Z2pvmB6tFK9zEdGaAEidEJAJQ1Q0RgZQgZzCzLVsAT6fTq9vPrJL79ttvf/azv3r37l1wrm3baRpev379m3/+9eeff24Q0KvtjpmHYcw53769iTGSw7ZtY87H4zFjfvXq1TSPXhms+kTbIUuncn0mFO+9JeLhGd+7lpdb1DmoLKKml1c4IXVRArF1tS5uuNpOFeT58kHRc9tTP7H98vnGF1vPZV9IkT7N7zw/ZX1wRdZPW77Ak+evTgNaJYTPg9eLvwgAHLG8GN6IqP6kJ8rctv6WVX/9k3gd3s63lOTZth+r51SfTPuX68FaChcXCSKSzM+3w5lXxJfP9xVJ/PS2yLIgIqJfHrFbYiqcbScQ3VPAwvnnWLREIaJSCBKAAuUMdSBdaXsIoG4v85zmeVZhR4TgRCRFSMOggqqaBXJKMcc5jjlnxMzsnQsILsc0jXGKDnj3+WdvQwhz3x8O94i6v4GAcRxHTToPD2kaGu87384zAUDTOO8bg1kS0bE/9eMQQui2G6aECI2HOI3ffXgnon6z+fjxY47z7dX19dW2YdDcB0iBomZ5IA4+qKRDP52GwxSnEeaJ5EN/H0KnhKchzqKIDQcvMTc5JQHJKFGHiU6DO8w6R2zaLo5xnmLnddf6QEA84fCOkmy3cNPwhqGhvGmb7bYL2wYdgSf2KADz8XQ43uUUtxve7xqVaZxnxNg2gVDG6XA4fnx1c7PduKurHSKeho9t2242NE0Pp+hEQKhRVKQg3LjQsvfOBXCckYS1aZxzDh1mzI3pGAAhMnlPxWOBwTlAVmJBNMpMaSeptbkXMAuxukV91XIfLq6QqqqJlyuu2FUXHZOUEbEY0i1XueqaDn9x/VcViDrZRwRE0SfPWS5h+kQVSJ+oA2tEXg3hEBRgA2w2kihadR8ZAOpqTSoq9SupqjakTxepUlXkKGajjAJO0ChjOTeCc45OpQkekntIcxx6SbPmTMCqOk85pbLH4ww9z8RE7ATpUSeHiK1TlXSS6zdf/pf/75///d//u8P9P6XTIbzavb3Gj/cf376+vf/4UVL665//9O7uTkSur68FcpL48e7uiy+//Pjw8ebt7a/+l3/+9//+30/T0Xvfdl3wrWOZYSbwmmnX7hFB5pxSJIfeG09PchVfso49oliVTOSs2YOIkGlBMGBR4K9mvFjbv9W3agkJZZ2k5hPH6+Xj61fV1YsnxuUDjD/+/Iumw+LVdbH9U5/CLv/Ip9fvALVGLE3d59tf/vL1gWN+mcDwqVeu//ji8y9uNkF5fvuRfPDiaK0PLTwLe0wvxEIA4Cf2jOvtS2/9CWRu/c8nKbl78v5LtVekGVYeESU3X7nAF6yLrXZ1KonwJFAOw2AqR4XdBSwCpsmyOiznmy0WKUpKwzxOMWYAsuJsnmcTsGgaDwAxphjjeBwNhNk0jXfech+TMfPe56yPj4993zdNs91unXM5zd77GPM8T3Ga55Ry1hzTdru1Oo9AxUxkgFRzCA2ITtMUx8kKLBNJCSEwcwasvA+CDAC5H2cBjIJT0hg1iiIwMeZcxKts74hk0SSApIAIS7+xqTfvGBFNONR+tcFWmdmmet77nOTx8dHc0W5ubvb7vVkxlHZZzimlGHUR7MbqMYbINjWkFWXFzlhjNBpHyZCfqjZxdCKiKoBs7bpi5p6zjZ6gpm5GPilV+zLEOBd2T9Pn5f5p/nduRi1dl/qE9ctfnBReXF8/fv1ezOqeb1//FU2sxFYgKApERkcTseXMYv/LNUR92ycrA64avIhoB1dynFNe1IIWyQUb0NpKuxgamlK/qBiEiiqCe57nceYYs3POUuGu6/wwee/fv3+/2+2+++67b7755k9/+tOf//zn//Af/sN/+k//6Ze//GWMcb/fPzw8/MM//MN333233XgR8eO4KNNKyvM8O8BqoWhoLLRZPTo9l3a4TLwWprICGLfPSlUCzAwsKNV9nQTF7m07A+vq8aKrdXEPz7Ys2xc6eYEo2hZcnAtAsCQxpGCfniETFGy/2pb6rS7uM8qZora6t/cXKp/O9VMMI2pENHN8KAZulQC33l4EQ8SQpSSQCUixECVf3G4cBVcH+89meC+FSliAOn9ZzPuRJyzV28UV9bwmtVtZmJ6FMY+Xz1yi2ifC3jlFuvjrCzEPinbi89sS8JbPKr939baEy1/tRDMNTwUkQLZueIwRkRnQFK5yUpNvtncjBTMZXryGzQU+xTQM4zSMqti2G6PWmbwyu2InNE3TNM0muLzM/Zc9L5K99+YTO47jzc3N9fU1EXm36bpujnkcRwDIMcUYm8ZfX1/v2q7xpCliLieBKnpP8zCOp+MwnOZ5SjHGaZ6mKbStiKYCEgEANQH7OYkipozTLOMkU4SoCOhtKlOpjJJzFo2EcLWBzWaz6zbbtjOxscaH4LwPTkSGvn94eDidTiLSBt81gVBSjKAUmmaeTuaCe319fX11u9vuTJs0pZSipJTGcZqit7CHRqhMjmJAjM57RM4ypZTYN84VzvumbRFxTsLMlDMzg/NEkhGIHHvnGJiYkESyqEBOpiODxWAFswlL0dJCUQGl2ukyx4/SDKjrIiIQG9xMAECzLsFAz2HmIoTgcjU9ub5Wl+b6waeqPcmpRtA1HgfMXHR17SzvtcyTFpkGrVoUtcIrOk0AABDx6Zcpi0al8NscxSnkSjUzkklc2uyWwy22MCKSs2EogHAJcipGVUd0BFlRBIZh2DTUj4NzTiSmJPvd9cfHj1jtt+7u7n7605/e3Nz88Y9/3O/3Oee+70MIX3z52cePH9+8efPHP/4RwJvbc5zn0DQGIuv7vsPgHFM1tSaybqQSFeMJSwdX+3BBV8IyDwM0VSwhW+hVqOAjzMdBGBVVUA2zKIU8ZQfr4l7wxe3FP6/SxqFaJFxsQTAhEUDJoErGy0BBJdMrNtQaolzckwqak+LTe4baAwFlWziNfb6IlyhQgcAIAlUVF6NBn59j2EBFoz4UZQJDSLy43Yh+52oPPlHzPQtvCJ+IiP+mm6x6uM/j35IMXlR7z8OYw/W1d77+1mSpp+HwhQ7nRXb55Layt12XabSu6tb9JVpVk6W3LjXbJQQkVBFCLF0L7z0AOXSIpAI2qIgxWuVRKpFSYQAALPBLEQtmvm2DcdSG8SQi23bbNI1IsnXB0IwGvFzGXfYNzTPP7Bc2m03btiLShNB13Xj3MAynnIus4vVuv+s23jGBJlVVJQV7IDEZOnSepnma7LuhgiadU5xjjkpILEA5ac46A4LQJDomHaKOGTIonoV4ilUeymx42+vr5urqarvdhhCcY3NHsl1nutt930/TVLjkzDlHRARA+2vOebPZvHnzWcG4ppSKAHeOcZ6meSoAeEHEKFkEDOYwjPNyfMkVrh4RTZsNYnFOYKMxOM/MiuCbpt1su65Dcgu83ns2jN/SHrC6ZDltltLTniD6JB1c8nGo6aCIrThPztvl+esL83nMW8eVv/D6vRjUPX+Hi4s3r1ikcp4tAVL1o0Z8YmGNl1jN5eOW/W+9EDmjgTAClMtklRMXeG3VAkWs3rmqICqlJiYBJYScYE45ZT0dB1AapjiO49XVjX774XA4fP75533fb7fdH/7wh1/84hfjOP7zP//z3/zN3/zxj38koi+/+vzq6uq77757+/YtQlxIFIjIzDmneZy6zq9LCPtogIWlXj2GAFbAJssTaj6hBJARlFAUiw4GFkHg5R7tHjQhIkImFQAyxZOn9/ridoAnTnVYpZ+hetQt1SBa/WS2MzWDqeE0AViwgct7vRS3O++Q8qCoO1uYQVCDWxLap1rNlgCBIK8f218VlDALAILlzTmrGpDyR7Y7Kr3Rcr9Ijl0AYJ5dHWcszY9fOfiJLBJeinarx0/i0LLleSRzqy3rB+vG45Pw+axpuawXL4ZDfKn5Cc+aoufv/yQ8a0X6SSHlFIDoeSITSi+eRFSyLPLHkARq1ANBKCMjVcEUxS74zWYTQktEqvnUHy3CmT1Q389xzpIBEa09GGOcpsJJt199d3f3+Hg0l2ojbtvFNg7z4/3D4eERjJnHJuiMKBpzTDFqSqxiK/upfxyGYZ6mPMc8xxwjAIQQ5pjinFM2yXDImk2ZZcwkojHJKBLVoF0ECMRkvPuco2J0AN7zpmuvd+1+U4gKjjg4b2FvHsYYp2keYpoINDg2cyWJ0vo2pXQ4HPrjsQ3hZn+zadqkojHiNMcYpxRTlJimOOcxL31LwJRjkmmemT15BwCgRkl3Bhxl5r4fiAiZjBxtYY+Isor3PrSbruuarvXeM3kAQGws1TBB8IuwV9piItZ5JiJgcxtGJCocHjzzcmTV6H7pfDufh/IMGrAOYxehCz4d/56PPJZojS81SKV+Dpb6RQtmtfjqmbqDVboEYFJZRaZZq/sIAFhtR2QEKkOjsF0RiGiqC6Z7l5I1UGERgdO6AoNpOddvqWqgUi1aFYhEdDgdNy2lJPMU377aNj483N1PwwgAj/cPjvibr76+3l/96ds//PUvfvH7f/mXH77//ovP37ah6Y+nq6ursX9QzfZ9mrb13jOokWhLhZTFgJfERESKYnB2U/+qRfuilml7xmS5LLaZ1wBQcZ7HVcMPyhYFVBBZiHYrRaxVc/DF7apiOlpoUdY0tWqldLGdgASkIj7AcKC1OasvfSoIpBe3m2yPMdWwRpHCvF9oCmiBELDqOC+QyPMOhKLbuejLFKYe/tj2YlCw5HT/apPzzGH831bwPQ916/t1tfc8QK4vcn5p44+FPX457C3vfxnhPjHzW0g8F2sKMT/93NKjLzjPZ//NybIcEYGUsiS1ojBLlNVt+VFWr1j0smFbjHlRnmzb1jln5UWM0SgEtrbmnG1ttYw4xmTMhKurq/1+bzWWAa/7vj8cDjFGIjW0Z9s0KJolpjjlORqFVqLEGB8fH81ayJQ8bb5CxCJJVQGZyWekGCWmnAEPY8wZYoaYrLXHRFwV8wBATNSdHWy69urqyjlZMgyTzGBm0Xw8Pi7iMos2TYyRVG28dzqdYoxX+5uu60QUCWKMElOMeYoxJck5iYBZyeO5aicRUE2OTaBARQRFchaiDADZiSpwkfyQmBJmVcIpziICek++CEVudts2dDHPFvacc2tvQouaFyctImYRNoXfLOuAt74ufiRW4Utl05OwJFUa2CQdztKA8OIMZv1BP/Lp5xKNnmAFsdIzSuB5lgPbFVTrM1p95zXM7Ty/tNO7JJWq8zyLSAjhsR9s3KmKKmViuiwjZPZJqoqQFVEkOCD2GfD+/mH35VvnQkrC7F+9enU8Ht+9e/dXf/VXj4/3n3322YcPH3a73S9+8YvT6fTll1/++te//v7777fb7VdfffX+/XuDkiZNzByapuhF1Au2ZK0iK0txs46R0uMz97vyzUuhrLCq+FVqI7t2KamyBmqvsohN1V7py+vtSyhLsOW8dEGLh47RwasaTO1Pa1o+8el2rdt1/a3O91r9Ry/vrdCtrf4zHw5RSTTXYFxC73JPQFkzKiEKKqrmJfRm68fiAsV8YTsgEtSwB886nBeX2Sq7/ORl8OLtL3nORdSBp2HvR2IeIvKzAHkRRC8eEJ+ffwHCfDG8KfHz8g4qpGX9cvvO7NzFu138zNXbFNoCmpqaiIrEmOY5aSyHQpJo1vXekChEHAJZ6xIRRVLOMQTvnHPeOpkl6s3zvGsbRKyiYt6kyE6n0zRFVTV4CDObZywizlO6u3uYpuh9Q0T7zfbm+rpxLueoWQwGQqiqEGOcxnnqh/54mqYJ66Te2nsiIICKhMgpQz+naU4A0EdICVIy5QIi9kk0SyYFQiAGQvSIIXDbht22Yx6JCEGYwFSoJcWxn/u+B4A0RxB1jbE1YE6RCdMczZKta7e73a5tWwAk5nlK45wMN5uSZLXA2xphI4TAwZuRLAKzd7bLzb3DuWDVWNs1RMTsF7SLkUwYSVXmecyTjP1xPPWHw6H1AfwTVnXTNEY6dME3TdNA45wr5mSESEo2C6221uWcqT0h28l1xRAAqHKWz2Z7uoguolGay8tyWi7dpWpUALIaS+HinvB8pV907cr90h+yjz3TpdGMtqvSrD1fAEDXDD12uvi0adVTBlATVzR1Y2VQUIyK4FyY5xmBnQsANE9JRJqmGYaxoF0WKzuTrbRaAhWKbKOoSFYQginFw/F0TOOXX7x1Pkwx9+Nk2YldLwCQUjoej8z85u2rP/zhDz//+c+/++47sxP5+uuvnXNAQURkimmap3EM3lNJK6MCk/PL4pBTBgDPigrEQMU4zvbkKt4XFZXzgSz/0irfX//+/B4UrEf6whr7ibC3xE5CyLW0wkrkq4/Pf1V169CyBJXle17c4yeqvUIz1GJhVxWXAZWXSSEszVVVIjO3k/UcEa1t+Sz0LvXi8+2Gmz1Xe0tWtSRWF6GrLL7PkJw/Htt+5E8X8cxu6zD8l4Q9+gSx4cXnA8CLs71PxTxEFHy5yblGiq5/rG2HJxEX7PpR03stCw4W7QaT+VBCTJIhxjgPU5xz23YLjOViL9nSbL1KyyuJsfWmIiHzPKc0WwdyHMc3N9dLUtwE1zSNOQelJMzcNJ0hApbZ0jiOj4+PKaVt2yGi6ZRIimgaxllAFEAUMWdNSXJM0zQNw2CtVEInkucYY0ZRUnQZeMppnPIwgSIkdnNOswAimD5qSilq8oAmz+sZA4F33ATXnt0Oio8BACwjSTDNjiJJzIUBoGDcfO+arut2u33btloVk3LWeU7znOKcsmZEavd7q5ubTdc0TfAtMwPR4lZR7KAqqtMV3vo55plzRbvd2Pe0FbM/HU7HR1Aij03T+KaxbxtCuL6+3l3tiajddJvNZuEgutIzNRUNsOG+6dlneAL/xdU8bA0Nu7haL7bXKPUkY13C3nnL0/tSJ60Wh5LerTAB6weyGh6D1nkNAFUjGFxNHJdr/KIle/7+z7LGRXXI9vwwDKbAMI5jjDEnXTu4ngEaxgIkMp+3rJoEximmaUyDxiSBXIzT6Tg8Pj5O03Rzc/Px48fdbvPDDz/c3NyM43h3d8fMb968efv27cPD3TAMP/zww6tXr+J0RESRQiFNKZEhshIjgapbfqBKEhHfeizmALkKHFoVIqCkKHVLCXJLtWcx0uqwF+sq6+EWdeBnVbtJqrxYzS8Z0xIqsDYbrQO5KKMszcYn2/Hcclw/p95nVCKUi/s6ucugRCiGfzCpRwUFzQqKJsSvoqCoCkgvbC+KBqs0bskSXtpu4d+tLw1EXBWdenHC2fbltF4WdPiRVKK+6vltTUhYP1hjZ16MYReRzH2imcmXzcayXWq1V35AvbS4Qlcu32dlCnOx9Hwqsi5vbGeiGAIK1UxSa78RSMk5N/TZORTVeZ7TNJOCc04FUSFHQUTvmxSnaZqIcLvdZpT7+/u2Dfv9XnI6Ho9I0HUNZJnnkcpQXft+zFl3uyvnnHUsN5vNpttazLDl8vr62vtmmqYY4263I6LT6fT+/QcAU6eU25urTdvM4yBpNrN1A7Oo4jiOZml7d/fgnLu6usk5D+M0TXGKkgCdb0VgjjiM0xB1FkgKc4RH829zCEoqGbIgqgdmlcYjap4naDq4vtrst62k5BsCzYjaNJ4A+uNxnqcYp/1uc3d3F+O02+02XTuNg6q2betFIEv2bRtws9k0PohIjHlOaZpiP0zGMiZy3jkmTy6wD+QCmm27c6Fp2LmmaaDIylsMKtlMynNKKU3GhkYRSdNszkqW2RARiEgsj9MM0zCGtrEa4v7jx9Ph8Pbzz7uue3h42Gw2r169Mnwpe7fb7dJUuB+SswB4LjNLV7h9lnSYftASxnRpbq3zJGYmJjRR1lXbbZkmYiWeEtHiF7NEHQtF8rRrhvViT3N8QvBY4l9BfpKoCCREJHVEVBChRRrL0L2kqux86Q0UkcYnQwpbWgWSLcjWwGRyKck8JQB6eDh8fH/3+vXbYRju7x9izMMATeePh9i1HCWZgGIubpI5QxntseM5y2dfvPnuD+++/fZPv/z5T//lv/3uFz/9xWevNc9xmiZNmQCG0+n4+Ng1DWQhhe//9Od///f/h//pf/q/q+rp8bDfbJsmPD48NE1zPB4B4Hg8vnn7tjQ/ACXlOCuStm2D5JImVAFVUXTOMXvPHHOGrFmLrStWjKXkpVRfw/aKs6kpKuoZuWPK1aCYEWoDdXWvnwhLUGdjeIGStAqZFifV4m+XC9WnRtz66bhQRRbRTsNtYkFRXtwbuhKXz61G22KF4znirL+pnTNQw7Q11cuJWdO58wn8NPY86cmdO3LwUm/zR8LV/8bb8+7f+v5H/nnxgD7R5FwjP9cPCnD8+fZPVJMXRd7yzE+GPYSLt637jVU1J6ilCSnkOOeKMMy1UnSS46wZgKx0GMeRULuuI0LT0Nput13X5JxPx+M8z4g6jrFxnpkRYBlrmVvePM9EZO4BBmK0QeB2uwMAQxjajHAYBjOrM1/1JgSTqpQc0xxRARVyFq0ZtzmYE5FkmPKUssaYYwZFUuUpaz/paZzHBEk5g58hRtWMpUqoxiHgAInUAaGIIwgBNp1vHTnWwIWxZ/OSGKNBZU2SwwhwdrPulu1q51xL5p7B8zzHLHHO94fHGPM4zTkrETl2TQjeN4YFJMchNG3btW0bmsb7xmyV0LG5JdgIR0QaaFJKcxwtciyjwb7vQdSZRQQTiqY0g+Q2tEkRksR+jDHmae4VPuK77X7HzATYh1OaY5JMRHGaNy6ggtrYj2rqtJo+nK/eigTBlxCbL77qxatvefLFRlxN+i+anCW6v9T8JNPcXdqkZcSmjrhaU579WEpvf8XhW5Wq50mH+XJauR9XRWcJzCLjOHrvh2ESJQBISRjBjpdZvSEiAeQioYiqmgEdYlZIGfpxnqaYYr77eB8atmqybdu+701+s+/7zaa1c75t29evX3/33XcPDw9v3rzp+yMzi2JK6f7+/ubmpoCxD9N2u3Gh9ADO5XIWWJiLUj1SURkQ7L4A+kueVY2JbD0xqonFoPNa/uS+tL7hL72Hmsh84l6xWENYNDaiy/PPBX363bDO7Yxj8BffQzUL/PH7pZXw/MT+S8Zqbn36/kUv/jcCWJ53WpYr7SLe1MvmEmOyfnARkwCAIb+8nZcXPg1yDC9vR4Czn7vWBEsrzfzy9inIjOLZKRtWawcR2xVu7FUAilFizM5hSinPUWYprSwAzZIgEREqaBZuXNd1eY73h0Pb7Dbdpts0p9PhcDioqnM0TQOjFnXN/tj3PSI2oWubzeH+hwUJMo3zMAwLCtRIvpvNpmkaM0D/+PHjPCVmbrrG0DGqKilLmrF4QJmzUOr7vj+N/Wl0oUlJUkwpSRJEdiqESuMk/TA/DjJnUI8CLokbc8yeUIxEJQ4KOc0hBEZMygi7rbvZtdvGNYxtQ53ZHhBpzjEnImob770/Hh8BIHjvuKjEombNCkqO2HtHRFlhHMfTME5j7PsxxRxzylmJPbGyAgP5pmmqH/1ms+m6rW+bYr3EwQXP7C28LUSxnHNIDQCAqIik7RhjvPvwUUQARXIUkTxHFUHEnGYE0KwpRQV1jHEc3p2Ox+O267qUZs0xhLCIHL3d30hsbeejY7CpnIhmO7tWV1ZRINaL9cfWApFsIpCLv4+djebgWDGWiCAGn8uS9SILtIydy0WvFeetBXWpWfLy5CXyOaHlBeYmppBEkYucbYbFvQQzKmRlQC4+pCZxr0m1uLCrtfgQFRkZGFndYEAtALDm/Dynu4/3u91VjL8nR8wwxxwCz7EOqBBNXJLQiYgioOQ5Js88Tykp9P04jBM5/+7j3c9/8mWOSbM0bffh48OXn38xnPpTe7zabTXLPE7Dqf/J19+8+/6Huw8fT599fn27QcRxmF/fvno8HtI0P9zf73a7x+Odc9xuGgDPSARo6pY5CjMjEippFinm8qsl8KK8LqpmsNRAWrZ8CrvyI423l57+ie3rc2aVl+gnPxfO+crTB/+mrwNnUc5P/X0l1GnnyI9vf/F7umXh/ktKPfh0GPu33j7V5HyxyINPhz2qPsIvVmDP/6T48izwk9Xks7D34mvP72/uelhw2MsZAwAVHkEAaDwfA4bkrCnlGJMqqrBNlaxcKzdPItL3/el0utq/UtVpmsZxNuAGs7d0Muds5rGquul2ZjZraixWLRngm6rz+FIwpZQeHh4+fvx4Op08NcG5EELjgyPSnDRHzRLnaB2tHFPf98fj0aIma5CsKeqcsyJl5SwwC/RTGpMmhQSYEkXVOcGcKYfSN0GArMqIxMCgjkESOIZ911zv2rah1mHnyWA71U2Nba4JAIfDIQQXQouIBciK6pwzJKH9RhQVgaGfTqdhnKNN+Ji8803TNF27bZpuc3Pdtu1uv9/vr4286JrA5K22cME8bCGlNKVo4i+QxXCqlhPkOKWUuqYdh+F0OsRxitMU5xmMZDnPC9GwCcEFdxqHh1M/T1Pe7yHLPE72bayZ2QFbK86HEDQoEhIplMoG6Hy1lsLoR0u9pSR6PrBfLnabDlosed7YWFd46wryYvC8vJXoamhHjpQAQFTjWpsXyQoHVROupkrnf9KhrYTVcgXZSc5NY8nHcj6P4zgO083bt03jBTxzTBM0jZvmmYlXHEdYHO4EaZTcCEwpKsA4pRjjptudjoN1+0MI1vawyGot/c1mg4jjOH75ZeHtff/9919+/T8ej8dpmr788ksX/PF4PBwOX3/99TAdIUuc5ilgCC4Ebz1hkWiDajTTn8q7KMt0oSesbnIOP88P30sL9L9tHf7xp68DXv0OL3f7lrY5PC1pbLF/CdSSX9yOoP/G7/Py9h//1Y5KNnfWOqoPPvHB+V+Jxhe3TzVF63DteXgrn1O3LNzV5a9V1NV6lvBy1fWpsLSM3eDpY/xEU3QJexd//VS1B+ZAiFilR8upJaImbVxH31mEEHyckr1WVeMULeG1togKknMhBEmzgcecc943j4/3MU1LT894C5rjPI/H4yml1HXb/X6PiH0/tG1rtnnjOPZ9b9HUe28Pmqbz3p9Op48fPw7D4Jzrmm0bnOEsjJonKYPmceob54lcnKb+eBpOJv7ixzmmJFFUlLNiynmIMkaYZhFy3DAlirMMUVLGDE7U/OSpEG9JCYBRWdUzbAJsWte1vvPUBmpDCecA6hy34f/H3p89SZIm+YGYqn6XmfkVEZmVWdVVfQ0wB0DuggAE+7AQWRFSVoTCF5J/LN/4QhGuCFZWlgOA3B3M2dPV1XVlZhzubtd3qCofPnOPyCOyKqurZ2YxMKmO9rQwt7Dz009Vf0ew1hZO49inOHXtRa12ljRLKdZ7Y0wTmqUNSaIsMcZhGPp+nGJyLoSma1fr1WZzcXG12+1WqzWEEEJo16sa85wNxtkTiMaCqYpx5FQbVVVtukZrwTOXlOYcU0YloufPn4/D0Ho3DkPfH6MqM1P1UGItUioy0DvXOJ9Ds++PcZxqUTo3TeVcAkB/OEphZfFNEGZCdM4hQLXaqdFE6b4YuDyHcFLdPL3CFSiBp64bnp/C+v6+RkkCXZyjl+0e4gjPgkFvRDgAUJF74M8DYWhVPfEd+WR8L7KIsREAgCGLpEiWKGqpU7tKP1WtOSdZu8z/Xp/RI6Cpzw+zjMNcWTPTNF0i7na7/XFGBAaoerZQVbVOvaH6+uuitwXGOBHwBnKGcYqX61V/ODZN07atiLx48cJ7fzweq3D54XDYbDZVhN059/z588Ph8O2337568UxEYkwVUJpzztOoqpvNBkBjjIDFe9s0wZJREmFCIaxkVTCEZBAN0skB6OF4LQAgdD9Y3ec176KCnG7mhzWhHs+Jlhqo6msfkN79h+t9e2OnqqpAFajyxk89eZW+uR4Y3rO85sZ+rtK+Y/37cr2H2R58v94efmBv77HAe0Y8wruCHzyIKG/8882w9CDbe8+H837eqd4Cj/cC3wh7b5/RG7MwAFhIOa8bKiEQkSWyVfqEeQFKnKfh56k0AllrCEwFSlQr9Fq33O12qjpNMabBmNrtwCogMuelaWetP/H5CjPvdpuKc6v5GTyYiFSJy6qlNM8zIrZt24XOkbGEAMDMmkvOUXMBliSJOVYNzJxZpCo9IKAxFlWBC4sii7DU0rNDtIJaRHOpEraGRQCwSu5Wv3kCrcCwJsC6C6233kLjbRNssPdVnUozIAPjNFeZzWr+Xuf+9XMVaoFFjYWneT7eHYdhSKmEENp2s95tt9uL9fbi4uLy4uKi6zquBkNtE0KwxhtjKnKzDta1qIcnY7Ra4VzeK0JjjFqr4J2xiLhuO29os17v1utqzCsiDm0FwceYZxYD6Jxrm2aeppoNnN9caMV7X20BmDnEUFnPS9N3ecyIiEDvk7C3kZwPH8uHidpJ7PS1aetrzcLTB3zQ43g4IDz8WU7LOfSehWlUFYCqY6ExRhWLcNUgRTBAZ0aHJSI23nuvqtbaOtCfVPQeFtbw7aBbCaaVrqqqt7e3V1dXd4cvEQhBcs4KVc6/6n4onLoedSx2Foy1ouoanCc9HoePLnYppb7v1+v1fr8vpaw32+PxuFqtamZZ4aOIOI7jRx99tN/v//Iv//Krr776xS9+0bSrw+GAhpqmiSV/++23Hz2/yjmVUlLieZ5Xq85YY4wRXtLiehGWrsZ3DaFvp+CPjavfmeh8/+WDUqjzb9/M+dRUTOibP5cw+eb6e6O673c8r/WAv1+qB7W3986O9+OR78OKtY9ne+8OJG8UP78z7JnHw94714P5gDCJiPSa/cf3eaROZYpF42dZnHMAyKzCCoqERkREFzTg2a+HiFRARFLKAKCF8xxzidXfu2maYRgAoKpwqUjXdc65winnWEtwlW8XY1SFpmnq577vh2Fg1mqnV0tGlfw+TdM0TYhYw0bjKp6eURRUuZQUY55jCH4apuNxGOcpZlZVEc05s2vQGEDRVEqROXEqImCQbGGZCo9J5iwZgIGIKAubqrOAWJtPFYJtEFat266b4IxBCN60jXVkokAdIC0ZBU6J53GahvHyaue9JwVgqSHQWiqlzNOUYsxFSuH94XB7u88lN0179fTparVb77abza7p1qvVumlbH1ptnLXW2VCJayJSyzhFM5zmzmgWJjsiliHRWXdOANE4G4yReZ4R1Z/UXKwxU+NFBAvknA2CMxRLLjEKF0AMwWcuIIyqyjyNg3CB1UpzAWEpOXlfSjaIROicI2voZNH3kN18rom98XA+JAK9Dht5LX68H6KiqmhOUIyTBFfdWw0Dle65VL+rbWFZNiBragmamTPzPFfsq0G6F7gxxhTfeO8fPpbe2zM5Z1H+rJh5VEXwzp1BTHW6VssnX3311S/+4I/g119679uSY1FrXGYBUBDRk5/MAgfVqgekWUrjwjjMx+PRWqsKv/nNby4vL7/66qs6dzwcDvM8W2uJTLVsXK/Xt7e3z549e/78+eeff/7y5cuf//znl7vdixcvVpt1ztmRGccxxlWdqyEu+C9buTFAzIpZENQQIRgAFZHTMEOnMeR8I0/aTg+nIIjwSNaFDw3dvsfy6Civ8naqB/DAl/aNhR4c22sfEM6CZw9+nrK9N9cLPp5Wvet4KpnjXcf57jOr6+9R+9+zt/ehSM73b/92nvdG8fDhlu/O0u6roK+tf3eFs6bR71r/8K+8c/0bH15/RO4nX6qKeO+AtTAmBV3jSoGcswrUoCOSmQuizbnEGHNmOlFHcs7HQ991nUWqU/6u65wzdYZrrbWu6ftUbQeMxWGMdTYdgvO+YeY5jt6H9Xqd03GapmEY5nl2NtSsqBrGqup0ckJfBCGraNbpRODES5vnebNe9dwPw3GaMlhDZCXnGCOLQSzCGmMe5jRFSAICAqHhrFOcp6gJQCEgkuhiDVKvcb1Cy6tkIITQNsFaqkHCOWcRU0YissbUTKsiUSvQrsqy5JydN03TVM7ieByYmQVSypViYYJfr9fr9Xq1Wm+32/VqF7q2adoQWu99WXK7e8j+wsQ9mUNhtZuS1/256ofCyiJSRAFJRZWZjQIAVBgtIo63vbfObbartuvHoe/7WNt+ztaJTmUmpHkB0EJZKnvMDERt24bcVDk0fcgTeIB4fLjm4bt8/nxu8tVY8vBZPUe4N5KJ85JzrAdTo9o5nzs7ONZkFBGX5I+XAGmMMW6q63PO41wNwsgYYypiyFpjzKBYKfw1NbTWVo3Z9XoNAAD3Gj311JumqbhNWMTWY30Gbm9v//erjTHGe9OBy/1kjGPNIrnOXVSW+p8ggGgqHGMUKm3XMkCMSw/i66+/fv78ed/3V1dX9fZPU+0UuL7vK+3yeNz/5Cc/2W63Xdf99re/+uabb5rQqWrbttXJ63K1GcexbRvrLdKiJlPKYm6qqsxaP+PizKCWvmNK/XAIev8Q/SMuH/q38HX8C8BJv+D7//xBx/Ohx2kffvOND+9cvnPvb+ynztfeXv8YpOU06r7zeBZb1xOdsoaxOs68e/9vBz96hHjwKI+Qcv0HACCea00AZ5GBU3dw4UfmhV5dkzYVQ9ZY51SsMHMBZmUVVWVGZioTDkNOM9eSYyklztM8z6ExoimzAooxBobBEE4AAIAASURBVEA4qzB7pyJpGqeScmg8kZUEFrylMKd5zjMiOkshCOJcioyjHPv85VfXqvpPfvmJcz5O08XFxTQchSXPcxl6Hvtao+u6gOklIjbBGWP2N7evXr0iosuri7t+f9vvB04UPBMNU5pLRmd0jOQcohE1E1PPwmTZNVPRQUovHAmQCCgXFRGxFFQKiTqDHpgKWIBAcNnRrrGr4K1BY1DJi/iMaAMbY9oQjDHTMB73+1zipm0a4+Z9j6AXFxdd06YppXk2CoWhFJ3mdDwe98MM1nXbi/XuSdjs2vWlW20gNMVYtj6RLax516gqqnhLzqAFVE6aSvAGWZgziiKUs96jmTbMLAhkjaIUZiYVY5ixLKK9Ss4agDxrmmbsrCFCRMOMjaFghmMfY/QKhERimgzOYGCCiQ2PHz3bDcMg02C7ToX30wjT8NFHH+XJhBDIB0BWRGsr40PhgYoPIJJZyPUppYoVMYhwVv4s5UwSQEQDCghLEgdcu2tnSWUFzSWvyszMmcsi7RZzTDEn7qeZuSq+ACuUssjfIIVKOEFUgKyqMU7zPJOBEELjHQlxHDmCMWitbe2Gx76/rWpt3nvP65V2HQ9DCCG0K7QoyIhorXXe3ZZbWDuIQYIl5/rjMB7Hxq+cHr/6/MuffvrZv////dms8OTZR99ev4qiBcCQgikIUhREhZSAyBApYlF3O0S08GKG/+HP/+Kf/fGf8Bd/NvfHn/30ky9++/l6vVqvwjQNzoaS8fmzT/p9OnTDqg2/+qs/+1f/6p//T3j42S9+/md//p9+/stfbC83X3z5m5/85Cc3t6/64U4A2tCUWC4vL2OMhtvhlnc/uXA7EZFx7KeUfAdqJOdIRGg8M6vWSZ4nIlUQEYRqK33uYZ1afadh+I05uuKHRcT7sfV1rst90rVEr3O/zrxze3hz8xPA2PJ3bf8GM+c7irdvnK9Ieedmj8WpirF6txne+y7T9yj0PTzEM/LnseN+a/v3rX8sh3vPn3h743dmh+/cwxu/+v7nXmtltaRT/znnucqaqCrSvatD7WpgAEQ8Wxmc5+b4YIciWvtY+/2+7/umadbrtfe+xLlOw/FklnsW3mTmeY5fffWVqj579kxV+74PztYZ+hzTNI4V+Vk1VpxzIsk5pyr7/X6Mc9M0iDjPaRzHlJIIgK39UUJEUHKNj6kM8zDOGhkQgawD49I8lMIsVeFvKTDJWRQXBdEigrXgCRoL666rx9A6a91S+61onXqVKiSnCoduulWMsbruAUAttS0tNOY45ynOmdV771xYr9dVDyW0jffeWGtOscEYE2OGZbIlCIBkLBJYNw8DqAALCCvwOftZIzGzIpI1glCkiCpaFBEpqZSCINWO3SKpc3BS+TFIGEBZUKH1IcZIiDllVVXv66zr3K9CxMyMzJW72Q9Dt96ckzY9gRpEpKqfvPE8v1GrhNcSuPuwV1lWS91y2cnyGFdqZkopp5G5UvSrnGmKc46FU0pzzFOcU8wMJ9KOEkBMcywlV8Ko99ZaWxEcyrKAMJVVlRlKKUMuIgJUq+4WEc3+LjRNaNumaZpu3bat9c6eOoXQQIUfhxCatg2hnYdZWec5HY9Hv96GNox9jDEiknOsDDVRFBFYZDqkIm4IAauZt0IGmOfU92OHOE2xvlmICIB1DtG265QSwkLWLFyOh+FnP/vZF9/cee///M///Gc/+9lqtRqGwXt/OBwUMcboG1/LMPM8+2BzzoIFThqkXNQYqYqypyx8uX1V0YKZrcN3Dkrfv4/1QcuHplO/7/W/p8V+aNHye2Z7D27Vu3l4j2//7uj4mPHQY/t5zJb2sZWPHcB9nnf/xYfV84ffrUe4DC5ESmQQjNR6WCnMp+KTgsgyspSEqFB7FVVUHlSdc3mOYE4KngpSuJLNK5OBmZumaX3QwnUoSSkZY5yzddZMRDFOwzBcX++Px+PV1dVut5uGARHbtp2miXM8Czs1znfNopJM1jvnh+F4d3sQke12q8y3t7fH41ANHBANKFU3FBFFhSnlftI5g1hQYwtATHlKORUti1CQChALsMJZDoRUCMEb6IJdN2a323TeBue7riEjyiLC1pqKwhMpMU7zPBbOTbtumoZzaZrGEMYYI8+IOEzT3eGQBVJKsWRC27Zt16032231yG2axntPxtbIV980TJmIHBkSllK44i0IsABUhR0BEWSWXGIppaCpQcJaKwhFsoAagyJSShJmAvDWOmMNoDUkIosepjEAQB0657TwPM8xhHEcOeWqQaOqMcZxHI0xaIyccJI1nDfdipmVWVVloeuBiHjzjoLN22/rg1Jn1UcRRJQqb6bMwsiACkQAqqIVejlN0yRzYuaUqyVvnlKMc06lDMMQU5nnOZYMAISWrCGyRkUFU55r2IOuMW1rjAFmUSliagZKiFWjmUsBAGectw5NterVUkoZhpTSnFKMbWi7WhQlotY1iGScb5vVer1tuvaw73NKscDN3f4qdE3oeB+HeQJC6z2UUnV7BBAAawdJgUWQCFUWxwgGmCIfjsPa+kM//sHznzVNU41Ka51zu5YUI6KmlKQU6/Tm5uaf/JM//Ob6P+x2u88///zZs2dPnj795ptvnn50FUIYpmmeZx/cNI6r9TrGaB3FGK2tT5irImtI6pypbmJEVMeHh/fu4fj8cKQ6kxfhRwp+76hP/r2u/70u9kMv2Xdu/1YU0cd+9c6Vj0Fa3h/G4JHo9VjMe/tXj2WN+t5dAbwjij9kSlW7US7VVrtyT03VZ6itkZLunY/qV+pMNo7TQ1RCbWvVLh0RXVxcrFarOhJV472U0nq9rrT0+sV5nu/u7m5v766urq6urqQURLzcXXhvj/v9NPQVUt+27aoJZ2cAZ221LI85eesQMeU89NM4TrkIoFFVBi1VkLPoMY5cFAx4Z9nYUrQf58OkGSEpiAKQUUR5IKhPWBXOFACMQR/sqm3W3cqiIKm1RIiZBQCstadGTg2EAgBVbmq9XivAnOI0jqUUZanYnCEXYbAVz9q2TbeqPTxCS0RojLXWWH9CTKgHYwCNguRSOAsAE1kyJTMzlxxrMsTMMSUu5S5HZkZU46wiMLOgVDVUYFFhg1TDnkVCWAxmT8w0JKLGeXDgrF11XesXUStjTC3XLAgRImYmZlVlqTqrBY2x1c6OFirDw8cSX7c+fjsbOH1+U+1FdbGjO+uWnVTRc0k5JimFY0wLQ26epzjnxNe3NwBnUWkASDKDqhKnWuRsnDXGODLAhbmgIRVREetcnXYYAFAkZyoQt2maKtyM1lhrWbQ2glNKinTWjN3nQ7BOWVjBBe+bzlo/QXKeDkPfThMYWwQ0ZvCeyFoLzCwAxCqVtq4qCiknMRZVzAkdkosep2gv/DTNvglNuxqGoygDLCqvbSuIOvSTs/npR5f7/fHjT55tt9uc86tXr1JKiFgbzxcXF7XlWJViLi4v62+nadqu0TmnEHImVYhz1Yhfyj9LIxOqxWAdbd6tVvr4NP2DhvP7fcLrEejx9fhh2+MHbv+BJ/BYPHr/+g/O9r7ncTw4sXfPSr5PeINHAtvDu/6dyM+3v3Le4DsTPqjYs/ud3H9Webi3B+7qWEFNWCFYco+BWG4qnp+bApLOviRaMfLuRMdeDk6hDnw1F4wxphwvLy936zUhMDMRCes4DPVSeBuISKTEGIfDcLw7eu+fP39uieZ5vry8bHw4HvdEVBkL1pha26w8NQDQLMdDPwyTMc65MI7z4XCcUsqlqKISFdZUcow5JmaGYwRrDTUejYtZpjz3k/YCRMCKQKRogFAXjb+q0YcEi9IyIXpjQ/DOm6odX1FtiGAsWkeaAYRrZgwAtRBasXzH43EeRlAthY/H43DsU0oja2V/r9dr64Nx/mEYqCwI45wxpgiUUtYAtGhcimZOzIiYDFX32nmeK/Ckegxx5qSVgSfGGCAUEaQF04EKBtQQWTK2+jSKtt7jqexsLTkyteVWSvLer1atKpecqw938NY4MsZoVfsspZRiqgVuzmgMGqunUjYaIsJqKk6I9fN5Ddr7msT5AQRVc7YeB6iuM7g0Y1h4KUuoMrCUnLXwHEtKaTwtdW6RM/f9cCZ7LDODoiLiykRd57vOWouoRkVZuEi7XgEAIREiAdHiQ0nWLkVgEVHOIoLqAMB5z8wpZ845peS9d6EhouvbuzYEZ2wqQtY736glVuk225vb/WEYYxZFUCSR+vYuJXVdrOSqKNqCgmUVBTBkBHgusB9GvNrkwsM4t+1qGI6lFBAE1WHod7udIzv2fduhc26ap36YPv7441evXm02m7u7uydPnlxdXs7z3HVd1zT1nQUAa0zwHpbwaZ0LzoYa0nJOkiXGXPV0ai74kKf4/gTox0r4/lGlevB3kO3d90jfC2n5PmEP3hXGPjRMvv9X73iMlkrnozES3qL2L6NStUdX4aUxhFVA8aEvbx0QHzrq1c6B5LIkbbrQofJp8njWDKswTmttitN+vw/BVzWQWis7HA6VAfb06XPvfY6x67rL3cXt7e3+9na329XGYYUjsuWz99sU4zAMKeamaQDgeOz3+0PKDECAVhRjLDGXOeZUQATAOSFbgGIsxynuR54EAICBgEiRAA1XjSkyiIoiBFINhiyqMei9bXwAAGOQCBQY0XjvrSMAWK1WOecK2DNEq9XiJTQO4zAMJaaa6R6Px+OxBwFoO2t96FZNt3KhQbLGeiBrfSDras63XNtKr54iEQmCCBTmxKUIq8Cc4jDFpaMJWp35SimZMuciwkQEtJDqvHWAQoCOjKWqmrNoj+SYai/KWeO9V7u4SRRhK2K822w2uUTU5UmOOakqn3Sii4plNs4WEcOMIjUpPz+ND4tdZ0cOAKhkOHh9Qv3wrcST7fjyU4RVq6xa5cbklOI8D/08T6kfDn3fV8LAknoicmFJi8sUApBxlkzXui547xARCNQb45zTBruuAyRdkhlEMIiIhLvLC6nNPURWqYBRAKiw2FRq549KKUHBe39xcRGcQ0QQXa02vmlEYUzZWAvWDnEeYwECQJNSsVXHnNCgAeDCiqKIQMZYNESUM4sqATJAATiMsR+DtfTl1y8udxt3vIsxo6IxpvZcG+fnefahqQJJ19fXV1fPhmHYbrc3NzdPnz799NNPv/72q9vb27ZtAaAG7IqUrndqHFNt5hlySBXVWVQ156rmY/Sh8xwiPGJt/+io9eHh70NSMQDAD9seP3D7R07gsaD495/tPZItPaqx+cj2j66HR8LPY1Oed37lYRh+Y1fvPovl3j3o7VVmzGmo0XvZXISqpUqERACkLKWwCKgCIqlWIwyt89raLdECS4NBtYY3yaUghhAMUsWLSynKDCIgEoJHYVV2ziHINPbTNAHAuttYWsLh3M/DfihZmtCt2jbHaK1dr9fzPA/DULuDRMSlqKo3y0hcS6bDOGUWRCMCKcV+mKY5iQCgVTLCkhKPc04FpNpu+S6zTrn0YxxmHgowVJ1HYiRGqhKQckp2VRIIEKEz6AwG55sQ2jaoSvC+C9ZbY61x3iBqKalru5yES0bQpm3W61WlqA/DUJOw4/F4PA5jP4iAMRTazjpPxlofuq4z1lvfuNDUypuqZi6ciYiYlZnnKYoqA7JI4hJTnlOcCwvSuIS9XJhLkTHOMcZsc47LuF8Xb533XqRUcqE31jsXnK9/cSt58fOzJhTOlXWGYJ2txVIfbGiMiBgka+2YcozRgFprlRaMu3PuIUpFVVmFBPREtHjslbzHsZz+uTAxqoaJ1Pp5rlY5FgmkCOeSM6c8TP08jP3dNI2xhr0YY63aLSVWVRSxLITQOB+6tgvNujOtb8mRCBBRaLqmaawLaI0xBsmoalFQXXRcLi4u9GQdlVnrJIOczUVUNQjnnGvFW7koU9tsvLGIqK2s1pvNZtOtV2E/vLq5Q7KFdUwZjWWAiaUTpSLOGW9sRiycRQEQDFLtmiuSqChSBbrkojd3d5eXF59/8dtPP/tvVsfNcBxrr304DGmaJTT1aG+u7y6ervtxDu18ubvIMd3d3L548eL58+dd097d3S3i7/PkGtP3fdu2Oceu63IupTBRqalyJeYTWkSuhlaVuXu+Zeat3t7b49j7B/rvM27/o0r14EdHcr4rvL2bh/f49u+uN74f0vKeyPfY9h+0H3g9vn7nJVr0i1lFQFgRjbVVn0WZhXmRNk4pablvXDMzp1z5W/bkUl2zvTNv7Kwu1rZtfzze3NyUUtbdqmmahUucS63OqWrt/zHzbrezZF68eAEs6/X6xTffEmopxQAaY5oQas1wv99XS506Oa0ze2ZlAXIEYIpAyjlmYAG0iGgT2ZTzmHksMlV3IQARQEsKpECqUgkbiz4OAwAQqEHjDXlrgrNtCCjJeRMaZ4msoxAC83KJKlDTOdd1Xdu2UqT6wlfgz83N3atX0VrYrm0IwTYdomEBBXK+8U1nfTBk0VgFKsIlKhax1opAKaXkxKyFOTJnLv0cD0M/zLEAjnMcxzmmwqwxxuPQj+NYnNRDwlMzpnE+hCCFEdGRcdY2PnRd13VNCEG9cWSc58aHrNAIu8JIcNXu0jxmBkS0jlSVgS2ayvYT0KZpBCmmRNZU3bLa65JqnVOxLSK+cWdU6sMprL61vL5eatdaWVQEWIClknuqAkCOKY7T0PdxGPOU0tjzNErJRsEQEuIU59rFNM5Xa9aKG7IklXvOgAAQfBvaxlgPhmrHE4CyMABYF5xzAFo1MEMIiqaUogjGmEpMKsLzPPfjVAEyqvri629CCI0PoGqtXW02V5dPY9bPv/wKyIloKYLeCi8nWwSMoq2Vk9PrSQZzyUSWmQGI0BqLWgqSvz2M6y28uh5C0/qmI2s4F2stM1RA2dJi3O8vP9ow8+3t7U9/+lMA+Pzzz29ubm5ubi4vd8fjsZQSQqh47Mq1jTGuVisEo4sLUmWqUR0h3WlBROEK9Vyqv+8cGN+IEz9WkfMfyPrf9/JokfOx9Y/NMh4LYOeW7Pfe/rVtHitmfud+Hlv/2H7e+OJ5cm3ogTfTiYQOAJVhAwvepIoqkTFGjbXWisg8zaWIIWcMMevYD0RWGGJMORZmlqLVXzqlBCzWWpBqsOfa0ADAMAzK3DRNIao60QBwsdtaa4XzPMk0DSIlON+2bYwxWAeK0xSHYZKiIYTGtznli4sLVLi7uyulaOGYJlVNeSm+IWJtX5WY0jTv744hBEQchmEchpxYFEvhi+3m2I/HfkwZqhO5InDRDLSP6fYQcwEhYAO1eVdAtXYmEQjpYcHNYnXQUkNQS5lpnrYr771v25YASo5jf6wCZ85gVA7ObDab9Wqbcz4cD8MwoOI4ji9fXr+8iSrQtuSbVgHa9cbZ0K2a0K4UjSKJCGsJZlESyTkVkeqWrqpoTBHep/lufzxMw3GaX9zcXt/dFcAp5mGYhnFOacEmqOpEZ5UrsIjWWksJcaj3ruJWnLGVgh1CKBeN936zalctO2+doXXXrFftYRgJ1RMys7HovUcEUeWcQ1h0QZVM23VkF4tdEQFmQ1hlBMCQ8U6qdRmCgIownOX36ORKCognVJSI+OCYWcriHrFwYREBpDBrycKsUoSzilgyVhNLtJoaYuvAAAoCqa7Cur6btWTXNE0I3lqk0JC1aF2z0M+99c6QM97R0te03pAhV/udDImIKlRKka0jQAMA5AwAWLX1Yi74r1JiP0suo/YlppJyCGF7eXEYxsvLy+v9kZzdXe2+ebGfAVaNr56sYyxjLFXGLwQSgaILc79agCUuiAaty6wUQNDOEV7d3P3yl//kV3/118HbFNN2uzrs9wjwT//on97cvNhcrm5v99vtWnL59quvn33y8b/4F//iT//0T7/67Zdd1zjnSsqWzGazWSqcKW9Xay3cNt08z9auu64tJdbccRiGj549WVxqF+kWQARrDdJro+55BvOjYzK+9/j/Yft520D4h/3dx5bHjuex/dTr9sG9vffYw76/2Pih699Zz/yg/fzgPw3vysffdcXfXXZfZJwUCS0iVgVF5mriCAQGoFRxRWPMPGURoZO4YgjBkvHeV/1MAFiQLCmJSC12VTGLE4xT0CIiVr+9qroSp7nCQRFxtVrVMFxtbEWEc5HCKaUuNM65qidZX7nKiyCyOceUSsmiitZ6IB2nOMaUCwiQIYdkc+GUePaQRDNCocX2uAACklYlLXyX/JECAZCCQfKWgnMhhM1mvWq8JRItxqC1vtaViahtW1W11s5xHPqpVttQ8Xg8Vhhk00BteQLAdnfpfROCs9YX0TJH55yxvj+OIbTGWSJCxZoQT9OE5MBQTOUuztfH4WZ/9+Lm5np/6OdYREuubAVC4yuENDTWGeucq7gaZ+oOqd4gYGHmXMoYy/W0B4DDl/266z56cvWTTz55crnpbDOLwBwlqENUS0osUdmyMWSRyCyVLliCnSFrqgOREi7aMY8s52cVT8brb5Q3K+fvxGSQxY++as0wo6iKKIuyoCgqGEAq2UBpDfnGIyLQojPSNE21Yqv1STKm2oroyQRcWQRQQYTEGPZsFIxBZGYuQiRVpUWpIGLtctVCKKAsnb8avZcfWAPVH1xcTNO03+8zz/WNeP78o/V6++wnn/6//4d/9x/+l7+NAG1jO+enXKY5+lNv/kTdA1Wk2nRARjib/CkLqMrtBJcpdyvzq19/vu6an/70p7/94nPNIoKl8Gq1evntC79yKaU1htpHr12DEMJHH32UUrq5uUHEzWZTH8j6Xp9jVS1dMCsXrX6RomWe5xSLAi9Tlge9/4fOGPB4tvdjLR+afv1Yadzfcdr3wb29x5KkRdjpkSLnw5WPlRbfX5/8YbndB2358J/3XVa9fxBPW9ZyK8Ay1zgfqAElAWBmyZWWvtiL1BadIa1+EapazZoNGMnRGgMIIlIDXoV25xirJFSMceyHeZyMMcF5Y9A5g6gxTjkmSyYEZy1551NK/WE43B1z5vW6bZrOGLNarQAgjlOa5moVVHLMOVdEeNu2wQcRzvMSWdt2FWOe5xRjzlzxisYgXu+PKUsWMNYiWVGTSplSGiHPWZIsSb0QVhVFAFCgE4JQTu1PcAAWwSBYU9t71nvfBHex21gS1aJciMh5Q5ZUVXK0hESGWfq+3+/30xRVVVj3+/04gjXQrrz1Ho1DxH6cL0O3vbhardYikjIzsyJkLpoiclkQkIiKIKDf3twpwjjH6/3tt9c31/u7u34YYpoT13tuTPX7c9UaNKlaJOO888GRgZObjbHBACiCQ0QwRRfPGsNmyCne3PSFnw67jy4vrnbb3WqFlsUaQmCRqCyFrCM2xpEFRVUxgoRL0wuNEb1/I4hO2JBTofUNgfVzR+AhWOAUDvn8X20nK2dh1pKX0MisnFUKgSBI11hn1NvlVgoQM2cWZlZABkUwyApFKQGicb4hVZJMxMYYY8SrGCNzYWuzcw50EUirEOIa9oiKGGPEkgtoDZJoYTUEpGTQoa2NTxF5eXNdSrGGLrabcRxfvnz18uXL43HYXF79V//V/25zefH1y5sXL2++vD4qwJPt5vpwNIQGSRBEAVgWpSUCElRYBP9rEqWqEYCFLq6effnbbz59/vTnP//lN19+NfMIghXm+uLFi5//01+kOKXMMRerWEoZj33XdR9/9Ozbb7897g8XFxeVaKhaPRgXCqa1dp4nY0wpJaXkgwmhETXHI1SGg/MIAAosyvS6hNZ5DHxYsv4RF/zAThv+SB27D93+R1keDXvfiZx5O5w8Uq78jm+9/RV4K+a9d/+Prn/nMT+2/kM/PIQYvDHpZuaiUns/rJoLl1yFFkFUalCsNnv1Zahxjk9s9pqZVcuFqrc79P3CT2/bEExVlx7HUVW7ruu6zpGp5s43Nzc55zqg1ITvvHGVgCk5c06llFXbrlarrmmJKE6LtmE9mxjjMEy1OGPIxsJzzMdekICcIRsEMJWSMrPQEOOUCwsoAiCIoiJhxe7AEvMAAEAQARUsgTPoLHljvXXB2zYsXgq4APfE2Cq+VXtvuV7waZoOh77v+1IEEXMqMQoodCvbtm3lloQQDn3vXLPaTKFZeV/rvoBojsdjSmmcU+UFO+eqLcBhjsM4vry5/ebVy9vjfogpqyiSab0wiICCYQGVAgKqmrBoLiCqqUyipdS6oq1DOVnjm65bh03TGOeI7KWL47Ef+sORS7k7zoXnwpHzc3OFiN47NQZUWBi4Tg0SEZGwMaYevFpySnrm5JmaCNI5wj0MdXAqcp4Bn/D6yIIs5wdVRFCUmbWwMGsNZoU5F2auOZ8PDhFVYylSVApXvEtmAECjZAgRDaESKoHCOM31r1tc6hneZ2s9qnjfeO8JsDZ6FzO9JeyRtVZqSgSeiFgKEaE1ZA2CISJLqKqfPvt4GIb6uMZ5ZC6o4r3967/4y7nw8XiUXC4vLwvry7t+mqYCiGrIEqjUBBeRFm1MEFVFQIMoUKVv0CjMqewuVizw1Tcvnl1dbbfb413vrW2akHNGhDplHMeRDOy6tfc+pWSt3Ww24zj2U39Gb9aEL5V87k8fpkMIpIIxZtHSdV0tEZ+7hudMvWbP5/t7HnbqdXvoYPP6AP1DwsBjYMvHxv/vD87ED/67H3oCH8hEAIQf3Nt7OxjQI4ijBy5Rv1Nv70PD2IcWXd+Zcb71FXrw3WrqXcWLa5sfARbCUB0EUat/bC65eOsAYDESSinnLKVKEFVyQ/XfAhFR5jTP0zQZYziXaRirEFdTlwA553EYckre+9W6bV31Rj++evHycOjX6/VuvSUiFPDGzcNY/WljjFzroqUg4sXFRVVgqkXUeZ45FxAdh6k/DvMUAcA6p4rTOO6PJWcIDXjfCNo5l2nOMasqzikn0bPuE9eZANbHsXIb6yUSACAAFABUi+StCcF3XbdarTar1oAKLMmNtdYYKpJLScZAKXka4zAM4zgzcx0tD32fGVyApmutDyJivdteXLTtJYG5ud33Q2yaZrVadet1CG693ZYixk3TNGUuMad6WW57ubm9/frFtzf7wxBjQUBCNcZWafySuRRWMFDNwcE3YKRgzqDAzDlGAPCuGaexiCqCcb45duvtptusfWhHR2azubrYGkQomfN8PY5zTtbaUjpTkfSGwNQExNSojCrGGKl9UbbGGDS2hgdjDNT0jggQpR7wQtcDxIW9p4SqJylHRBFVQlFwVOuJNfjpogKwAIZZSiop55zzHGvzrPbUpjnmzIW1qBbWxJJFBRQJERlI0SigQUTW+VScrU4LGgSNEUtYRIsseuvGGEVgFWcVEdFUX/V6aEpE1e0J1VlVsloJf4DQ+IAgwVtmBs3VTsvEFBrHs85jf31z06x2q3V7GPopFwXHaBAIUUAzCGh9ccsyj1EABGsA0YAqaoGbu70zutnsXr68/vO/+ssn2+0L9wIUVk176Mftbj1N09OnF1MacpFpGLumBYCc83a7vbq6klsRkbEfDJL3q9qP4Fy4FKxoL8S6fS6l69pt6NbrdUrxjdFMgVnyyavktfnNj54V/aNK9eA9SM73x+nHIgq8I7w9GmbeH5Yexjx4PEx+n/D5fb4FbwVReD8/BvBsKPzwUEspXGMhGhWouvWqaqxX1pRKian25JillCKCOWeD1YHMiEhO6WzpUkcdAKhhz1oLUGrehoht23rvhSXGuL+9m+fZGNN1XTXGVNWU0u3tbaVb1cGtTjmrnmdwnplr8TOd/mjfT9UozlqriuOchqmMM/gGrPeEJrPOcxzmzIJElLmIgACQAUVU1moicnqal4SPFBAVEAqAFSBU71zXNKs2rNpmtVph1ToxBlCtrbw6EBF38tEexzEvhkcS59T3gAhNMBWVY4zputVms+k+/lntetYa4/F47McREbfbi3ri3vspzvv9/ng83tzcfLPnu8Px2E9F1HgnokVFitT/aRZSsGQ9GmsMASpFRAgWG29KUVMQ0YTGSjKJOGVO85jznHhOnLquC93TNvh2u10FrznGocccDeLtsedcUAXWsgqusYYsmDMEoJbeDNsHnkE11SMiIHojCXj72X7Y2zs38157XOv6xRyWSymaSkxzjqnEFKe5ZlRTmlNKcypFhFUVbBHIgqm2/1hZmanq8CgiKmdrrbfOWusEGYoCWpWskDPPKVkiY4wzdimzN4aIjHNQzThUqx+6b4KePEBQ6XwR9rfXBGidDc7DdlvdzOd5RpWu6z755JMp5pe3d9OcUWEbzItsFIlrtxkNGqxW9aoqspi+oQqae/3bMeur2/3TP/zp/tX+13/75fN/+V9/8vzjFy9e6Ekpdx5G8/yJ9Q4A9vv9er0ObVPbz5vNZpiH6pQ5z7ML7iyDV8Hb9bWqd0ZUYoxcWu8bOB1VTYVrRVdVS+FzqDtDdnHxXn7n0PzBMeCdqdt7qn3fP9V7/+E8sp+/k2zvsV+/v/j5dm70NgAEF+TkD8zSHkv13tj/d2ZsHxQm4fW7/uBXr+V5Dy4HASDSPapzQTlXMAvclzGttXUiXb1aCEz12fDWiQhSxfJhxVnUqFbRMVW7qG3b4DyI1sxMpHRdt2paYBmGeZqm4/EYQvC+aZrGmAUUczgcDodDZdqeha27rru4uKgNhhjjWBPBXCor/HgcVbWqeg7zdDzEGMFa2G66AiayTHOeY85Ji6hCNdIDIDjn9UvvAeuAe77sCqioYAGcgWCdc4uzWkXeL85quMA/RYpIUeU4xjjPaR7zHIsCIuac5zkzQwhQmfsL5cNaIrK+aVduAdPOcyVZ55z3+2NFSFa55/1+f319fXd392KgeU45M5FFFZHCrIggwqpoyQRjG+O9sR4NAYw8WjSdxZU3hbBPERQCoTS2Q5dFp5IiFygxxzEb6HNKCDqMgNg40262AXVlTOzvkug4RwuKHDA0IFYMKKioYG0s1iRMREQMPlgexLyH752eSOhv1zbPL5TWsRW4mi2cpERzSklinuMcpzlNcw17JeeBUyllTiUXKcKpxDGVlBmsZ4GsxKIFENEoGiKa88E51wQXQgjWWUfO2NrERQWH2PrQdiGE4EoFfFlrrVc1tU5LBFwYQdUDgEpBVC1w7l8+ubyQXKYUp6Gf4+y9327XuZS+7//qb3/97YvbDABoQvBONKVMpltCHBEZNYDKRatIIAGcetIglX1bjDGoMkZNsbTt6sVXh+vr659/+tnhcCilNN5P06SowzCtdm0uSVUrC7a+yES0bjtSmHPKOQ/DUBsN9bnt+95frFiqIJkhJGbJOVtH1traXK/h84HywAJBqk/COex90Fj//uUfW6oHP4C399DQ5+GHOr5/UF0RHg9C36e8+cP2/571b2/z9s14+4F7Y5uapaE5hUYlRDHoiAikglQUdennEVHTNJWTVFGa1dyu6krDyeZtMRYnYuZU5to2qKneNE1VMiql9PTp065b8rxavaygxzr3dM5R5Zk1zeVuN00D55KmOcYohasUVt34ZMUg0xT7HhShXUHXdUMseYoxxlJUBApDYVB3qm8CAQHwW1dMl8rmUiQ0EIJzzgXr6hy2kvSNVVym9gKcAUw99+PhUONxKaUq/TNzzmAtOHfWJeEa/2qSZ+19x7qmv2cE3RTnu7u7/X5/NpQfx7mUwgoKUIt8guC9LUUMoDXGGeusdWhQVEUdgCVsrGmCy1BmhCKMXAyicdYiSpRY4jxz0UPO8Zo5OP/kakfmWRMu2tYFRAdidOOkFJWU0kzokADEiSUj52dPyfAp8p0fvzdeDX3QYIYH9YlaOXhjJg6V9P1gOSuOllKklFrerFOrOM8551E45zzHHHMZ5/kwzofj1M/pbn/MSkmEhRipKrIRkQvFe981Tdu23plgnbHo0VxcbpHVW7Nqu0vYICJYBQBrlif8NSVbvXeNX1oAtNgiXh8PbdsG721HaOh4PL548eIv/+pvfvHLn//1rz8fRiAPMfHE7A0YA0AWWUSZFMhYAmZFLWoMqQgiGKjFThZFVVUg79wY083NzdPLLmf4zW++/Gd/+Eer1ep4d2ia7vZwa5zZ7/e7J5t+OF50XX0+m6apnNrqHajHQ+2pA0CdidY+ffd0l6dcSvHeIXkAKKUoYAi2jqL16a0vu6pa684ydec7VZmOHzp0v2f50MDz2PY/1n5+34s19t2X77EJhUKGOtDVh/L0wbhH+XDvWf/2ft4mSMB7t4fX/3mvHA2PJHlg3rm+lGIeOo4uhUo6mxUvXgJV0g/AWssgVaQKLQpISiXGuHJmSQ1ZORWj5KizFMosd9f94e5orW19EBEt6o1PeTCE3jXE+bDv+5evUkyN89X6p5/7EEK3MoCxWGrbNt+Ytl0RWgSXoqQI45hub46bzYUIljRbawFzP9ze7W+G8aby8Jyzotkbs7voNp2ZpztHlPI0T8ecSimSE+YZSqTd88/6vn/VjzHGcYQo0KxxdfnseixTop7tsdBQylyAFRiwSFWZlswAIoSIJAC8IBeAEKFi0lRRVVdYdk4v1rTtcNuaTWuMyXGO4CxCsQattxYpxdQPx3EcyzwwM3B2Ro0IF4YkVsFYCI6crxRxZ1dN9HiTRz5eX5iLsG66ZuO3qzCmm+P+cJjGot63petEWwxX2vc333z79cuvpWmnYcglo2DOKRcgAItyue4AIOcMUBj4bo7GmCdPnnxl/uWqCbBuRi7QjLtP/gA53rx88c2Xv/n42bObu/1nP/vFT4L/X//sL6xtnG2bgUMQd5BiU3ZG/S4D99NotYM8dUZd532go8amxNZ4w8E5RwYVoOQYq36bIcmJiYyUrBZEAB06a4zzpiAiUJWCM4hGCEWJjBMUkIQKAEKiKkIiFiFYW5TjHCHOJmVIOcWkcxrmKQ9TP85DmodUhiXUxVzkLqZX++NXd4dXx+GYJSn5bte03W53uW67cTjevnw1TaMDOGZvMsIwGZ0cmdb6xpJD+mwyVtEaWDd52/PFjjer1jn3NM2I2LXtZiPrAnXQN8YgJUTUKmxrMxCJIUS0jqaS2FEWANdIWO9nkLD7X3711VH8TDBNQBaYcFIlNi0MLKwsxhhvPJEB0YIwFwEgRlAEqZwLVACYicViyXCXdYWr5mJ80cf/8T/+p3/6y1+q/eb65lvwNmo+DOMXX7+6uroK5nA4HLabz3IZWULJ5snzZ/v9vr9Ohflic1Eyd7ttmufWhnmYy+G4DqEUnva9X/kZgGVY7zZSwLYdiptT4jiuVitDME0zmgYJrXcu+IfF6inOD5L/++yf9Qcmgm/Y6p2Lhw/GvfMY/OD3p/HzXl7xze0f+3Pvpturvpvn9x6QzffZ/4MIkuEHZHvwe065fk9/4rH9v/NX32cCUu0LakMOEeW01JinilxKFZIgtSBwvNtXlx88YTWrwJUNxhlTy5vjOJYK4bN2GPrEpRb0KyyTmadpyjk757xbKqJ9P1R5QO9913Wr1otISnPNAquUSXUas86c9TxzzkNKJeYiWkqZpxRjTimr6jTWgDdPExgDl098aFeqWGHrOXEuNfmoiPBHL9S5q7Rc0/p6KgRfyAApVKGZxZJUtZTiLNT8L+VUJY/neXYP75QSANdBvlkZFxrftOhs0642F7vV7iKE0CcYxzGVQmSN9aFtL3cXlxdPXt7cplwki7VW8vHm5ubm5jrGqEZRGIVrQ8UbWK3bal2LiIfD4e7uriorfvzxx7/85S9//uRPpuHYOLNpPeTJaFk1xv3zP37xzde/+tWvrLVPry6e/eTTy6unf/YXf5Fi6boG0YxTX14UtKZpfBtcKcUQNE3rscwxEwt1DsimwoEq2L0mOkylMHPiYpmr5DOq1mK1eS33Q8SFSnHyqIDlMzAsRYclR+STVEsRYeFSSsxpivM4zcehPwzjENOUS8wlFR5ZD+N0fXu47sfMst5eXHQr47v1xSWrEpGy8GyCtRS8tXa13gKAcJbCFtAA5hiPw3i4uW6c3a673WYzp2mch7ZtrTGw9dbanBnQCC9S4z4ApoIGKjljScNg0Wk7IQSXvLaUMs/zzc3NOI7MgACIBlFBQGFBjS2h1BAsJXR5rCUkoCKyTOlKSSWPM9xc333bfWsIV6vVNI0iEmOs9KFpmkJor6+vP/m0e/Xq5ieffTb2Q9u2XdfFcZqmqXYxV6uVM1RfPWstkgKhXbgnVMeEOi+EU/pOhqqJ/Buj1jnUndc8pIS70L7nfXzH+X4gnfx7Ln9fadz3OTAAsPiIG+9jceExHt7DNfA7hKvv2Xt7+6+/8eGHITkf/vN0297d4+QiRFTBFyJQkfeIeE71JDORdTYoa4z5eDzmXCpFV6qrSynGmGCctbZkqQr3ImLQFJUxztVz4GywWbjEGCu3r3Wh2gMdj8ccUxV6bJqmaXytUN3d3Y1DX3I2VfCJoG3bRdBSshSeppmq/DFixZeyCpKdx5hinkdIEXaXcHX1VNHcHodcypzLOMc5V5lpo6Ly+pzuVEerV0xUARdCFFoiY8ggtU2pc4VV167WbWi8MWAB0jxY4xCRmeeprz25UooP+JCnpKpE4BzUK3m+0c5VPbDu48ufWOsVcZ7TNKd5nse7wxyzb7txHPs55szj1E/9EZQvdtub44AgCEKIztnGh6uri6urqz/6oz+6ubn567/+6xjDRx/97Je//OVnn3327Nmzn/7r/9Ov/+avN53/b/71v3z+5OKbL3/967/+i+tvv/3/9He//Nmnf/wnf3KzP3z07JP/+//1//an//4/fvvy5a/+6q/GYbq5u53GPo6TN/bq6goudprz1aYxko43L0oeAEmQYpqN98osiCha3UtiyRijDy2UYkVIFbTS9tAioQIhQu2hVuzDyQPyBGCh+8kHkQqAaGWqC2gRnkuJKQ/zPMQ0xDLEMmSeU4mppJJ7arNDdxmeXMBTG9QYRssCiVlSylxQWdJkgJHUGYjDYIyBwsrsQli3bSLDc5rmIefYz/3L2+tu1Wy364uLi67rYKBV2z65VLBOFAPLmoz1LmU2gtYv4byOSERYSkGkUgozIFEppR+H67vbb799eTwMRcCYOngtD80yMztRPrTieEAIFyWUN/ISVkiFBUAUmFUFGeA4zF988cXPf/bZZrVW5swp5Xkee84xkF2tVv00TtNknC+lEMB2u92u1vvCNzc3Xdsej8f26dOcWRUr7owskrPn7qyqllyQKqVeGKSUYrBi3N4ctSv7okqNv7HAUp94x/Io4eHHXv4eO3ZvHMY71xO+V6UFH4978F2R48dqsH2fL37n9m+GScDv3Bhf5z88tpynaaXIAtc09V0yCgLAiGiQYo593zOrMcaSq6YztRVakxsRKTlXLAwRceHargOAarIDix2PAkAl+qBiSqkmf0TknK+twUr4Ox6Px+MxxQkAShYAaJp2s9msVx0zpzwXLiLCilKZyqLMUrKoVme1BABdB6tuQ0T9FI/9GFnnWMakiUEMqIGimkEemxbcX1IAVCawVaLK2hKs6drQdV3jHSIiijV2FgFlZkwpDcMwDEO1VTqrSNa9VdCGNSe6iKoK5MxnmNwKtGl8s+pAKeYy9NPN4XAcxpgLgEpJfT/M4+C8+fjZ07Zt+fMvPUEqptKn2rZdr9eN9/M8157oZrP54z/+43/zb/7Np59+2nWd3TR/3l9/+/mrhtLPP/1kPNwebl4N/d0/++M/AoD/+v/wL/7Df/xfh2nuGv/s+dNSSvrsJ19++fX1zat5nm5vXn351RfW0ZPLi26z2WxXHtloiaMBC4ygFCqJXhlAqw8T25zJmlgyFMfMhgWtQb2n350m/2axaMN79VolRDWECqpsFksbRZBKLiEUwgKaRA8xTilNRTNQBsrgMmoCtbuPLEALqAgpleMw9fvjOI7Xr25ySUTQNR5AOE0ppZyiVTIu1EZxKR0455Ccc8aucykxTeM03cXpZuzX49B1XXLmcrsratS4yNrGXFQVwDnng7XglNAgoSE0VFW+VKWqVqKxqjpN0/X19eFwSEmhnpcuZBjEewcDVsET+pHg3ouKABUBVEFBEYqACIhCYc2sxofGjohwc5N/8kluQuODNQopSZzGu5vr9RMPSrvd5d3NzU9/+Qd3NzcXV09ijOtNx6XcXF8ry3G/3202Utham2J0zlkw1pAIIIsYUmbRqseioMqsKSWD7o1Y9fa4d655npdUCnzI8mOJnJ2PB16PfD/Kzn/UReE9dPXHw96bG/zoJcq/r2LpGzHvXZHv/lqdcMlYmXlnkJUWRanWbs6gLYXHce77vkrtIWIVaBARQ2AIpJRYSpxT7VSDyDRNd3d3iBBCaNum8n6MMTZQG0IXGmYexuFwOMRpBlHnfQhh3XallOM0Ho/HsR+qSwAA5Cyr1Wp3sdlut44w58xFuagIpJjHcUy1wpnzNMWUOCUoGUKA3fYytM1xmG6PQ8o8Fp6yJIEMAHVoBmBQAoF7K8wlzwMA1VqFE5SqtqsO0VsilRC61aprvSNQ5UIGLBnCivcpaZqnaarodu+9lPGMvD9P4U/2ddaFoMZWtXsAFNFhPKpq4myMRaLVOoT1s6dFXt3ebi82/pUdhl6h7LZd13XOhnHs5zlk1qZpNpuN916RhOFwd+ut+eSTT6r/3JdffnHojyGEr2//3X/80z/94je//n/+P3Dd+t169ezpxXq9JoXffv3VX/zVXz55+uzZJz9h5p/97Gcff/zxby433tg0jbeHY8rpi89/rVzgF7+A3c4a3bahWW+IUPKUQYwPgNVuoF5FQaHCSqmifkot2T14IO/pzESV0k5QKaQnNS+sM1xVVRIANCfYEaGiYaQsOjHfDfMU02Gcx8QFQK0jFxogePKspDz1/f7m9vb69nC3zzFK4TiNJSUioJyRNC7wY/h0d+GdyYiHFMvU35bMAv04qCEwhMaYtilaBi77mxu4uSnWX17Os9Asuu3GxoftMPababvp2jYwcAvBWkuCRi0pENWZJRKRdQ6NSynt9/tUVLG6PIIIiwIAElkAOSMhKzUCEa21vMwbXoegKyAZYRaAxNLPs0dyjQGEIjyOc/BkrfVkDUJM06uX3+7claHQOC8MlfJhvUfE1Wa9WrWbzYYA52GcpsG5ENomxyM80IqjhUWo1lqDhKhgjCqgoAgQvTYCnfkn8Lo28jnMqKrDd2d1fzeJ1z+QVO87lx+S7b0nzMCPnfN9z6Lo9z+kh4ZUb8+e3lP2fG3R5VETuQfa1bDHzJXiVK2z5mkahqHE4r1X1lQlLiraEwkqzy+XeV6wyynGfhj6vl9tVta7pmnI2Wpb4Jyr8K15nvf7w/F4zKlY66twewghxlhlKlNKFQFYqQjb7baqJZWYqjRMzoxgco7TFOc5CasIJOZ5hlQAAdpm1a7WLHLsp3GKQjYmKQxaIQAEgFBQ39MW0Gq2REQqhGBRDIFDRdKuDV0bDIGIkCECEikIkjNLzlVBrXY0nXPzXEpmETHkAMpim0NYr7Czwbah7dar1Wq1WnnvnfdkoBoUsgooskIqsmrapoGS8t12zZK9977pSOHjp1c5Z0GsfEdjfUplnGcXGjKuaZoi2jTNzc3N51/8JqVyc9i/+ObbYX9TcjoaKk+vCMrNzavLy0sR+fLLL60Lz37y2RTjertLmT/7ySfzOJUcX766ub7b98MwjkN/PHTeBk/aBtesEHHuNeZIKiuPqBXBi4BoVLMwcDlBOkFE6kgJJwAnIp16e4bInt9QBajdLABAFQQiACBiVUVg1awyFx5T3s/zfprGKe3nlIuC88F3oWmttbcqqfBhnq73+5tXr8bDUQpbAGQpcS7MOc1IVLSgNT6EMh2xREDjSYEhpzlmrjXJggAEYI3xBqtLluqrPmXowd5kwXV7XAV/OW3neZ5jt1mtKnbRe2ssWVdr+U4JQZgFLICqzvN8e3sLALUaqnqWDKy2TQAAAoDV8uCkhy11borAAKQq52HaWC5CqrlIP06tMQ3YVGJo4dj3TWO7zgKIsejBpWk+HPrN7mIcx6urq7ub26fPPkrznJsmThOIrru2aukdj8eLC2eMCSF454AURevZkSAoWuMRGRGstQYq3f81FspZ27Pe68d4e48VMx+FePxIwQkf5/P9w1oUAMDW0tm7TuOx0/sO/yd4KxT9sPW/e2/vg/bzxq8enM5b2bAubeRzC5rIQq1tKgJQirMwe9cxS/Uprf08LhJjlFwAxLulNVVVoatI5jzHw+HQT2Mt4ZO15KwxxlgK3jtjDVKcF3B5yRkAKy7UOx9jHIahPxynaaoVV1QAhd3l5urJxWq1OnmFiwjEmI0xuUhMJaYsAlyEBVjBErTdquvWOZXDOI1zKqwFdC5QAMCCQaNos2oBFZRT+v/wKi05HwFaRDJIwkbFqBhQZ0zbhMZb4AICjffeYE4zAHBKcxzPHc2arHARZobT2744tpilFV8zwrZtV916tVqFECKIMaZtGgTDoIUl5sSqKgUU285/9tnHV1fbUgQIUWHTBQBAQ9Z6FzyhzSIp52GOhdVQ07Sr3dVlyezvfM7ZeNNYOu7W09gTyNXFZbdq53kexvny6UeKBqyzTXvx9Kn1TRKYp8PVxWZ8/hEzs2RrCZS//fZrb6hrgwCwALpmfWFLmodhEJgVlFUBBEUyohZRYBZ4SD+HE9kc4AGjb7G2RQAUBBQFBUQBJNDaxkIhBK6t5xJzGefpdhhuD8eJpRjjurUnT6El54VMUTz0hzjFvj/mFLG2bQuLFGWRXAABwZkQnHerbbfabLpXfzvFnHJBQh8sCII1fr1mkSGmcZ6FGRGdccYaRIxyPExRb+7GWDZd2LZtlVGt2W5ovWssYu0OgxIIuvuwZowxprAOY3VlAjlhEYmIFYowUeWUYu3sAcCio7fMDJbXnE6fRFHRoFFGmCNbj9YSCKxWzXGcL3I21B77a0uwXjcRobCkmFebdQjNvj9yLs6ZNM/eWhVYt908jMaYvu9Xq02M0TWB0FZFnBijBYsGmNmIqBQisM7YBVOuIIqWTqPNO2gq8AZ27HEI5WNB6EfMyf5BpXrf0dv7HXf6O9Yhf9/rv/MqvD+9w7cBPyd8sMiCta0R6rRSEKgU4cyGmAukOWtR70M1G9LCqurdYo9Qp2wiUuUQK+s85+KCP0O8jDHek/feGKrdu3NfEMmcU727u7vD4VCN6KrCCAAYYzabzXq9ds71fR9jBgBVzImj5GmaY0zznLhoKZwSsMIqtNvtznp/ezzuj33KwmhS1lTFV8gqogIKiyIAPTqhUwUgITCmUvpVrIhRbqwJ1hCocEGg4LxBmVLEk4l8FSesXc+qtFJ1OupDqwr1qp/9ybxrqnBb27ZN01ztLr33RLYSnRCNIhSW/XHImXeb1ZMnT2KMt7d38zwRmdWzp4v7kqoKAiGQUTTTHI0PwhBzGcbx9vpVSmm3220vtus/+ZOL3Waapq9/++UwHisp8NCPL6/3rm12Tz9++uy5b7qb27uuW6+f7NarNjgvIsbgnPIwDHe3h9988eu28atVW6lsV0+egRS4vub+G1BhBRUlAFCpAE6tKVotVuq9HvFp8mcQjS4TjprtacXlGzCKAiBKqFLrziAiRXjKaYjpOM1308hItm26btOsNmDDnGTf9+M4S5rG493dq5eHVzep7yEVFEHBpm2cc+h8t9nQqqOmaTfr7cXuD3+5+/qrb7968WJOnBk0KgI573MpvhLlCZ03yjr1/RzBI5S5xHIcp9g3YVqvicgQ7HYbXp4wc65pW2tD04BWkylqu1VtdRsDMS8xT6HWbxfLBWVQQmvsMpM/NcLOL7tBhIr6BACAqoxLzhoAkaxkyBgpaJwbxlkA0dA4jt7iR08vg7OS8vE4PPv4k+Px6Mi8fPny008/2d/ePL26LKKtbe7u7pqGDodDhXG61tQHuwgrLfKkJWdjqWghAoMIxuIp1JE1D96pc37/DufhN87rjeX3hNh8x4D59x3wvvMI4T1Fzu9sdf6esq4Pql6+Z/s3+H/3X9d3b18fowpLqXlGFav0bYeIhIvCUIVEi5wnWfXrci6Txik1PqhrUizzONcdPmQcnxsMIFxSjPPMuZRSqmfQPM9AFKxRBBd8CGF5W5zzYIhIChNgPch1u7rYXQHA8Xi8vb7p+77GjJzmcRguLrafffbZuu0M6DyMeY4iooJTzMM8xznPc5qTZNaUOSVAhNDCbncZQrg5HH/71fVcoN02zPrqECmAkGHAwho5JRUFEoSlWIBSM4E68iKIMVT7FxaBFEjAgnTWtg0ZLUa165pV66WkOY7DMMzjUaUAiDPGewcA4xyHYYjHuW2XWYIqO2fB0kJCNwYAvPer1Wq9Xq+6TQihW69PecAKjSGylTp4dTUPUxzHMeZi27BqPhEtXBShingIEZFxNnjnPJKdU86sMeUp5baxm3WXUhIB23Yi0g8TEf3BH/6RcybG2A/TNy9f3lzfZWU1/vPffn3Tz9ZaujlsyjWhJeSu9Y0jznqx3VxdXH7zzVc3N6+I4MnlRdu2aCh0m+e+OX6x995bS1xSHCcVDs42TcO6NI8BQEQ8kTXWkEFRtIvHrJwzAEIERFUAJAWkaseHQmiIcs5FxXinCLfHw91wFKSw6rLgenex2l0OU3YkjcD17fHuxVcvvv5m6o9GgDhyyZ0PjW/v9v1/+9/9H7+5ubl6/um/+rf/rVtvhMy3L6/XN//Obdv2and7d+zH5IYIGJ5+9Ml+vz8eh/3+EONc+ilnIYAVQK7+Ss4KQsxlTimzINlDPzpL3SqE4Ax5Yxeczt3+uNlskAiBxnFEY+d5vrrafPH1kU9oT0UUVQA06IQKKJRSqGqGnjwRl0FAF/NePNcoEJxzAJK5NNYo0BQzZD1O82bbzqmI6HZ7cdy/evHiBXP+g5//AREd94cQXNFkAIfjsW3D9ctvn338k5x5t93+6m8/3263wzQpYi68u7pcrTeZU9FS3Qpzztbb4F0lpofghaEwO2MrZg0WDPMion2Ga77RncGzKe3j4/kb2eEbA+kbSeTby/dc/4MrnB/6xQ8N5zVX+Yee7f2OF+v7rz+VK0909VO2dH6wzi7n9QXx3lee3hv3u5SCaFRQijCrFmVWZZ3nGUWVBarmVC71Ec+n5VzqtMZUERYiYhUCrFwmEclzPOtnLqasRPM8z/O83+/rwQ/DgCDPnj3b7Tb1FEopMeacWRhKKSnlkiXGnBaiuuQMKuAaaJsWjTn04zDO1pGzEJMMpQACKzIAAwpiLe7KW5f2gbsegQKpACiqEoAFcAgOofEmeOcsGdCS4lTSPBzG/uAdMdybBpyzGe/xjB5aJK4XVII3NnjfVFJjHffrfOWcJZO1RITGAsDOb0OIbQjMLEDnGxqnY61RqyqrkrXBBxtCJ1BEY+aUUky5VPNB1b5eRk4qSMYU1mlO/TRmLk3XNmRKKb/+9a+z/G0V7PgIBudcUZmGnkCb4JAsIv7hP/mDeZ7vbq7H/phztJaeffRRaBvZXVTlNgRJXcxxKjmd6+QiIsqqVrWCXgwAUHU/WCp1KLSM4AqLHDgCIJICAXBNo2OMwzgP84SGdhcXF8ZHBgVD3h2G/vZ2PA7DMMVpmtLhpiW5eLIjgR5kRGkdOYv/9t/+m//z/+W///yrb7+5O/rQfvzpT3dPnl18/c2L/++fXqwa1zbNdvPV1y8Pc+y67uNPnn322We3t/sX37y4u7nuD/t5GlDZGXuDrjoCkTHWGjQmcTmOw3bTxRjHYRqDd5aMybXUsb78CNHEGJWM8wEA5nm+uzsSgSwFPjyliVSLu+fAtlyvN4YCBYPI97NzZc4qalCFIQNba5q2Y8lzzJbE+QappNwpqvfNfn+01jprm6ZZdeuY5jRPxqCq3l6/3F0+Wa26n/700+vrWxCIMbaN08JS2JCtdIVSClmXUzHGAKAKKIsCVG0aBn4jsMFbUlk/YPnQtOwffhr3Qct7eHuPneQPUWP5oPXfmeG9dag/wpHoSekV6sMoUodOBEJY5DGrFLIxaK211jOzSFZVlXswlQiAqnCJMcUYU0w5szKM42jRmDqmsxQszFlySdPMzHleCNpF2BkyznbrVaUuWGvbNjTOT1N/PBzG4xBjRDTr3a4Km+3v7g77Y50VCheD1HbtxcXFdtUhIpLGOc1zKqXEUqZpHvoppTLHXLkKuWhhIALrQmi7Irrvh+M0k/cG6NiPfQQwUABUjQCKKp+kahARdcEFvv2UqCKqIoFVcASeMBhqg2sb3ziLIFI4Kcc0pRS9a0kFAC0uNuhVkKkLwRiDaJjLQ+gQnrwaTmqcJ3dWQSJTDTydc4acEgKA98E51/i21tlgEbzkebLWOCDMmWOMrFiVQq21RTUXyexLET6ZZRwyiABzrsoEzIzWKGFoW2EAQgGa5/k4jFIiJzpMtyEEMsYSrLsmFakFxsuLbc5tvenffvNVihOC/PKXvwyXV0RIRAjivZ8nmnqNzEWh6H1Fq15hAnn3zFVp4buD4ELRrlokpIKgJIp1skVEq9UqtOvbYx+6dWYzHvo5jt98882rlzci4AGvnl5d7bZljr9No9XUdW3wq5//4iertR+nw5/++//5//U//jv0Xei2r27v/vXPrtt25XxbxDICI7gmXFxdXe2uLrbHddO+bNtXBvdScoqkgBaYFYQZSAlFeZqmW+DGUp4DqniLTXAhOABQxQrqAUREq4rCUFRZa5SDrAuWSnFhMKjA0sl7fTzDZcvlJ9V1qjXssYIlU0AwlWC7pmsPt9ftOgzTNMRkALe7y3nqU0wxJhEd+mm1Wl1erIVLTENJ2RDc3dxut1vvm08++eT6+raydVV9SsWkFNrGkGGRUooXy6Uou0rYWboYSMY4Fj7PvM+j1jsheLDgyL57tPygDtw/qI7dOw/vw7b/YcZD8AgS8p3f/V3WP1a9fOw4v/fOHy3GnkAT91r1RHSWVz6vrAqZ90qGItWKdqmtgynMcYrTFOMYY8xSVERKzHRKXKpEtRau6V6VmZiHcYrzuZRRbbW9921oQnCgkGKsbDYAaNt2u90aY/d3x8PhME1zCGGaJkP49OnTpl20/jabTUxTSrmUJKIxxuNh6PuRuTbzSoyKCM4v/RMW6YdxyoUBCGwqZc5QGMAjCwkAY9W+IKimCg8u6BuiSBVTgwBGwBAEA42zq+DbxgVrCBWkiBYC9cZIcJziwwpzBeCklK7WF6p6NmlCMMZY50Ipxbg6NbGE9ozjv+c22OCq7Cct9xQBahuv3nEmZuImXCCiApZSarmpiHBJwVtCNMZ6IRFl5lK1gFsPsOglojUIZrtdDxdba+2hHysZGY2p2qo5MfXFWmtO5Mtzcu9Iu1XzZLeOMb66vemP+6++/MJZ+uUnV5V4JgWYVRTRWh9CvTLndwHxfpZ2uuK04NCWmFfHKQBFJKPEAKi0uHXX/LgK9alqEe66zjq/P+5vXr66u93vb272d3dd13328U+unlxebDc3r168/OZrDn67W23Wu7/+1X96ub/+i7/5/MWL26nA7WFqt08uLi7/6i//tuvcdvt0vblUXcQWnHPNqkM0zpjGO0dFc7y7TSUXJIcCCJX/mhJIspgM9H3vjaaSE5dUcinFibXWDsPQdl3XtNZ5AazE7YuL7avrQ33eqkgp0kLhqKWJpRUBSgpLZf5dAzgiEmhRMNUaiSWB5FwESBR8tzreTd++um4dfPL8aYxTP8/bdutdUwrf3d01wThnWCyXYqmJnPrDcbMzxrjNZjPMU83tpmkCAy5446xoQakV1vriI9QCbRUMfcCEexh+3ujgvA0nfGz5ULDl/zbAmR+yLGHvB9AV/w5wKH/3Cd8btTU4QcVSWVhoAGRMBZg0zrl5nkspXFQViOAcL9OcK2d8mmKJVbGSCO/hdgRYTUaqyVmtc1bjhaol5pwjaxnUGNM0TRMaAK6VTGauDkTr9bZpmnmM/fE4zzMAEGITQtO4zWYTnMk548mltnJ7S+Fpmvu+n6YMisxasrCA9xS61hjDnPtpvOvBOOusG+bST3NWAAusyAo1z1NAVakC0whv1gQEgZRqsxMADIIhcBaCc6umWXXtqjXWkYhwzqDFGapchfE4EZGxiKQlc1UmY4aaVbMuBA/CmmpbUa3Dd71i1lrnQv3PWuuMt2SJDGJlAAOXgtUqA09zFEBAMt7VPVer2yorHGPmHNEYZwy5Wvemqg5mwIhAQWAGRLAWHfnGQAjBk/bEiBiCU7XTBPOcbPfRORjXiF7/XA3qqrpZtxcX22oT8e1XX37yZFOzVyVRQiC01pKxhHBfzoWlQfWelxcRBbSC+hWX8p8iAIMBQ2Du83VAVb68enJ713/129/8zd/+dp5SHNO2a54/f/b02ZMnl7v1uosxh65NZbatsx22rtvffTMOL9cNtmpjH2282UDz8ZPtarVpVjuyLZfcOr9ery8uLna7XWlK23hjNKV+6Pe5DHGeHREggDBB4axJCgdrELqu2263u92u6zoAmFO23llXPSC1lGKss86ioSWLqumc0usBjRQBAeG7OlKISHVkB7UARGAtsUIWiDlNMa23O1Y8DnC3793V6jhOBWC93Y0xed8QwtBPh/bw7NmTLjSHw52uQuND3/dNt8o8PHv27Ktvvzn2Y44pE5KzOWey/sF0rU52ERANEouKii4a6a+13PAty73vn4f9Z5bq/eDFfngA/zEZ5e8Pbw9/+32y0u/3V969/UPHAwB4WPBU1So5VtOI+sy99sIQncNe3/cplXmec2YQILR1nHXOGSQQLcKAAgBcSu3SLYL3MQJA0zShbUMIUEnZ1iJijaPVeIEqF0617/u7m/3Nzc08z01oc86Xl5dN40opBLJerwFkGIY6tubEx+N0PB5rZ1AYcxFmJQLvmxCCiMRYYsyJzappBHDMw5QALRoyKVVsKAHhIwhpAaBzngGnyIeoxlCw1DZ+vWp3q65p0JKRkkpKhIxkQbUCXM93IaU0z7MIeH/fVa1FRjJUQ50ghdBWQbKK4Qwh1HKiIVulp14D7BlDRED23Kmtd1lUzm4vtYdqrTVmHqrIVg05lowhNdXm2zNzSpJiFhQDgFjQqEV+sgkXneXay1W9XG2JKEVYCJRIClyL55ZM5iRFWUt1ZueiDOzIJS7GO98Ei23xNpHJZhRmb0wI4ezTVE9qOUGlc5FZkQAWp/t70WBEQaq6nPW7pZQcUw32xnobguRyuNtPw4giFmndhBDalQ/GrLI4VhdWm92Tp2rEN44s+IaajE8ummlMhtxl9zGiXXWhNf6iXYd2NRdA0S50V7uLJ08+CiEwq6gWFbTGr5tN3jap2Q9ZC2sRYGEuCghcVNU6qt1r7xtDrr563vv1dlsrw0im8w0ADMPw8tVk3MmuDwiRVFAJF64bKFbM1QOJu+XiPBgnlkxaSzWlBVGDygACMo7jk08/Ho93M0DK3K42N7e3TWuef/Tsm7/5AkV3u42zOAwD523TeGbu+3632+XK62X96PLpbX/oh6lKT5RS0pyNIxtsNVtwEJahFUzFbFcvMDFyHm3eGHbOR/49s73/kurBdxY53/fF+n+/n8j3ncjPRw/rd/u7Z7xlLTOeA+3JE5JrknFW1HwDMSwiOZeUli7a0hq0RgWFpRrCgSzOOAYVERciXUwnRl2xPlTaddM0zllErOW+aR6naVLV1oeSVUSGYz/P893dYRxHg2SImvU6hEAgUoqQRURmmecZVbnw4XB49equH6OKUYF5nlNWATDOkvUqmBPnIizgmkaAppRyEUBAsgikCAJcB4ulYwcACoRY3oyCJAgEgKd0xBjjrW2asGrDumst3YPQagArOdUs1hqsStkppZTUetjtdqeYx+dua12c8+1pqSotSxkQCIBqE4gEARAIifBhkVBVBSreXWcUQQDRKmpDRMZQ2zZcateWiwgporHGWUsGDaihjCYTCIIhJ4qJ0DjjTFiAJyXVHrAxps8h5ywliUjOMcdERMH5ItQ1K+tNNe4gsk0X2tBdH/fOUnDWknGOnLFxghKTI7IPYp6qSmFUsMa/Sx6umnIDAyCqqf8gVL5/sIkohOCbYF0g515d38Y47bbrENr+OM1zIrQqJRaTDpEFjPW7J8/IYxugCYagOLdatebu+i5NatGVLBbTpl1v25VzbePcqr1iCKvdbhzH2/1dfxyP/X4aD1GS79qtuULQdDfHeU5zRGYS9oRt07gaGFKqhlPqLIIBIJHF2OvhdDDFogDV61gftPFUVVTfBrGo6r1C4YOHt1ZCUcEaAMBSCqpapMU8j8U6t27gOAqgGefUduspprZdHY+Htg2rbjMMh/1+33UfrbpmjvM8B9d249i3q12Mcb1e58RlOhLZiioyAV3j6om4YGv/4t54CKCUIviaV/DDyPehy3+Wqd4P7O39jn/s95rt/ZCz+qGQzodJADyAdMKDSdbSE2KOMZ6RdecvnnKUmkZYQiKAUlWnU+q67gzbtASIWNGbpZSq2yIirlY1m8Z7b52p2wDLPM+cCxn03hNqKWWu4P4YjTFtaNq2vbq6Oh6PcxyrW944jinNAGCMiTENw3B3d0gF2qYDgJR0SuAceFM1QkulAxJB13XjFOd5VgVyCESicKoKqurDPOM7bkQdYGvp0hvrvffBqiYDKKDOUgiuDX4GNsY4a4jIWFuviQh47zebTTwO58IgLpGUzhoute53zrZVFYH0gVAhnKRTz0tdL6+7lgtLvR1VXtxau9vtSilpnkopyqqkhGos5pSJyBoyhEjGWitis80Lj0pVcDEFTHme+uh2P0NENaiqBsEgOUtN0wzDAChc1BDsdru2bXPOfX94+vQp55LLVCGpIQTUXJCqxdVDpPHy7Jnz2H2+H0v18jQnw3vxuFOCaK0NIQjhkggCbjarrml4C21SzmLQ1vxsBjsMvWpZb3zTtcZerlZm3Rop0RFebLZGKE4ZMgrDbrObp1vrmyiErmt2V1PCr27ufvvb3/bjMM0pl4lQ0Zr1dqXQEACtdOyHcRyQxaE4pIagsbYi9VOKMUZviIgUeJ7nMZfVarPbrZxv+3Hs+x4Rn1w213eznks5hCALQ49MDYT68PQfRr43FwEkrILQBBDIIqFw3u/3nz5/2nj4m9+8GqYREcn6b7755k8+/YNfHe6Y2Vl7dzft9/tPf/Jss9nEmzhNU1h1wzC0q93hcFhf7qzx33zR1wZ/RWxVcTLOuTY7VPGMSa5DDdgHVPQHU+2HZc8PGh7/MQM4z4vFRzTcfnC8eQOB8qGan9+JZPme6x/b/32l4x6PQQAAxoICVH9LMCoLEk7LZImMs8agaJaCQORDEBFAi6goaMijMKaceyZ2qIoipZSUYilFRQg1TUdEnMcDM68uLqqtubX2y30ZhjTG1DSrJx89vXxy5YIpHBGwXbcG7d3ttSS9XD+xaIZjf3W1ubm5GY77ecxIbr0J3Wa9Wq1SLoAY2tW6aQ3o1A9liii6p+mL3/z25avRWRtcczjmcc7GtNaJD64JDaLGqY8zWwvdCr+yzWFKAxr2oII5pzkrCwQDAlCwMFQymJYq7Y8WqrZFjSIqSGxFvAOPGhA6y7tVuLrUVTOByM5f3d3dFY7b7XrVNdM0TNMEhjKnYAPn0h+OeYqdh5ULGnOOiRBVMc4SHDTrxvuGC65c04SVdy1CUDUqTtjkBG1gg2Cts3bhL8gp/omILk87AtkK3wswozVgF9bmklnmArVd2nRwpjrMPI25CStUrLNFVQARSxR8l7lUKCBUyCWidStjOx0O/mRzYVUdgLLyODfOq2qFG0rheZzIus1m58boABq0aBAAihSGhpGCRwETizpnvSWyBg2ggQxewBCTRWPAWEVWBWHITCfVMlUCVVBHapPvolAyKZkM5GsGKSJj0U8+/XQ3juM4bi+bZULG7D0ZcwmGcs53+/aQmHMj3bZ7sgshRCRmjr70fc+q0/YSrn5hjCkxHfb76esbh9S2/pNNN08mj/M0glHYbje79YZL6vueNq7v+1cvXt68ejVNU+fs1cXl5Wa77lbrpt2s1pu2a5pm3Xab1bptW2mLc8jDXT7edutdPNyRUsoQ1TJ5rmcMTJAbyKAwVQtIRECsU1JEsYhoEGoZBsma+iVhFgKDiowkKFllFEYCMngXe380l1u/DfAXf/7iv//v/uSbLz6HMd4dvrj8qBEY9kfYXW5SKX/zq1/99Kc/vbi4fPHixRU8lTzH8Y6ZOR0//fTTvmv6vscJr66uGmzmPtGsvnGdW+1fXf/zf/HPv/zqc9uazUV3O996b626nJhLbekZQgOKIkp0DkjL+YmKqvyOfLvH1v8DrnC+m7f36Hnh75DtPbb8WFfnBydtP3j/58v0sHFSWz73WYIggIqKClbGnggAs5TIuVTB/pwX4ZVzyfRUmNBhGHLOIYScc2Wme+9DCKUktLW6uaoZHqf8/Kc/4cLTPBNRuwne+RKTqh6Px2EYco7GGO9dCE3lOSyZSu3wpxhTYmYUvdvvY8xEgGiYpZTCLIQagluv18bqOA5zZufAeOSiBQqzVsVqAKhofyRYDMp0cYEBoHc+WBVCUq8XIhCBMXCuTBLR2R3QOfda2wCRmXNMFaHjva0tEACY5zTPCQC8O1UR5X6Wg4uQjW/a1lrrvL9n7D2YCS1K3yfyw/0p4GsqGNV1T1WrL/Z5OYvmzNNc+0zGmKpdsFS8cUnCEFFPOSkAhJVDNCxS+7gikrkIoCnGetc0zrmgiKWUeR5LkRawum2ceRr1CRTODwrv99neG7PK74Swn1whFzDL2TBys9nUSsNqtYo51bIEMzfNFTMzaCnFN2HVrwqq840hCM6LwTjDVKYck6Jwzk+fPCkpjyUHa8A5b912s75Yr1btp3EeYz8Kl3XXrNpORKZpkgDjOG7Xm4vt9rg/aOHGeQA4NzK1asqUEnMioikPTdM0zhtjU0o3NzcvX7687WfA7wvNw9OFwncUn5YiGOnCzz0LXxLRNM+bjlYrk0c+HA65lCdPLofhbr1eu0WMt3jrjDExRh/sdrsdx9F7PwxDfbWZebvd1spQfXIsoSVjrS0phxAOh0PbtiklxFW9NVBOr9XrwM4PGu7+y/LG8mOGvQ/txn3ofn5wpfT77P+BzP/y6NfBxVkLJ/GRU4mBluFdFx6CMqQ510Yds6gqiuLrUaF2uR2ZEEKKsZrJwbnF1djFfgxAC9dRL8apxNjY4K2rU2/r7eF4M8+zqjpnfOutsQZAcmYuTQiWMMc4j2OaY6VG3N4MKQKCVTFxzvPMokBGupVvWptLiTmJgm8CWYzTHLkS51UYanGoFiqrdO9bywlIDYK4OO3VmhIiGlBnwFvzkFE+jiMRNU1wzsmJDoeI1rmK7Y6RncPNZuNciDECUIwxJWgCha5Fa85gtnuugnMhhKYJ3nutSsM1IV2wecu9riXS8whS5yQG7otFWCfOVQK4gTPO9qHIDrMqszKrtYu2M1T1U8ilVDVUsgZOisDiiRDJQGicccTcVZp/KQUURTWlmUFVkACdoTRPZ7hNLbcaJOdc5PzG4/0Q6XB/Cg/YOOeVD7/1UN3RABEscNDq9+uc67quyHLipRQutop6N853XXd5ycwsgJnLqgkFVVNprNGubdvw7Nlz33YTTCTSGltSiyqbbff0Yts1Ledu7gbO2RgMzgNA4124aHNMV9vt4fJyv99X8xAUDY13xlKdohBo/Q+1Co6vmtZaFxlSSsJ5ZfFY4CRn8PrLvtgPfq9hgYiQtd5WPAmWnYmA0zSV4tfrdYT9fn9k1t3Vky9/c1snXlxYxNQ5yjRNhM3Fdnd32K9Wq/2xb9vWWptS2q43JeU9c0oJp6kh9I2p7b3Vtr272z95cjHmoXa7EVGUz8dWB5z62P9nWXv8O1t+AG8P3r/9d1Ypv+f+f1/B73HqApwM8EBfGx3uxxcARAMIiJaI4hQlFymKSsDCKccYARwpnAtrCLWUIiUlEfHNoqsCAN5XS1gRkWB95QKKiPO47rbH/cEgNU1DotM0SWGL1jmXUlJl68hZT6CiRZlJrHVkLYHonOIY55JSnOZpHKdRRByozjH3fU4JXIAQbAg+pTjOEwC0bWubtpQSOY1xnKdUMjy4ighAiAAKiJW8L6oI9JY3E0qVgDKgKIAGjDkxt8xClck5t11Y3AFjzCUBgDHorKvdNVVo23az2dVsQFWFgRDatqtSbSrom5onhxBC0/gKaTkhbEFETqA+PWdp3ntEFKAHyABUVWNeAwuccblt29b5OBGdK35aPTcyTJIByFoKoTXeGHJkgJlESimCJQGQACvDbb6pGaQxjiwacmDqCsvMUphZGBYUK4HBE2jlvsbwuuzi+aHF1wkMb4+DjwW/h50hOOV/Z0IIInqEeu6llHliRGTOaOyiDw4AQALqrB/miU00TbNr2+12+/z5s+M0W3EXjW+ahnMap94Zs2nbVduI+HXjpGQFqZBfFE1ayDmzXXc+bNereYxpHjmL5GSMJYtoDdZCpAExaBFzzpMCUToO8dW3L/q+N4agLPMS1HNC9/p7/zqApX42p/krVKw2Yi1ryEIbuIctl1KUgYtCMKFtYk4EWrK0rQegmNkSWOsRkVnrBO7q6sp7TwAGaZqmzfZimqauWVW0cM5ZpskYk4Ilmz0BoY3TICLBuWGYmqab5/ENXMl3Qlr+8SWCP+R8f19Fzv/NJXwPqwc15NVJ92maf19SQ0IE8tapas4ZlZxZkJMlFQtWRZYi1EleuaQcY6yssmmK4xStCwbpeBhizIt5eikyZGNM1643m83hcNetVhbs3A85ZwIsXMZxzDkCqHPGGBRgyWycNc4RLND/GGNKpaQyDNPd3R5gq8wpxWHI8wiI0AZsWg9Y9sfbKfFmvdlsNkVkHuaUYBzmnGu2AUQkDFqNuJHwrWwDlACrA9yS9yDWeTkgKCFYouCctzXmCSoCij3B8CpVsSZgFR/LzNZC03T1gpQsKRVFCMFUeTZm9sGEECqGs+u6pqll3sWw3ja+3iJVNQpmgZSeqE4sKkuYWYRdQLQKFIuemJuiAJkFACwZ27Rd055RMLd3NyylqHApXLSkDASI1LZNLCXnxCwLEh5VWZ2zACDAKgBJABKfIq611gUfwqItkFPKubTOVzTpA/RNeSvmmaqaYIzl1391/nSP3XhgxqaqFonhPuyJCCLUHAIqtb1KuyERoEFqvBWGXGKq9k+mll5NCIHQWgBTuLadqoDeReuyBWds13XMebCgyp0l5GQRvbfkrGipHgsENE8TqQnWNGu/WTdatJQkRY/7W0JLBgxaIjDGKGnRYsWKCCkAlGkaxqlPCRArX08Y8J3j4DsHhfNVqEn/yb6JFWnZi94z/nIC64EVximuHaEhInzx6tXHHz2J8zjP03Zdp2WScw7BTdO0oDdz7rqu73tUiPMcu1j5uCJSeLFHRgM2mJQSIk5TvHyyvb179dHqo7GoPdGoThJ9PxzJ+V+W8/LjZ3vwelD5Xfb/ewl+j/L87gtED/sfqgJazUsqRRirsJ+IVmtyS0AKUlgKA4viot6yQCRyKSmnlFJKq9VKRA6HQ0VgFi5nkx1jTCkJEZw3leF3sds5MmnOIuKcMWj7w+Hli2syFW1hjZHaP7KWvDU55xSnOMW48OWnYz9MU1bjCus0lmkEVeg6aNvgjM55TJkBoOla3zTj/ngcyxylFBABRKgqHgJFq9CTIYAzHYwAFJRU77VZKoHdqBIqoRoES+AsWWucIUNUbeScc9YaAEl5LiXVuqOqxBSnaVKFtu289ymWcZhzzjEmRHLOI5pSRBjsyrdt26y6pmtd0zrngEgRKv7bqDtnQnWpuLfKX6xlWKD7Gimeqqxv0DFrr/Tc+jo9DHqx2Za2dDmXUhKXGGMd4IbBExGYqgptznyJzXZX1cjO06DEpVpeiNDZoPgM4QOBilOt3PZSCtf06q3lnf3p7yxyPrw4Zwu388IP9lDRPed6ss0lg1ggR2iM4RRdgHWwjVk3oTNEhdnS/5+9P2mSHMmyhbE76ADAzHyIITMru6q7P77N25CfCH8/F/wB3FC4ekIh+fVYVZmRPpkZAJ3u5eICcHP3iKzMmqu7ICEuHuYwM0Chqnc69xwUgkIoreh0Fq1eGzMFglxmVTXmG4BG0hooEV/1OwBgpJVkbn0K336zYW5ba9qkqYBoFz0ABGYArAL7YddHGBuZJqU1q9OXhSBRrbsGdJNfFzOZyxwAFEK1yX35xgrATFVkHsfhZu99dOR/+P7+H//hdpqmKqrISKxVq1odPT89Pb3/+KG11neDZThNHcz6EWutkrOISG01FyltHGcXaWEZFFtbZHFha82mhI3PBiD//fbV/1rHX0G0B3+zAd9zTmkTPl52Fl7dQEvvLPnPeZxzziCKaO3VqaaMANqqtgbSlswgIxEwaHQ+Om9kK4hLtxYQA1cXF8rrPsbb65tdP0jNIYRWamslhKBN5jGN41hbjh5rra0056jz3bIMWpmnKaVU5lJKmVM+Hs/nOYnCOJZS8jiV1qAf4OpqCB3XWqZpDD3GbghdnEo9zeU052myNK/oWtVY0dIkFx1RAGDanABrXQuEEQnUfjoATxAJo+fgOTrPvIx2CN57byONiM47AMk5Z1Omdv7q6irGeDqO5/NZBETUOcd+oWUJXdzv9/v93tr1NlFNRGTnTEIPFQzEaA/LsOxFGiIS+7W9D1FUtaEsElxbnGB2gsPS8K5Vmzyjk7oYGImRGrug4olJwVq7uq7ruz6EsHFhM3NuRUSKtOUrmEhJuR2GnbHnqDQA9S74GJg5n5OlhRGx1tyamn2ylACzI3KGNJZmt/sc5332l8sc/vNYAXpiAXTESkiAhhwRU3q6YOPTmg0zpdJIlRBUWi615hwIPaAjGjwF5wWCI57zGJiqSsuJoA2evWfv8NAdaq0CzTln5GtIFLxvoltHpluyAiAifd8DLEJLqiqlFmlaW5pms8k551YKETJCq83ErQBp3Qdl6cP7CfviMskV2CrCLxk5lkEDQOBaZZ5BFBXQha40eDydADD4ThVrrc4Wi4hz7nyeuu5ERNDk3fXNeD4i4vHp4fb21vtoXeqllNACNZznmaEFcPvrXUpl6Pdpzt4FZsw5b4wKG1buS3Kyfz9+/LCqwB/f7Nnxx3I6fq5T84eXJPVVU9dzZtMaEwzUIPN5bKUSICnlnNM4LfRXRNBEiRgQmBySugZeQmDnnEWBIcR5ng3lpanYFzmi/WF4f3MdopvHaTydtTbvfej8+Tg+PN7N0zwMA3G1OhMA2A6bUkopjeMpJ9GqIppzHaeUsjjnx/tUas4ZyEHX+WEYgOtpmkqFq5thf31bhaenecw1N5gyuL1TLcsmv2iZ41ZCx7UF6nJvXUTeVAgRVRiEEByBY3SEnp1jcosZMrYUQDLya/be1zpvqFcf/G638y4+tnMpzUyU40DoENk5Nwz7/X7f9YN3kZl1KcMoMLngzae2lCkR2fUv5MVLHEZbsmgB6NNSV3uVmTDmGot4LukL5nm0xBSAidM6oxd4OD4Nw7A/HLYa7fJdnlpr3BZggqqueNrmPAc/mBevC1CmDcNg9lJE2oreJCJHz2hYWD2S1pq1Y/+Uwt7ixKyz3W7KOaeG+SfESwHINfDtus4xL1IjqqSQpzlN8zAMDrG1lqdZS6k+eGMQcEi+a1CNHi8EZ8hYRJykahVp1ZweRmRGx/6ZYZzYyMYAwJ4jugU0C91Sg9wPu5LzeDyO4yhSYwi7oStQz6UiCC6CE585SF8wlCEAw8IUaHIHi+OLggpEIEsUCA3A4FyCUKqUBoKURXfsKbqH+6erqyvfdapQSvNdZKLWWhf9PM9PT0+3t7c55w8fPvzLv/9b3/fjOO73e3YBRFspACClNscplUCIqIHjPKbr26vTdOr7yChwAbhbqzB/N3t/0PGzk5w/0a68XYdfQri8ev2yaP9Tzv+R7708c1v59IXpEmNMKVnSCVcaC++9Z56mCRkdc5mz5dlyLt775qXmeRxHi/xaNQqTMo1n9i7Gbs5zSqnkllLquuF8Pn/6dOe9B4CcM5NLc57n+f379+9ubr13u30fQkDU1tp4eupCJAwppdPpVEoh50LX3d3/5nA4DF2nqiXNzNxKmU4jA4LI/dPx+HRuDV3oIevdQx5HVW3Ow/4q7odetKbpnFLd73039AJ0PJ2+uzs+nlNRoA6qNmY2wTJzwHOTnHO321UFFa11bR4nJiKHUGsFECQmUBVRAGZwCrvod7HbxbDfD51DkBq9d45yzgpLTWqaJoDqvX/44Y7Qffz4cbfb3f3w0Frr+/54PImiIgEtxNzDMHR9bzh764KIMaIPIpLSVCs78pseoe3jlircxR0iSpMqZfVk2DkX3LPfc2nL53m2sy4aHgztGU3hT1VljUUAoN/vAEBWLTRdJDsKcCREi97WqQiw1ow3DVVEFFngT0RLW4J591XVoocLF40RFyaa2HkiIseX1wlb9uJiITCxqpJwAfDO0TBUS2OKRueBicAwPtxdZD5OT0ervppWi0pzjNzHaTzVeQrO9zFGx96Rd6StlDIxe2bsmBtoySmnGQBaa+xcHz2uj2Z1JrgsQCC0gWIfELGpIKIgLBV0RGL2IYjUsPOB2TH/+j9/AyDvrq9++8N3nSep2ESVl+FFBM9snECL/t5zLU8B0TlHgDXnJtbUJ9JaH4J1mgCR955Bk/lkAE2gG4b7x/nT/ePuF9+k3HyIP9yf/+f//Pbf/+Nfn06n9++vW2vSmjYIno34aRzHw+GQ5vGr9x8+ffrU7fb39/ffdB0RdF3nQnh4eLgNH3ZxsMD9/v7+5v1VSTk6X+YCAUxoxfalzaV7uwcuieufsjv/AcfPjSu+VIn8Y8VFXyp0funz6U/Rt/el449lXP+QYO7HS8GXqgvykhbBJpwI6NKUbX1gUquU0sqcypxKztKEARXEfHMRSdM8z7PJW5pk2oInFJG2pFK/+eYbIpjnqetu+uBBVWrrQ6Rh55zLeX56OFpcaLv2JSmJIfCMxprI1SLSAMBJq7noecxPR1Cp3mM/xK4LRNBKlaoAMAwDIeeczynNZrgMjr+UfBTAyOAJQJDUlFrgEuGGYmQXiAgCBGJlTwfACI4heGbCllMtBVywK2dmJAPNhgKq+qzZu9/vY4wlt5TKNM1W0mDXETpmdhxC6LwLznkih0zARESCwM+kMIhbY8nqHS/9T+s02OInoE2WDjZH5xIJsp2/TQZEJO+3cFdAVABQVQCYQFFA1vKnvYqZAV9Imy5OGDObXpXtVgwsKELPcRisG9lmwreLeXVVm0v36lJ/ZJ7rhbokADjnGrwIJmBNe8QYiSxYR1xue8k6GnNeSck5in7B04bopBVpJuywpCUQcX99tXHMVq2taa0ZgJhVVzpAVV3iaAAfAxHBWnMVkVpKydmDa3UpEnddt+vjbhgO+1DOeSWYQDWKAruRVUUFV30sS4OjKIhajEsrvEVV2XrCLWdKirqNOehSmIRc4WkciQbX7c4/nJuCd9Ff8Xffffqnf/h2nk6H3T7NWbQi+lLKNE1d1zkfLfCNMR6PxxBCB3g8n7uuKylLbeQQkUlJRI1hEIAAFjDLAn5B5AtS1r8fv9/x5zB7f6wq3c+q3n32nB954yUtGcKCj1DVKlJFaDEHC86FiGqprVQp1epSNTdScM5JzY7AM7ZWzCJyiEw0jXPXda21nLPZGw4ewQ/7Pk2zZbf2+32rteXiGJkJROdxenp6EpHd7tBEjudHJofIoKSirbbW2jxO8zS1iqVqrdKqpAzjOZ/OMGXoHfSDv7rexciqyfhCA3Pf7VLT8zSdximVJgpCCExLeR8UBMAaJNRu2Qyf8WgL0tLQsG3EBMiwJIg8QGQcgg9MrRYp2e9674gQiAHxWS0aLhqor66uHIfj8WjE3DlXx459JO+9i94HH2JYGKejEXNegjssU7fskrUZfQ4ze/ZE1FQAwFKZsLyLEFFMLGKNjZAWnRp8OWF07SNg03PVZt3Mq3wpqRAAKRm6VRB4ZT9ektjPsxdAxfLGSMvYEgAhCoEo1SUoBEVSdkjoAQDEzNKl/NBFL+nnXDqLaV6V+p6z9wu4Fi2JTarIKiKGPBIRQUBEBTIJKetdAVDAhoj7/b7WmtOUc84pz/OMJzWiO+v/syPGGPrOe59TcU7VAnETPFyI09jaE58nAxjUlUAWxQxAqLrwtlNPUpu25r0/7IbDbn/YDTdX+/vxzlx/1YVnWhUu6affDs42FMbvbnaXQK0tDgRU1BBwAuCIAIjZM0IVeDqePbv90DeB77/74fbm3dPD3fl0Ph6PTOq9n8ezaA1MUuoEU4zxcOChj7VpYHp8evr6m2/I+buHh2G/SzmVUjyt6lS5MRcHxtDWLM1rOLi4SlD9oZvyf9fDvJ8/VZLz7fl/EeO3nfY7Ub8bfMO8w83hvTwAFlUsh9C0gvnOVVpVFGVi732RumH2RKt56/be1to0TfOUrDW473eqejweg/P7q8PN1b6Lccyp1awVp2nU2tI0EQgyl5LGKZ1Op9ubG9WlN6iWUuaSxlSmchwTKNeGKbXTcT6ecirABLs97A9+2HlEnadSa3bBx74D4DlPx/M0zrUKCKMSiaIzpS8EZFBRw60sBTkTHdIGKLhkCwTRrTpvAABM4BCYwDF2MXSBsQERhRC8Q2lt3d201lJK2qp6McZhGGqtj4/HlBIiMnHf9xx755wJynrvY+y7bghd5yx8dGz4SSSCNSICc/r1RTFn+5PFeUuItSUbL44fny2CREqCBgpVNL5rJUQWIARFYkRFZEUhYI/bJKQljFRQQktgoiziHgBAgLbPX14A0RKKSDW50ddh32em6ArMebtAtgj4knIWnvONqIbStA5FUFWdzvNy4yKKCKIKggrAFCg4T530C6VLybW1D19/hcgAYgx9c6m5nRB5vx8ACJGtb2S7/tKa9zEEx+wt9AdSRM55BrDuEqu3EQCgLuhWrc1FjjEOQzfs+j5G78g1JeNLRxR8gb+6DPVebA6qBKhEfFExZQSHuKBpVgtqCfMmQA4E4Dy3OE7MvNvBv/zbf/7yl7/87jf/ud/v//PXv/2Hb79qVWHBloJVked53u/3XdfNqaSUVFVaYxeGrmulikJKBZlCUxGouRVulgcopXRdZ3uIAWsvsxE//fhjJRX/+o6feV9m9v5Ml/YXCvhenf8j29nm+iGuibvl9ZX8Yd1tSKEBGHNjM/2eZetkUmLE0louRUQdcegjkpumyaJFw0fEoSdyIYRa6zRPX3/86uuvv940/FQ1l9JyGcexlBpCKE1Pp9OcSowx+A6Baq0plTTPaZzO5/N8LnnKQKFUHM/58TGfRyAHwy5e3ULfe+el1ipSETHGOPT7acrnUxrPqVZAD0zOCvukgKimICgIUIEW+Z0F+qgLOlwAQUUZg1nJRfsAgAmYwBMGps478tjF6L0nFGnNewNrLGgRXUXtu9tb59zT6Xg+n62UxYQhBBd6IiLnyYUQOhNm6vvdJfOLouLSp4CoSrbrrdUts6zOOXuuAMDW2LcGiMbBYcKkW2H51VSx3j4jrhIgQAAkUDDlOwDSpogMKACMDBbtITDyRv3DBKKKIGoADrCAQhdaQbXSsEXZKqoLMdZik/hCYxaeITavzB6+Ce+eTwMFAIOhChIACG7O3FL6sgqibbL2FZ6irmLuK9pLUGGcTkDkORLRYCl3rao6n+fgfdd3XRxCdN5F54nJj9OJyW8Vyu3zGxIgC6A2FRVQM4nE6FRRpYmoooKgI8/eW7pCWiulIOoQuyF2RBSdn0ojUUFFIkRGbT+yS1z+afMDUBSkIjki0lZEANZhV4TaWpGmiEhQi57HCRH/4Xr49N04jmM37PvOPfzwg/HRO+cMxKuqUlqZU02588E5d3d3NxyuUkoR+erq6tPdnetjzpkchhpqrVSIGZ0nbZxb2u12ryTPtkzJ34/f7/jzRXvwlwv4fvrFry7zc2RgZGMAACs/mahIbTnXec7zPJecVRUVtLYMGVVqKbU1WPilXG1tnudUmirmVPq+77rBMvWtta+++urbb7+9ub1Kp/F8enBWOGuNkbSJ+Xem7cAu3NzcMIbWWk5lPJ3P5/N8HudpKrkBcM51nOrxlM4jZIC9d4f9zbA7MmtrubXKjjx67yORSynNU8oVkICDR2IRFVFSxKoAwIwASgxAEIIVd9SsHejKgIjGcYUASoom1+0QPIF3xEiOObKLMTKRSlsGpNZtBzdQPjPvut5qRVawdA5NUtxsm1k8U9fruiGEwMGzSYQbAmQNYrQpAOAKmoCLOH5LZsLKXbm9/mpiXGbAXk0eIrdV7wQZFAnV2rztFMS1y9N0zVE2zjZURCAjigEAUmy6ab/RGg6+jlAXCSd+RjEQPP++Fp7lOYDD13W+Ly0EvKh5rwyfhM+UzUhEwXsAsJZ5WAG+qtrvOhVpWjetLouib24/WoYDREuraRrrsaLgfj9YlmK9JUPVOoPQwsp+oKrSULGt4AMWkVaXrwiOc8oG3Gi5oOqw64ehcwTMyA6hNkQG6zv6Aknx0rqHSBbCijAtTR0NzaIor+19ljTfyJsMRgSEiJobnM6Tuz3sd/Afv/7N//jnXz3ef397ezNNqbUWXJzHqZTivbc0z/F4VNUw7IwcIKVE7HeHvX11SZmZc84xRw6sitbfYk3ABq/bAvHfA8b592hvOVThzxbtwV/C5j2XfC5IK37n5cHLyK/WYiHaBm+RplZmsCZ0FWFAQZQmeU4AcynJCoWiAFDnlE6nkwCV0lJKwzBYwqSLw37XffvtN0blME1TKYWdl9ZyTjUXACCAlFLOhYhi533g+VxSmufzOJ7P5/MxT3OrVYVF9Xw+PzzAeQYEGDwc9te7q2uk+9oKiBJw1wWmoErTlKoheACYiZyrSMTiFDEXJAAFtrZiAmQOITTYYgsryiAuxSMLsWwQlygIEZxj0doasYsOqbUGKq9EDYlItW3914+Pj6WUEMI8L4q7RM5CkL7fHQ5X+6ubfnewFPGG5kciNdNAqqRq1L1NkEhRiRY59c883yaIKJ5hi/ye/7rEhS/euKQETZfcNj/ZfmfTcSUFJSVLXSoAqRYkxjX7uCRedcsaAD4bRVCgeoHEWeA2aslNAlhE4ZZL0mejDhfWfbuLzyY5bfy3tlSb2FbP3rqhqy4PiJlbdbACnaxQZmHosOu1ttyMtq3oyt73dDxbbc8RI5PzMcSekRVEAA0Uxahu8XeChk0xChxsQC2RJszsCEszbFNzzqmjBhq9k0w1Jc+0H3b7/X4tDTYCqNBg49tR/ZLCzKVLtOZ6kYgQxBMDApVGqkQMhE2htNYFx8410dKUEZCwAZ7P036/N5OGyF035HSepmn/4R0pWGwKAKUUS2MciPuuqykDLwnbw+EwpbkukJ1caw0a7CKlLclVANjkigDAWPp+7634v/OBf84k5/O3/oUgnT/9wjas5pKEeSmk0JpoXUhY1sI/MDMLFEvbtdxaM3b/UltrmlKa59zvdqW05yo6883Nzddffx12rtaap6mWYpHNnKd5nudxsoluTevm683zPE1tmqbpdJrGKaXUaiVg5/3xNJ1O8HSEAhA97A5X++ub3XDIpdim1EcjlPJzatOUyTsgR1TEMTELKiohAhY0whoksAoJMTCzSLP073q8CEp0dZ/RUp0IBlqT2iACgNRaEZr3Pud0QYKzIEUszjsez1KbmT0EDiEQuSJg0d4a6i2KE5fgxoWO6wW4UQAATO3vguUSXjzl53gOX7z3RZz3Ok8uS6gHS1/z8vtC1goEuEB91ln0dtvFbfRQSWFDjfJ2SWsAhwu4Y/Eo4LO+7StIy6vfL6NbANhyy6q6EXNb0Lx1NIK0zexJW753y6oZlGaapk3x3JiJ7Pjw9RUAkD4jlRiZiK4PVyJiNWmtDRFVUElLqXaRG+B203C2F7cWFBC1FgvnXCMqIujYCOqcW65NVLXJsrfptpQB4LUW78sU8YviLhGRgrkAzKyEzmlpzcrJqlAKIAEFYqLTqez7jpnv7u4Ou/7p7rQbhtPp9A/ffkNETXTbRqyzxXf9zc3Nw+PR3N88p8PhcDyfBJ7H8DJNzY71gpbF0j9G3fLH3QD/Wx1/KpaWt2bp7f7yI2//6ed/qU9l2+OWPXGd4hEXuBQAXFBrAQJoFWnNOcfkqlaj5yClwF2ZS5vmrutiCMfxabw/lixtlI4H5zlN59SKas2SfeugYiqFiIPvSilpqiCa53k6n3dDHPoQnO6H/dXB7wbK+cmMpA8oFY/TnMaShE5J9/uOWDVLB67rulrz8fFhJ3l8Oh7vjnMB8TsI75JCSvDrYy4gcwRtsLuFmw/C7rd3T/+fMAIQfPh63++vn+bcQOvQP5Z6nqfiCIYOREsVAIiIiPhwysPQOe9TKnPJxC50O/W+Jom7fQCdn2ZRAAERcAyH6QTmQ0lTgA7xqttd7boAFHnYH7rdrgOPrifPqJpRAkoDaFjVKTMwQoPa8piC6mlOtYqVMFMTT9iHfojDMAyWHA6hoxDJex8G5xyTJyUBgAbQAFEbIxGxM8QjI5LZN+cDIiLwUrO0yaMqrb3KCuiKMrUi2tLNDdCWXKQ8T86L+VlK2pKEDAgrgXS4AJgQERgZCipIQzTEywZLqaoaKVhMBqKoyKsixNoJwABQl9CqqepAg10pgxIgrvZJFlklRCt7AoCKSAPw7BjFjI167wEqgPhAAM00CB27JQBT6LytFNgosWFpwLA4mhAQlB2SEAuIZ9kWrAqtv9aUTciJ2CE4Xoe0kh+W1aeaWkNRETCt+VKKkbbv+r6FME3T09Nj7EM9TdjaYehTmgj141e3pc6iOaXa7+KUZSy581EIA7sFnoPQVEHJytCMWGqJ7B2Dce9qm7Nqq+B7klb2+z0yfLo/9/G8PxweH8eA0E5H2bt9549T8d0wifZx+HT+QY7tw+3ucdL9dXz/8avvfv2v+8ifPn338av3Dw93nz7d7W+uReA05q+/voVWx+NT7xlqklG7YdCxfbXrGsK//Nu/HuI/4dCmu1MHQSt8/PixdXg8n99/eNdaq2l2zo3TCfDK8gqECGp0QmgSoeTb293ypV/0eqf8wv75+bO3kvNPPH5ufPI77cKb83/e9xL9Cfr2/tTB3B8lyNuW7vbI8SKjteVCNyeLmQBAK9Zaq6h5o/M4Gm0jyJLzdITOuTrXbY+zTzAZZWQKIXTD8P79++vbG2Mg3HIgAKBNn5XWa2utnI6PBvbrus57FiFE/HT3YB0HVVBqq2U+Z5lTk9ryDKpwdQXXhx4RW84EEDrwHpHYUkVjyXNL85xKbQ3Z7k4BN9Ij73mpZFgUSwuyvLWGwJa3tYdACEQEWx0I1SF1Icbo2Rksoi1UIxocknOoKi1pqUlbCyGAUi0FFXyglIpxSSuyd84FH0LX973HGGMMIVhzuvPehu7SPcKLKt3lKsC3icovzKjLc/Siae9HptBl8vztJ8BGpHLRDwpbJm2tn739zMvQbfscCxcs1fmqZrmcf5mtWxPOl/e4feZ2ndv1fin5j28Qg9vlbX+6vOv11i7SAOtE+pHCxNuvtse5MSVZLXEjMLO7NMlFi/yGYbi+vna/vSeq8jLSfTvIz9+yBoSILy7jsq7JKwOOc465orksC4S76io5+fg47jryBLWKYy2lQO/Gcby9PpxOJxfD8Xh+9+7d4939OI4DdTafZVGizn3YdV13f3y6vr4+Ho/9boh+OJ/P73dDrTWntGwgRqmjavBOWH0FvBi3V3H/ZyfA3w/48+jt/fmrel8650de3JIqiIurJdw3tQAAgABJREFUvnImsZXfq0ittaS8kBrDQrABspxp/x3LvMUKpidpRrG0ent7+/Hrr29vb31cmtbneXZsaxtbLnme53kuJYu0fojTeUwpdT4QQU7T8enp6fHx6f6pNK3qFFytMpZ0PM9zarlCKRAj3NzsDvuulanW7AhCpGHYI1GuTQFTKk9jUvaqSkzEjOxIYVOVs8Yg2z6cc4436qxmctRb5csgDKpiHQRMFJ0fou8iR8cxOO+YUBGXng8iNIleXT+ckEBVawHkWmtKqdYa+66Lgwshxrjb7RwsYgtd14UYvfem2/Jqz7W2EyRaYStMxOtGzNar8PbJwyo9IxfYXXq53W/7Bb1Mgb795fL87cxNVRHW4tmXpuIaklkJGbYufkS2jd6CxQ39b8augSooXzLGXZg9XMuP21cgEagi05L2RDS5VVF4kcRGVLmwjaq4Ni8+/4KLfuF2Algroo3WKry17My0XDM+52CWXO6lk7ENS1tBYYvevQgihhBKy2gyzrpwdPV9f319HWMkmtv6pc+p3TcHrUNhI3B500RQK3ivqg1RnVtOCyH40lSt5KFoQG5QkRYjnZ6krPX+m11PRN7FnPOUk4tdzpnIWTvj+XweSGKMsDIBtdZcDCa0eXNz85+/+fU4HsKuH8fxPUDNJWttrdWUQwjsnE2DnHMI4XKW4lpf/+x4bgXCvx82Gf/4SM4/s/H78at55ZK/9Ya2aSGiW5RzWcyLMa58hAqiRh9lK0FqQwWpTVUJcOHLXykcVa2qNxuxUAjx3bt3Hz9+JKJxmpa15L0jFZFSaprnaZqKsRApBOczoEMKnkHb8XS6u7s7Ho8pA5ATYAEuVeappNRyhlSgD3B15boYoYnURgpMFPuu6/u5lJIqkKvSSml9Nyirsif2QAiy7CCqGnxQ1VYrwEJOpohSW63VAdu1GZsLiDKRAERHMXoCZFR26gOHwCFSCC4EH4OL3jESqNRaVU1i1Dx4YeZSsunOlyqK3HVdvxuI3NLvzF3XLQ7yRtJvSJatvGeJi+W/hIYX3SAbRmYp8MXkie3tX5pmb/NFb20evNxnL3++XnUXYdnl5LwM9T6/WV/c7+WZF418z3cCZlcIZQVE4trGtoISjY8GEBXJvcxcESKCmnruM3PN27t7FerZVRk9lipejOclk9bl689Lkt4MvjGPG4mLrLw2RJTHtAS3L3ttvfcLyGqB8aJBLz/v6epqKOztuBhLQGhrsx0RheDs9xBCKA2Rc82qwACtNcWNckEA4Hg+PZ6O33y8CbEvIo5gHMevv/76f/2v//UPv/zHH3744fr2/ZzTOI5d15nRWqSyzsE5F70n55j5fDztrg7keDqdTWmWyJVSF25So9QxEiKA9TFdGr/nmXnZH/mnTjb+pY7f73r+THp7f9mA71WM/1nLt63treQPa35jmlJJ2Wr7245jCYqaiyFcGKm1No+jQkNCQCmlTGmc84SoXRd++Y+/fPfuhh3mnFSqddoysxZoLeeU0jTnlKQ2RGWiNJ2lFe+ImXJO5+PTdD5pq0q9AFbRlNuYW861taXf7uqqu73eg9RxPjlS0z3wsSfvpbYitdQ6pVIEBnLBu2aUGyKtKSo4YnILiMDulJiJqKpamoXILygBRAKkpXUBvOe+j1CLtIokzOADEjTnfde7Yei6LnhPosYUTF3f1VbOT0dtNXhfq9zfPwIAs/fRWWAHQLbIfRd9t9i8S8Loyw7rrXuBiIDdxnKJRl/GTETS9BkXcuEs2adZNW6J+cwC2Z9FYYFM/liZGS8S469+bj1wcMFyvvUbvD1UbTe27NxzaMjkLxixn83eYg6ZYKUaAbLi5XIjAHApCEzkVFGbIhlDi1qv4Rok0ssltkTPr+z05Xp5NQjrPguX1w8AaJzZq2W6XIyXn3PpRlhofOHc0CVwf8kht2bCjRYiK+kCSVFVBSuLwgUP9atdQ1VpxcgCgEOo9thFiKjrQ85ZpHrfO1e8j9PToyqEQDmLoohI0Uoemsr41D6FH/7pF1+72J3H467j2jT4rouDrqrowfkGZS65z9ny9iLScjmfz4frqymlq6ur4/l8Pp/ff/zw+Ph4dXUVhuCJLeJlZhHwLpZSQAkQnvmEdHnKX/Du/n68OP4kfXt/TuP3pau5jPEv19JnPfHLk62ZeltpunIKqxAq1NpaqdCk5lJSrrWiKAE651DbuLrAlt40OgZj6P/2229bayvT2O76+trEE6C2Nuc8pzInaW2FiIvU5hyb2urx8fHu7m4cCwAIdaWUac7TnKeiuQIAEMFhD4f9EIObx6nlGns/dME554IvtZamTeQ0p9nwKCI+OhEptZViCNPFtb/YX8hYg7W21hYK0aW2p897HAEEj96BADJQ8BgDeU8oJXgaOtf1PgZyTKKo3hujjZSa5yStqMg8z9OUYoyh6/u+j93A7Jk8B29sTLZBWKi3WT57PEq4pTUXws+N+RNxgTD86HEZbMHlnn5RP9u2klfh3aVn/Sq3uU0nYnplM7b+gbc1PLgwHq+qhs7x5cmXk/ZFqEfPgB28qLNtJ68mwRCkSy+KffDzcgDzZ15zll5+1JdqSLAGjojblmyvXz6Iz3RZWPyGWwx38ZmXaRhmthIbr2GuWb5SSq0AAZgZ10S69V189rkbKNQ6LhEARQGBluZUqLWGEEyFo7aGqOwwRt9aI4LY+ZzTxvjS99BaA4IppcenUz/syjydp/nq6uqH+4evf/HNp0+fdrvd6XTq+97gQpYT6vveLs8ITnPOQ9+P4zifR7ltqeTxfN5fD86FeR5Xkodql7GO//p0UEDBCOS2Z335jP7Wo72fez0/Xsv88+nt/UUCvlfv+vHsE678zuaaLYRApWpbPPSayzzmlBIIWFV5AW4sm+NSsBDVJpItg+Hczbvbd+/emQFjwBjiEDsTcJmnCUtLU87TLK05ArOsqVaR5plV5Hw83v1wfzqW1sA5qMBzrqepjrNKAyVgRmR89/7WMdScEKSLLoYQYxy6WAmP41hFlWicSxPo9p6DBya82Kn1AhJCzB6RGZSedyvLLNHaVm18viAaPSAqSAOtMfJ+P+z64Fg7F3e7uN8NXWBAba0oiHV3n0/HcRylNhE4Hcd5nsl5IB52h91uR+QAIIQQh9770HVd13U+Bh8DB0/ekWNihi01iQiG8ljJqYkWgff1vi7jque9GJRAFRVAdYmHFHBDbF62jsuas/tybe9VfvLZzKy740UVCQFwEbO5GNztFUS8TBIiMAJuLFlrchKtqdF0l+xRCqil717M7DXiUXuYvEixwnbDpploUfNSGLtcOF9gA8EvoFFMWVAX3TsLGY3s1K4ftpWoBG9yy7bQ7IyNIBAuAmXvfavZIC2LRor31v1dFXBjtRY1CRFtYiSzzxe5FKdRRWCzEuv1MyuiNimALsbeKvOw6rFY02oIATEpAKCwdwRwnvJh54Hku/sf/sc//XI4XD18Oh/PU2n6z//4y/v7xxC64/HoiDhiSmn03sXQrygnRDTMamut67qUs4V6D3f3/tYNQ6cNQNC8YR66V8Muzy6O0Mvn9aXQ/L/tsRSa/0Sf/scKIv/oQedbD+jVX1+I7RFZix4igmhJaTqPVqszPCcDgqrp7RmB8hYybqv0cDh8+PAh5+y9f/fu3YcPH0II0zRN0yQieU7WfgeroG1rLaU0np7GcTyfz8en0ziOiND3tN/3RVxumIvWAg2AGZ1zwbubqwO0mqbRE+/6ITr27Pq+r4BzLlUAHQsAeRgO+27oyW0936utuMgmbaHV9irAM7Eh2OaogAo+AEFrNaO0GNxuiF3vHOn+0F3t+64Lnklqqzm3XFprDuH0dDw+PgEAEU3TlHPpugHJdd0QYw/I0oCZ+36wKsjGa2yXtMV8r57dqwLGFlcZJuJLk+SVJ/RZx+htye3V+ZfnXFq+tx73lozVNwd8Dhf6HMZdTE69aHTb3n5ZkLYT5GWI9WY5EALh0n1/edhe/FqA8EcG8Mc96y+ttcsxuXAq9PKOtsVoa8r8SFir79bQaUcpRS6+aIv2Pn8xuvwkfX7LSgu+fBcsVLSr2jsIO1QAIqNwgw0Cmms9HiGEIEiffrhvCq7rhPzj0xGJU2nv37+3dvtpmkwvZRxH2y4W9LJqmubgXJ5TF6I2OT489iEej8eU0sJ+qARAqvClBvwXQf/nJvnfj+34YrT3O0fqR+YxfNks/d4P4Isz+Hd90avVLm1BQiMiIm+rawv1dG3UxaXJtNbaai6tgve+7yGlx7vHO8ceFaRUAPDe15RLTYzUtJaFTrO8+3D7D//wD/v9PqVJpLamJYNKRUSVWktKKdVjRtTofGut1VJKAZHgSGM/juPT03EcZ+S466KITKk+PJ3neZ5mqQ08QQhhtxti9PcP36MKaQOlfhhurw6EOk3pYTxRiKW20zi5EHznkZwiImDooiJN06StGoCzlOJjtLKlKpK3KnqpuaQZeo/Rh5ISNEFARiQAR9B3waQn2EEXXBeDYOuid4zBkWdqtcwpqbbow+nxwTMedj2AzrkCgAveeb87XBH52pS8d86hc7av+Rhc8FsntW4IW2al1YQQbjEEAsMqh+R9tLwfIsqGHUQkZFjjD9ECsDadwyrLjlDXAici6sVW8iNciHjRpbDtPnSZZtg8qpcTdSUmEREx0ZzlQ4CZNuFAa4SwvxIAbZg9u3wFq02uH4tASwV2VU5HAKaWKgASOQABEAXL5zvja90Y+Jacp77wKi5Xk5ro1KvYUoHQwfMd0sVtLoGjxeKbma/2NLe0Na4SKCVvZg8vGLQ3gWUGnefJ+9h1nQV8kWQqAtxUtbbGDNM08SqZBIjWiaiqTQXRgMxAgCBNFYjBEVdo01Rvbvvz+fz+qw9EWJucTqfQdwDADISutYYIh8MhhFDz1MUgLY9z8g7TrP+/f/m3/+1X3x6ubv71X/7z66/17u7+arcLvmulNoWmkuZ8OBweHx/xoF9//fXDw0NrzT6/67rj+aStEYTvv/++67r7H+53/S76kOfUD1etle9/+92HDx9Ky4ioZGBa2vau4PmVe3SZJf7px5f31T+tot/PT8b+zM9ZCWb/mBf3cy/6j/U5v9+XvnLPLTJ45b/XWudxEpFd3yPiw8PDeDxFH1Ch5bJEaYAiUlI2lVrjon3//v233347DENK6Xg8wkoNZcIJW8OA9eJYcaK1BmjAzlZKmec0jnNOAuiZgqibx2Y6f9Yq3ve+6yIRSM0ESqDec9cFozJpKimlhlibpNpSrqnkJBURfexEwWqWhj7dhsKYQs0z2ORgXoYsL2jsgwNSQG3BuS5458g56LqwaqWKtIraHLFpEEprwfPQx7UJiWPo+36HwMwO3XMQagwg/PLYEBy6Iji241KE6NXxKlC7/O8FTgS2Z7QNyLbzvoo8fmReXdo8xM/04cGXnb/tRj779ld39BPDrFdz/uK/Rv5Cb8cBkY0+7fcoKOAXjt95/uXIbPv1q+eLiJfMdpdP00RCLPkNK3jp1fBcXsbrCFeNXxy8d4iwTQDvPTM0qSaSfnV1QNRS8rCL8zyWUqycrAjF/CSCOZVPPzzGYbi6Hu4eHolZEAy66b23CO/x8dGSRibDaRINJWUQIUAQ1da0CSo8PZ7Op6nWllIZz7P3cb8/fPbpb9mFt7Pi79Heq+P3qe19aTH83EXyx/qc3+/i7bicQGb2tnMsqXI+npi9Ya7GcUwptdYIGiiWUqQ1772aPGbOROR7H6S76rpf/OIX19fXOWeste/7rSzUWtMmrVapTZvgWpM3zkORlktJKZ2naZxSbUgcXNgButM53d2XOWOtlQi6noehj9GD5Fwys/rAQxd3w9B1AZjyXOecJPhc85jTVGpqEBwCWQKz1ipbRxQRESAjpVqtTkbsmNn0QkXEajW0SDQsXjMx9iHancU+DLsuBh89Rxd2fRi6jpFqyVbAL6XVlKbj0zDs0TuD/ABgCJ0PHTrHLrrgiYzDOnS+G+KwNC14h45p+0ckcJEwNIfeknYWyV1utRayfCb+QABwF9FYg4vdBAHgBbYTLqAuX5pXl997abo+e/KrdOh6vtO1h3q9TpDnQuDWPagIjAj0Qvfp+fNX7jUABEF4hrESrVQNCEvvmr39WYkX1AqRCEBLcuLiY7fv+Ow4yFtPegsZjSHTGGYQVHHrq1gHZFmScEEdvtzEepRawMwevGj5qLVaeRQvihSIanXZFxNhjbdJAUTJKoyLAiCyQ0CotYhoztky6q21/X7/3XefQohnmHLO+/3heH5EHPcH9t4TQWlqVGRTzr/5/tPtzT9fv3v32//4968/fiWiwXsiYsAE4L0/n8/Wh3A+n6MPc6kppWchLYBaaysFRMpc0jQNXQdN53HaDZ3vY87ZkDoWxFlGoYGoVW2/sNf9/YCV1cv93KF5a5P+5ozf22hvdeVXtxEdABgz7DRNv/jFu8DuP/7jN4/3D0PXeXJP90/exSUCqC2nNM+zqhrs8Pr6+urqahgG29mD9/v9fhpHC/Usutp+aWmpPInUUqslGHOt59MEFPpdh+AV/NM53z+Mn+6hucwMMbq+77xn1FKltpocuz52+/2u74Ii5JxzLU0VvSspz6k0BRfYxYBMrUqMvchkjrNzLvpg3nQpuqQNFz9gSZhsEDtL42001MzGtEJ97A7DLkYfY9h14Wo/9MGDVim1FSGFmvLpNEqeWwgikqZZag39Pvadcz50g/eevQMA1KVl0JJXny9A4vbCYvZwAbP82ITRF4zkb1oOvoz+1pdQz985ky+v4dlkvrF2l+cs/1199suvVv1M+/zlG199hVhL2edWECKu9SETFxfLVlx4CZsdvey3+6nrEb/w2mVK9vLWlrTwq1BYX2glvir4GUKX1oGy+VlKrQ0EABZPjohQ5bmmuxlRwMV1MykJVb0M+DefAxHGcVzYG0RqLTnPWoslnEspIbh5rm0Ado49S2ulqUNogHPKD0/Hm/0eAB8envouCNEwDNPpbBCtx/sHRDTi6a8/fsXMU5prytJ1jogAS8rzPIcQ4lUsuUmDELqmtRYJ3aITqYTW77KZ+Vez63Ju/7HycJ9/wl8+/joDzd8TyflfwObZsS2bjRWCmUGprMcwDM65kotp4sTgpC3epX3aRi/Sx3B9uGqerm4Ow9DnnKZxJCJiV2piRlUAaW1RbjDIdc1TWbJ2gjmXeZ5rbVUEnY/djjBOY324Hx8fT8djUwAUiNHvhhhDAK215ibJEURPfR92u95IHErOpQo573ys7ViaInO/P4S4I3K5lm4XZQaLw4Lz3nsAUUFHHgDMMKuqNFhAPTZ6AASISCKCqgzYahUR58IwDLvdboi+CzzELvpAAKUs1l0a1CqttMNuYMZSi6oawXQMPTAP+x2ztywWKjD7hUKaCfh1HlJVickIM5UWzbx1+39OML5EIb74fdVBwE0FCbY6nJGxmb6usWvST5qErwwbrkCJbeqtW8CLTBRalKNgyeNNgUHX/rb103ip3yHqKicIa61RcInmFhDq0optBCxrc/brJYAbrlV1bV6Tt3f6eeP35eMCPfSiaeF5zC1KeWu/YSH1RgBQIz9aa3uXByJsb94SM8zkndQVKYqIACoC/BLDiYBWk3zlChit2kLP78B7LyLzPFrXqSFF+77/dH/sYs/cTqdxdzgcp+OU6w1ijLHm2fTYgZwL/P0P91f7w7v3H56Ox68+3haAq6ur+Txu0vMAYO2GH96977pumiZbdD4GUyCax8mz29/s5jFN03QdD4A8TRMH8t7nmre1YOZ+oZx5Y+T+Og3PX/b4/aM9+PMavz/Ww3vlHT+7kIiwEozVIkulDeDdu3fjOE6ns9EgpTGlKYUQ5ikbR0nJyWDHh/1ut9tpdKawZS/GGKXWh4eHXb9o7FlyfyvmlVJ8YJSlOzDnXEURuet3TH6a69398fvvHs6jIkI3OFHt+hBjYIJSS63ZO7WG8X6IsfMWR+ZSGigTWbtdU+hiGIbBh74tDPtqdt16yIjIII/kAqw8/a21VnUVg33ezbdNmYjmOYfghm7Y90Pf9330XUchBNU2p5LTLKVB1VqrNvXe99HlVHJKIuJCXLFFzrTml7LiSrOiKx79VQVIABAv+EdenPFjiKrlocOFfXoZS731ivALXGU/8vmXH3KRn3wR0Fz2k+kFBkHWu3m+2jffaKbuRVXyxyOwi3aLi09eQj1VM/yfLwj9lPX+4l0vGjNeXMXFx74Yk89+xSt2ze0nM0srTdrCybMUkCXGGEsRpaqMrZoPIC8/dnuUhkPWi3ukdQgNHGt40bnMMUarkdda9/v9/dPRBxbx05SIIDKUAoJgyXmQ3EoGIPY4zeM8z+/evfvtf5znKfu+60Psuu5pvsdVWaLlgoin0+n6+tpYn2rK3vu4lgDHccTzGELwT08hujjEnOs0pv79DboZ1lI0LJIp/OqpXQZ8f4/2Lo/fv2/vbzfge3Wsteugq56kmR8R8d4TQMtlnmfbv6wO14q01mgFUHnvh2HYD71zDkMwe8HMnh2s7IKbtdvw9GwNZw6bSJOSckq1VBVFtBhjGqcf7sZPn47jBIBEHEDpcOW894QgUlQlOIqdC8HtD0OMXsmCFELH0LSKTLnkVpHBEoaI6JgxumeO7BUsoGvP4jYmlsQF0aWrS0EuKKkQgBDnCfqed7udqQL1XegieuekpZRSTskpisg85daadwHalFJKKRGyd8G50HVdNxycc7lKlYaC5Gm72gVVawgZQiBUA1u+fIJvZstlmvEizlsIV55XI5v9u5gJ1sumBu59lmxdAiH8IrvK55f3Np6vzimlvLJtS5vaEtrShSl6jo0Q2Pa4Ld+r9LuvYRsOXVN4CotqoqoubJmA661vVwuwRsBwES9u4eOPLqoXcR4AqJpn+ZmusucrvEj0yqU5v6ibMnGrudVqursbzmVRXVcQRV052+y7L0064gLqtbtYW21tRhkHLwKodUq0Jmb2LD2DSPv9vlYjAuWU0u3t9Xx6tCb6LkRtfCo55UzCXYjH03jou93u8Pj4uO/iOI67frg/fzqfz/thJyLamvf+/v7eOjGsvaHbDSGE6P04zzWXp6enDx8+lJyPj0/e3wKjbU1mno3iT0E3vr7L8fzrNDl/Dccf2rf3pdn/x3Iu/nRJzldoqI3X33ryFnEs70+nk+Ul0jTP82wlblOVhLU/L4RwOByMT3YBZZUCAPM8393dTdO02+02wCSsqVT7Rkt3WBRosY4d05ge7p/u7o7TBMTQdQOhy7mauph5eY4gBBdjjDF2XTAXFRHZkwseEWtrKaVWbVWwcYuSc33fbyOwKdLZOllyki9jkVfb0+V41grGpRJXnmhv1XvmzYiKQErJZOjNyVVtIQQTS7u+vv7qq6+An0HYyM+XdPnzbdgHX+i0++nT5hX+Uy+QnG9hnD+yj+gXDstctTdHfXPYi6++63KWXkZsdnwJufo7J//bX16U1v4Ako4vjcNnv+Xt769u7fL37ee27rb/quo4lnmWnGsppcmyyuhzV/5q/rw9oVZJaaF92cRd53k+n899H42JfrfbzXO+ubnB1TEKIfguAlBKaZrnruvO5/M4jsMwPD09qerT05ORDZVSbKuxZXI8Ho/Ho3POdh5ocrkXpZS6rmPmcZxrrQ7J8J/bgtWXrcaffXB/t3+vDvelEfmR/qSfdbQ3vBWvTtjWwXJB9EV/8PM3sJ7/arkqPGfwrWfJFpCKba+rY4/EjMwwjWkYBiQcz0/TlBA5xs65MJU5BJIqjiD2w+Onu9Pdp4PnGHE6n1oqfQyHq32MrrSSctrBDgu0lussvotX3T7n/HT3lGtZxX1UwPSapTU5HX8QsStkqTHNAuxjOMzp9Jvvnu4eYDc4F/xpPDvvP/7yHcG9gooWRmWE4GIfuO+7Xb8HoNKkVUgFSgah4PtYniYfDh1z3/eMkYCcgBPFWvP5DABd1yFTlVYVchOniIilgKSmAAzOoTqspTVo0nmfW641d44YSdPY7YE4cczDHocIJT1hwdh3dR7b+XQ1dF2I/3b3b0/Hu12/b43vz5XdPkbyvnfd4erwvhsOIsTkmSE6cM51ITruyHU+9sSe2FtXtYrxRzlEIvQIRBgIScFU3wiUZOHPvJx1DS6UEIjosgFjoR6+xALYRKI15lrToUvn3GR4yCXXC7CU71ba4qaqTVprTUw/rzQAEEKLf6pKBZMdYDBYrAIiO0UAJMSGYKwrCCDLVVhOThBBtx5CVCBUBKe8XY8goEmtqqJnAG1QRVHF4icAAMcVwLhGFVBUlRRVrQGMECClNM+jVV5DCDKPiMjk3ZL9VmJH3tdm9SRCRLECmpUxNWwbMV2aMV3SmxuQxAbcyfKYtkCPlpql2zrHW2sCTRQVSGpC4N1wNZ6PzP44V+ChYn9MIMyIXhUcSK1VqhBAIEAEE3YvWgFImRgp1YwgDFCkOkBySASg9XRq+30/5TSX8vHj+++/v8ul7Yab77//4Z//+Z+//+GHyL6U5qLbdeywIkKaxuuvPrSUdn04gvTe7Xo/T2PvKee8i/tvf/mru6fzro9j1a+vvjniMT3lEILzvtUW4mGa5WksFHbs3VRAJ/FuFzyO4xgLP/36/h//x6/mPD98f//NL7+WVqbzqT90DECEImgCvJ6YkQuLvnGSLvv2Xu3DX9pgv7zx/jy78FIK7HcfP9/f4p/5OQB/OKTlD3z9T328dX8+6xC99Uy3xiDvveE5SynAuDlW8zwj4jAMFt9skVMDNcC9TTVz5HUBWK8RQK055zmltlA8OxHNuSHysN+lpE9PT7/5zXettRCgScVGu10f+845l8aEiI7Re9dFPwzDbtd3XYeIYgXCNVa7TFpaWOlCsBHYAppLz3cRLgCvalKfpQmscUnbBmoZRgVkZSXV5oPrQvTsENUTq5aUppoSALRSjylP02T1DABAx4rI7Nn7YRj63RBjVCAjtdpaET4b2L0N8pZfFjLlpfUbLta2ftnrehFwvPnMWturiGoBRGgDIFDRFTKKqoqgKQNAg7UDchFpWo7SpKkoQCOCRQtp+VhSIkDSlbgMXwfWeok+xeftbL33z68yfMnI9flDCWDJQNRaN5isKbqZeIgHmz9CAAJkLpqz7UNJsSmwtqZLjhBobUrHV2w1F/p5Px43Xz4dvWjlt3dtU+Lyc15u4mtGFJbuiOVPqAjbRS0lBoTtksDGMoQltd6aTNM0DIPlY2zJW8nZ8kFEdDweYwRmLil7ZittmMfQUtquUHRhCyqlBLdw65RSwBQEEQy/HcJSVjeVok3ncp7n89OxgQC5nLNiba1pW/wMXBtRlnQ6/SQzo/+96cr+HH17f7Yq3Y9c50USAODCFdqyBLDBolpDxE3mw5pspilJrkqaczYDkVLqQhyGvfceFtVQZHZNBZDIsaqWpTG92FS2OEOb1FzSNKecWqmGtqsCqTTLex6P5++///77T3m/g77nnJtq6/td7GNrVQGI1DnX9/1+1+92u76PIYTSams1pZSLiIoANNWtI957H2N0IW5ZO8vVMD5D3SxXCeJFRBWZuUitYi3tYHy9oG3BToIwsGPyDvZDf7UfomcmJAJtUHIuKSFqLvPp6ZjmOYbgA5eSpKlzRN71u2G33/e7nQs+V8C18W6BbjLh2vH+pawUAKA2QieigKYngUu0txSpnufAyp8CW9+CoVkIXlA8b/uxwQ3IRAUtvWaZA2jWSQfPFSMFwtaKqhrHis0jXXoBfWstSTWhNvSBPCIjewebcDsAAq2MWc9UztsGps/2TdcK2aIHpC9pLb+UU4FnTOlGML2sBPtFRJeqIbL30fLSteYeqypUZghKjhfGzdo4eAUEQlABFFwpsIk8mDSVrm7Es9S8ktXTNtE++/N6j68uW9cJvFUHzCZv0+FldnR9pus4WL9CA11VJJY5YZycy3zQrZINS30gsKp6YoF6Po8xxqHfl1KYcZ7Hrt+N4ziOY63SdcN4Ol/te2aepqm/uWk1ee9bmp1zkrOq5lpTKZ4BVgN2uNqFEKzSD4jOOSQspUzTZEUK2yiY2RE5IkTMOT89PYU+BnA5Z3KL20pKyGQAWFUVqaoN/O+IxvSyCfW/q+X72QoMb0/4cSOHv+v8n3j83sXCS5sHLx3Di/xVM6KyDW+yunXCzFIbiorIOKd5nEjBe89IWx1r3eoAEWuTpk1KhjXYMrNnKTGrvZdSWi4mideUa5FaBYBa06fH093d/cNDIgAiYO8BAJmItdaSc77qAyJG77uuW8RXgyciKVJqzbmW2oTQJDGNReKS0NLonayKYBjOS2k6733NBpdQdAzVQhZBBJPcBABSZQBC8I6id++v07ubq/2uZwSSxqgKWkpBacw4nfPpdCKi/X5v3nHJcjhcd32/O+zj0JPjBti0ATFitV4Ocvy2b+HNBNgivKbPlFrw41HE24BDVrjfK7O3vWg+kaEeCVFVAMQShIL2U6BBbXl1oeqaElAAVGwi0KCJIjh2VtEhT7zEEJa2XHq4kam9+P7PXj++ifAuB4cBF3ZKBcLXH4FL/4PAwmxpuP3G7GEhKsNAEUxWp2lKZ2mNiKC0EIKlU6UJSANCJTZTBIpECEitVFIQ0O1nQyAFsZ9vGFAvwSavinyvyhZL7G91atTLAVnGZNXhW23eZgkVLyinEZFARAWgIYDFr4I+sHOMClJrNUTVPDcDrJVSYoyllN3BxRgtr9P3/TRNzAyKc0rM3AqGEM/TrIIhBKg1pZS8c33H3jWVaZpk31tVeyvoOl6otIdhsOlWV7VLIgoxtlbmeY4xMJGURs61potKzCaguPV3vuGTezlzXhi8/7Y1vz9T395fJOBblsnLNMirF7dMYPBLv9pm9ow0CAqoonOuljKOc0pliF2M0Q9he++C2SZUUEGqOc8ls6kRkWOCVpUXsEyrqbbcQNChQ4Ix5XnKqsjs5lR/+HR//5ARYb8HVWiSur5zzqnUUidV7boOAByRcXd573GBoixInCYKyIrYVKuAZ9ryKiCVVoyctIqgBqxhy7ctLnQzOMuWZlFCcmpfBKpIygqOYRfdMHS/+Gb34ebQLxROjUCt2kCoBJhz1tp2w7Dr4jjOZU4NvO/i1c314fo29r0giIhtz0iEq5ro1rT3NtVJuiZytIEYx0ZDQBBBIrRa1+bJbiu8CVzurWvrgsJrs7csDLZ9QUBAtKmC2jVuVlNUQEFUoKECaNWF+bqaFjGggNIZvVHAuhAoeBc8B4/EiGRfbKBDUps/RPpswm0LW7UXtjjv89CMNbx5fRCAXKZCTYzJzOxy3wpAhChaRcAQgd5HEUFwKT1IqaU18E2lMTsCVCqtIgIjEzpGcugYmQhdFRETulsqrotL8sLEXfQsLiwqut3PKodr0EpUQF0kY+2t23kX2U4RYQZsz0b++aMUDb6La+ejcazqEnE+l3WZ2YcwzU+tVe+d987nBk1EKzt0zqWUWil9jK0BMxBBjH7bBKwFlr1XglzL1dDl8TznNM4Uog8+qEIudZrnvu/7YcilGOuTAbhSLaUUXzwioluEHpnZITC7mouIqGAphSLN89zvOhEiQAAkwmcA9htagFfG7++hHvwh0R78NCOHP+H8n3L8IQ/p0s7hm74TC/IoLkhoi/w2Kuo85ZaLYwbRPM2IOPS9cy74sBFaGuGvNFHVqpxbba0pERN6dqrKjcXa8qY5z+mZBrPJ6TinlIg4eBzP8+NjLgWGARR5HBs09Z6853meAVrXRTNFhng2UhiRmmuZU0q1NJEGgICiaG2+zi9aCrqqm6rCJabUYGOqWsVySqgry8XSw4u4dNG1plJxtXnXV7t3Nze//Kbb9YMDxVZBWpWGrWotIK21VlMmoq7rLGlGRPFwOFzfHm7f7fZ7IK+mjgrUtCgh2H+ZkQmf5R/e1vnWTRQFgFGfbcOXHFi5qPldnvmiRggAooqAChw8iIqqxURbEUzQQgRB0aoCi+qUAICKtFaMfG772MrMjOzDwjoTIjIDmaGmLUUpqzSg0zV7+SITuMQ32zhcAlBfGT8FYEC9qO3RCwS/qau/CAOXrxIrXS9FX+8jkcMxVJNZzhVq85551X8FQmZGduiYfCAiIVQIC35mTQvbhUttZoNeFR2357I9X7xowwd4Yd62d11Q9shq9piUQF6GMqpbi+fLwqcyLqqslrFuIkq4eIcAqsrMMTpLP3ZdR1QAYBxPNzfvLK8zTVMIQXVRaJnGtBsCIcTYlVKY94jYms5znuMcnWdmcjpNkwGe+75PKeWckanve0Q0/Rbza2utNjdSSt77lktOKacEKC76PKWrqyuAitbF8VK2V1/28+jL3lP9HKnCf7fjz9e392cO+H5nkvPyr2YPtt8v68nj8ZRTqkVyrlLbELvdcFARY1gARUK27i7j8RpbgSZItEGQ7dsNFzOdzzUXQzzknGsp01wACJTPY346nUoFKyc1Be8BGQikSQKsweHQe0PHhei6PsQ+sKM8tWmaUkq1LWBFc58VAVaCSkagzTC0WvNspzGhd+y8r7VqSSXNCNHStRuoBwCIUaS1WrGBYwgE+y68u959/eH6422vrUItwAQqklNNM2iT2lopucxMBADH43Gapt3ucPXx66uba9/1yqwWSiITktbyXEpjGwRABkAGZDWu4IuDVEkVZenvesZ7yLPDf/nL5fa6vL6VeS8sH6mlJkHFhBObNlHQJkbfj00LmSkUsbqT6tL/LyKwZjgvLDURMSHbL+trTi1hvE5CIVoCHqmIa0/3qrGuqkhotGG4KO0983MBgBIqbKCYdeYvg/JiMCyOUpDl83WDgSxXBiCbwA0z7fphHqdWj7U2LTl6dsTGJwCIyg6IyDGkhEyIBH7ArdvyMketSzUU18DtMgoHw2VcxKxLm79pAiqo6HMWz6YuPxdllzUrK65mPURhQ35sA2PlfefJwKFSLc8JItAAg/NCrE0QKQRvcJKbw9VEU3A+pXR1tY8egqPz8fThw/vWQKQ6506n02H/gUB3u53kqUgz/qA55+Pp5In3+yHEfp6eeBqRyQXvgp/SrPNs7Ux5mq0bChGtlQVUT8fj9fV1qSWlNM8zMkjTaUwlN0VAEhXLFawI5It4F17iof4e6m3HHyoz+3ODxb/UcL/63m1j2so8m+986VSKSJrnPKfWtOWCon3XdTGaEp733rvgnKuydFy11sZxdM51PhCzqqZWpVRpLeVcTFqvVFJorc3TNE2TQoyhV9XzfJzGhghEUKsoYojEzimUkpuCRh/YAQMTUQjBtOgMEjZNU261CQgikhMgARAgpaW96dKbtprf5gRsPXaqWmsNvkMEJAUhWEVwSKGJaAMVYIJAEANd9+H2sNtFmqYitSF7Uikl5zShgraWc665eB9b09NpLKV+eL8/3Fz7flDAUgWYWAmYQRVXG3AJ5vzxmbM+qWUzJFoqbVZJYsClnkRICk0aAwohAyohKahV5mxj1EZKaO34KqCktaigagUx4AOhCiJDTU1RpGqDJkUaiFYj2QF55l4hi1mtSLyqA6qRJTMBAFthcgGyWO/AcpsvA9Y1VLNw8CIkwgWY3r40PgCwGUO82A1fb4Xr62jCPwBwIX4UYvTOoWhOGUvByoLkiN0QERhaVWKpBMRiPOCFFoY/fkarCoJZKfNc1idttdVnn+PyFjbipO06F1V690La6aXb+np62EcLgAC4i4ElkMBO0bWSBUAMsdyWSt4mosLe5awAk5E3xQ7np3Q4HGIM3vu7u8zMAIIKhDKeTohMhH1PY8mlFMtXlFKmaT6H0O/7GOP5WFNK1uRqGBYjvHbO5Zwb6LYSLWs6j9P14UqbaBXJIh0wsGWLoIhlymmdKPYkL1fHZ/e9P8H++jd2PJu9V8HQtoA/GyS9Pb6UXPq5Onk/95lsFstylVuMv+XHXuWycsm6EuDaQmJm48SzttDW2tXVMI7j4XA4Ho8l1avd1ffffz+dx6+++qaP3f39fSnlandIKY3jSEQuRBGwhlY37EMIhJRzbaWWUtI0pWkGgDwlEWD2ZU7H46mkzMzD7mCyk9M8G3IEGIhpHOXD1S4E9/T0AAK3N/vY+ZRSVry6urq+PvR9rDUbs8OSh1QEkdxaFREgEalNQnTffPXheDyXnPdXXWutERNgms77oRv2V1aiT/PUark67EvhUhKy++HuzgU/5RRjX1MeZzlE8ASQAQCud/37m/2hcypl13WttZpnENkNHWh7vH/YD8M4jiH2XTecp5RKfffu/dff/oM/XDvngUzMzytQFVGF3CozMy24mwWDQ2TOL74pxyKICqpsLcwkqgAkQEsOb63GKYIosGOD0crFX1EBqIEKgIiKghi7ISmKglYRadoUyeIikaaqkyqqNq1qGoqtFZBnzCGqIjzzaKecLXW2uBfMrgoBuujMjAlolYX+5lmuDxgQkGHtYQdAISIExpWMY7EEqJcOwubGoWzkOwZjWV8nsVVmg6arPfE+qiqu8MvtSoDczfW78Xg6Pz40aZWRBaIPkibnAzkWIBeijz2qlpIoOiUy+WNwa8CHINoQUd+02F8iMi6T1JeKVyoCIqhqdDWllC70Mcanp6dhtx/H8f7+fprmImy4aFU1rCYjtLY8Guo6x1RrVWi08Hg1YvSopcCclDl5H2/2/unpqbbqPDnidzd7IPzuu9/+8pe/+vd///ehj//yf/x///f/8//l//Z//3/cXtP333/3f/rnf/73f//3XR+XDnSkVosPodS6i7E1vr29+e63DyHMj0+n9+8/xqE/nk7o+ObmhrzbX19JqU9PT+/fv7dhqSm3WEIIITgi+vrj+zyPTcQR39/d3Xy4meesivf3Dx+//ui9f3i42+37YRhOp+N+v6+6gNIvE5uX++324qtl9ePb8k/Zh3/WPv+nRtP8+I243+/Nfz3B3Ntv/P2uYStNm2e3BYIO6fR0zHM6DLvg/PHpaRzHq6sr42ExtOc8z4bPNOeOAZXIbF7NeZqmeZwIsLWmIqXUaZ5qrVbGGud5HMfzecoZQIEIgFBV+x5rzYhtGIaqudSkc2Hm26vbvu+tU7DWRZoZCFGwlppqVXTIrICAYH0Odi9mTkopInXFJ8Jlc+HaX6itVS3aWpGKIlBrLdK6CAAAArsBvn6//+bjzbur/dUutjKJaJOirRJSa0KABnsLvqso45xbk6vr29t3H1zsDGAoBsFfgfiL76JARJ7sARBfZFl/5KmtS7cBIBr2xP4EABe8yLiC/wzMsf3UpYdajbzZGhAEgCzMUiUEUGAAEUDVkusGt9uOrS3EqJOZPACgY1IqpdjdiYg2gdIUiqqWNjL7RTgQYFELBQSy2t7ShwEAiEzUFlq4rZtiM0s/Ojhg1U9Ym991xY5cRgOIiAsmAtXaVNYRUwXvXPNW5ys5tVSxNXUBug6auBgAqdpEQoYmwEWRra9RtSE5IQUldAjISAICgKrIBvmBn7b7XQbBl0ma1ppVyL70RstkbBAYsnYKqYQe0WqrZvfB0FVGX8C86E0iYmk1pQQAMcYpp3meAaD3ywC21g6HA0gNIT49nn7x1buxFeecNHXO8bD3jpBgLtmn+XQ+vzsc5nkex9GyNaWU8zSZA22BpvU2bNSgDvk4zezdPM/X72+OT+f+IJ5DnvI8zuxda1pyq2HDD//YZPjSJvnfLf/5Y2bvr7x6d3m8wjL8HheAKzWXZWNUdZqm8/lMWUtpiMxECyFnFa2yMvdTrXVKcy5FkIgoTxmKWtRYa5Ui0mw7BWZfq85znqakCkQsgMfxPKd5ztAaeA8uMCKLyCUNUhN3Op0E9Oqwe//+1jnnGEXqwme9UAlDkValkfNqEHNC77v9fi8iteYQgiNIC1NMtTA3embU1NozC5eZ/7osaURoIqrgPLQZHMFhH775cHt7c9UHYC1NaspZpDJSA62l1JyJKJXGPmgrc569i+8+fLx+/xWQ04XRn+GCTVJVGZgYmK1jiR0xETl8vbmrKqEa7BMAQJuKbO14vGDWP7f46flBX74srcKaENOV7B8VmoA2gzooIrY1ydbSLCKW1jborMkW5vqc3vQe0DE2JYJSs4IQQmVamqyaKBYOXkVJmJgU0cwsAjR9ztaZlTPMfqmy1b1UFuul+hnSjM2r/8wAqD4r8SGgAQEVAbC1YuYQl9ZAXBFghOzIuaXgNE1aq7okOWnfg+6InTbNDcB5ABCciRwIKjcUJHTIAEBITkFWcK0AisGHtr5JvMi4bte/NjUKgCIIIrTVV7MsaK3VCttfXNfkSGUlUl/YYmCpBhAAQRMxChmBXJs2ZQRkx0jEQMxtainnWssw9KWU6TyK1t2eShFCyLleXd3kaSx5fnx8/KdffpMSE2qrmZlDFwgkBKgV5nm+f3r8sH/vgj8+Pp1Op48fP4YQijRBeHx8/PDhAyrM45S73uZ/F6JnLqUI6HQev/3lP9w9PSLT4d3VNM/jeQ5DhLXDHYzr4OVjfwVggQunYTsBLryKNzn2/5rHF83eWxv2+xm/nzuEf0gc+QzX/PlmbyPf28jxpml6enralS6wE+fPx1Mh6kP05MZxtG+pAimlOaVSihIzc0kVqxQmw/ghYhcjE0ltqppSSjk3Ee+cAkw5mdRq7EAFmMn7gIgiYJQNzAscAxGHYXj37p0RsoA+8zqqqgCWls09pOBzlVwqso99CCHM82zlBFVduqpVAcD6+XAFzlmlQVWQVGtDJhV1jpsAornz0nXw/ub6/bvbIXCrKUsGbLVWbdU5X6Wdz+eSjDCQc2lzzoC8O1wdrm99jLUIq+qCKScAUgED+AAAKjGgM7OHxCvI4dUzfU7gKCiuYasColYAwfjZ53vJMno5nZa05qZuI6vZaw2aPHcyixpEs85JRHKrpZS8toyIyJzrokfvmBwrO0EoKgCiTQokUtDaNFR0nohJtFEFZmTjg0NlInLgl/t9Tlcu925xqG6x2hIKfyEe/kxSCwAQDQKxaq2aYUMiEEFAoWekDAIIImkFEEAkIKeKtVbIRVTHnMwyse+UG3KjICbaJViVWVzjyuoaLX3VAIgiCNQQEaBt2/DPWqf2HK2cwcxFZKPA/eK2gLBRigd2iqCtmnUnXXsVUZtKKpkohhCMG7W1Zj1CpZTz+bzb7UIIT09PtprmeYoRx3Hc7XZXV1efvp9EpORGRK2oiFTVyKSi/W6XplFAj8fj3R1aPmkcx1KKQTqJ6Onp6fb2lgCNsN6w06UUz7Dr+vM8zfNsMLqcC5MnqqU0iynXLeJFNWebBq+qPDYOVg2FF/mSz7Aa/Vc9fmqS86854Hv1XPElB93P+hxTC7LfLXXZ+WAsTaoqTdERM7RSAbm1lmqptTZRADKoCDQSaFKrrGwv9nOuNad0Pp/nktmxD2EueZpTrjAMLjhfq9TS1sIkmAFm1PF0qjV1Xffu5ubmcNNqtrtrrYk20SUeKaWQC8F3Qn4qs8kd+NhJq/M05jnpTqxOzojaGkhjBOte11Ycge312pqVTxCxtUqOa6oAQIDdAB/fHb76+P76augYsaXWtLZkvBIikHM2E0voQuhOp3Mu7XB9e/PhqzDsBZxgdUqoRKuUHQCAoSThuSLrjIpDAbW1NaH36kkhiFr34ZqWBBQCFHT4OY+tlfri9c3sUTUAZ7MckakNq2pdICpG2K/G8V9bTbm1lmtJKaXSSilNpakAOSIKsY9DH7roXEAiBQiepam2mmrT4qBk8IHJZ5jW9sS1W8M5Zq+7yMzMfiUK1ZWY03x2AHjuwcG37v3FSlRVXLrinss5DRoorO0QYKMMqAiKi3AhLF8NoqIiSsoKz5XyVmtW0VpM/SF2qNyEqhfA0KlkRARhUVZhUmZmQCxrc4pu/NELd4p8/vovH7rln+2xaEPV1sToh0yi8kfMXhElVZFFQMM5B4oNVFUZ1EgqbChUsZS2+n9pmiYA6EKMna/NnY+nXT90XWSmPM2eWAScc+fzeb/f33791W9/8599359OJx8w5wyqKaXoCaTtdjtHrNpSLr/5zW9+9atfHa6vnp6ejufT1dXVbrdj5uPDY0opOA8AJefqvXNuniZkvr65Gn87a2u//fV3H775GgShqVFa11qdd6qltVakzSXjCt6+tGdf2u6+dM5/bZsHPyXagz/M+P2po71LL+ayWvtzzZ7Ropt/V+viCoUQIsbf/vo3KaWb62toejqdQJSIxmkupRQjayJGxFZbKSVSB6KqsmGxzEQZ+PP0dAQA770SVmm5gSowYwhOJDepKtW5YLUW68kbx5kITMSViFKtlwW51lo1HLaIj845N+ZaSkH2se9CCDlPZrNV1SRLiNxCwEgLm/t2QimlNRXRlBK4UEpxIbYm1iS83+/fvbvd74fgCLVqqyqSa7Yxz62mlKoldVHnOY1TIhcO1zf7qxt2QQSc75fnsmQ4TeHarBuyKe0t6U1g3OpQLx63qeUttTSDK1jAh9oAlBu8RPrhRQPD5Z/sv7JGUaq69NtZhc/Yj7fcb13YdjTnWuuczOpJFbFGkf1+73yMQ98NOx+CCUqoKoWgtdUsKrW0qrVJKo4YVQkXGn5gcj74LoKrE1TvfQgLv6uqIuAFvmNVjMOl0te+YDbWWPF1CkREEW0IcRmApZhnNTd70zMNJlUV1WaUlYgEWForrWqrKJOqNkH0ASioMAChNbxy48bNsRMnbg0sFi5SUoO2Lo1zLwXQL67/MmDdUhS1VkfYpFmW3lbWj9T2RAQBmyrBIhsJC0i5oXPbx5o9ttKaKUtbykdV2TmTbk4phRiGYai1hhAsBZJzOp/PAMDsY3TjOHYaVNXKH1VAm/RdVFUAmcfpdKomXj3P8zzPVqfPOdsrEBQRU0qm1TBNk4txt9v1IWZtd3d377766MhPU/JDOE3Hec4DR5FWK1Sp0zR13fDZnfnt/vlK7/5VzPdf2/L97gaGv6GA7w/5XnMYzR7UmjeIyng8j+OISIScyjTPMwiq6ng+i6ogXeJfrPrDBGhtuU1SyiYvl0o+nU45S997clxanVMqDUIAZrYES87KtCxyZi6ltJKY8XDYX13deBdLbswLBrVZni3nJqooljnJrR7PY65td7g5HA4+xnF8EhF2hLTqpKOWkqyijiqt5M08l1JU2T6fTYttdbpvbq4+3Fy9u7nuggdpgEKgJc8cgvcetI3jOM+5SgPLQYF0Xbc73B4OV8y+NVUgxwxaXz45MpShIW4M0uIXHjWDQHz+cS+FSOPUlgoAC/uMZrhoZza8iCUpl+kBC3PVMk+awCYKYBZUlRRqKUa2osbBUWrOueWieaq15mpEpRxjDF3Pzu2vr4g9d8GHjrxDWsIIQQAgBG5ZtLSaZhHNVqBcvxAI2TkXg/d+vtp3Xdf3OYSwsFyiY2YbWNHnnpOlWsd2U2+GqDZ4qX/CSAqg3nrnrHoHCkZtoq1VRJQF21U3/SlqjqTN81ybIjCzJyJttbU2l7lIEyXXqbKKogL64BBRKwozi1ticSaxrKyQOkarKQigJR7WK9TLxXvx3yXOXR+RIslKcptzPp/P49i+BAFHYMsPyzLWbPPTuIlkbXMgx6IIqsfHqY/RULiqYsSFOWfHYZqm0MX9fi+iXdd1HlAUBVoup9Npv9+nNArCOI5XQ1drJkLnXCrJ9gfnGJkU4Yf7e0Tsus42Gc9ORN6/f39/f48KMUZDlR8Oh3EcpTrLo9bTUUSOx+O7oRvH8WYX5znP89z1voFFtJJzDqG73Aa38XybMnmLEoLfK2D4Wzx+at/eXz+k8w+0fObb2lI3z86SJ58+PQHA0PUppdPpZD723d3dEjCFuFWMl22uVPFGrE4V0T5tmqamRrwJRl2dUkqpNYEhMjPWWnOW1sAxEIHV9s7nc6u63/fv3r07HA5Wehw6vwoYLWDCJgoEzjtEzCmP4wjsuq7rdgOT34g3LWlpg1Nr3R12IYSt59eyna01Im9pNDM8zjkiRNBvvvnm483+9jBEJ45bJGqSC0jf9yGEWtLxeEwpiQgqtdac9/v9zfX7D323K2ZuvcNll16e0uWDY0SCpexhTet8Cej/XNPeZqr0IpKrWi+L9s9R3RvSpstvf44nFoCg5pyhiVgwWVvOuaZcStE8maUlRBfCbrcbrq5j15EPyoTsiBnYEZFaD1xroIDOUdNSRaSU1KS1mqplxXPOpSkicvDee/erby2e6LrOuNkch7dmzzknAsxMn9ddWaLbyz5CSyOLKNHaJk/PKdIFybl4VGVjIMLKIG2esmG4rMOMAGpOrbaqwi4qO1ACLEiMBIjGJ7YAVAiBlRfEEAIJWSPC8pgu+vO+FGdcPqPt7uwwpYhUAMLnx8HQwJdqGCoCTdA0UuzbSYkIgVT1eGq3t6UPwXI/sCpLd4fBYsoYYynVOefcghEFgPP5fNjvT6fH2MVxGj/eXotU59h7P49qxWDvIzMDw93dOTj34cOHy1rM1dXV999/nwFjjDlnw9bZ9Z6Pp9sPH57OJ6ssvvv6o/Us2fKHtaxjXbqvjNaroBk+x9sCL5Ev/7XjPDtc0yUXbP6vwpb3X3Igz7BvtALK77Bzr7wMhJ/Xt/dluZTPv07LHie4gbFNmE0J0UgBTXBEDNqOUggRmK2tzUiivfegNJ7Ph92VzPX97ub/9f/+f0ppcky1tpubWwB4eHiYUrbZlkFyEwa05EWesoFcuhiVjujalOvj02nKQOgLxAx6d/cwTfDtL969e/f+/v5uPOXr/RDehamEnPLpdEoTOAfOLR3ookVUQ0/DoXeRqubWUFXnueScx3Ga5znVJuDRE5Afrq9+/evvH05H50Ichs6TF5E65Uw50+3tbZPqnOv77tPDd67D2MP1zVBrfXxqBtcuuXkXNe6PDw9MDkvbOU13x9sOvn4//OqGhpD3gTvPNU0/HM/O4/7d7ZXU8fFpPI4wJaq1FRWGGHaH23c31+/3t7eEIdeCQA49KaljdV6ZF44vFSRhBHQCTpChOc2kwBpsh+UG0BRwhYDaMwVTa2mlAgAas3ZrRFSXoJGcc2DS8IQAS7s0bFCRdR+cpBn+E6toSVCbSmUB1owKoq3UUvKUU8otC8h9ciEMoe+6rot9xzG2LmbnyDEigluaD4HISnKOQoMqWDiyc6HgPJenOs5tSnmc5qdjPk8t5TynZGXRr7/68OHDL//xV7uvvx4Oe0QsKqoagmNmcAwAjYCZnffOuTIvwh1EBEqbnSulwdodsuQWrW6qNmKWagZnGVTEdjotkVlrVKqk1OYEteq+a63FCNcfblIK49k1T2lOKdB8OpfzeMz5Og/X+0PA4NqR9JaI0DGDYwYHSOgIYZpGZCL24hx5p1bUROSKImKBJAfPzNXaH9m04wkR1wx0U9WBJaWZHCuFjDFDd2rUH/hhbKqNAJxCQxCFxg6RJY3eeyZtBeaSg/PETl2XBVrD3W4/16fr/eHu7t5HPlwNT1AeFUTB7Q+axynNXRe++ngrpRyPx338BtH/x69/HUJ3GIb7+yMH7LrwL//6n//X//1/OsaSxy66WtJuCPd3d7fX+xjj/d0DAOy/evd0+k/ub87H0//x/bHQ8NW7a0KtNV8N8enpu5urWGud50dkfTrd4/d4e3srjo7Hc5fnd+9u//Xf/61zBNOEJben868+fD2N+e63x4/ffC1C40Q+hnFeuHueCbtRAcAt+7AAANJCb4SIjraQenMuGWDr7cE3++26T79BVHxh3/5SD9KXatJfTNrDH+Owz3Gv/v8TTf3ftEewmke8HHrn3DAMj3dPnfP39/fMPJ5G8yVtcBatEIDFwZzLMAwG+GxtaZmfpqnrIKU0TiYrC7XVccrjOItIjISI4zimNHnPVpSe53o+T6W0vifvvWqrtRGhqh6GYX+9v7267vd9cC7XWnN+Op1aa6XUZvQrgITOIC1iTv0q3Q4A1tJkNiCXJW2FwMYKSEQbYlzXXrT+EKLzFTMiRO/jVb06DB8/3O66bui9I8zzCDX1Qxcco+p5muZUirQGisjI6rwPXby+vt5fHbqukwbN0AOoRAsL5dv5gxc8k5dUnLCuye06X9RyL0pRny3u6trHZ34xXHz1MuFV1Ko6KtYWrSLQtJYKIKIt55xSySXnWlXVdzsfQt/33dDHGNk7YxBdiLOfs6uL2QMF9EwVmARJNAZXehGppahRw3WBiMg7ij7k7jfn03kaf/3puw8fPnz77bdfffP17nBwwVuoUUWaSlUhx8bI2jrw7HwMnh0g25wm68MTFTC8a2N0jZTRKbVlHAy+I2rglZwzKdRa52kax7HloqqMxM1vdGum5iExsgKCOMAMWMZ5HEdQVd3t+yHnzMxkz7uiIrAiOrZI2mRJsLa2VBkRhbf9ZOmiAYAtOl+Z554l96CBsdqg1FqP4/l8Pm96kG8Pi6gUdElpt0bMIYRWrZw27Xa7WqtzvM4fmOf5+nDw3nsHLJDzTCGoqvf+/v7+m2++VV3wk4hwdXU1TVMXIOe82+1ODz/YbalijNFYL/I89X3fWtvv9/M4qmrOcj6f066LQ8fkCdVVL7yQPSksgICccxh684M33YbT6dQNfa2V7ORaU0rYeMO4GibOCpnbLrf6Q0ZWoOvaWvpEL5fhK3v2X/JwG+3fFuS+tMMXMe8CQ/hM5//f1rFtpttGaQGfd+679H3puvv7eyssp2mutc7zrKsOnyUYTZokxoiI8zwD0DAMiHg+n0VgHOdxytKoNT2fpuNpylmY2M4fx7G11neBCOc5q7rWWnBwfdg5547HpyYao6+17Ibu9vowdNExMgKjVmnn87ghMJCYiYkZmKdpsiI/O2e62Kp6Pp9zzvv9npnb3DbN68BhiAMjLkBwJBBttUpr0OwGixL0Mex3+4/vbz68v+m7MHReSprHsyftdr1nnMfpNE7WPlhFhJCdi/0w7A7Xtzf9bhdCTFmyVGkqqA2aI//K7OHaQXHBL/xsAhfJvWf6TDVsBT4X48TgPbjshi8Mm8iiB7toP73EswAAtgpKIlWbgIi2YtJ4Bv+pUnLOc8mtNQFApGG/izEOwxD7zntvCA4TIlj67Ew1iQgJAaAhQxNEBhAUYOQASMyiSCFyF9qcSRZFdq3t9Ol+nufjaTyd/+3+4em3P9x9/Phxd9j7EBBREJamCsSRJ2bGg4vOuxg6H9CxQ7KfpZZNGkkJAdtS1UJlQN34GazdvracM4rmnMfzeRxHVY0xDrHbRpyYnboYI6kYyBiCT8RPTabjUy2FAJk91ck556GriFZwagLQiJgVwCqsZuIWUnWKZPGmgpj1IlTEtiQ/F6mI9VEqwnO6smp7enp6eHio9YudUgvxHigzA2gpxXnq+v50TF3X3d//8NVXXz0+TjaZSinew/HYrg+zeu8YiGie5xgCIoYQPn369O23vzS5Me89EVzt9v9293jYdymlvu+nI6c811qZ/TAMKaX379/3fT8Mw+n8FLtYs3POTbk9PT0NnYuBd31ErTHG1hrk0qxQLZLLPE7UH4KV+UVkt9sdj8enp6d+N+SUgAkYoUmaZvAsCNqkQtuWjypslDgNLtN2igjGVCNba+z2D34UAvq5LsC/9I7+sw+nLzGsbzO/r6LAn4IR+ms+LmOIzb+ziSK1OaLpPGptNRcQnec5cLTTnHPMi4idc243HKyhx+DmlkNzzpU8p7nWIoicUzkez+cRiKCL3nlSbYQQO8+MrRWR6nw8XHWBXdfFWiuieo9d9G7XH3ZD5wJo0wrgiRS2ItZy4boSdjCPT0dbbBtdZ6ktpaTaiEC01pqZFxY3E3BQVa0NTWxGRLWhaEqz1AoKjnDowrubw/ub65v9nrG13CRPDjQ4dqCtlpKm85xyzq1ZG7ELXb+/ur463Pgueu/ZO26FaKkYiYhblZJePZQN0vJs8HDBu8Pay7z4oWJRWns+cm6t8cJv8sLsqSoQvqp5PH+UKkoTKdBEazMMi5mfXIs96znn1ho5FwJ77/1h7733XedCQHehjkQEhLj+15JBBhIBRyAKyGTxMCF6N3jf5jmnQVJRqShqrNa/GHaImGt5fHx8fHz8l3/717uH+6vr6+vr6xCC72KMccEftVlENC0awikuqoomuPGMXCUlImUhICCBilVV6nO7fSkF2qK/aLVGBQgh9MPQ932hZcSQgNA0AsUxY6vsAgHWlEua6pzHeULEvkdVRXZE1FRJVRTJBqoJgho/HBDaZBb37Aatho2YuYkKrqjdF7G9tNYkqwgp4Ol8fnp6KhfBnq4BotGubkOBiKBaaxXxbsVwqqoV6VOS0EHO+eZmOJ3GcRwlhKGLwaOqdl1X5rnWmlIex9Nut0up9H10jksphvyqrdVab969//V//Pt5nENwPniTejaUpnXmee/7PtaS5hnu7+/74B2BZxq6LtRquSWbhFYp73Zx02ewRtuScyt1nmchDENH1rykAp6l1gYNL3i6AWDhdZBlywAAy7tYtP1KAWN7Fl+CSrw1cn9b+78dbrkNy+Kupe6tX3ZFkL/Ot/6N2jwAMAnvjcZTN/iy6jzP3vvz8RRj/OH7T0TUSlEIVtkmIoBlA7W04TRNlnpqrT09PdkEyrPUAqqsgim18Qyi0PfkAxMRoHZdZOfnaUxpZEc1513fO+dSmudpItSu7/oYb29vd3303qsKIUKtZZrm08l7X2utzbqnlUCRHIgYvD740HVdjBGJi+TcqndEqDUXqa2LgRClNkby7CzCQwACFFHU5hhLSd7R9S72Mby7OXy4vbm92g2BtZQ2J4J6teu6yFBSmqaa5lRbrs2gcSGGYb+7urraX10hERAgKpKanIKqmMXaEpKXU+hVbnNbfssEu0y8rDyNm4pvzdkUE42zDV45oU0BgJfV/wwDtWvwWqGtyaUmJhxRa01zKdJyzlUaIHoXumHX9z0Og4V0itYBB0y0FqIQLItLuLknze6CEBGrAqBTQGJHzBi9pthyaVakbKIiV643GtKr02n/6Yfj8QgAOZe7u3vvfdd19ny3loa5ZedcjbVGvxxhgQWto4rATKpG+a1Va60LaCuljVltcyGY2aIT30VHDASyDA0uonnqmwI7h6I+ht3+oKrpPEoqT8czc2/Il8ZI6sTASozUFAgUKwKBOtUFqKnQAFBIYJP83YL6VQ99sWS2VAFaa9KqoFfy5/P0+HRqutCcLbHhcggpmgjJOiVgLewKM8/zPAzDNE3DMDw8jM65WmsXh+DH1sQ5F2N0hH3fO+fUuZxzjOHu7u79x6+Px++GYQjBPT7cxYC1FgA4nU7/2z/96re//s9xHA/7rjnq+36e54/v393d3UETKVWXT/ZTLueT3seHGN31biAi9sH7mHM1B9FQRY+PT/v9gYhTygO7vh/mnKdpdqqAFLrOkZMqQMSKNVchIDLOWQV3uTkrrL2SsND1IACa0gUibGToy9hZxyd83vj9rR9GzLEGfJdcfC9u+AWB6duB+BsKdfElGe62S7YlNlKtjUOYp4kUAcAyeJbZFwFLG8Ja4bOmcqv8Gfgqz7UUaQKtlWmcWwP20HUdERnsgBi8d/MktULXQcrZeVIoOaXW2m4IV4dd58N+iKYH6pBaa+Pp/PBwdz6f9x8/AoDxYzUVRhKqDpmIgIS9H/YHH2Kt1bzF6AMAlJIAwHtvXqSlPTcTDgqtZm2VEaacovdd6PZ9uDnsr3Zd5xBbJm0CtXN06DomOU9THsdWSqvQKqhS8PGwvzpc3eyGQ+c7my5Sm7YKqgiCQICw5YovDdtba3d56AVn1VIiujB7FrWIiGN+/uubSvtlTXDDAaoqS2q11VKkgaqKmYTSUk7NGGCQyTuOg+/3vu+1C1YREURrhmBCYAI0LSBSwzHiypoNlspTkwElRGBSQuc6bYG63EqtuZitxSaxsV3bcHXohyGlNE1Ttp85G5w4xjh0fdd13jktoioNShHQ2qRUKczM3jMAIAGiilFgeyJ0uYjNjTmnUkrVLShcdBxjjP1+F7sOEauIc8ESlYiL2WtCzEzeQW3UqBt6JprYjU/H6TxutWRERFanSkSkIK2BWKpBQdCE4gCxml6dykWIbPpRzzI6honVzRlXqVXVQ6n1/vHp4Xgy0jKxKiDCJRLDZhohbQ6uiJhUnuX/n56eYoy7nSdCo6T3Hkqp3oUQujpP3vvz+dyHQESHw+Hx8fHdxw+I6hz1ff/w+HA4dFYHyTmTC92wOx+P05wRse/71rL3vgsBtbessqWOQiy1wHlK4zjv+r4pMrOPgefZGo2sAHE6nVJK1u3QWnPMnjlNsws+pdTV2okWrcGxQ6q1NkYRXYM9Vm4iCGAp5QXPsrK7IeLCUnTper5Ydy93eLzoc7jcUQH+xmqBbt1SjM3BWP62mA/WdnPaXtnM4d9wwAcrQcUacyCiSEPR6Tx678fzWUqdc3HEudacs23HpbRpmqywN02T5eVs9zTjV0qZp1ZrK1VSyvPcENfOvFaurvagNaUJySMqIohI8FDSLI26Ll5d+z52Xdf1IRJqy1kQlXiapof7+3mevXOlSpPFa6OFsxEBoOu6ovPCdcQ8TdM0JhUkB4Z7d56Z+Xw+5zn17z8E50spbI64tJZLLcUTE8DQh5v9bujiYYhDYI+CraCWfReiY6lzyaVMUy0pT7PWDhp45/f9/t31u93h2nsPoo4ZmlbJ8zjmuSAiOt5qbG+hK1+aRW8L7FuYbuZhI4MW7wFAoBlwl5D4QovcUA+0sfyv3NOSUyut5GzhX2uaUppzrdKIPUfvQnAhdF3nQyR24ngTkzPQikGG2TkBwLWkt7BIAzpxAAALGlEWYkgkVUHP3jsXhEJpuVCtKsIVaq1kUt1IXa1xnPI0xxjzNE/TJLVVLJVdc44VQgxM7JBIERqI1qai1Bi9GjWaWsBtBTTOFWzQpDVQYRtPQnLsPCNzCMFHb/o+ZCoGuMBomzIRMHnxFFXrnLQJMIZIIlJzbU3mNJpYhEVU6j0zoxORDIgqjoSVjT0dFUBEa0MsyMzmGIqYIvoztaHtUVabaDUjIjMV1WmaP3369PBY0G0Unq83X3NMEUSaEIKt0/P5fHN9mOfZuaVU8f79+99895v3799N46nr+qfHyZyq8/l8OPSn47EPwXvf9/3xeDydTtZkst8PMTx4x+cKJWdrMBiGYR7PpZTi6HQ6Xe37UsrNzdXDQ53HyblgadWuiwlSzvB0PFm1mMh5F52PqWQRIREVSQnu7+9vr66NXHDhTEAkopxLSbnEXECcc1YfNaqNreqNFRARSZlXiL6oEigbExLIsoHjKl7xegxx4cL7vFbt3+Tm/1OQnPqjnRx/i7f99pqNoYmIxnF8f33zw+kHbTKdR0+c4bnfa9kvRJj//9T9WZMlWXImiOlyFjO7m7uHx5qVS2VlAYVGN8DmcCizCMnhL+EP4K+i8I18GwopZMu8jMwMu9nNafYKFArIyj0W93D3u5rZ2VT5cOxe94jMrEahAVS1SUqkR/h2r9k5R1U//fT7eBiGxWJRv6UUrcDUOI45aUoyhHEYcvWMrTh7ztl7J0W3h1EhGyJjQAQWi26361XlbLVcrRZYZTydI4WssaQsVMI4xhhb58/Pz1/uD7VEIzRKTMxoDBq20Jok1nrnnCjEOFV7gIoEp+gYhrGywqrCBRExoIiWnDQLGJo3bjVrzs/mnXOd43ljLapgKiF3rjGE++2uP+wBQLMOQwBosLC1fuYX8241a2YiUpIwI4jmmMJ+jDECGe89MRdzqrXeKfi+/5geBrz3dIofRr57YsvDr//eD6zOGHLKW4+Fo6RcYiy5lFJAMRUJKcWckQyybbrO+tZ6Z5wla5VtISAmdIYemJtXMwvGySZQ7+XDgI5+dnKU1yIAQBhjMMRsCAiQCSwjqAqOYWycc26GCjFGVGm6tuqGRO+ttTkmQrTEUCRJamazOr1HRJOCl4iCaKHK9lEt1ZOv3u04KU5DHeOB4yeA2RjD1hpnjTEna/v3ezxVaUWVncMiJQZBAARia31TFHZ3t8wcOdYWMgC4nLQ4UQEmVBSYkhGZAlt1rQczVYjV3LewNXJUr6vTivXKOZNxSJwF+hDX210EcLVCBBRAUIUH2lCnai/nAojMLJpjzCdc11rb9/3z58+//Pq1tTZbO5vNbm+Gmk6llGazJ2EcAaAqpxDhbrd7+vTper2u4QoAVCGltFicrdfrtvXG+QIoirv9/uJs0ff9+Wqxvr3p+/7x44WIgBJ6LyL9Ie374Hf7R48eecs1OT2FlrrEt9ttY91sNqsdPgE1xjASiJSYYoxJpW1bRsqToBwqT33VI9YNCAyTQ++0vUC1VjqgBPQwsP3wtN/v+tj+27z4//h/+K9PNc/037HfXx8n4jToetwtxw0A73zwzt54CC7B/Vf+YP/mvevHPv1jb+A99YH3kpFT4/r0cX2oRepZqSKiRXPOTLy725Di2A/Dvo8hQtGxH7abQ214IOJ+f9jv903TPH78+Orqup16ckEVKhLy5s31YZcAkJgJ2bV2NmvYcIyxbRuozWQEBXHWNI1DhKEfvbNnZ4tZ20pJMSZE9M6XlIkIAdfr9Wa9btt2tpgPYYxQCybTNA0ShxBE1Vh3t9443zx/8UHKOYSIbG7ubhWo86wqtZVjyMQYS5EPX/xktVht19sUo2Eexz6NIxJAkSfPn8wab1AJUme5cwYkxn7/9NFFCmMYDqggRfa7/TCObKylOSl2TXt58Wi5WBIwClo2oLjfHm5v7w77PsUESs66rukCaO2V1ltaSiHCk9STm9pTxhhTgSkBJSJT/e6rMss0XpZDCDGElFLJGQCctd77jMhMR7c+RAJAJUItmeo0eYU1RHLJUIqGoVJQYkr7vu/HAIZs03aLhZ91rmlc2xrvm3bWdF3bzsQRV8+E2jer/zEpKBIiT9XeaQUa8Hx0Rqd6IKuoCCEyotau2SSuhsxsyABhUVWEamJeQQVGqlVP4/18Pm+8L6WM/VBAENQ7a5mr9AsBMGJOUarXXE4phhRjDEOKIYSkUojQGEZDxrCxbK1hy8YwGSJCxYlOoqioCFVrBhEImag+qSIqRaUUhUnRUwERWUpOKaUcrTXeOwICVQBlYhEplUsjqqCVbgiKhMSGT0P01SpPTjRcmArDopJzlpSWy+Wbm5vV+cXb9eb/+U/+u+ubtRAXJEUGRJgs7IEQAbRpunEcUy6W2VmruQDqvGuHoe+6TlWqreaLFy/6YcPMMcSm8SUfDvvx0aOzedfe3Lw9O1uVnC8uLlJKV1c31pmLi0ellFxSibnxbrMdENPq7GKz2X7wwQfbzTrF4J19fHmhWUTyxflysZi/fvWKEA+H/dOnT+/u7lrvrdEUJIyhSL549Gi1WpaSVQoiqhQAGIZwcX6hCn0/nC3PLs4vADDnQsTO+VzEOHd+fhFC7A/Dk8snh5T2+94Y0/i2PwyllKZpK8SioqLlSN2YQHktWbUKJdXhnRpq71lE0yle//f3UeT9dmDpQ/L2X+dV1S82+gOYrJ7+eK/P9zAFeC/A/D5cD1/JjyUpD8NkRau0KCqkcep+5Son1g8l5UoO9N7X8FZKqWyC7XZ7dnamqn3fV4yt2jWMI6wWKxEZxnEcQyHw3llvbIWzLFWYBYCZWUoqRZnx0fmKiG5v3xpjnjx5Ytnd3NycLZa1nVMVT5AolxJi7utAgnFjiimL8d5YX13xWmsnhWYycRiGITjnmPGEEdX369hMxqeIhFhyKTGBaOcb25nVrHXONM5YAs8IEg2IbXy/31lGZHMYD30/FgU2jaqGw2iN73xn0JQktVpV4L7vi6hBH7QoKKFFMDkpuPc1NicVjMbBb38dUxnEo4BIPo7O4RGdQUQ6Vg2l5Ppx1ZgupaBIyhJCCDEVUPaWjANj0TkylrxFa9A6ZQIyeiRD0cMcjum0/B5uv+loUURRAARRrYPCiKAAxoBoUdAiiljlQFWVvYGiIkUFpAZJEBBBQ845FAEBw6RZiJCZ9/tdjKGU3M2apmmIoJQUYzaWq694KSXlVN2mAMD4RaWbTu+lThzSNHdYZxBPn0IiEhKRmoKoqoBUiiYzFyZkAznrJA5ByMR2KudDihzYWyWiGAIAVBormKPluoiqWtM8fJq1mwoweSYUqPopAAAC1V4YanO9H8PVm7eb7V6qSxSiANWCum7u2sqqOSuB1I4yTx2Ne0Z0jPH8/Pzq6mq5XNYuGpFhsklTjGm2mNU5hFl1PgG4vDy/2276/jAM/WKxHFwopTQNAMAwDCnnvu99O4txHGISAWQ8HPabu/Ws84vFrBS5vLyMMT59+nS3XoeYgQEQ7jbbzXbXNA1bB2SYixrT970xZhzHUspsNitlarI03k8K6appDGEYS8kFdLvZsHNERgQqGqoKMeQiZRqhQVCt5TXX3cMTbwVPcvDHiPCbJKp/P8//v/5lqlvZ8b+jCwciiL7H7QRAVTjlZKcb8bt95w/7cw+Tke/3hKYXzCQiUiY2BzOLiuQyDAMjsnMJhziGvu9r9igi1Z2nklYqPNj3fdfOD4dDCKFKgtSt2HXAZFMchyH0PYAF78F775zBycQyS47GMKJhstbI08vHKcQU4qztmqYB0QKl67pY8t12E0JybNg6RQopjyGmIr7p2Nn9TZ9S8V1HRPv9QVXbtoVpINcehv5wOFhriUEFDLMURkRm9n5if5EgKUgukouzvFzO592MSVizQe6ccQgk2Tvbzdp+vyXAkMp+34eY0fissj8cWpktZsvV4sxaLwKgpMglS0iSRQ5jvNsdUpbZTMF4tF0psZ47VTXt+0jab/v0T73VY2FURwQQEeHoGTShW7nUTLZ+qqqaYtFpMk/VWMu+4aYBNsZ5NtZ4Z61n45gnQUUyDMcc8yF0McXX00ghAAIS0kQwxKmXWPE3QizV6ggUUKsCqqgoaD+OWkC1oKBBICUEAYQCGRnZMSuhCgA7bwCaNGYpaej3hMUyWWullBQjoQMQhZJzLllKmUBgaUAQFEEIavyteT9ZQ8xsDdQofpq8BwIAUTk9psKGFJUA2QIlINbaJGDDBmzjmCmMY4yBAC0b1ZKDEBGxMLkKAwNIKaII1vjpSYkg1fCPACCTLR6oSsWOVQRUkXQceyI6HA5ffv3VzfpOAbIoGFYlUBUoD/Gf2om3jJKLSDHIdSfCNMFNKaWnT598/vnnH378kxACAlcZthhhvV6fzbu2bd9ev5m3bUqJGB5dnr+6ujscDjnnpvHzWbter2dN04fxcDio6nZ3mM/nfb8fxrGosGvC3eHujtvmyaPz1dvrzWq+uLp5++GHH/Z9D4feWIMg+12+vr6ZzzprjLUWtYAUleLbWdUqa5omjqGkXFTqsDzVAchcckrWWlIJ/aDWI1DOohprKZ6zlCKICbGanKDSUToABY2e5rnqKV7L9poYIMp9nYByjI6/R+f/3+wypzHeKmaDR9eeOuePD8DJh0ye997z75DG815V9/18BL5X86lqmczw+JR45pz5iLzVISooUI4NpNNJDQAT6HSE4JnZOV97+ACwX+/HcRwGLQV8C97brmvatglVwjhGLYWZrWmcNU3TWcZURgCYz+dt26Y09RvW6/V6s8tZF4uZIYwpqWoCOJHXmS0qisiYhs1u59tuPl9K0VIKEh8OQ4ypztVmycY4IVAtlpicJ6ISk5SiRQi18da52cXZata2IY+oBUtkJSbQlIQKebOYzesk2RgKsgOwQxz2Q3l0vlislk3XFkXIggYk5X4IBfn67u711du7zWaMoenmHyl9vLjIua/HaT1cTqNmv+1zPwWbUgrWZidNLIx7fLsGVwUAYCRCrSMHdVorhBDHkMa9FC2qyoasJe+Nb9Aatt44Z50zxtXM5sFc7wNM/kGH7+HH95+lCSpEOmK2SqSQSwYoVTSPQBTrjgNAEVCRBAUE6j8LFYxxpIIApaigCgCSpc40zWKScCSiIolkkr7s+161iB6leY7VsDIqYzkqvDGhMDITG4NMUB0S8P7tHLPgd5wf5DgFRIYpGiEVFCRCtq7xYkwpZUg9l1x/VLU1VgBk0VKIEAkraWXSAp3SlBoSpy1YswdlqqJF9XkSQ0rJWrfbD99+++1+nxGhFKkiiIJVVUNOM1ellLZtneVQxiKiRIgkRawxIQTvrR4FaKrnSQq5ZHSuIYqbzZifymLZAUAVaVqdLUop1kJKoWk6Eem67ubmhpmhQHVvGIZhtVrNZov17SAiVaRi6A+Sc9d13o81ZNbxickyM2fj9W69nV2/ffb4wjknMSCB995YW1uM1ZO96zoBPWx389XSGAOWK33XNI1Bk0LUVBCxZJFS+0tUSlHFnASpvlM8zgFVikA5baVaV08nv0xwiYhM9fbU+vrh8/8/rch3H/bqXahR4P5dFagarXXnVJTj4Tv8vXq3p2Gm+tcffBhHHedaC546CFrHabWIlIIAhjinFIaxHtBwPNfqJEPbtjHkOkZaoQREjDHe3QUJOYQCAE0DTct1kuxofYDWerbYzRrvW0PMJDdvvrl89Mh53/d9n3U2m40SX758PYaw69UYKIrVzJSZ2di2bWOMksgY45lzzv0wllKWy2XbtvtDX0oRjX3fA6FrOtVQyQWIAEVqpEHElOqMOXjvreWucYvFjJEa35QUDJM1aEgklxJLD3tn/X7f90MAsoL20Ichabs4n5+d27ZLRFLEsJLqIeab7T6U/PmXX337+k0qElNR3CTjZ5fPF77Ux1TvKhGhs9+Xh//rXPcwywN1q0kGGYoqTELStbwTrRKgWiTnHMfQHw5xHMexZ+utd2QdWFZiZWJnjXeGLdnJWBxq9+p7jQdVrUl3bcQS3JO5CZGqUUV17QEAhQlSUqBKJxAFBIXqniiqOu/anCUlLLFISZJzyQkLiOQioCVBUpFMwMRAwF3XOm9Om1dVqrzXbrdT1ToBRkRN0xjvvPPRMTKxJTaEhtgwW4NMaAAZiQC5TnWBVEKmiqCACoAqkD5EUAiBCA1DzgikxEBSRzrUkKiGHGMMjKSqRgQBco5IggTVwVwJc4yINSugOtWuRxQa6tS5KNTB6gd+v8aZvu/f3t4lAbWkRQFI6hTgg0dEk4/8O3O6dcEw8ziG+qn9fn95ebnb7bquy1GYufHzFIbKx3RutVqtYoyA8mL27NWrV8vlTFW9t5vN3ePlI0QUKVUjr+tm+/5QjYQ2d1hJ4N77HIbDYdd4e3a2+vbbb33XvXr16vLx09WSb25uci7n5+eb9e2bN29m3nftJOkwn8/7cdI36vfDvunn86UhuluvXdMBRgOeFIfD6GyDhod+NAupZNFSCjNJ0VASYj0BlBiq5OZRA0Co+iwSPtDDAUSt2stHeTOkKSmb6v739uB/cpeZ2ul0tAej+0aFHn148UECe+qmfP8A+p1cpyQU3s0+fjDmPYxSIlJzmfrXpmnSMMYx1mk8BsxQ88qufhcAGGMqm4uZd7u7OistIqpYxTkPB5g5MqaQgaaxrvUAMgz9MACgMLMztvWNs14EhhBSKrOuM8Z45wh4jGGz2e36w/7Q74eUMjgPyhhDCbl0zpvGO+dCCFnBOscAY0iq2nXzSiuNOalipXE2TWetBQiqlZIDKQVE65xlwChCRG3jHBvA3PrGORNjREmGyRluDNsqslUKALx69Wq7P2Qla9wupLvd0LSzyycvuvmCnROkTAKEfSnrvr/ebb745tuvvrvaBXAORCEmMW/vmm+/+6PnzXw+997DyebJGiKiH7MS+JHrXnoDoCpFVSbgw2pv4ouKokLOGVW1SEopjmEcxxRCzhmt823nutZYXwySMaZrXNMKgjJB5R4iw5Gk+WOS6PAutH6/XwhVARWVilbGiaiCOHZw9DoodfYAVEVqN7eUUmetyCigVZYKSRSRlGPOefKjBwjp4H3bNI6IVUtKuaLxU2PneHO8b5umaZquNGKM4fofM1tTTXZquw75AeZ83FYP3t39ZsejRCcW0SxalEQKQZ7QWlDCnPPusC9FvHU5Z0AEEEZFwDLJpXFKdWKPmLnSl6rMTR1dn1osUm99nRosqCAim81mv98jgHxvpwsQw731RCnlJMhaIRtLE1RTFZfGcXz69Onuu+3xC5SInHP9Pm2327Pz5Ww226WERLXHv1qthmGonTZdaOttzIksIpH3fnfYr7ebRxdn9YtTSk3T7MZ+u92a89V8fl5f1Xq9Ozt/ZL1j5kHAWue97/vx5ubGP3/MzCDadM1h2NelnlLa7/c559lsxkQMWFK2jWekElMYR+tcjLGEMJvNThsk5xxTYGZrhYhYsXayAEi1YKV8nRp+026qY+z3N7BG9Knkg9+v8/9vdpmSp8HiU+Q7itZMRS4AgOCpz3fK9X5P3uoJ5Hwv8p0++xDqPCGWdTi3lOmvhpgcyxhjjPv9vu/7qjpNROxcjTS1vbxcLolos9lst9u+7wFgPp8DQAghZ20aQFBAIALr2FpGxCxZVXOJ1lrLDpFVsWQVAQR6cvk0hJBC9k2TSr67u97se2M5BEAE46wAZUBiy74h59lZAwpFSylDjDEmNtx085N9lzEujSHn3M2XcBz+rWEvqjIfR4kRm6ZxlixxysNkYCNljId5N+saZwgkF6qiYknu7u5ClIImZh4zsHHkmqKUCUpJgqQCh2E4DOtX129fXV198c3dvkB1TwCERHC134fPP780T2AqMe3D8buahP71r9OxXqXhvD+qdh3JLDVTOY09VCxUchnHceyHklK9A+g8O2+dY2fZsnGeGsfOSs5KU2+JVYlICXDihh6Rf5jEkqfj4wT4HDvhBFBIAKCe2ZUdeRyC1epoTsCMSoQghKKH/V6rKzohAVVrPC2QVRREtTrdaJFcsU3YZ2u9c8ZaX/tfNeYBEDExW2OMtdY6T8YqIJnJzL3+B0RYDThwYnjBUb/xKBB3v9EIJzSFVAWFmcE6FChFtIgWAQAlVRQist6F1B8OBy1KMxQ4MDOBLwjGkGQF1JRzowYAVQVEUFGItAa5esgiKBAWOU5YC2JR5TCONzc3wzAQQiiKZEvt5CoBvCNLXbVXQDIBEpGkIgJsXSmlbZtxHL331jpVXSwWtVVfFVKIiBm2293d3d3F+aImZsNwcM60nU85DMMwn89TivP5fD+GpmlSlqrdc3d39+jizLczKSWE0Hm/R4xjcGxijBcXF9vDwVra7XZVsV1Vt9vtanm2KdfXt3ePLpatc+NApNC2bd/3lZx1OBxub2+dc7PZTERSTrZrfNPEnEIIbAwDbjY75xoiUsWaCZUszJwne2qsylzMXA9CnnRbCECO7joV1a6dnXq61ucO8ADk/D05//9mlznmpw+5/vcg4XsczodV1I9Fl9/h9V6v8fsx70EbZnKbrBJJzMRoEDHnPAzDMAySErOp9m9t29aj2Vqez+fjON7d3RFRCKFihswcQkgJvIdwyDkDMpwoM5UUQBkeGtIaNl3TWWvTuJ53XSz5+upq1w/MzIZubgoYMAagTpsDGWeMs2Qm84QQc427uejMz51zQJRzyTk719SQUDVZ0J6ifJULoWpSQ0SN823nSEEOQUQYAQAM8bzrus7nMQz9IY8hxjEOEQByzocQBLPr5vPFStHcrjedRQAA4iK678fXb++++vabV2uIAATADElACcjxmNPu9dXbR9Q0zWq1qlgrHmVzf9tnfWpZVU2Qqr6NiFJn0R7UW3BKjGoJmEvOGVS9903TBGRVHVM0qIbbieqQUiUvFhEmyqBW8WGC9V57773X9rDVPfEEoACiylEJWEGlTAGSyaJVYyyAqlpjqihoKUVyyTHFnCWmnLOWUkCBsFrByaRIFw6Hw6l7Z60lqlXv5EaEiCrVWgEiJZi3iJir1wSirQ0zIj6uiuN9Ot7AB2GvRmytbAgmAgWLUMRaqzFX3rMxJhdhZ51IojGkMVLUTsdxNM46NqhGVUVFUhLVxk1CUVM+qiqVeMykqidp04mPI1IpdSEM2+02hADHqb4fWydElFICUWcsIpZjTyvFOJ/PN5t1znm5XIzjuFqtvv32266Zl1JUi7W2abikMo4j81l9Gfv9vm3b6nBy2A8vXrzo73Zt21rLyJRLqFvvcMiK0LYtxL4UMY1lZpXUNM2bt9uLi4urm5vZbFb9Y1erFSp88eW3L54/C8Pu5uoQY1x2ba1TF4tFSqm+vc1m8/bt2+VyuVwut7tdyMl657yPMQKT815V1+v1YrGoirunzWWNG8OBmaf1+8DGUs30TFXpIQ3wuNrh4UGqOqmkwu/l+f/Xv0wNeABUOw1KE85ZMVzi2rhQmuSWUPN9o5v4HlOiB75Nxzmf+tdT4Lz/8zdc8iMg0umMewhhqSobhqkbeeIuV3mMOic0kTZP32vI5pwLlErhiSlJKYUazSEmSIOMu+KxFXLjYVg0KzV+3A0l5lW3JOsOuz6GvOjOXl9d54TL5dLwbLffF2Hv/W4XivVZozNku0XbNpLyGA5SkqTYtn7e2ZlXywExWhwdObNihTQO+ywpp3Jzc9iP4B0kBUbrTbNaLOfz3A9bSXvj2lFc0zTb9XZI6fp2ePJkbht/drbUknbbveZc4uihtKRxe3v+9JEEs2q8Jio5O+6MMY5pHNfW0bx1zqmmUOI49PsqK/XJ+ZIpaZ9zH2I/bHf9ECUpbweOsiit/dP/7L+cXz7ZD/nPf/lXuei/2VtVDWO8226ub+9ut5tdtplVSFSVQVugGcIs01yVBX/57VvpLtqLsjRkDRE1iaxmBIOslErhnAkURNFU3pCBIqlkQmQEYkKqigGQsyYoBUG8h7YdnRcRysdFolgk5zJ1/gQ0xND3/RhHpWK9xdaCN7Fa7zKBA7BFdJQAiGiahsSgUQbHzIadsZ6ZI8ZjPVkVLkCZiSgJEJHC/YlQyBDRokz7ZRK5fsA6Pv3L/aoGDXZnLFQcD6pGqCTMst1tQFRzSTEOw0GyJpJiIOcsIEWLFBlL4MyVx0TWcOGYrUVrvfPsvauqnW3V9qzp2inUOeNUJxmZUxhHxBFEABVEQREigipGQHEWBEQ0gwG2Sq1llBLBxQ6ESg4M3rdzEEql3K23s3lrkQ0ojKPEUD2btApV1/SQWZFKKaXOBRYGALbOWgeGT10GhaBEY6L/+Zd/eZdK8iamDFRYswFgEs1Qta6BENGYNLKCMcZZDhkCZFAoSEG0KeWQ4Ilhldhvto9Wz20emZeqggCr1fnt7a1B2PeHMJZHT568/u5l49rVahWH/sny0Z9/9+dWYD/ePXp0uVz5lFQhvXn57Wy2MAD93X7WNeIpRtTV4uKZ327uvrkdfnp5cTgcPnn+rO/Hddoe7u5YYda2zx8vv/nyi8bbp5fNd1+/6tfbZ88vLfFmPzCaKny4XK2GYXhz8/bR0ycmjxI15GCHg3EcDtvG4OPHj69eXcHQg5ammW12fbtYoNqDaLbzGAMVnRmyQJQLYLbEGmtuLwyMWVnBEDIzlErjVVWULEAT6K7MRzUcrGT/esJL5YjQw1hYAIDA/p0GsPfYlA8j8W8KeycsqDJ5HgxCTSp8RATHEfWpu3H8BQ+HB067vRLJ4EdSgL9xXvCD7+c3/6iHpd79NUH89109nNzWRFUrXWoYBhKsVBQ09gTEaSkAMKnUq3Zd55ybtE6YAbK1ANXNznK1a4hxGIZBJYNAthV9AuectbbSzUPst9vt9dXtGAqTc43hlPsAq7PWGJNiub29I4tN0zQzZy3HouMQdrvdMAyI0HXzqhORUqnyK7PZLMZojEHicRwtGmvIWM4xhDgS+OZ8cXlx7iyVFMZhFw6HFEdLaHxbh7mrw1w/hGFMWUCUkoIoDSmv14e//Pzzs+1g29l+v1+vt7/+7rp2fVLJQ8qhZDmKKz546nTSPEopj+NYRRHBW3EeTh6wP/JMJ9The+2E01qtx3f9xhMpdOo8VZBXFQ2bYmpFSFj9NBgA9vt9LRaVjeRcF4QAkSmASEBKRQRFcxIsRNzSCdvUafhHGYGBKmETAKQ28VTpKPCPD9jOeORGwQOI/sTGYmAoCCQMLJAJkIWUyyN7Wc3qSowprsIw1h7eYfM2Z4lxjLmoFkQ2hoDNfN4xW2uZjbOOvWt9Y63xMp/XovCEddeXd+rlvLdljKIAFigwOTgBAQkqISoDqwURddZp9ffB4mwpxakKF9VSUsp9Djm24kspmHPtqauqVcfMmjMRwWQta4jIWsPM+Sj7nkup+29aGAy5TH5Dm+0+ZmFmdi6H8YTlHIeN6WEKfTqs6naumpyLloZhmM18nYebzWZDjM6b/XZYzGZd59Zv42JJ19fXv/ijz5bL5WazOTs7E5G7u7vz8/OU0uXlZSVw3txsuq4bh2ytRYTdbrdczFIKTTMXETq2Eq21QExUVqsVsrm+vpb1+oMPni8WCyi55JhEmTGVPAyD6WaVCHpc8FO7rormVxqz934Ifc55s9nM5/MnT55cX18vzs7317cffPzJq+ubx0+eANMYg6rmNI5DVCVHiISlKGFl0hqAQkoApeLLVXUBCyJV5jAct9X3zen+0wM8TUqTLWElcVWy8DFsIxEoKSkoqRICVHx/gkHqPG3t+Z0gihpZTufS9wvh/5jI994P+Q0h/QGeee9HU4UJdWp+wBH6AihSzTaHYdjv9yGMDi0RppIp59osKaUg0smcRUTm8zkRjeOITNbaAQIRxr5XBWGbUio59vvtOCohLGZYsZGjTr5F1JjT1dubGJIoFsD9fhhGQICuYxFRhFhyDrGd+1W7dI3LOSNQjHEI42HITYNnZ2fdYq6CmksIwZMJIdzc3AxDODs/A4DGWYOgUlDFG17Mm/Pl7HwxQ5B9HNIwxHFglNZZay0SgOg4xs12fwgpCYWCQ4E+6Zvb3XbQtxuI+OX5+nB+8fjld998/d3Nvt7HSQgYBaAKMU+KwAoCKhMwpgoQIxwOh91uZ4wh7EpTjg9L30toTv3a0/rRd6PdCRmr1Hwkqq7cExKA90Iw1dOnyi5Xj4KKfYUw5hAtsSXmaaJcARlBjCIhoqKpU/44ecK1QqhTXFdBAGCYxuERp4b/sSOSiShn0SOKO63e4wkodSi9vlk5vq8aPrWWHDSRq1Gml6eqxhbfdO2s4px6eZaTxDSOMYtmBGaDSMbaI55tLDOe+n/RuYdZwgkHexgVoGI2CgDI9cBWqvJqAqeeyPHBVG0aRrZWkTBN8JrmwgiScxxDDLHaz0I1jIXpqbGx+cGoCYhaaysS56zLOQvUp6wigkyEkIvGUm5v1rvd7tBLAQD3o6l93fdHwanTG4Scc9v6Ybc9O1/2u13d1NUS9rDeM/vKVjs7O7u5viKit2/XInJxcXF99aZGmtvb29Vy2R8OLz68/PXnX64uLiSXtus63zjnnKfNZv/hTwgAJj/nkp1zMY6b3f6YkHEdtF2vt7NZe7ZaQsmb9W1KmRxVO1nvvfeL09Op6tExxu12O1/O6ioCgJIy6HR20Xm3XC7fXF198ukfXL2+evTo8uXLl81svlguLZccYwhRFMg7YyjX+UWFSU2WhJWRAaAoINWpTkEkEFFTRauPVDKik3pn3aT37ijHbfs3oWf//VwmJzmFPVJUUqSJ6nMfV46HDx75VLXoq+1ffCe6vGNR+/AUg/+4yPfbVnv4LrflBOYQTrmtiCASISpM419Vmr2KvWbAxjnJRY4xT497/Ui0A+fcOI59P84W82PypeMIiMCcUhxFJEZFBedhNm+7rqsFIrIppQzDoe/7za5XVTLOKKWSQwZrAInW2zCfl3betu2i6ZqqPBtj9L4BAGMcUVosFrPZzPuuisUgYtu2VSZUBLqua9t2Neskl5SDd7SYnT86X3VNk4Y+hsM49FhyY9hbZ5kUpMQcSu77ft8PQwJhOxZe9+FuP+4TbEc4APRjKre3ojQe9gLgugaqVYGigBKoKpTqMHR8Xnr0OUHElGEcx8Ph0HVd1/oK9BUsxO8oDJye8vG2v9tzVj2dBUy1qOGT4OBpwpKqjaJIHVdHREEwSMcRjlQtxQ2SQYIiUgqhQRJE1iQCBQ1KIbSooqWqfuSEx4JCAFFB2TJR9SiAqZjQ05Lrj1P5p2VfhVFq2MsnKVGcVEvc8YA+vYWKuzCg1pJLlWr8Z2MIfNc+lCQ9ElFQRPDB4B0RsbOGWK2tsmgTiwGZCJktgACQSFZF1aJaMxkyqqKIwAIqdXb+6FhSJvcrKAICqkRAYp3T++pMXNP4MZSYKluEj+LppzJ3gthqs7aa8eWcSp45j0wExMxSB8yraEzGkNL13d2RqAoikoexztU/KPfePwpOqwKO0i2nbLiWa+v1+vmTp845yaVt2xCGs9XibFlHnuDqzdvPPv1kPp9vbu9evHh2fnZWDVvqQqqiFiGE2bzNUs4Wy1ev1jlnbzmEsFycbde72WymWq6urj7++GM2brfbOecfPXo0vnr95tWbmfer5bLkuLm5U1VRGMZ4OBysndfudd3giFgjonFcSlHQzWZjDDnnjMput3PNwlr/0U8+fv36dbtYbDY779urN2+d9wDIbFKMacwRuOZqBRVAUYVNHUUvhGbatQYEGFEQ3vHnO+2yh03tScnzuEF/z+s/k6vTLiiBcqk0rQm6qfMcMnmOTAoUMvFfpzF+omOOXFO8ybcJTyvwlGr9R5Z6/zHXA/TjXjuqlIIqRAYAQDUOYw5pv9/X+DfG0RlTVKCU0255WIJU456K1zVdWyGjlMA6QATD9c0WIvANzOdz50w1S5uMimLeHvrNZuObbr3eIghZ6xoDLCnJ7TqpQiz5rG0ePToHxmE4FMjOuZhLFvXexxi7bl6KxhgPh8MQorV2NV+IiDHGOZp1bdc2XcugBGAcm1nXtM7m2Pf9ASSBaMNMhggViuQUU0oxhn6IQ5KoXMRsg77ZDld3SQg2ARBgefE45xhCUCgtwKsYYCI/1xXDlfyByKpZFMox8kFlBCNUNbI6y1iFApIiG6N0b4P38Pp+2Hv4TGupR0RVYaj6/hzP+vsFYB2fpGG4Wt2OYxbBLJhFY86qAmgNUeXfjlklq45Ehgwjcn216RhYtM6eQw0/XFSICIFPAZvZENFo7jHMqVQirBSS6h5Vvz5X91SRkPHUt1ZVnt4Teu9VBe6RUq2xsM7dngo4Aa3b0Rh/QlylCIoSlDzNguuRcCrMgIZQZHpDUM2U6KhlxQAZAQGQkJRZlQqgoIQcKmdSQEsN2wiASIYpMzBpKchkvWu6VkSGw86kVKxjZkLUInEMqKAtemYyDISoVSpaU84xRgEltsDETHAcPLKuDbvw6s3VkPLRPY50kpoEOP4PEUVV3tWkBgDC6YCuohDDMHjvQwjPLh9/9/KrR2fnjbP9fn+2nF9fX5+tFk+ePHl7c9W29Pbt21/8wWfPnj3783/37589e/L8+dM///M/vzg7TyGulksUPVvOv3t1/eTx09u73WzWOrcOIawW7Xa3fv7sMsZ4tlq07azf3VUuFju7Wq2YebNd73bjzc318pNPVqtVv12Xot6wiOz2vfW9tRaZUI9JgMgwDH5wdTTl7u3N42ePvffM/PbtW1olZzwBpjGmvGXfrB5dXl4+2e0O3nsma40rCWIURrTWlpIm3pVM0stl0msHKZpRCIkFFFANCiAcB97qnX9Q8wEAqFQHrns85u/5qP9rXiZmZQBgtQSgoFzPLAFgJJn04pHhmOUhHXUOj7okdQceBwNqFlC/a0KBv1/qwW8s1H7wQnynGngYwH7wegjavPdl9ZmVUkDAEJKCIIYQQj9ut9uqm3DYH5qmccacwND32DTGmIlBDlDpzsYYxOAcgFTGpU8zAACAAElEQVRTo4IqiHXaz0xnE5osRUtOKeUkOUmSMSSdzZx1zW4XY5YKg/kWaph0zoUcU0rsaD6fv327DyFUX7SmacZxpJz7vgcgY8yh363vttu7PSIw83I2dxxms3nrHarGOI79RnIsMbXOAgkAapGcY0ophVhKGQ6HIaQQdQQeoNwe8tt9ejtOS/uzTz/8L/7r/yqF8bBfg+bD7k0UBYQ6/SOAqlIQgKvxAKmWkzaXIDKotSAi1cWwTOPkIiilFCE+eTN8P+zBg+H00wqseNHDqdNTMcHHRjQRmSKi2ThLKdaGT0WxYoyW2dZVm7XaEgBqKQmN5CylFBWs02y1MB1SnhaDgiKcwt5RCfS+YabGMrM2jEePeD3ihqebRSoAIAhGAQSgqAGjigL5KLmtCrX8ipNg8CmBQ0Ugg0Jo2AihURQRKCWJANkpdIlA0YzAxMBktX8ne6Oj2rVzDh5gnlwtHVBBBSalGwZEQGFD9VYgskBGUAItXEi0sJakyETMCRPWUdemAdGx34tIzKmaGNRHXym1iOi5QUQmZmcRNIGEFBWBkUjk2J9VEQHf3W73v/r8q81uiHXCmskYlhQVAGnSelGtii06sROOa4b5aM87jp33/bBv5nOR3LZtStD3PfsGsZprSoxxtly8fHk1P29ijLvd4dGjR4cDbDd3l+c/9c6cLechhGfPntytt23bIiobyiU22JytmhhHxC6E0I+DIB76sWu70rW3my0i1unVvu/rDOt6Pez321nXzGazcTggMiKN47jb7ZbLpbW2nrTGmFhy1RhqmqYWmuM4LhaLunNKSgx42O5+9tNPf/XrL3xrX7168/jJ08OYRKBx1nETRXJKQYHQQb1hMPkwANTD/zjoX/Q4viZYGAEFHs7z6cOa7z2AbTqffy8Dn0nVPVpBCRmQgalqsqGiqpG6HaukoE6Nhno23Pszwb17gygSqgDWKd2pZ3PCfx4Gqt8a+cV3NWR/M+v9/tB8h2uDuUI1TKpackEmIjZIqFBSGg69Hn229NGlb9sQy6mo16OMWUrJ+iaMkZnb1lTagve+aYY+SLW7JFAlYAZmJKKua0+ODWMYc84xFwE89CFn8E3HxoV0OwRYLt2ChZhn8zkZsx/6EEYRaawnImYrEoxxXcfL5TLnzIjOGET03pUUh37fNtQ0Tett13qn0ZEalJySpIAlGSjGoJYARXLOJeWqmjaOY4x5HOOYtE+6F92lctenXYARoGuctX51+ezx8w8sFs0hDvuX377BavYGLBNmJ7VFfOLkqkIBleptjmTtdOichLbhiJaXUkRoCoNHYyI4PuX3envwoNSbUGtAIqpVFxOjHk1tmAxzSlKPeAAAwpTyGEM/Do+aJQFqgRRjyZpCzrkMfRDQyhLCo4LatJBCngIDTJApGraViXh/2SpNYIwp2eppMKCSoQ0jUU4FABShipMAACoykCMHiIJmgn+PmUGOBUAeBKzaUFBCUKxGCEUmMo0AYAwjAlX3FEVFLMawMJYkJ9Wxeg8zFyKSfB9Q6XghosckIlJfOVe2KoCiMz6BFEXVXEQAUVEVNeeMCGRqsAckJFOLOarLzFrrjaPqk8DUh9E4S84gIhl2zITA1YIAtBbEeAQ5gfB2f/j61dVf/tUXh74AAAKLIFsS+A9fNfUEABQNIZwv5mPoQwhd50IIxsAwDA3h2WohJXWtn2BUgjoP+t133/3pn/zxkyftbrfrh/3jiwtQ6Q+HD56/2G73/aFfLWaSsiFMcVwu56WUEEbn3O3tbdc2r169/uyzz4z1KWdEHEPY7jYKcna2PBwO69vx+vqanlzOlgvRXIoYJs206w/dYt44i8KaFA2TiogMfe+d2+73L1682KzvLi8u9vv9arFIRTbr9Qcff3Kz3v7Bz/7gV7/+wrfdd9+8PHt8nkLSonY2s6bNscRQjClKhaeuHCqjAlXFVFXU075jJCWsBK4aHaZEtJ6udOR2HqFOfejP+vt4mVyQAEkRGCdXnsmQkJCOvTpVqG6lqoxWFfAdic6Hde59eYtKqoDTGNV/LLb5Y9Xej3ezH5oN3f9qeWCiVA/Y6lRSsf4KoKtqSkqGZ7NZyvtTuD2Bcjln31LlgxGZ3WGfUvKubZqmD71BsATOGWIjIo3z3vvlcjmbzUi1MkVrqZFSbrv25mbY7g6zGbnG+RbbdnZ3twEU5w0R9f0hhNF7i4jjOLZtO/aDMcZbt1wux35AJi1yGPZny5X39my++PjDD1TVOQeSO29yGof9FiQ7y21jSU2Jcb3eSiklV805TEWGMfd9n5RTlj7DPuR1zJtBhwICMArd7nbx3/97JF117qcfPB1D2AfIpmQUZkXBXKoYBj1kXWodOsYqfDT5fFaj8Br2avQSjfoj1/ervXpNY5FHIqJilSA4xtEqwSNCRMpE1jjvdWJbTQ+6fn3OuWju90MYUymy3/eb9W6z29Zqj5m7bkKnAcDzNN9WMf8j956Z2RIfFbGttbaGPbTWEqOZXicy1Y8nDRRCMgatwWqGojXLBIVpqZWcU8ki4r1F5JN14mlJS4o6eRc9EOJiJK2Wt5OdAvOxAwr3Ye94rFPFtmpt+d6GUquqelTgRCUSAMUaq5UUUBSOtJOaMlpj0DAzlzwxlWpMylKw5EpOsWzYGmfs7rCvT6qoWuSiUmehrHeplIrG6hFYY+arm7uXV29fb4siOGcKmpTSpEk2DZX8QDb88M0CAKikMnF9h0NYLrvD4eC9qxZ75+cvrq6uZrNZnZzz3qggEL9+/fp/+Y//5JOPP76+ennYbs/Pzq6vrw+HAxFJTrv9djE/O/Sj83YcwnxxNgxDCGE+azeb3fnq7O3b25/9TJVo3nZEeDgcYoxPnzzpuubVt98xwt1dnM/2jx49iqPv+14R0HDFk4ypvhb5lOsPwzCbzdbr9T/8h//w2+++FpH9fv/8+fNvbg+PHz++vX6bFV9++91HH31yt9sNfWhdOw6x5CStOusGpVJyTgoGAbDAfQsBCbBgRelkylwFDAjcNw5+iKvxt3PO//1cZozFIBkLApAFMpKxRFS7vkimBvBcSk1SKQowsyqQ4tTsZkLGoxmLHgUroJRJuvQYour9Ot2y91HK48c/nLfJPYOIHn7jj93l9zji0yNVLaqt98MQuq57u3+7bBdxDIRkjXn16pX33ncujoEZHj9+fHN3W/JEl3LOCVIIoToQjeNIRIfDAZG7WVehhq7rnDfX19v9AZjGxaJjZ+fz+cXFRdc2iLA7HDb7nSj6dpYFhjAwJjIQS146++LFi81mMwz9fN61s6bv93d3N8zUzmfOOS0SS9jt9iBq2V+cnUtOqJJjar1bLB4T6HZzd/P2rupHPH36tDGm8RCHogTE3DBrzn0/DH3PzI33KrDd7t9cXW/2PQI559aHsNlpX0CdKWDHErMSW3/x6Mnw6pu7Q/92vXly8eGf/fIvvvziVxEADIFCEUElNGbyVRUBJCAAFshaRKOIZ+OsI5oYfTXs1eeSc25aV8cnTtVeBdiJiBBUoZRSpBCRM1jJRPVUqgVZ/SHMzM4yMzEzoFZWverEibSGpJSYUgpjDFkKGVaEMYZhjPvtYbs97LaH9Xq/3ezW6/XhcEipLJfzZ8+fXl5eti0QkThTlU0EoJRUUpIwtZBqlYeIBOi9r9ri5TbZer5aS0RAU1OwEtPJGO89EJ4oqeqPDTmRUgqoeiaypkBh4qPMQqp2x6o6cw0gKqHgMdUTFS2MVG3RJxnpUpJIngh40wapAS9PUgx2+o3valOMcWuMAeRSiiB475uuNdZBRpGiWUuSklNOqeRYMZiazRAROJuHUCq7m1AnP14ka6x1SlhUnj5/HkIYQmiaxjibVSBL1WRIw+Ccs97HnFSVDKeSQ9F/+i/+pWUQZ9dDAkIytqToDVlihFxEVJHZKGDOIjqVawAg1WoKkREZYb1e+8bO59j3PRbx3msuBPL2zZtH5+cicrfdisjjx0++/frl6qwdhvz111//9JNPfvUXv1x03ePLi9bZVNzLb7/99JNPPv/8y6dPnsUYX768bRq3W991XffkyeXVmzcxxtls8eTJk/V62xpzGMIHP3n++ee//vnPP1uv1x9//OGv/+pXqxWOo97ebhFxuVy6ptvtNlny8mw1hDHf3lxcXJw/uhhj2O128/m8cw0ivnj+/Ksvv3x0fvHdN9967/fb3X67uzy/cK3zptnsrr/75utPPvuDIvDlF1+fn58vFouUShyDMQ2xSyGggoqCgkUCJRIsWUsuztZEgRmQCVVAqFI+TuYVULHQKnLGbBGrVByUMvXZYWob3B/U3z+3v39K/61cP/bz67+bnEAZBcGSIqKyQEEDlXcHUmoKqURa6uwinmDGCkHRqas8tTS0PIQfK0PvIez7MCP4e8gOvt/YEwBiHvoBEeuMNijMZjOsksBQUkoXjx8dDofFYhHGSkPPIpKk1NGFSt9iMjHGGJUMd10nBYZhWCwbESgZrKXZvCPkOqXHzLWhVV9GLTIA4HDIVUJlGAZjUu3Z9MPu+mZHBM5DayyBEIi13rFxdoaCaJBAShhR1SIwaeccIjI0dLESWcxms/m8cwyhHywb2zU5xP6wLzEZxqo1c3319m6zTUWNbWcLe7vZvn41qoN9hKEAEkTEIigASPT1q28W3eKzn3709HJ1dXN79frV/pCXHbw6Qv0P/ry/2aAkWOqosyDke+z5vm4upQjdY9fwAKA+lnpTF+phn/hhqXdC/iqxZbrDoAr3FiIAOpEGTd3B0zzJbLbYbHbr3X6/7wl5sTpD8sQuFRFAjrHp2tl8cX5x8ejyvG1bMbZ+IyPWMktVUaHv+wq/TnxwhQiS+sPjZkmAZHiSPylSzc22t2sAsNbGxlekgZmLMTnKw6hDRBMSikhYqHojABliZUYFVgXAAsqndgNX2WwBAUHB6kiA1WwGTyMTzAzHiYJSSh7jiRF6qgVVtcEyDiFL5aCyZilZnEtSoT8GA5gFNJUYQs6ZVaSqzIjKEaTNUshO7W20hpjB8MmzcKqVvWNnJ3tThCyCTAKT1lotd4Zh+OVfffHy6vpQgFNF2hSOSIxCAdWKxcG9q8P9OfMQHKqZPYBl5pyTqjKbnIvkgl5yCnWOvn792cVyHPZdZ9++ffuTZ48vL8+vr15/9MFTBLFsvDWM1DU2hvH8fLU77Pe7HtG2bZtSms/ndcrQu3az3s2ezG7Xt0/y42cfvPjuu+/+5B/98TAcPv30083d7du3bzfrfrPZ8CS214pMpNPadgGQtm3zYtH3/Zy77Xa7WCw2m83HH39Yrd6vr68/+Mkn337z9SeffWYIlvMuo/viiy/OH10u5vOU0nq99s51XeMth2EYhtBwi0BCKKJSoBCiKiIJAJ28hqc/9cRNe+cAP+52fdeZ/QfP9vf+8XfFeTExZ1ZlJSEmVlPPH5mYHqTC0yQDgCgwZBAiUJ2MiiriWIs/RFBR4nfe28m64WGcf9ie+TuNfKeFfv/bmeou6sNYBcZWszkJp5QrnyWFjIgfffRRjPGzTz7+5otv6zeeGi21mTSEqKqlaK1Yuq4jjCklQm08IMBs3nlvRcA607R+u90eDof+MIgIM8eUUy5IcD7rTgCa8W61WsU4OxwOwze3sw7m866KIREqlIxMpNh4b5yBSZGyuvaII6wZcbNcVNYoIuacckwpj0USalXMpxDift/vdrsx5JDKmGQ8bELSMUAAiIkPsQwC1mImUmJmZGsXyE8uH33yyUfjYXv99ma7P9wMcN6+c7cnB1cgVQA++Q6ATnw/rXAWTN5PpUKdOWetEiFaI8K7lwjhCda7bzfU49IYc6+KORE9EI5q/Tqpy0/cDDLMYibKFREZw9Ym0giiTN1yOZ+fGXZ3b9dJ6Yl17W6XQuy6ZnG26hbzbj5fLBYwnxljfNUjmAxsBQEOh0NJOecIRUQkp3To+zCOfegZEDKd+n9EhITecilFS0pRM+JEMWWuH5yWKxGBEhqTUio8taZUFaRUQo0QTdQhxcq9RAZELpqPw3akpEfbVZIc6+3Kx6Gjo4bndAbVu1pzCgDY7baHwyGMiZnJGAUga7xvbdsYY0xjEDFJLilhKlREYOKmllLKcfin7riCaKxlZ8kasoaq9p+zdQiSnTWmMnu0TiwwMxLFnAS0cTbkdLfd/MXnX97tcgFgY0kZja1D+ippIvvexzy+Hwg9WqfCsUk8aXUCWGtTGlTVGCMplxRBupQSKFUJzQJ6eXn59Vfb2eJsvV6XUv7ws8/+p//xX2jJj87P7r78UhcLAFkul2/fXn348SfL5XwYBtGMpDXsHfbDer1dzhZv3rwZY/Lev379+qMPP/z8V79er9c5xyeXF41/olL6Qz8McBh63zau8SmXLAVytoj7vhfJZ2dnAPDNN990swYR+91eUg790PlGc4klaknOgOS0320QzflqmXS73++7ruv7PqdkjYGjth+TLbXaK4qERCoFMhBjUaGCQGXyagDk2u87ClVPpzsRVfbm8Zy/Z+9PUeN4tH8/HJ5i3t9/8DMxCDMaA2KUBKsyoxplQWRIBFDbP1wzgKM1xZRxw4OIMuVuUpD4HVzxN4T93wkWnEpWgBCCJ1s3JBY+xLi9W3dN+/r1Vde0XddV2+Wvf/0NHqe+WNQYow4BYIwppZQzGAM1HSOi+Xxeyn659IjqvT2O2miVddjv9zmVruvqkB8RNY2t53491r330kUBIJDLx3bedu2880eZmHpEHTY7d37edL7ebe9M1Zm1BnMuVSKIteSxr+1DLLHkLCKWyRlKKe222+1uPwyjbWe+W/S7w802bPZAFpoWxmhHKEUBlStUwYyWsI/j2+s3jH/0i1/84j//z/50s775v//f/tvbtwewAEqo1RAVAIAUToAbIAKyYhFAASzvItWVHPTwzH033smknlPpIDhZ1h1bRXAUnNSaEcMPLqdqFE6kovVAL6VAmtaqtVYMuVm7JHbsu/mZJWf8DG1ThwtTSt6abtagtRm1ELAzaC15x8YwEiJW525yVnPJJWqRGtHBGTO4tB4TAComKiRkylShWlvlMEVLmXqTNepgdYGttAEgIFJCUYaCgiqp/nwRqTPsxjX1e09dgPqWle4xJX2Q9knOKiiaVVCh1FH3nOTQ7+q/EBo2yGSRVAW7FodDn4o45wSwFDXWwpzGcaxODsC14FIiMERZtM7CFlU9Rr2cs1RpZGPYmBr22FlrrbGOmQtM1kLV0p0BclG2FpAkCRl2zvVhvL29fX11kwEYiU1DkgCplFJh8FOHCR9cp2rv9Im6upgpFjglGXrkBpcYUUtJka1z1p+EKdi4yu08HA4/+/jDxexf9P3+g88++8vPf51CBJHnT5+8+ld/3u933tn5vOv7/nA4XJxfWmsRoCoozefzN9dXf/jzn//qV7/89NNPL59e/MVf/PlHP3kRx9B4N5/Pz86Wd3fblFJKyRpXs7p6Rqlqba9UtLxKWl+9ev3kyZM3b9589tlnm81mtVod9uvLR+dvr97Mzy78vAlxePHs6ZffvvTOFe9VtWkaKDKMiRAWi8U+jgqowiJwFGqlQswy9fZwUvYuVclFHtzdk5Lz9875h+f/fYz4fsCDHwh+f7chsP4WM4TELFbZFmZGZSQLlY8CigCEjEwASoVpeo88mYfVCycnyUnbDBFV7mempGScRM7wxPfEChE/IIHipO3yt0b/ee/4Oz2qLCWlBIKlFGRHzIYYClYO/5PLiy+++Ori4mK33yNRPww1Fz7R2+CUKDGPQxCBtmVEvLu7Q+Dz83MpwVmbc5SSgLFrFyB6/eZ1HVMzxjRN41yTs1ScaTYzpRRnebFYNE2jWrIUbLwx6Kz1lhs3tYUmGk6GWdc03tZ+kmFkUAC1VfsDSk45j/VkzKoKgt57Y0wMw83NXb8/FM1EhlwzjHEfxzEJOGgWMGS4G0DJJgipkvMFVRVyAY7zxmz6w+tX3714/vjy8gNmYusGOUwx71jqVZyTAIoQ4P3clIAW1QLTyGN9RxXnPCrmwBHUPAY8nRjSlaL53g6ZiJ6IRUrt6glVtq3SiTNZMQkmQhKB2pObfhQCGTZqZ+erZi5hTCkUYQPGnz+brx49vrm56feHEEZAUS0JJEkRnIxfS82E6o8CAkS0ytawGi2SSySDxrEsZjqvlu6Tz18pJRcRKUOYJkaMMdUHh5FYBPv98VCBmk3bwMw865qJx4+aQYoWUGAAzJHq3RA5QSgFgK253wVHJBgASs6SNZeYkxRJMeQa9uqfuUQpIJrrnyWrsQpFqlFbSjnE7JpuuTwzjp1zrrHGEDEis3VsrRUD1hhVlZRTSvUtp5IVtCKZpvXoDDpD3hrvDTsSqaJlIlJVQhEx5Sk3QSayBg33w/D6zZvrm00GYOQx5ZASEEHJgsXhj50DkwpMFVk8hb16q2rz4kRns9aWAUpJQGQcTrMxWUSk67rtdu8NXl9ff/rh86ePn1y9efPzn/70008+3h32IYRnz54tl58jIjF6Z1Oy+/3+g+c/qUktM8dcZovl66svQkpN09zc3Pyjf/SP/un/8N+nlNiQSJ7Pu8vLi5zj7jBut9vZfGGdRaDYp5oTpxx2/aGxbj6fbzZ3i8WiGo/sNjL2Q47JsnFdG4b+sNueP37Seff6dgPGPn5yOcZSh3Rqr7zeWiJGpImNIViEUAhRaQJdUKrQ8STPUoFPuY97yHqUaJdSJ3fr9Fo9zP/DEl2/q4LPhJCYRUSKUcsohlTFMhrWyjpHRaVq/EGVzHL/oo/FbGX4iMipDfNwmL9+wYN5vt+lJ29NjWOYiBVN04iIZL2+vq4ki7r6Qwg/+clPanr1sM8vIpXV0s7mwUTvy2w2M8b0fc9km6axBjabTeV3LZfz+aLbbQ9XV1ftbG6MYTKIXHta9ZVUc/FZ11xcXHhvh7EfhkEkf/DiWR3TabyvwsH1t58tL7yxgpBDTCmVkAWRVPb9XkrSo1AOg3pnnXMKrpQShnF/6Ic+5Jyr303KcghpP0JQEOOQDYRUJCWhICQqRhkFJOVcCkI+uziPY7+9u/3Lv/jzr7/89Zs3b7787o4Appg3KeNPDteTBtQErIFUk1fQopOjb50Vq2fNuyPq3w9+k3DV6cR6MCd635qq0VFEThoux84R1QLoVGIm5mnLMgNABnWztl0sU5QUEZQNOmjweTfbbreH3XYY+83mZn/YjnHMqIvWCwAzk3Dd2AxIAEAIgAZRWakAM/siAEArewo51e1PUpZcdpsNDWNKiZHq5smqApIOmykbrIIsABUY1TI3VS5NjlJ5ohkRmOp9PqmrCBRQAgaYyOg0zUyCAFCKUnlA90OTIiDiarNPjYCoWiUVISF5e3sFACGku7v1zdu7/TBa433bLBaLruvmq/l80XVdM5t18+WsbVttmZxX1ZJySfk0cWSMMd41s66ddbZxxGysNd6RVMFxjDnpyRYAQBCKCNHUkU0p3dzcfPXVV3e7nQIoccq1RGTFzMyo+QSvPVxK00l1j3NOfy2lMEGtq5xzmvI4jhers8SYY6rAOIg654rivh8fP3781W7bNO1ut9tutx988Pxf/6ur29vbn//85//6X//r7Xpzdnb25PIxGYpZ+n7ftl1Ku5xjKdp1XT03nHNnZ2evX189ff7s9evXH3/04bNnT9+8eXN+tlot5tba5XLZ930/hqEPxjrnHOjEzqvgRAiBFBaLxetXb25ubp5ePh6G4fLy8vXr123b7na7RwsfU3r27Mnt26sxp0fnT9+u786eftA2nHMOY8g5N23jnEtx7PveeP+gMq5rSQFUBZVABYFQRRWhTqnI0RLgdLCfbIEREUnfm9t+ML32A6Xe6XE8+OCH49/fVqSYqr0xRGYWMaUUYRRhFQRDKjXsCQgKCRHVmSpj7hmSNFnuVpCqnks11Mtpq9Mx4X8v2j18838Xwe/7t/j4bJGZQ9g75zRK0/kYYwny5s2bnPOrV68q6O9d+/HHH9/d3Z0O39qV4QcsAGZ2biI4dF1njffeI4Rx7GPU5dJVLZUqy5RSmigVIopgrF+uPCK+eP7UGGNtFbA2iI9iHIfx0LbtlOwTn7QlEbTzvuSsRQwToS0xgAIzlhxLiohoHeMkRJFK0pDlcDgcdruUA4Ma62PJIYb9EAXJNSYXHKKMJReybtbuN5SB61AH1c4SAIi+vLo96+z52bLSpl++fs0M3pt9AlQ4Wg8c7zQA4PvLdyr4ikANG0T19Mn5/nB8N/jd/60KjMFxnAuPo9anmdnTmhRQQDCVx/8A6MNjGWGMQcPAU6stEoBh17R+ZqSYFCX2eRzDanGmTO2s6ftDKuN6e3e33xYSXKxSKvV1uTqQPlVTU2vSGNP4DgAk5ZxzYmusbZx3zhmiUkpJWVJeHM7DMEpIjMjMNZYBQOq39bZIyjFGKQkVGDCOfUGqEmUl5ZSSFlFVZSQ0xIDAFZYsUO5niKqQpmRVFBQVVOWTZ5O1tvFNfRbVN/Ve5OyYIM7mLmd5e3179fpqtz2MAUbs1+t+fXvnnGtmvm39YjG7fPzo+fOnfHEG6C3xKcFtnKuULXbcNM18uZgtFmxZQNEaNAwRmJkMY6JSWeI4Sc+UUrQOjKnu9vtvX738/MsvxpgAkNhCzkBkrY2amBFLrtVHOS2k+kfFP4/ki9OxkJJ2jnKWEMJsthxjGobRPX6SjQ0hVAZAjYjI9nA4PPvogzevX1ZmzW63++jF81//6pd3t28/+4M/UtWcQs6xaZp+HGo2RkRN01xdXc3ny4uLy6urK0TcbvePHj169d3L1ThT1evr68vLy/Xt29evXzvzARskNOfn5wK42e6yyhgzSjDGFJUxhrZx9ba0bbtazapwTUrp0fnZ9fW1cy6ldPP26vLx08bbr757mYmfvvjEj3G/WXfLM+99nVeuu0DFxBgMOwBA0pOFTt3CSiQgPDXST011FJnEfx7UzQ9C1zsnPJ5aq7+Z5PLeB38Pl6lz/giqanDqzzOBqCKoIGqt9pj5+AYmp7r7cHIExx8QOB/Mw9L9u3ov5n3/FiDi3/Vbr2l+hchTGI0xfT/kWOpiev3yjfdtCKFr54vF4uXLl3DUeKzhzTnnvS+lbDYbQq5JnPWuhj0iGocQQmCG+bxDxM1mU3uEt+uNNc5aS8RsXNM0bdtaaw1j1ah9+/atYTo7OyPWUkrf72vMK8eTfTqt1NV6dD6ft85GQimJmc9XyxCC5FiZLNvdtj8ccs63G2nbtm0soQnjPueMhp1tvNeonKuWomFCTIJFRdAqGkZh6wgKM3MRRlhY+MmL5967fr9/c319c7PJAqnPaMzpmdWu3ntGn6cegEzKk/efOpV0p4KvUlreJ7aAiuip2iO6xxgeJjencvx05j5cXzVlUbifxa48xvOLC1VMOUsphtk1ntApmZAiGzebtauLM0Ttx8N+v0OmcRwBwFp2Rzc7nsjxIrkogXOuspDSGMZxLMYa733beu8JsKQcMADA/GzVti2Idr5pfUNEXN/LuCul5BCHYRj6PgxjKQlV4ziY+otySTiCliJFVEIUZkEwbIiQlBQnxjWpalEsUkBUSq6FMFOLSMDknOu6bjabVRUFqE7cMVZk8oRPHiKnVIhvSymIMGuJjE0pXVxc5JyHcTjstrvtuuTUtbZrvWst1N4bs7e2bhlmRoNd17WzWdu2YDDnXA9araJuTABAAoiYVEopCii1YU4kItVh9epqlxLDVAseGyJFM6ifGE1HhOFU4f1QRq2qBcAYE2JMKVfgIUaYEtMhVjxGgWZNw5Y3m019IzGOvnV938/n89Vqtd1ut9stIrZNW3dBCGGxOl+tVv0QnHNvXt217ezsfPnq1Wvn3PX1zdnZkoi+/PLLf/Dzn9/e3j67PP/000+/+fqrzfbOGLOYr1arc+MbYrPe7odhYEiLxaK6Lsy6xhhTYgKAZ8+eDcOw3W5Xq9U4jvP5XFW990M4IOIXX/x6MV+dn5+9fv2yXV4MScZxnM2Xc7cchqHaUDNT13XCFgAA8wlYB6UHU2TTx98LTg936IP56WOH773a5j8I7P19g5yb5FnIARsAC+gALBUDaoWskkW0aiw1Fq1jy8haCtdhDmAUrUeJCCMJ12EpIFE9tlGoNiAAVIEEFI+z7YSVaKdEJNNNRwCgH3HZPnXs9ShD/NesER9mr0QEITXWrZouh3gYRkPUOL/ebG5ubmpo2W63jx49+uijj169eqWqY0mC2g/9itk6MFDOFp40GGpEQICIrEiWYoqD3TBqQjbdYtmdnT+OMYcoIWpI+7abEZFxfjabedfW/pb3HvOoObFSZ2cx5t1d33b2bHFWpM85xtTnJFqQqWVyhrykjYSgwGnIBy0AlNUOm9D42Tjwbld2u12/X6c8GpO9tz0RGrMbRiJ6dPmcct5s95ZsNIhscz8ctgchOJ+fRS13t7tLOwypjANgn+35IzSLdb8BkPOZf/Hhs//yD3/Bo/zz200gukbZZQAARSh1rOXoJI4ArvrMT3vEZIMCOAI+YVLirLkh6jxrOYyHbTlbZaWsbAsXYgSxIAzZShziHBiMMUBVWZqECIiMo5QSkCVbUompmNYxY2HosNQZ5+rkRkhVo9KqqCoa49pmJgV67WOMsitEZJGZGbVo6hnQNRrHFGPMatq2/cnTJw7h6s3bw+Eg5497gH0CuRsuHy//+Bd/PJvNXr18mXM8bLYGyk/On3aPH0PJ4+1NLPuRE3XePr10XTvGpLnoGPZ3a4iiSI1tdLmI3iWVbMk1HjetNUZSKfuDSeKMKQp9DISZnFMjZQx5v9UxwDhKiHqbBxBubXu2NAuXRYoko1jGwQtaIcoSY5Zc2DjnXH9BmrLm4gQtMjqHbWua5jAOxtnDdnP39oaUWf24O4x9vkW36df/9vWbV4fd6smjRdMx0dliefnobAijYE4qt5u3b/oD9G+NefRcY4gHms3aZRMC9n0vgLbz8/l8Npu13RyYCaltOkQWEds1WUuQlLEkTghiJHtLuunPmlWOSH6+J/tvv/jyv//l1SszHykCccmBGVBFY/K1BM6gbAVNgqRFDIoFQc0FLBtGkFIKpWyMMSTFgCYYU1QCMbDeb5cXq/3VNS/mu5fp/Mnj/X5fSmocpf3d2fkqef3i3//LP/nFL3755/9WCl5d3xyivvjZP/jn//yff/fmu08+++TP/uzPPvn0427WpNyill989vN/9e/+bePd5ZPlN999e3Z5blpaXM6bvf3m81cvXrx4/eqbb7+5+ejDF6qz69tX5+cvbu/2ZysHwCjaWXq8nDmIfd9vh7Lf77uu6bpuokqB7PrdTz/6eL1e361vvrt6+Ys/+MPL5fzP/uzPTOtnZvb6y5uf/PST292h4dnVdru4cI0zQ4z7YaSYjHFN1yBOnCmpEvBSwxUgIGshBcxIpo6rFVA96gxSYUNIhKxIRVUK2Afay0f5loxIU69D7Almq6q0U5L6zqyTPNSZ/7uIc+8FCzOO0RhStVrFhWpizgqGVBlgIkVDxS4LsKkE5TpKDABQ+Wz4ANisWcOppXfiT9HU9qtyCfzgfeoPvrjvv+53OFp/jYD3ENOvL6LCFDWFrHWbqp6S3No6rj4JehQTAYCUUh/GDl39UfUrRUoWMdY4ZuOdc84aG9P0i2ruXH9XJWJVdL6O8VUgK4TQOHJkFG3jYUyxxJQklP2+yMAMhh2T5qwlSsoJSjYoMafcp5ByLFkUs9D+EHO6jUH2+z4MI1LpGnbeAOg49tZaIqwanqWUmAoRDSlB9eNuTBHNKhXBFjmBQlPKxkiplH435pzHPvxX//h/9b/93/3v/0//1//L//n/8f/S3x6clmPHRd8b3ZvUkPVYAmLJWljA3C+Ad+axHgilnjQQTo/mZOBXv/Rh4onHWbG6EnLOlZF4/zOZEdGyA4BxDKUUg8Y5t1wujTFvQnj06NFyucxZjLOI2M1nLz744PbmWnMBTabx7J0B25UFWTMcbo0x1jvfdmiTFjHG5FgyDDmWyZ7JcLU3R8PWuxRSydm6BowWBSQ8m3d+2RlnFSWmUIahhDEdhjD0cEFg2MycX8yKoywJST0ZiFEOcdgfyphR1LJzjbfWyQedplyGEA9DGYKkDIhJymy5EAQbBtt4LJKH0KcQYxCAw+EwjqP3/vHjx5er87ZpLlZnzpKA2ob9rBtTf7NZCyYAOJn5wYkwYl3btm3bNk1TFZOPzYJKwiSqgq2AE4NCUVWt9dVnr0jZbodXr69ev359d7cHcD+4rrg+4iPj9wSDHzGCSeeFjlbAhoEIDEKdpM05E03SJxVNiTHOWl8Xatu2CBJCqK8/5/zFF198+umnE4na2rZtq2aTtbYIlJLOV2f7fpjNZtvt9s2bN5988snd3d3jx4+/fbtNKTRNk3Pu+z6e5fl8vrm7LTkPw7BeQ57PFjO/XC6JARETsB6nfdQYRDSGnXPffvvtxx9/vN7czmazSjH92c9+9vr169lFK5pPUu/VbKQuckUiMnXSB46CHtMk8WSVVUGRd6S9H5yn79PKjqSVyWP1vrbW35nrwF/zMvs+OqZSVAqbDMWgWDEMYE0pqkXVAjCIgCoag6SZmRWMgLIAMydSQcCqIAS1GgMAQFJUhXLyukOAabAPYNLgOCrK31fE+iMh8OFhd/oA7vUCfuB6r4l96sHGEBCxAvfDMGgq+/2+7/txHEWkbf1sNvPeVwp7CMEYk1IahsEQkDkpiUDOORVFmmJYSkkFjSFrue6H6kbrvWuahoittRVTUuQYY4oRAMahGLLEnpGLSsklSZISu5lFA8goRVNJ4xDGWDTj+dKnDIKieRxiEOWcdLMdSuaSYbc7HA6ha2E2WylCGPvqDmOtq+eOiMQ0UR8VqTJLxxBTSopgvYtjhuOSl1wsG88uhtR09puvvvqwXQzD8NV3/+5f/pt/OwKA+a35xmVCO0kAS9aM5YSnETIQHxFdKKBF8RT2Hq4KfMA4Z2adussqIknSacr4SGPBUzZ61FmeVMRUNagQnnqNCABMbIxBZAEtRXLOwDRfLq21bme+3a4lZyIypkqcJCV0s3aWFlklp0EtZ0tkyODMe2vinqxFa8k3zjkQNdaXgoNSgV4VhBidN4YKIxk2TTPGlLRY4wiQBFzbzFZnPHPAoKo+R5jNJYbRb/u9M6133ttFw60NVFIpzpC3jksJmx0RBQ4GjfcNO0topHHoQY1XQUmCQOisafz5k8dDGHchmG7EIkWAfNOwzTGmlFT14uLiJz/5ybNHj7u2PV+ujMWUUoFkGs/NxZMXz2MZYs6OjXGuKjzUnLJaH7fd3DeN956IgMykHo7IQqpCk1dh1TKHAujbBjKTcYnoZr356ttv3t7chAR1PTw0Pj0hchVuO8kanNLuKvEDAFRVUqUAlKq1bVF2PQBACMEYu9sdLmazu7u72Wy23+/ruqn2s4YxhDCfz0spaRw///zzP/mTP1mtVsMwMPN8Pk8ptW07jmM/hJzzYrFg6zbb3Wq12q5vW//zwwGXy+Vs1g7D4Lw/7Pf7/X4YhuVy+ebVy/PV0rDGGLfbYmh+kp92zh29Sk6BhFSxgquXl5c3Nzc3NzePHj06Pz9/9epVLhGQx3GMMaQcqmcvpmScV2JAPsY8M5nw5ipMVu+YEpESIJlpXA8QgVUz1g6FIgGpHrn6DwawT6jmsS2ByPfWK8fN+/cU1f6Dl+mHmI0tgCpo3SQNZUhAxQmBqmrRaTYaVYhNOnZijDDAsXtHdWbxwTsjrRYN79g1ENFpwr1KD9YVe2J+KrxD+PkNLdAf/OsPfvF7EPM4jsw251yd6iTmzWZzqvbatp3P5zUi1qG6KoIVYyyNc84hgzFGUqmteOc9AMQYsxRnoZ3ZpmmITCkl51glqtu2ZTbVvGYcYiy57/sqzTXsD7XXzmSdc845NsIkN3dXxEKE3nrrOt/NZosGwdxcfRdjFFZV6UMUwSGUu7s9QMNs+1D2PSiBICFTFLWWAaBK7J84HdXluSiyaGXZxFTIGu+bHAVgGkgosVBrLBvMgFkvzh49e/oipvL/+1f/5i++fYkMaAnib7fgpBZ8NXQhiEhKKYQwE8GjlAYAFZ1krb//E07pzomxWdnV9WibnBEfqNMdVbimbXmKeTXspYJH199qCwCIysylaNM0hkwIgckaYyxzznkpZewPr74bgMzZxbmqphSGMbpZ6/IoYwmkg2ZFK41RBjdrwdgMKKAFgdmQI9fllBLlknNWQ2CZvBMQqCImzpeYYoiWuG1nfrnkxQxQMxRRBWJyWA1MlcbCWCyxISVSBmQURjWkgOQb181SQSmaibKoas5jsWRRShINIM77djWfLZbt08e43dF+4+KcQE3TmtajkLx5U4/1JxePnj17djZbVLMJ7x0ZE7KWUgzy8mxFdiUieuhPTCUAcG5qY3vvpybfpCBAD+kzKKpFoIhIFZiZpKPIupjyN69effntd7tenIXx3e1OD7hTpz4TPiDuTvygI5+FCBQmniEAEBmFXIv+pmmGYbDdMsa4Wq1O50+Mcda01tqc83K5PBwOw34vIjc3Nx9++OGbl7+uZgjjONb9O4ZUtZ+ePXl8dXV1fvEo5/z27dvzR4/Hcby4uHj16pX3M2QcQ9jtNuerF9ZadtYbrJZeux1aa1XEWuserNsaxUMIKaXZbHZ1dfXhRx+s1+ur12+Wy+Xt7e3jx4+H7c5YF8IhlzKOIzczQvXek7UFDABW52AE5io1fuK9AhDWDQXEApoRGCBP0ktaz21SzffjeoLIWP23jp0sfEBJ+1uwWf07uswYQKWyTSUrlSIiVBiIQFUE7msyRRAQglz9GRWBax5dVKHKeFc9HzAC+MCQEO69FxCAVLTyXCsyCogw3bU6wfcfDngP+6U/Vu29J935EO0spRBVyWDOOY99X2lslT9WSSs0uW9PAwyV8B1j9N5ba13jRaMxzvrWujakeBgSKllrmYphBtAcUw1shKgiIYecJeYsAiGEcYx1Ke8Ph1IEBIkq9aFtWnYGHj1+Mo77YRj2/SCHKGkbxhJCBs2IaBsLALtxLBkOQ3p7J2Hs2wZSgkMGzlDQkPdWso7pJEasAMTsnAshWGtRIJV6FuacwVnDzGycMQkdikKM0TjfsGOA/T7/8R/+cQjhn/2zf/Z2vWGEsfxNRktL9dQRAEAiqpr94ziWUowp9wi2Uu3OnVR+TmD46bkfUQQqUk68GED73mqB6bzD08851Xwi0ljnnDONBUWIAKBAqAiVIOBdw8aUasVsTNN1H58tb27uNtutYCqLxXEcV4z3tmsyFrUcSIEVAAWpWS5VMCmMucjkt8DkvGlaHlIBEMPgLBirWARJjPGLxZjzentLRM3yzM46JUTvCYqWBEpMgNa4nMeYMTA6D86hN9aTY0YCEmCHbL0gjVn6fiwA3jtjXHPmLdk8DkMolJNpuu7ifLY8A++o6/xy2SA3hvOY2ZgUckmZAC9WZy+ePlsulwZZRcYUS5+7rjtfnAshOnSNR6NFBQHGcYxjKirGeWtt23XGWtt4thbJAkD1UlAFAZWSRYrkAqWASJ2CBMBQgNAUxNd3d7/8q8+//ua7rGAJMd/v94ftoWr1pcrMjEf64eTIUYdpJg9RpAd9pXp6lKw5l+Xc1tnKChvUTgQS1U4HgjJzhUBzzs+ePfvLv/zLP/3TP/3uq1/uthtj7DiOi7arzJcQQtPNU0rL5dIwzefzYRgeAQyHw8X58tXVKwXx3oOU3W5Xo+x+v6POE03+JH0/zrv24vxy3fc1zqlqiqWUgqTMPG+bu7u7Z8+ePb58uttsb25uZrPZixcvvrq5FoIx9IpmGA6OrIjMu3lSZqwHM4NWtBMRsTH4DqdsUp+ehvkAufprqSpohfWmEHhfUCCr1Lndqpg06RSqKCAoPfQPeOfo/h1eJiZQ0eqTbLNkS6UUaxBASsFSvWSMURXRXASAsyUtik7EGFZVBVtNKyrawEZPsi3wAJM8nj8IoCCoNHkSEZ2gzjoU8Y449Y8lCH+dG/feN56+pSJai8VCYqnZnIj0fT+OoeIldeXtdrvNZlPh75iTIR5jtONYofwYshKzNWS4JE0pAVWOaJzQtjQlfSGEFHMqOcYcUimlxJhrEWmtzUDABhAFOGQoh2GMQFxeXr9EKMzU+cbZThWzSEqpHwbvPTpXVIZRQpR+KPsBUgEJE0+SDZBr2HsjhTGWUqY6ktkY07Z4GAdK2RmTVFU1awU/ak/FWmsNcogaxhHbzjnHiE/O58bYw3Z3e33zxVdfDQrtstkNo/ktDaRigZhLLLmoJSIESSn1h11KoZZfD2p9BKVTIxaPs6EPo9dEvdMMR8pSHVp40CM8xcf7dmDtttZfZ1zjnKu9KBRFLIgEIMbQKXMqklQQEWezmWlbq9g4G1I2pYz7XTlbeutIofbw0BhlUjaKUlD9bB5jzKAJQES5zvMaQ85h4wwh+waMVSapctLZWGvtGMRsKgserSugbFCVCwgoEAKyqmXTuJDFWAZnwDnwTJYAVHIBYGCDgxtUNyE0DTvv/XxBDVm2qkrWovembe1shl0TUxJn3GrZEjfWxsN4u7673azX67WInJ2dnZ+fG2NUwbvGMLvGdF3nO6sIGTMikiHLrs4mVlDBWn8s9dqmaZh5suGlCX6EI00aRECQJvFxIDLWtGDcEOHr169/9esv7vYJqjvAD10TkF0zkLpwVEXvOb08Ze9HSK5qtCKmVBoDKeWcp7VRTU5ijNXNzjamHg6LeVffVNd1KeXVavXy5ctaxW6328vHT0SkWoy1rd/v92eP3Ndfffv8+YtXr14R0Ww2227X9S3P53NUsdZqwVTyerudLxfb3V0uzhEymyyl73tjqGva+XxujKnIU8o552wsWWv7vmfmb7755mc/+1kY+88///zi4iKEwIylpGHobTtLYUQzpBCX575EQSBGAnLVzA8REbjA/YD1FNtAtABUgnId41MDWLt9qMKKOPmy1vG+qkmmqCBa/eoUHiap3z/GT02H39VlYoKSJRdMsRgLjeOc0VssotGgL1QKZluSgBVxWdWkYtQAipAR8AoARQVFVBStsioLAwNaRZVKOgCtjE5QAKlGMSQGKoFZj67bk43Yj842/FZv7GET6KQDCVOpR6o6m80247pingCQUhIBZ0xVyKyOCn3fs3FSCRdsRCSktACYz+cxS0rlFN5CisqUpDRQSKVoKaWAKIDGGFOcLEsri7yIpFwQRRQSTOo1UnLJGUDZqGFFKq0n6xyQ6UPoD7vdNsQBumXDvjPNrKSYpA9Fk4ICOA8KrFKMhXaxsG1HhsjYWdOEENr5rFvM6whgOULyzlpX02NGQ46ZxxStbYyzaGzIIYCYnB23hvnZsxfDMHz2wUf/zf/mv/nv/t//41f/5J/I+flufPXbrt6YIMQ8hpRbB0womlKsMmDWTqProlVNEsrxWHwwGztdDwOYaD6p2JyiZoU9iQjz9B2nOb+TEQ8iqq/GD4iI4CwK13zXWFuKgKoxRr1PKSGAtZYnou5sGON+HML2cLjb2LbJ3paSVQW52uhI0ZJyceSKRBAtxzkhKgIqxlrn28zWNh6tUWJBQSZqUBVM1ywvzhGRGgdMSByLFJQEopKzCCtkgEzU59Fway1nRim5ggtYBAAsmkyUiQqzWAOtx65NOoJCQRRitJabRq0V4lgi+wa9S4eBAcDYKHK734dhdMacr85roWOIZ8vFrGnbRZNzzhKAECc4gZxzcYhsnG8AESuBy3cz37VsDJGpDNsauItiFmA9za4Ao0EiNghk2DWCbr1Zf/71t198910C8FN5bACAHvq0oIACIquIqky1XDlRpKoy/nQYSykASkSMyszDUM5X7W43lAI5S9M0KQ2r1WoCDA+7WdvVzPXJ40fDMFS5lhCgzp7v9/vz8/O+7501lo0ltsS2tSVlg1TnQAAAVOpERyllu9s8urzYbDaSMgCzwfV6vfjg+Ww20yI5F8PMTCmW7XYfY24XS2s9s3UuHmDb970KSgEhQIQ6QWGtn80WKZWbm7vZvN3t+77fL5su5wwxxmGsPQshJLbMRtHgRJinpKSggioTL0wQTeUnao1NtSwHIyqgSEiV/IiT32p9dveslloI6jRCcw/d6VE67vfhMjGDQShSMoERVBWo/s4IUiYSSqnqD8rCQqBFxZFMY1qIigWAFRVyFQFWVgYopFzj3H3EeqDeCVAlAACgehW9f/1QgvAD12/+d32gvH4iWAJAndurH9dqzznHQF0zOz8/X61WVTHQGMPGxgqLGa5lBxE55y7I7HaHMaRyxAem0/l4hiIqktZvCSHEnABIkYmMaskZclbEFAgRoAjmICmDJWg7aBvOCYzRjowiVFyFmXwHbD37xth2TBKShCipUBZhgZxLUpgx2LZrupY9Ug6L+byCM7PZDEoN/zGmVADIsFVL1U/AuIKQhtD4GXPVIJ2iTpVJPIyH7WH/6y+/+MM//MX/+r/8L/7b/+l/+PW3r2DmoP/rGHzeX0kg5BRzKVK5YaUyWkvKJ90QBRUVQVBhfeeC98JeJW2yMBwrudO08n3Ym2CWd/z5auELAOVBvGSDJCRSBZhUtBBg23UtNP1hSDEiYoNo2JA1M+MNAqim7S71I3c+19dbxBosgAlySikj5pw1k0xc1VIQSsneOLYGCNlZMjaDEpIQsrMlJjJ2dXGOyMbZgsTeIhZCYIECAJxRgJx1xWc4oDWua4vBIY2iQgiGqShZZtfNZqszMbZr581ySd45R411mXiYBzRs2waNzahoHVtTkPowKluHTGwFwRDN2q4yLPbDHh1672fLhfO8O+yLStN0TeeUMJWxDgGb40QjEZF1dVCnGm9OO0WldlJLKZPADCIDKykzo3HENgoESVebzVevXr2+HQWArcvDYN6db6oiQfWDozYQkEJ515vzyG2pvRUlBTYVJ4Cmae7Wgxw5wCWXrutev35trd3EWJfKOIa2bff7fc6567qUYLfbnZ+f393dLZfL3W5njLHOMDOiWucql/LibPnm1cvZYrnf7/eH3c8++/l333232Ww+/fTTw+EwpMCs3jeH/bqUwtYcxj1IXsy6tvE551LSOI7Kpmmaruu6rmOEankWQlhdXtzc3JydLb/88su2aZ49e1alNi4/erw/DCGEGvIllxgGyQWkdpOQ0MBUdiPANGM77SZQREZURCMlAoAAcM0kJvZ+FZ5mqLAn4NQPUETEqSem932lhz2m35OuXr34yfOPi2oqmkWKCgCJQhFR0SKqoiKggJOLpRJB0kl/saK5dMQV8NSfqzxzmtTeKrtqsgojZGJiNlMVPE3y3wu44btMzncx0vf//TfcytPheDofa5s9pWSIAZCRUPGwO8QxfPf1dzc3N87YMMY/+qN/oKqLxfKLL77o+x6R9/t9KeVnn36KACIyjOOz589zLrvdvh9Ga71xFpGtc918vrASxqGUxMhFpzlrmAZjIBcpokrIlpFEBMTYGEoWtdYxaxFlgqYx84VdLReGKYSUYgpjUYHFYjmm8sknP025fPvyJZuGjN9tRwWICdqmzTmTwsef/ARBLh+fjWGAlBbLZdc2XdcVlWEckTiE0HYdIvXDsO9HZG67uSKEEAnx5z/7g1cv33y77R1QJNXGdKvF3d3NLz77+X/+v/jHgPj/+Zf/87/+5S93OZuzOfTp/cdR1Q5LVtH7NKfeBBGrYh113liDUJIhcNYionHG26ZtO0MEooRgiBCInD05w53YmPUJw31y88Axh/3pNx57eDXO0YnmMAlb1y+oPAvCSVW0rj6ikjIiIWEpKqUAAhEDoh5GA8iE3tiZbz1bSTmHIFn+6pe/evnNN6vF4tmTxwwYxoEEht3BGQsKOWdjLCNJLt5YAzT2AyE6awsAGkbDKSXPWBWztIi1znctO6cIQKCIBUrOWUoiBVJFBMPWzRpljZJjiVrN7USdcSVnQ8YYK6rsfTOfkfMIMvZjLmLQMJumadHawzhm1X4MQz+qqAHOMV29evP1F1+1Mc2alpBU9Wx19vz589XZyjdNypGNca0z1hKjImBlwormUoqCca6dzdquc96zcYIgqlXPRxFiSjGlIlL3pEhhZsuW2SIZReK2++bq7RcvX//T/++/+vZ67SyNMXXtrORYjUQAAEEAlWoSw+ScVREUNYZUpWQxBnMWIgUVKYqo1hoGzDkjgRZpHIcQLIG3REQI9PRiUfsaKSVnTcn5/OL8sNvvd9sXL1589903z549y2mfc64du5//9CfXV1fL1appmlLyOIZ93xMRIM3n8+u3N/NZh8Q5Z5Hy+PElob69uV4uFs65cejHYVTJOaXlatH4hgljiClF51w3mzdtdzj0XdtZZ1NKCLRYLK01IQQpUrlZbdtJlhiS+/8z96c/tmXZfSC21tp7n+HOEfHiTflyqpHFIiVKIpsaKLYaUqMbQs8yYMCNbsNDu/+J9hcD/Q/4gwF/MNyAYbcMGGjZkmxZVEtUSSSLKrKKNSczK/NlvnxTvBdx487nnD2s5Q/rnBMn4r3MKtIiqYOXFzdv3Lhxzrl7r/G3fr/cxRjL0lrjau8vLzfzoxNAt9sd7t5/0xgXAi+OT+om+BCOj06SCIsYS1rdNMaSsSyAAERIhggIUAQFCYFQB9V0GyEAKu8RESEpZwl0nFx92WXYyevLnq0XpKHp/iN3rIavv/b4rC6Yvm6TQodRECgx+CgAHCOLmJREhESMgIAAkvJCccYdKKVTFBUlqIYWVtXC39tIAAA6gck2MFMEbevthAmoJZUYYjg//5p/avjw2ivX2I26v6WdIenm89CBzt+EEM7PzxXcdbnaVlWV5/l6sxmPCuUYe/78ubWZdQ5Ns6sOxtpyPAbCuq5x2veWYkohJpZE3icfEhprMxJGn1IMKoYpIMSAzBKJgUEYEgCD+CZmx5mxWFVNjNFaQpsBwPHRLULrvU9JLF2p9VrbQpMnk5FzriiNSHKZocB96K0zVUi2KArPrMqfiMgi3FEWsaQ61C63Ews14qZpimY/Hy1+7W/++v/uv/1vz95/+A//h//nv/itfwkAxaSol2sjnRROf5MBRMS0Uc5VqVnXdgRIDCxXm4ElGnFKBZ5SYDYEIgwpClC0faJ3HZd7BQIcLHRmpld2wvWOTgut6t2eXIM+oTBq/YyZUNFqElDaBBERgQKCQErAYMkgocU8gTQCE5uxb5rlZvX8pcszhGStzWwWfcyKvMgKROSYKEnwtY+JfcjzPLOOMkd5EalN6xHRGaOdL0gMKTIYRmFiSSwcqV/MSMZROybLqY/wQCRxMEDGmKzI81FJxhmXJebMEiJKTL3UERFZJHVBlqiwWb3bL89erC+WxFBmeZZlRTEaj8fHi+PFYpHneRJGInV1RCTUzm9BavOGFnnYTlWa/g4zt+JTfdbOwgkFuEUKIhoBTGKa2tuifPjp47OLJSFEYUvucNjrRwIAgXKzCqriAyq0v5uAGujqte8n0am9FrUIN5eH4jlVI1dLQXmZq1JmWZZKeDsej7fb7fHx8XvvvX///n0tFHVJJIpI5ux6vT49PV1eXjK7xdHMex9TnEymKo+nPbliVFJNxWhkCXxdNTE0dQBJ+iMU1lE8ALDW7na7uqmKoiiKQrvvo9GoPhzm8zmiLJfLFKIKXOu4Idksxkjk1uvt/Hjkk6BI5rKQArA4S5gopQTA1trAcRizYsvMqSkJY9uC0V2hQl4Ge3knNfYCAgzSj6ALiEob3zTR/+bkfFbBVwCYQFhQQopRDDIzBAshmZgwT8QMLOgSInMyHCXlFhK383xt1KWAZDbAACIe2SJRYp2xIQIiMKZNCNBdVXu1kUMGRQCHRdFXFCuGz38Wz3fjf7tcvs0bWp4URB2+ybLszp072vJ5/PjxdruNMdbBZ2Xx1oM3iWhU5t7748WRiDQh5nnu6maz27gsO5oeAeF6vbVUWMKIlDjpHmNhZg4BHImzNkXhEEJiRLDWcRQGEYEUFcHfBgdFUWRZljiE0BhjymIUm1Qf9g/e/hJL3O92EhNaJSwGFHDGKoL/6HhuDBZFFqO3RNaStZasQUMgrHIEpGIZ1zthxpgsz4VjYm8LYwpsmlQDGCfFyfQAaXnYfvP3f/fR00+993Vdc0ZUZqJMLddWt37dLTz3xpEAQko+RZYigSiHdJJU13VT1U3TOGPJkiBGYY5AA+pbhbR0kwlXk3naNbnh9nrFjJby25nBqsA+84tauUBQUmdAElXnE2RmCczMIGCMIYPGGFfmnEQEIwuAWEBnjBjKQe7Pj6Z5noNJm4MpEhoQipvtdr3dzE+O77s3ITEBWDKh9ovJ9HJfp6oK1lFKmBLk1jKn6A0YS5RIDJG6DhGOKTKDJEYWArAohpAJnSWDIikCsjWoyowtP45SZrAYY0yeWed8isANAqcUUgzWWgSGGEASANf7XdM0VtCGmHZ7PlQFwHg0yvN8Mp7N5/Ojk5N2OLqq0BrSjrw1aAQAWl1W48gIIBtrTZaTc2CoVaYCYUiplaMQDbakpZqzhMaajKxLYAPIrq7Xh/qH739wdrGxGQraUTlLl2vB2MmBCnTqbgPjkAAApP2uDUgiUNk00Z2FLMLQF8k1YNA5h8iNeGZ3OBxOTk4uLy/bQJC5KIrV5V5n7C4vLx/cu/ed77yvTeL6cBgVRWjqMs8EURl9T49PXr58aUtzNJs/ffo8iYxvFRcXFymlBCkzVrnHycB4NLaE1eGw2+0QuMxdWZYoOukvADiZTJTanqeSWUdERTFyLn9eVYo2F0HnXFVVTUO3bt0SjCJYluPz5XrsQ7Xfu2Ja7Q+Lozs+CHB0zgFyTF5z3+CpTUGIsGvLqZQjgOZ1rLmd1vU0gelNqDb/Oms8eNRdBDfBLMMAFK4Hr3+a8E4bJZEgIrbDhwDEkkBEQrQYk6TEKRlmiAxZQuKYLDCgsEkKR2npEbgVociS8rFoXcuoXhNKr97QVj71r3Y3RNlb+uALB2M3w5tyw+H9LHeqL3VeJb9dod8YowAzxZ4BgI6jEtGLFy/2+72IrFbb2Wy8WCystWfPny6Xy/l0dnp6er68FIuubowxPsUQgsszFXfFdjbfGGMcQ0JiBm4iAAgjIwCaVleGOSUQFYhGtkTWYZYZa81isWDmw3YjiUejaUbFut41dSyKfLVaH3Y7kGSJAkQSCCyOoPbeABzNppYgczaGrSUu3EiRinrtKSVBDJyufIDpmluGiqJgrhDFOGpYDgncDKZ3jqd3j6PD//Pf/b988NvfmrnRm28+eLxch4rHo6zCBNdXMw2HRq5/F4gYAUKSkDgKd3dAwQVVVe+rqspd4YwhsgQ6yNX2gYbbo6d0GXq+FhPYva1niL3WG3zlp3BVSGcE07KKIqrAIrJ0bYwuOMgzZI4xYRQRFoCWF8vZ24vjEziKwiAggWMTmhjPHn/y4uXFvTcfnMwXNnPO5mWZAUvcHS4eP4vCD96iMpv7Q+IAZJzEwBxD46NPZZ5jXrTOAhkJUBKhGBFDZFI3hCWiptyQOHQCiZmdRRCOoWmqmHzI8tIaFDFp7y1iSCwxERkJ0XsGhBSDYTCB96vN8vmLy6dnzvOdyWJkYDweTyfz2WzWDvakxCDOWQYgQ9iSiouSOkmnmaCrrm+mtrOVwszcaz+ICGjUwGSQnMvIZokhxlTH9N0f/ujR02eBAUAKpwziFAdZ+1WXpc/Wh3PD7StXFqYlLo8q+wjq9oZaMSklgDEzK9+6XpX3XluDq9Xqi1946+LiIsuyk5Niv9/fvn37/Pz87t276+1W/3pKqSgK5sjM40lZL5sYPQMpFsZae3R0pOQYVVO3qluEZVnu97vkg/6tIsu1LK+JpkIQmqbZbDZlWeqE1Xw+18mfk5OT+XTy0UcfXV5enpycjEaT4NPx8cnjpy+zLK8rP5q65XL59he+akwTY8xGuSCE4I2zZK4sLbVur5VQIGXjVDVixDbEQQQJbajRbnTpKEZ0poGG0BXpdGZfdX6vWu4/XiL4x3OWNnIgMFqfRSQQrV+Aj0mn8YFb5RcWiQmMiE2ShCFnEDSABgUFA4gway0TrGArvKRhp+J+2uyCGRAxRUEEauGdbdUCELnL9l6tZcEfMdXrPejQ2CkwQgsaxmS6ORXSooWF8/PzPM+Vo4GIJpNyPB6Ts7dPb3/88EMRScJHJ8e7QxUDj0ajycSv97vNZjOdz6bzucRLLeHpSrIWEUkEsywCQIwxta/bkKIKs4i0tEDO2dzZojBZhonD4RBCCEWZ5db6upaUFrOyqfbb9crXB6vyCAKGEJLYjABkWhaT0dhSdIScmFBUzLq3PoioY3zCLYSuT4zQGEMUm0oguMImAhzB/S++NX3rnp2PosBvfft3pyk9/fg9SFA4GBM22x2Ru3mTAXX/t6i961VHAYgMTSd8Iy19gc4y1vWhavJRkelQkRHBXvWpN3adnWoxRDQ4huXQQV+hFSqCq77gVUY49M7qifVTSLd75rCDk2pv2FgjgGJQAEUQGYFBUgwplWWJ1vgQfPTAyInBx4mxW6RMxIYIzBFjiCnsDx8/fvzBjz8oiuL24mh+fISSfODc5ZHQ+1Dvtk3laTyejMZgiIMHi908FFoBw4CcIAaJAazRYBIFkRMCAycJ3jChGALVrQVgSdFDDMZaiIF9Q0Zp5YNxNh4OBqDZbH7yox99+oc/MT49uHXn3ultkDidTifTuc6xee9Z1UisIWoVSZGYmSmRiCQjKAbRmCy3zhERoGFBQUnCkZVCO7EwIzGKYEJDxEaLEAAUvN8eqmcvXvyP/+w3l6u1y82hShnBbrURYGqh9wMSRB1ekHadkPKHdCkIdQamlzHRHQesnCSEKLra1CZoBNwOF8WoILjcurIsq6rS4aWqqt59991PP/307bffXp09/drXvrpcrVSSJcaoQFAiGo1Gj588y7Ks9nG3250cLXJnp5Nb+/2+rg+IoMQRKcVRXsSYH3yoqooEeNxqycYkTRNms8V4PF6v13Xd6GZNKZ0cn+52O++9tXYymU0ms+Xycrlcnt7+QpHbxsfZdNFUfnY8jz5sLjfATIAckyWTDDfB28xEiW1RRMi07FkqmyyACAgkFpCRUFWkEXsSSumLmSpo1fYrAK9kx1i/pps78d+EUqcVSYwgwoAWgU3n+YA5MiGyeEwCzJxE8ghOonVJtAqUddrybJg5b3VoTcvwJiSWwKR+gSplW6eygKj0pAB9MawNurtl3RNtfJbng585Rhhme0ioDTwLV1pC9+/fj01AxN1uV9e1iGiG9ODBAyVwKcaj8XRSluV0Om1diKGyLGcz3jf1YV9lo+Ikz1OTUgrMjKJLhAxBIiqKLAZoUgIxSGiMiUlSiurzkFpfmGVZZpCALy+W49IWeZ4ZG5pw2O+sye/eube6vNht1iiQOQsiIOwICcUSIsDJ8aLMM2OZMGXOaLeg9wqtvqvN8jw/NE2MKQkTkXEWrTHGgFDDTQg+z105hpgVt9++b27NPCdI3GD8ha98Yfn42WG/d8bMR6PzzVbo2neBigPpY45X4joiSAKRU+TEICwJwBJBShxCqOvaV3UoMna5saQOZ+DqrtyeDvb2gw2a7fVn0j8naslf+1f6aLRbOdcY7KAriiIaxJbrSiICgM7Eb0NERANKXY2GLIogYwiJY0IRCRGFDdk8K5Jx89GDWTHKJiMT2FeN934vUh8aqHyqq0NTnz95ZozB3NlJiVmWGp+axh/2h+3BCM+nM2cJWCQEFERCUhnMGJJvQt3E4E3mKHMEWtGLCGCEY12RKckYApTEEoP3vqqqMYv4uF9tqsMht84aAyEZNMTy/MnTH33/B5+8/xMT+N3b9984Pp3lIxrZ8XicFyMRCSkJIhqyNjPKMKxyTqSaCaaPQowCJY3BgR6bhg5DeSNEDMAGDRBKggDCyW922+Vq85OHD//wg4/XNbjSGkRnssbXBbgah9y/12wodfoa1tqUEofU1dygb80ys/YRemp7DX+0Th5C2G63ZVkeDoeyLPfbtYxEd40iNuu6XiwWmud98MEH2ojVoVjvfVmWBFIU+ZOnz8qy1Cudz+f+4nK/3x8dHQHAdDq11kKKeZ5DK8bbJObRaIQshz0fmlogjcuR1mb2+/39+/en06mIbDcbRKzr2nt//87d4+NjgbTf71duNZ/PT06OV6vVbre/e/f+br+6e/f+4yfP3/7iV56/XNm8CiGQAQYxFg0gIKPFEBrEDFvJWdL701lnudrQyN2wvyAYRERQXU/Bjn5EuMPjy82E70ae91nW+/Mt+edDVP7obo8EhUXUVREAE6heNolIiiLIiVmSEQkxJiPRRZeiaJlaUlsdYIdgjAghS3vlQiIIEkVEhyKMKpwiAqSWEbU1QG2E3Y7XdNdzI1372VM9eF2xuA/w1c+1bo/bDzTGZCOnTiKEoN3jGOOhrnTJXl5eZlkGGWgB5FBVWVbkeV4UGpp1geFVYqMddAJDlNg5ByJBIifoTa9SnhOpedV0iwAkpRBCNJPcOSMpiSTnnLN5Zs1uvUqhyZ1BQ943HMEa44hV5/BkMQdJhJKitw5SSC1sR0vyr9yfXncNjUUiTpDnzvvaFJkri7ws3CjzlhLAfHH0xvHi/vgUf+Uvvvz0+fd+8N5utUVRAfarHIuopfrL85yZpatnXf1daySklIQTMHMyCAAqa4Us2jWsq2LkAoIBTmiuXF3vTbV40md7HYWuAYDA/Mq+wv4ThiOAvRvW81IHKH3lEhihzVyVskjlaffgCVuKNQfIglaUvRcBgGMCAEfGGGsMOWMdUX5yK6CkumJOqWmiD5jgztERfvHL3vvY1E8ffeLGpZmU/FQOYUeC1f5Q76pquytMvhARQ0KJwZJFYhZOHKOv6uawr1MosDSOdB9DFAQU7QJC5MSH3W6z3SWRERmOiUD2u92Tx582+8NsNBplGaYkIaSmOT87u3z5cjEaf+HOG+/cfWOWFRCSmRRZliGZkCIqT7chIWzFeo328xARRBAMCQCSxY5KQJSCFUQnGdXzKVOYoKAhBu2HtJMqIfDhUC9X6+OT07/0y3/pt7797cMhTsdTYLBgpuWkSX64u6/8HwsikkFjKDM2YQiQAIAImYUIrTUGjSRmSiqX2n+CMaQEmIi428l8nu12u+Pj49XyXHveRGTzXEtBb7755rKule/icDgcHx+vVqv5dLzdbnXMoAlB3Ztye45Go/PlKqW0Wq2I6I49KYosBA3rISuyGH0IweTZeDwmlM1mU1fekFNDFGNcLpfMbIybTCaaj4rAfr+fTqfT6VTpgmez2e3bd2Pk589flMW0LMuq2SIaXzWOTGi8rytrW4VkC2IMWkt17W2WIxGCVj6ISJgTYMsyqXVN3S6tIIEirV4xt134iNKCRa75pNc4v+vFzz/l/M8iIgOTUBJWejVGMerUGSMmEiYGDwmQmW0DgZXBU4t3wIgOmAiMEUE0BoXQIKKhCACEV9LkalyQANF2hhKGRhM+I4f7Izm8G8fwzmKH3kRE1pqVtGp2n3zyydFsMZ8u1B+Px2Ml5NRhnfl8nlLSx7t3767X68PhYG2WZbbX4dNAbK70H8ggaIwBbG0EARgDlHRMRDoZCkRGIgQWLWIRqM52HI3KIsuZQ2waa7L5fMQe1+u1bxqQZF2OSFUKkJAIrIGUEgOURRGjt5a9T5mDJvnelapXDiEgg7YQ9MIFgZDIGKVNn06ny9VOOBBRRK6CF3ZuXDx5+vTn7z749OmT3FqNWGOdMgeHQedsWFjWonEKQStF/eEsMSYR0N5OH3AbQwCgDHAKHycU4GTdcHTv2kaSTmAWhxXs9GoPT91eGkZC/ZOu7El9N777adevEAGthlmlRgRgAUZOEHUAvcVCt3zH1hg0ACxNbEQEKTrnOLUq3q40KUuFcdWuevv+/awot4f9+Xbd1PVmtfz02VMv1agoJYmv6nE5McYwQjYqaJw5IiEQAVRRWt/4pgkpKsM1EAonAWEgErFkmNnXzfL84mK7jUBuXBIZCbzf7s6ePqur6q03HhzN5pxSFNlttoXLvvalr5yMpvNiZJvkhCbTcZ0BEcXUIj/JGDbIKTlyiKhVCsZr7XMYFFeuvqpusFW/x6t3EhlnLVhkJiaRqKSAf+1v/Dvzuw8+enr2k48eZ1m2u9xRV44e/gnpxEz5yoeZbuKl7+t3hD7t7jciEv1Vt5g6EUZEjFG0x5HnuY4hQlu/Y+fccrn84he/qIDJk5MT7/3xg9vL5fLo6Oji4gIAyrJcb1+WeVGW+dnZ2entu2VZ6qleXFwURaFTvymlw/5gLB5NZlmW1YeqaZrpaFyOR1VVNaEadqMvLy9DCEdHR0XusOPn3Gw2KaXpbHx8fOzrBgAmk8mdO3cePvxwVL78hT/3iz/54OHR0dHZ2dnte29uKx9jzLKSgI0hkpbeKKVku5vQB4iICGAQk7KJac2jy1V0xswARGzfo0ara/C/LsnrLcOfdWnz6jDH09sUgBIYBiNgAEgYQdEpDICCRtAkoZBMHSAaCWIiZUmySJbBRXBRiMklMQrqjK2EESZkIgvoWhwn6ZSkCCWBKCiArIwLDIKqNskWMSPKAZwIAVhAUjepbABICMgt5gjEkMHPOIabpN91Kcl8Nn/27Pl8MgPBUMf1av386dnhUC1mJ2VRfvELX3758mK9XjuXx8irevWlr3zxjXfubQ6re2/dP4TqL//6XwFnvv/e98GZyXwqhhrv94fDF7/4hdXyMiMISYCMIEYAYx0YqusgIN4HQmuN9V5QrDWOIzaeNytOUY6Pyum0DH7f1AcUKIrC1ynPyhRxtdxOp9PF0ez5i6cbj+gyJusV12E4UBAD2youZvDmFxab7XmWg7UOwI7HJw4os25Wjh2aFEJT1VV1yKzJ88Ii7Xe7al+VRTYqCgAmhDUHsK7ah/lkvnm58svNn3/n3erR0+M63TeZO9/AvtpXhw+fP71IwDMDjYrnqFiXoAJ0ONV1FVMAZds3pCCPyDyLbAWcwMiV49FkXM5zmyNSZm3mTEZAmIzDvChskYF1wKnV51KkH1kBSUkpE6Hj3CdDltAoAE27bYbAWZs5C4TMLEyq62NtbmxOJhOwgDaKgBCI6SiudKESGgJCQRJEIX2LiAFicmQztIQIwswhSkoSwQo5IUtgtAsijsgZAsxFDFHuKLfgHGYZ5QYzMA7IsqA1bpyPSnRTyu9PT25lxYkbzUw+NRnFdLi4XD5+fPHJI1d5s63MvubVLq0rv96vzi/Onjz75MNHY1eeTI9ysSZhgZkRk3wSlt1m++zZ05dPn6xenvnLC3PYT1Kwz+r4cndEozfnd+ZU5h5HDZpdtNtwYsan2WyMeQYuz0f5eEpFWeUYCIOBSBgcJovsAHODGYgFJASEGEPyIdYx+eRFbJZlZWmyjBFTi3kANMggLAwg1pKxWheQaTjcmk4vlxeBaAd0KfSt93/yk+cX/+5/9Hd+91vf/a3f+ebJ0fHZ2fPptEDiQ9wjmOADCJd5kVkXQ+QUraXMEqdIBNE31lFeKPEskLFFUfq62W2bO3du7fc7a0xTB+sgy40j4OQFPBkRCIljQZBZR4iTsgze545GuckdFgXuLs+PFuNUH955cPf5k0dfeffd9fI8y91uf3jw5luHqrZZnhdFCKFuagT0dT2ejJ8+fXL79ERS8PWBo791fGzJNFXNKY3zXERSiIYopYgAWg1TdCGSsS7Lc8oyC8BNUwlIURaj8cg664p8Xx1q7yez2WQ+PzTNartNAOV0drnZuKKsg698c3LrJBu5qt4vTo7yUaFA0DrEopytN4eTk3v+UEMSg5hZl1lLIKCMBwr+RovoEBxChlKgFM4ERNK+rhZDTIffU+BLV88zCIRgEJWVgtrKAFktKjNzO0ALys+lnC/wr4u07LOKnwaQAO3nvGkYHfeBVUrJIxpvCBDJGQUDivHGo0HqiKVN2700sQMot1yxLd+0ctu0qE4ddWxBBy3as43hALpMuwNl/fGihmGOqIR7iCggWZapHMHp6el4NNZi/fn5uYhsNpvD4eBjUBn0qqpWq5W1Ns/zJ0+eKHflZrM5ObkTA8coy+USOsJPPf8EEmMrFHCoDjGyyzI0Wc4UAocQqqoJCayFsgTjbIwxhISIeZ43jbcAIWTM7JzRAHO9ZrHtVKR0k3ZO2EMajeDuvflhu5vNZiJRWfOVb1o6erY+JUopZcZBiz2+egMRzcpJVdUVe2FGUaCNWywW2xcvd+vN6smT+nIXgSxAYYETwytKysMFo/dTb75yYELQwBAiJwWJMBkNyvvugve+bg5kjTGusFf8ZMoVrq3111YFtEvRZ2wd58tVztF/CGpr4mps9jWI6uHr/aGfL73IAyJyT7EAAkCILYMRCLAAEiJyC7IXhc6DiHLftMpwhBkhGnLMZSYi4mPw3tcq+gMsAB+894ej0cjltq8Y76vdZrN58eJ8v1w9e/R4PJ1o+9llmYgs1ytIHLw3ScbkbIL6cnPRxMLnzOzyLMsysqYJnlUIAhGNsd3S1ak7vqYag72aijEGkfVC+p/qj4pOUV1JcPoSt/KGUCeA19/q0XTy+MmT2fGtg5fJePLhex/9k3/yT55frH/1r/767/zO77x8/iIIOARjTNPUzIzmagrweg2NhiEvERkDxlBMKYTALFnWDTUCKK75xtetADTxV/3d0WhkDHS01O1K1sE+FcwbjUaHw2E8HiuT52azWSwWIYTRaFSUpU7dTKdTAHDO3bp1q2ma8/Nz/ZCmaUhYqaCKPG8at12tU/Tj8bhc5Lvdrrtj2h8VldALISieNs/zk5OTlJK+czQaGWMuLy8Xi4Xu8dlsttrsyrLcbrfL5eXLly+Pb9+Tri+gvBYqQHaFqu3wropmv7Gzrh1qwRGRBCEhWoCktapB0fImLKO33n+8hty/xsP25uNnORURCSExAzJyUt50BknoHILWRQkEQZDAArA4dEZSO4etzNXq1REccBQAMAaZozEGgYSu9PaIWj17RaBoixUYkVpStNeVmK+d6uBGXxkvY0zTNOPxGBhijIXJ1+u18mOt/Mo3zZsP6sPhcPfu3ZSEiJo6iMh+v1dZSOdcCOHhw4d60y4uLk5O7iwWi+328PHHHx8tTtqJUSJrrU8xhEbRMVeVwO5MYko+QhDISphMxtba0NQhhMyhtXa335TO6RodjUZ5nh/2+80GJkfc1ycByTkXhQnSeATz+fzy4nw0GtVVXZaltZZEjDU4aOAbY0ig8j7Ly345th0XEGNsbqyPFSUxgIXLGEkvp8jzsixTOYKdbwKPbZYFv92K7TisXy0+D/eSloKJCFOlLyuCIMbIFkWo33vq9qqqIuPyXJis7sOhSXv1Lw6LloPMPiEiWoOI1M1v9MP70Nl3gHbqnweaITfc3vDzU0paMm0hGzFxTCmBABCxABLZFrFlMLVNwysH2kprqc6D0twTGYPkjIhMUq7nycyRk/e+DnWMMUeTZZky1ak00iQrZtn4lpsDIa+r/SHUdru253rmeVlYMqW102IO+UyDXPIozhRZmee5AphDjAlZLELqSW1aO5teA0ciY5RlyVzp/QzaBwBg81xpvqnTGtO7p9zrWmBUYIv+YlU3WVmOp7P12fLi+fNvfvOb3vvTk1v/2//mv1nvqqLIMwZXuPVuyyzH89luV/WBVBtvDYaU+vMxxhgDzrkYfPSBGSaTUS8+rCxiACCQBJL2mSwZcFmoG1DEdUqz2Sw0B51eb7xXT1BVlfd+Mpk0TTObzZ49f3T79u2macqyfHb2QuXxFosFoDKUHsZl2cRkjJnNZi9fvlyv1xqPKjI2hMBIztrRaNQcqr2vm6YxeVEUhQovgATVthWGpvYpcpN5Y4xZ2DwryJrgIwIVeWmNq6vmcrkcjUbKAP7ifLndbpsUy7J88uTJV7/+5/QScpft6qAn0MJ/+IoZte8EwfWwADuYIWg3FhWsCYQDb3fFt4VdaXQwh92hNzqniP+6crs/6mFvhOr9/6r5uKqed5edUhJGZJ+SSSmhCApAYhQHDgEZ26qjahaJJ7GdUqhymAKCiDLlc0wIgmQQgRLoTrhi2R9wm14nthnMRX6Wt8bPgLQYQ76qR6Nxtd03TWMzc35+rsFatW2U2pxaMTA4HA63795WZPPR0ZEClHe73W63U+5BALi4uDg+uqVbZTKZZBnVdd3nLiIJkVxGWZYn9lEYvPc+quylMWABVHsahGKMIiBAMXKMQIWx1hpAZ0EL8RplkXACZI5obG+gZ7OJ3rftbi0pcgocgsvz3opJJ++OiBziwCGpP4hCZK2NVRPrxoI4Y8tRsRN5cXHOnCbWjsvRrXfesfdkdbmtAqz8WeNZ8KZjePUYfk3GGOEInRBaTClB69hSSmzYGMOsSk8BIO9xNz3HprGIV3hOGn5+n85i5+R02Sr9WO/j+9/tP6EfzgOADssGr16X3kNFu/VtJJbWrCcWSMkgEkpL50omDjd3Fwn3/9vrwGGnPGKRQcQZo0MtwuxjE0I4OTkhwiSsCbR1rTQ2VqmqKp+izRwQJWGdmTPOarJuyWTWOmN1QOjgXB/RpxBEiAwRUWy8XhIQyVWu3O190LmXHqpiWBIo/yK3KaBY03LBdXFJD90cuqU+vtHTqOt4++69n3z0yGTj3aH66KOPOKYPPnrvP/xP/ydnF8vf/OffiJyyLIspAcre19dSxoGcTfdNXbX9+nZdSowI4/F42ITuK0x9/KQ7peFG93vTNPP5rW2omaP+uVaXuGkOh8NsNtuud7du3frwo4MuIfX0OsbgnKvq5vj4+Pz8HAAmk4naBGPMer1Whu4sy5CTcmwG7xeL+fHxsbN0dnZ22O6Oj49Vmb1pagAoikJ3bn8DFVZadofegdlstt6sWu3oulYe0dFknOX5+fl5SqkYZ4iY5fm+iVnmdB3qdenC1i2jHvGG8YRhcUU6oIsilrDtOXfj2oJ0lSDqB3zWnvpTPvRP21dPYvi/NwIrfYURojAzSAKUgNA1SrWGC7phgYyAoKGQxAoaIRWhJSAQYEwtO2JCgCTYlo+UUbavYGCPNeeWe6ld7oioFuPzL+/VLEStoXOu6Uytjit47/f7/Z3bt5fL5Xg89t4b40IIRDSZTN5+++3NZqMFCgU3a6X01q1bL1+ej8oJM9+6devk5MSFtTWYhFmibhUwtqMQM0iGxSSJIcYokATKUa4VV5X0I0Jm3u/3KUKHMkMi8d6HFEYTaNFxXTiQWMU84datW7vd7uTkZHl5vphNe3YJiMlimzcbIteF4dLRs1lroStrWEvJJxIwRMrugZDWm42g2KI8e/nilLO3Fnec5Ivi2VE5QWjOu0HyV+95v3iGBU8gg5SUHvqKSJPIKq7AABEZQIkphhBDw5QbY1rsE7MwAwuwcExEBCTAojP/fW25d37MwswkJJ+BhOrNXweMgN6MDschhkFhO+fU0xFSK5qaZZZTikHdUmKJAoUYEUv9Rmr1yK6NT0A719Ov84QskgQI0ZJBY60ldJmIIAkJG+f6jYBkp0XejBsRsXkmAE0Mzrl8PE7Bc0zMbAB1EQIRCKiUdEwhcQIS49palhtl3dnolarMj9F5akRjAMgSasfHEKR2Jkt/q3dF2BUS4TpRgGYPvSPUX3HOhZBRVh58nJX2/Z98+OTJ02Jy9G//9V//z/7j/+Thp49/859/Iyuz3X7fxJDn2Xbvx7YViruyA52T7kOf4XftnEWMzFwUefAeREQSM2cmQ+mAMDpdDKoc1FLEVVXVX5oKKWRZFqMHaHmoAcBa6wyGphqPx4g4n06a6lCWZQjBN/X9+/eXy2XTNIvFwo9Hy/XGOVfXtapLhhAKZ621kliYt9vtraNj5WTZrtabzWY8HmdZJhakG5lXpjS9nwq66a9XnSIifu1rX9vtdgS422zfevDg6fMXRVG8vFg1ddhvtvPjW4OVTz4lYB5yNMYY1SsrNRq8xkVR/91qR7//wMF/yiKglrxfD9I28FjfBH9WqR70Rc5Xr3Do7fp1hogiBgCjoGEUYsRoADkZIkoORTSzswgCKMkKIbMkIFQXC8BCBkAoCQChzvYb0yV5hloF2n4jqeKodAYIBlHe5xG1DJf+0AqHELFrNWld2zlXFIUKrBPRxcWFZnUiWJbl2dnZF7/4xXffffcHP/jBarWaTqfn5+fqC5l5Mpk8fvx8s9kYY46Pj40x4jVu0noaFKMS0Oyrpm4aBlvkuaCt6lhJjIFTgpFz1too3DRNZLHGCqfdgZ2FGDmEYIoMyBwOh7quRyNIQDHGKGzIAGCMkYydz+fOOQ6+KBfpRSRs5/GJKHKQThkEBl8lSDIAzpkiyzxLjIwCxpgyz5s8JI6VrwAgH5VkDWaGMnf28uXqsuKThiPs1jvxjPyart5rvwLonFwyDoAYWEDplNuvsigKQjDGWEeGDHNSQaiskwoaZA+knltdO/OVBnuf6vWj659TD4Dr+QFc2e5B+khXFC0wYIQhpcVSl0lIRDbLUoiAMXpIKUmMkWuLZMqcsFc47TxzL2c2uEv6JBhiRIYEIEEiISnntM1yESFI2LnwxBEtAeV57pRJqm7qkGIElhqJCAwCESAxAEoCH0TEkwUWQXDOoSHp8L15nvfOQ0SwpzdJQERtruyuISSvbixhS6apwOSOOqANETr/lwbTLH0qZvLR2fnlrTt3X5yvvvX73zl7sTrB7L/4n/+XlW80hS0n40+ePBcEIA9XIQT2/QJUBa/BEF7fw5NOlFyTsP1up+/QnKbP/LClbdIpdV2ooBJ3IpLneVVVeWFcUSwvd5lzSluo/Y7FYlFV1YzZWqvw79FotNlsmqYBAGPMbrcriiLP8xgjM5+enk6nU+/94XAwIGVZmqJAgN1ue3l5OSrz09NTR+by8lLlZ5XwumkaXfytLIwxMUbtGp6fnys0Qc9zOptobxUAptOpf/jI3DLWWufg5cuXb37xSzFG8d4iJeYQQmZsX3weLsUbM9NXC1UZO1v6ke6fMKJR9EU73oegfJ6ICYSwZV2GwcQ6tGJ+f7rO7yrbe63zGPY5rr3e6S8oCh+FKkkpCWGMjjiBCCZIghjBZFYMSmrpW6KgFVRaPETUm4X9vUNlQjT9wIOIpG4MmaSD+ogAti0R+fzLe629izFaY7SXi4hKc0BEp6encESjslxerKqqFpH9fjkajZhZPWJVVS3zwuPHdV2/+eabq9UWHB0dHXnvF4vjpmn2+/04eSI0SCKSZVlejmKS7b5CREPGWhsZA6eQIiAZB0mi1i289ywxK3JhiZGdBR1iK8vcWltVe+99nmPVcAiJEYrCChCHUJTu+Pi4OexV8bIoipYzczKJofG+NgatyTQ9Z26JMNuv31rnXIoJIoPSRQJba63DWNWY5bOjBc9GLncZEky8OcB6t12+uHx29nyPWItgkb26qj5rUSFiZAZOSJBAYjfN1VauoK3EmpY9JDZN3TjXG6+uMKsGri3LMLMMoO19FgsAIlfd37arN+BwaQf44Aoi0dnKwVANX+EvusthY1DIKBsqMwOITl+QNQ7BGIyRU4hJUhODqRMBoiGDRNYYJDREgMCpN9Ot0xUAgNpaRINoQTtP7eA8RiNXg97CIpKEQaTar/q+KVukSYmGkjFNjASIhBE71VUjoMoxhEpBDYgpMYEYBEmx6yfIMHnS26KlS9E+Ligsor1X0HGDmRaRdC107pERvWo5dLAv/VIwK54/O4tJvvO9H+72FSO8/+jF//X/9nf/y//l/+qHP/rRrjpsDnsgmE5HIXqyDHyzBCXQDqTr1Dl2/Nr6BkKyzgUABKiqPRE6chwikggzpKT5t0iChABYFLreqG0wA5Rlud2sy9E0d857PzuarC+XdV0XeVlV1e3bt/f7fX3Yzad3tlsQkSLPlr5xzq3X61FZ7HfbR48efe1rXzuaTV+8eDE7Oh6Px/v9frvdavnXGmONKcsy1E1dy2QyOT4+Zuaqquq6LnIngkQWkVIS5qD/S2RTElVtCiFtt/sQUkrpyaePdbZdWTX025xOp1kpy+Uys64KHBrvsjJ4LzG5ouzbB/3wokbMMMjXr/YIGERB6dwbAQoocawWSrTCCdCCjBTGCV18qaRd160Cge5T/NPzf1Y+NxYexnRdfNSOMmqDAyABIDMYE5i1qxeBkBAEJUnKMhEQshgSAgFF1PtpDGISRADDmvwaJFTSym5ZE13NKb9azvysytVPv2ZrW6Ev5rqqpBPhmxQzjaq890dHRyGkBw8eXPr15eXl2dkZM9+9e3c+n3//+9+PMR4fH69W25TSvXv3zl8ub926dXZ2ttnvcoy6sXXLjUajqq4jJ5tngA4ImyZUdR2C5BmVZeZjUPsVOEECERQygBEJQhIfIpF1eY47TCyZswCQIiCBMSiAIYBSSDx9+ng+nz9/9uT09OSw33IIzlILlbyal+rMmag9JUNkLZmUWgorxMo3SRJaEktulI3m0zDO0JKvmnt3bi9myJfV40/PtgAM2GMEXv1q+vysXdp9o8VHATCpnVrpiFyEmYWTQQMAxqJwO1+vAUoPRQGAlDDGaAxe9eE7IsFh/D7s80GHE8bOmuufBAALQ4rqq3Wu3yDgzazxqkeFDMq/iSCIPgSLhNY4Y4yDaEITfBJJjWe1/sYAMxpDTAIYOwdPZITQdIX9WBhnDDlLgEmiMEOLx0maMOkW7G+syZ0hw4ScWvxX4FQHn2UZIyILd601cjYzNoO2GplCVOBGAkHEoEWtAcRJvazN2uoZWaOBPg8CSkGQjhVTCA0Sx9Avg6Hv7G9gzxmkl3O5343nRw8//vg73//+4xdnyxqOj8v3P3n4v/8//h/+6W9+IxsVTQjY+Mo3sUnTxZjX9bCI2v8tRFTyI1TutEHC1xdglYbCOaepGPC1ohF3Uppq9FOK3vsyt3meL0NQkFqMMcsyBZtMRtPtdnt662S/3x8OBy0MjkYjjcaOj0/2+/1sNjscDk+ePhuPx3mer9fry8tLAFA4KAkyM1mnuGtvrG+qw+EwLsq7d+9eXl5eXFwYAuyIYPQSYoya8GmMu1gsAGC/39d1PZ/PEYSIttvtZDIZj8e3b99Ggbqu58enSq7WpBATl1lWh4CI1lqtbSoQSX1ejFErqDfqJYhIcAVpQRIAJrIA2vEiTey02Qdt7d5cs9LSsqR3xGZ/NoeZzI5fiWevjNeN5yKSQG0mUMd+p6i8xMKcmFtidNCyh5BwLQyAKAIszJIAtdyLCu7WVolyWgEYlKQzrEp1qjsOWhartvLSmjBtA8GgvjEo67/2FSJCpMNuPx5PfN1YsrnLv/W7/2pSTuq6Xi3Xvmmczbbbrdoj51wV68VicXS0WC6XuuYePXoEgLdv3/Y+7vcHY9x+f0A0zjljbI5109Quc6PRGJCQyHt/vlydX2yn02mWj7a7qqoa47K8HFmbRQ66Y2MIvpHMmqIoOTXz2XS/90Vub5/eEkn73TalCMgp2cMhWQfjyTiEGIKfzqbz+Uz3Uoyhqg6jsiiyzBCOx+PovXMuz11KMfg6yxwgbDbb2odbp7dr73f7Q0wcYnJZjgBbvx1PZvvG7xs/vX1aHC8agpjir/3lv/zv/Y2/9eiDD5dn5x/85CNJsAWphQNcs2V9dD80eb1ZYWYjZA0JiyM4nk+m43HmMLfGGMqszTOnn4AEzhpDaKBVV9LBF4X8C0sv5SHMIEyIhsgQobWDv9gK8TG3MiBdBa7FZRhjIigm0/Zjzpo86cmnxC1fMPREMO0cGiKiQWvIGEuqIKWTqVrf0SzDOYyREFrcalP7ug5V3VSVpJgaLzFCjMAsIYS69lXlEbjxvqm5aVKMFFlSxJSQBVlIGFPCyBgZY8LEpCTwiiwT0N4nCkhMwKJJmCFDSMycQiQfQt0k7yGxIbJEAMIxhqZJIaQQoveJkzAjgiGiPFNmlm6bEhpEIkHVmgFCbfZRpzTUYkG7GKUVuSSiw+GgJZC+pBxjlGzyhx98+N3v/+AHP37/+x+8GE3ATcbbuo4Izy7Om+Abn0QgL/Msd/vtIUdjrW2aGhEX83mM8VAHS1DkGSGmFLljPLCGYowIIMJ3797ZbjfVrpqOx8aiCEuKRZGLSKjZWckzRwads7HxRVFYS7ttmM2KIncIStIY59OZDw2IEMJ4PJ6Mp9baMsenTx7fOjkt8oI5gnAMkZPSUnCIsa5rZ+3hsP+5r361riogc35+rjliDD6lZI2JIRhjijwnwrquQWQ6nc5ms9FoBMKHw0Fxdj0IoGkaLTJ575VPP89zpfEsikzD+s1mk+fF2YuX293u7bfeMTa7WK6++rWfB7IhsbFZEwIwGGtxQOLRl+GH+5cG/VoDSbdhO3XX1r9bIKNpYWKIrdgqEmm1k7CLQ9rhWhRokRzSVWVagNLQaH9OVvYzvuHGoa72WpGzz+0Gnbybc3sAwLq1e6k3QAOUWEIik7gJDCaiMWCTgHFGCIUiGKOeCk1S7StGxEBCpMTNgihg2tdbxv3u2gGMshl1RZVrbBrDisfPcuU9pqO3ziGE2Wy2X59Vh8N4NNXWcZ5nDx48uKgvT05OZrOZAj5TSlVVxcCbzSaEoEVCGqiuW27R2wDMkpCjxs7eA4P4FHeH/aHhydRa5+q6AQAlg2AQRoisUG/rfVC+gJgSoURRFjG72fgsgyyn5INwnEwmeZ4ryiZFH6NHgcyYPMsAoKoqO+ivqO1VkCQh1NWeOTlDIQRLmOcOACh3dWqSRDAYo89EMmuij9/4xjfsthmNRl/+6le/+wfvbfzB5Fmeu8N2r1+Huo3e5PUt8RuHFXKETILGIFll8dJWhEhiNEQwoHNsRw7UenafwcYYY6/KkojUTyxQ96IxBoBSSowtedWr5TtdADSQ3JPBCOOwTfhZa4mxLU5CH8G20ExjBFGgHAMz62ycelBJLCL1dq+LVS/WIjECstSHetCiaucsWo401XwRAAILihzHQE1q5c3UmosSgBOhIIEhVscGyJKEgSxBggQJwRiLAsQSEwNwJDRCgmSAhNACMoLh3g5eWYrORBCgoLpdIn16RfZ9I8/TFaIhRZuzioQQXlwsN+v9H3z/R0/OXgSA0cniV3/t1//hb/zGv/WLP7+D+P4ffgAOUdBmbrvcjka5jVcOVeXuEMAYYmZLhlliCAA2z3NC0lKHSqw0TYMIKaXG+9B4awkASJSc4KqDq10VkeQcOOf2+z1CPFrMY6p7JRNrbbvjUqqq7WQy+fTTT9966y3dy7PZrKoqsjalpEAzBc09e/bs3r17dXzinNPpiP1GmLmu61FZNk0TpLEGdZLh8ePH0+n0+Pg4RY+IWvBUBdCiKABguVzO53NE1PHi09PTsiybpkmJzs7OiqJU+EyR56PRaLvd/uJf+OU6ivd+XE6a6FOIu/UGjSvHoyH0Yfikt/zXnkgrrYPUMlSAKJITdAQWsWfsVNPdbS5hQG0BQidXpO+hzx9F+5M47Gt7eJ/lQkSk1VECI+0ZkwgIQkwCoP/FqMJ5ZDiRIQZhskxEjIBByCQCNICgOHIBsK0PFWEkRcEm6DgS9cYRgTBJJ04LV+npNcf8s1yzJvIyAByGEObz+XQ63TBr/UHj/aqq1pv1aDSq61rt49HR0fHxcfDp/PzcmCzLCh1m15+Ox2OMJlpDg6kXabEYoHu1rn1Tw3iMSaSqfV5S0zTMSt3bYhuttb4JhsDarAdJ6YyqJF84NMaGEABhNh2PirI6VKe3T/Y7jDEapCzLxuOxMSgpkHHASdGPHVoBjDEsst/vU+Isyw51oxInOtFRHbwiAKv9wewPYEdc++35xT/4+3/fPzr/0r23Dr6ZjLODy1+utu3w/EDxR4/Pqn8iGyAE5aFmZhA0RDZDCf0d63QZWxwEDgYVdNzWGIN0NQ3Zby0RsR2Joi5YRFRdhdQJGPUo837+b1jJUfII6My38gsO58+66+hpmUAHb1pK7lZ5s5N0Ryy7BmSMMYUYQkghMnMKPoYYY4TUgmzVrlDq3AwitXytHToDARENEhpAMmQNAdYQUcnQhAgYGFESMKIBBMNGEAwjJwGRJIIby8DIkDTTQ+tUKbooMkQhg4YMGEA0oHoI1qBCFAZGADqKiVZErANLSX/fuqOnGpCOXaEPOhUt+eLs8NHDTx9+8uTJRTi+W56+/db/4x/9g//g7/ydk7v332W/rKvzDz8tphNmBkW3DhJxdXumE1u31ookn8BaybKMUJqmKfO8LEsClhSK0hmL1cHr6iABMmAstBkzCFrQvczMeW7yPL/YXCLEB2/cX17udUIp1E2e5+rJEDExn5ycPHr0naLMDEFofJ7nRZFVTUgpjYrcGVIH+fz585//+Z9Xp5hSmkwmzWEfQtjtdggwm01j470POn+i+dZut8uyoizHeV5dXFx4H5wT50hEynJc115ErM0A4PJyHUKaTCajPLt48TIzDo2tD1We55Pp9HK7L7KcBF6evZhMjwhA/8RkMrlRHoNXBACGZhNaZrKhbxNdfurYWjCL4jZIurZv/+s6xgefg8D/0zl6w/Ea5os+4RtWCzXYI0kJDUJqlR4F2wY8JhFVCAcwlhNmNgEg+oiIrFDoiEQqyd4GsxAZhBTSYqzOjkjfzxtiZJm1acR9RELXx2aHZ/5Z16w4KOwC7SzL6qa+vLycTqcg8uLsPKU0nU5Xq82jR4+O37y1WCzG47E2oq21TdPsd1WM8ejoVp7n3dwMAkBZln6rsq6k+kp9AmQcAAASuTxH0+yrQx2jT3Fsi6qqETlzhWQADAzgXB5CKMuiKAprrcsoz/OUojGmKLrxUk4my7W40TRN+2JiIDbG5LlzBvs8SePr9iu3tiyKQ+ObpkHTTvIZQ0WW1XVtkISjsyaL6eVq7Q2V6aQ+HA7LtQGbav/sydPLOgEmPJ1MjmfNZqdLKCVOiYliXyp57c1PDCDAITSClQ9N42NOzGysQRA9216nrTegPfSOmXW2ncxQ9uFKNgxiO/mhzUSRTulZ07iUeiAMda0muF6S7RMtgFYVud8anxNX0VD2CIEBkFAAIwgSIhlnDTnrUqYJX5ZlKYS6rkPjY4wptijHcWq1uFgkKdxbPx8wAhvEAEAEgSyiIJo9RQMGUQiMITAt4kCnYAlRGEmQWVAkJcAdeOgHz511LhpniUgfyVpjjLZulDK9HTzQbXWddwOVtwVAa1Uqv0Z4TZ5QrlFxShcCJgDw3u92uxTN//gbv7k/+GIEX/ulX/qn3/ydv/rv/a3i1uJgePHm3b+Q/+pvPPo0pAg+TiZFta+L0aTvHGsBwNrWRjnniCA0TTufCqxojjzPQ2x0nsdaKwLOOYktdM6Ya7hflY4Qidy1k/spw/V6fXxy9OLZc03XdNJpXJbW2ul0qm2/EBsFc569/DTP8/1+T0RvvPHGarXy3r948UIVPVXDFgCyLNuuN5vNBhGmozEC1XXtyMzn86ZpXrx4cevkxBijAns6/Kdp3/3795WrU7uJWogSkdnoRBlbqqoim/mYirIsiuLx48eHw+Hy8vIN770PY6Isy6bTaRN8b+GHFQ7maxnYVS6ouCZgVPwGaXWDNE7r+NU0HlLImHYXuHOB6vmMtM19assXf7p4Ttvzd7zW+Q28XV9LZATDCC0cFZJmflrpSmrVIpGPZIIw5i4AEBmlMSWDYFuOQ0qAMTJiIouITImU5RARRQx3xGRC7dA6MytjCwBTK0sLLFdjyz/LoTs5hJiRdc6xTXmeV5tqvV5nR4aAbiUAAIAASURBVIUu8bIsJ5NJXXvlFppOp9o6VgBxXdf7/V4LoTr8MBqNmiZqjhhVk8ZaVU+11qKNiGgthBCcKWaz2aG52GzZhFr1XFICwZS5Fr/KSoHOnLnC2gzR5HmW5/l+v2lio8zujY95brM8FxFALkd5T/4kwk3TNE2DmXXOKeO2bvvkfdK1RkREapPIoLU2SQsoJgEHVNqcyafNalP5uvKR0+7s4itf/prcLS7PzjOAjUCzWU9u34n76gpqIZCSKL39Z4YdgpYhCZggddM0IQ/JppRskRMnVZ+IAQ0ButwYgwlJwcOJtaCiMyzJA7JYJEEVgmYgBkAdserW6s1aTW92e0Pc69H0gAtrzdUke6f/0A+5i+Z23fRS65JAtWwEWIfZGUSAhQCRkpYoCZCUp50JWMpxkWIsfBkar72o2PgYI+5TB/BhZk4cNB20ZACZRHeiwjsFgGqTQIjU7aEYtIhiwHA7OyyAhpEFSZCFMThpBxKssZlzRe7y3FrryoJIlTdJ1GMwd1Gm9CN6qqECAGB09rJ1e7q1egMy9HDDiToA0FcAoKqqi4uL7377aXWol1v/pa+/+8nzZ//Zf/E/m9w7rSR6Y2ZHp7fu3vv4w48++K1vG2tyl0WsFWvTOuOkulpGP1OzSU0o9e+LSJZbpJYXRhJLaqGkLAlJiNEodICvuDI05Grq1DSNtdYYWq1WxydHu8v1fD5//uQpgIxGo+VyOZvNMiO+ad68/8bmcqVrabve3L59u9pvbx0vdrudMWY+nWxWl9baF8+fvfnFrxwdHW2326D9vKKox+PDfv/pp5++9caDk+OFiIS6UY1r1fyrqgoR8zzXdkbTNIi4Wq20yKk4c618np2dEfvRaAQATdPMFmZ7uaqb5u79N1+8eNHCtlPSifuUUvLhRt7VZzjDMG6Ik2jFe1tyso5Ss024+WpuD6EDs7SkLZ2qg/7sVSf3p4rn7JUQrrX0hk6iv+Crbh8mYQICEpNaeLRoRkuAyJKShMgUvIjUdWxhxiiABgnIEFK7aVrVREyIGAGlU2Pv04W23IOGGRClo424Yu6gV7j1Pv+Cdci4rhugNqVQgNbx8TEhee+zLMuyXAPJ+/fvny+XRVFMpiPoBmy12KiH+o+yLGM8aBRGRC4zxjnEaCK204EIxoCPbJjzsiiL8Xa31dkp7w8pAVALCxJmChzYq/eMKdXez+0oLwoAiDGOy/Li4jJGGI9tbp1eVFEUGldaR6Hmpj7s94WkTEQystxJpGpRiBFiDARiDFYhGoy5syFBSsEYQyzjvDApC0ZGxiy3h21MxtnTYkJ1uHj+8vnLfYEgWbZpfPP0aWFce1fpmnrGZyVGKkWJ3epiZuGu503GEGu6kFIytjXQwwonAKQkAADhCgrfNsBaBQbbo0ZhMBc47Dn1+Qdc9aKumDZhEPNprZJoMOnVKjMMw6yrCbyELfyYu+EGstTSL2h9A1BUxpoMWcoy68qiYE4+hBBSjJJ3YYSicGOWUkBWeghJ3SbrRoJJh3uSgHBKLNiprVprsS0+JjQEBoUsIk4mpZo/p0yveWbzzDknAEDY0Yq3OofSZ+2dzxsYQNHMrt1vLP13OnR4fcKnVTWFsWiP+eXLlx9++OG3f++99Xr75XcenC0vT776zrtf++qPnnx8fP8OuSwrR7APf/5X/tIHv/1ta+1uvTkaT/dVnVLKMktEIUZteLeQ1JS0UcfMTdNY0w5xq3KsIiERMc+t0QhhUMRKSfR39aOsNSKw2+1Q2Bi3XC7ffueNcKj1xlb73Xg8jimWZen9qqqq6XS6Xq9nR4vxeLzd7N544w11afuq0dpSVVXz+TyEcDgcjo6ORqPRbrfTRG02mynuZrPZgKTFYlHOcpV/GY/HnNLhcNCV33O1WGtVF3Aymcznc23vqXV6/vz5nTt3ynKknQ5+/mLbbO+IhBBcPlJhWyKqq6r9suD1ZYxh5ax/5aoQ2tENIQghgqota7rXYsI7MqKr0aCuFgoIyMKqUKYN8X8DentDD//qLyidUiumi0LdTWMWJBKBpD+JiE3kBDUFESHDiIJiSZjQkIhBi8IGDWl7TxglAkCiK7fX3vAWoUDMQ+LgXuft89z2qwcRWbwKZHRb6hweJnO5XMYYAbCua1WKMb5dtTpwWtf1drtNUUajkQqI6MC7MUYHfUZtSTNDRBtTr6eT5846l0B0E5alSwLee0uanbScQ4SGmZs6zCelMc77WNe1Nt6yLFPmFyKwFszg0Etr8QIUByW7lmoEWUg6z2HIGONDzHMbq5ohZFnGPjFznuehPjiXidDU5ndmR8HLclebEoqy3J1fjoybE6wZgvd5bmA84c1u6CfgcyuBoP1aQkzQd636oMo6m2dkoCM4loTA7QTb9eAMEZNv1wkRYSd8o/j13tS28amelfLCXKcOUQuNiMrQ1f8VTdx1i7aTDG2NnZMoivgqz9PQlQUB0ajSMiF0p5FM1/Brm+FoEEEgAiMKEhpnLSKWWdsCnJTSsVamlDgm5ggs3nsafA4iIokBwxJByWiSKFpaXVKLLVKQpbVkDTiDiLYsrbUmc1rMJCJjrTEmcGIQaUf8hI0Kk+MQbdDnAV3zEftatqqsDZt5N2qbvT3RAaHNZvPxxx//wR/8wfLlgcBcnF/+1b/9N7/96fsPn356dOf00/Oz45NTJqxWq4ePH0GGIiIJLBkZTMXEJM61rGDq7boCXUopFXkxn8/bXVmUo6JsmgqAc+sgMV1nk2EGQjBEjFLX9dHRbDwuvPfAYTYtm7o5HA7z+byqquPj42fVYb/fj8sJMwfv8zz3vtHO/GKxWK1WMfnFYrHZbLIs016Dyhg557bb7Ww2G4/HMUZw9tmzZ7nLyrKcTMa79UbbKMfzxWg00jEJTmDIcQrVoRFG51yRZwCQZ+V6tQ0+nZycuOP85cuXIYRiNKr84eWLF0fHJ7PZrB1BsbaqqvFkvq9Dtd8fdvvJ8cm+Ci7L8jwPTdXfBLk+sX5jL3deTzEp3EdfV87i6jm0oM3W20Hb0uuTvz/W4Nm/xuPKpnxOb6+/7N4udGguUPgWI2BXLFWHFIUpAQCEEIggeONM9EackRDForgMKUFKlCgl7dVrxkMJETW4bu8h62ZDAKNdve40rhSehm3Iz79go3CIgdvTfbjf74+mJ8aY7XbrXHb79m0lrQYARWCPRiPnXJ7nRVFYkyk/bIrcnSooWmyWWWa2zrWty86MlmVp87JJ0jR14JTnee3DYV9Nx2AMGNtRhpJBlhTB5pkxJsQmRu4xyokp1MEY41T7Cjm3rm1VdsIeqpmZZVmeuyzLJFyhkLUqS84y825/ICKOiSEWZWkStM2PhjLnoo8Z2ZP5Yrdv1lXtBJv94dmZv1/gbDJudnVNaAwd1msH18xHZ/hAkXKvHu0au/6KHmodLFIMoV9peP1t/R5jabuAKSVj+gUD7nXblYikU+aD656PU+rtZt+O6sto1F1a3/a4PkA6HEiC/txMx7fLzElS6mHTRKg5BZKkBCgsgoCGDDmyetrWiAgxG2ZRjkROIjLWP5VYa8lw5UfbSTpiUaSY1lTbSUdDWsw01mJmiYgBrbXkruQRVGiZTUdPA5C6oStEhFcAucOSVwswaxv8V3HkDbc3/EaU7nKz2Tx58uSDD54DTxOkr3z5S//sn/2z/+i/+s/zu7feO/tUcvv05dmsHKft4fd/4x/TeOQv90eTUol72oCu83769dmOtMwYo4D4LMsmk8nZk8e73c4Zi4hKqg6Jm8ZPpuX1ddt+y2RRu3R5Zvf7rfctg/aLFy++/M4XDofD6a1blxfnq9VqPl0cDgcHMJ/PHz16dHJyIiKTyQQAmqaZTqcff/zxvXv3dCZP53oVHKecOHmec8CLi4v5dFYWRUrp+Pg4+Hqz2VS7/dHRkSpWMrOC5lRUvdf11MJSXdfn5+fHx8fvvvvuarV6+vTprcX0o48+FcA8z5fLZV3Xk/lst9s9eOeL20dPdrvd4XA4uXd/udrlA9jdNdvebcZXvSB29cnWtSHrWHonv5DaYXZMPbBz+OsAgGgAFJL9ZwDg7A+LlG681BmMAQtDV1VBRIgZaLmDEDVT1PwMCSRJEkAUISGIAinJzrqAWbJOsGTKDGVoDCQgQHFkBB0YBgaMgAKSaiArlCEJGTEkYIBJQHWg0IJBMNSPTQExJtTGIiOpTLAGlTpe0aHLRcNFgJCkyDK0tN1vR3mBJKenJy+enCHCrdPF8uIlIBuH86P5tt6v9qvRSTmeFav1ejKdhsifPHqaIt46Pj0+useJOIXJeL6sLiXx0ew4ND6/fVrXtYeYMLMoTR2Xu533dDS7/eEHTz1DUUy4jqo4ur5ovCuY2aBNBMzJZJDnmc2Ccb6OS2MBjM1Hzm/rW7dPHz58aNJhNjPLVTpaZAfvjRVJsalait7CFRgZ0SQfPIklY+KB0HEaGWOy3MaQIkpRTiIsyRblaFJVdZYVWQb7fVUas5YyisNpjkUIh8P0KHvDjQ97v6uCncJzEcQg0wwaHtexSLjNIXGSCABgjCND1hi1bgAgGLUCrnBfEMgN+BAzAleYhKEJTYDcS/BswLogEFMio2m0cbZM0PSmkxANGSDTJiUxpFqisEihLOcxMu4adA5KgyJk2/AImZ1BZgmcUkzMAZGtRbRoxEEUllbG3QBFxc5o+Z0Momo3CUcrIhRT7x2vd8GvMUNq7wwtgvdX2167+MARuPMfJERxAAJSX0KdAeqVO7jjCO08TbuwXbrWkuDOluhMSD9MiR1IJ5mU+vOhdpSYu+8LAASE+mEPEa8gWWpLW4hodFITrLAAqFawBdVMBmjEcvANH0JoIAWCkNuEiGQ9i1tXTRC3jO4f/e6P/8k3flTR6DIe3rj/xvknT/6Dv/hX/oI7/uu/8jf/+3/yD55engOhWW9/5Su/+Ik9fvbs4sG92z959mJ2PNnt6tHURjT+EAUMJxcDnswXodmNC9lsVs6BKwjkMBmfxFA1dZhO5mggeI8G0IBxZltB5evpdJoazoq8rnzjeT6bNw0ae/AMh2o9Ho+rupqNS7KmXjfr1XY8nj579uzu6V3FkR2q3XZX3H9wvxKqEtYRwn4/n88d0SjPQ12XzoAkZ7L9YUtEZJ2PaTGmnA9NSndPpt/81r/6+a9/7dMnTz789JO33npn23hEU06OQtNcnG/G4/Hx0dEmPhcJWWZiTFmehVjtq8Mbb7yx3e83m43LC8zNk+VZWZWz2ez0rXubyxf5LD/E5tGTT09OTh7cvxN9oJSq5cu0ufRF8TTGNx/cLSgWlqv9MjOjFhnkyOgkEqCxmffetI5KSQraAW1QZ0ltgwrRAKau0+sQIiJCKz+rG0GFxhgAQCJDArCAEcEqvAUggljVdgUA4OFM3WDW6FrP78pf0meUlj6/5vR5SJBXI7VXfzrsiLz2YObIKcbYxOB9DCFEVVpLElNKiX2MKUlKEjildBUnXtV5htUSvoY4H0YocL0U9llnrrUItTKazGVZ1gSvNcxeD0W5X1VVCwAWi4UaCA24VIRILYt2p7XmruGkJoV6uCIfjUbT6VQnb7S7JiIxRmEsCoBBla/v/w9HvxGv6Eh68H1RtG0t6ZR6dKgoyzLVlOm7WZrhackLBgmZCj333coeYt4jPwEgMzYv3KQcTadT0xIFdOEhXaU+0mn+6remo4H9DUed8lFr3c7kgTEwHBunVs2nPYfh2HtfyB107GB4IZqL6zn3y6Zv9vTrREe4ek+gr3jvXy3jaLb06vLuk4n+fPqzeu3RlwRfi2vtU6L+hPWw3eEGR5Zl/dDbjSfZ4MgHh35C/7b+VOHVSayfzVjc6HrgK0e/40VEZxOl656ioczl++ogDI337/34/Q8/fFgHX/tw++4d4+xf/xv/9v/6v/7ffOmrP7etDn/xl//SR598/OTp01/65b/0e9/+/b/2139tMSmXy+W9Oye7za5rNUpvXvQe6pVOJrn3kGXZbDbTrEhTa+j6Jh0dAaSk8OBeIBP6BWM7iUfnKKXk60ZzLB0z3+022pPTvawD45PJZLfbzWaz/X5flqWynCgUTqEDujhDCIrnnM1m3vuvf/3ry+V5URTz+Xy5PM+y7L33Pnrw4EFZllG4rusnT548ePDgwYMHVVUtFgtmfvr06XQ6ffjwoSLp9CjLkogUqHJ8fDyZTKy1dV0vl8uLiwtmWBwf6Zrf7/fM/OzJEzUmQ1WKG8tguIyHT4Zf/WetEKXTu/Gjz1pyfyaHGU1mP/VN184VzeB1xs5vdr3wvrbbKe90vPPKpEkG1AIbQiRGBDJAOheEAu20bzv4D9IzeQMQ6F1X3jfoZCU7d983caCrz+ir165DQ5QYgg7ZxpSKrDjs9w8//DhzWeayw746HKpyNJrN5tXhkGfZ6rBKKVnrmDmEOB5PhNmQZZbj41v7/T6lJABkzKGqQwynxxNEdLkzxiTmxND4uN/5y/VueVEJwmg0EaAYAxm0DkMQAHDW6CiXMZBljlCsIUMgkmbT6enpCTMjcAih2h1GoxEZzPOyCdHZDMns94fJZBJj5BQJwVhjDVlLxhiL7JyzLgOAxAKCZI01LqSEaLz3RMblRdM03oeiKJrgqQNcdekMGWsb75MwKyCFDDOEGJNIdNSX9AkIlBtFGE3XsgaVegPQZkAEYUEBi5I7HGfZuMwKZzNjRkXhnLHGOKfwB5NlmR0omNBgv0mX/UhHTd59yR1FBHZztQiAqFJPOFBp0F83zgGA0qt05JUgSi8BOp6DXVuesJ2lo355v+rkbv5oMB46XIs8OH8Z9CNffbwRCd2wRIaMCkEYozGEziCYnvS6N1BtVX848DPY4MIM3fAsdi8itBnh1SRyJ/PbXjIMrk4AAFLiFEMMDccGILblF6QEeHG5YTRnF5e/+Y3f/oPv/WhX1Yc6nW+3x4vF3/7b//5/+J/8x8+XL/+H/+8/vP+1Lz69ePnv/Lt/69NPPvkbf+Wvv3P/ze//wfd2u93FejefTzx7QxYEUmARMQDGkEEpC2sMF0V2fl4fH49ms3lKvNlshdkYY62KqaYsd85SjI0kcMZo2CdJvA/WECJkhlESETrnnDXCLJJGZZlStETvvvvO5fJiMh6vVquU4mg0MtYuFgsQOX/58gtfeGd1eZk5p+Gj9944Z8j6FJ3LEosxJjTV4VDNFosY0+md2w8ffoxkJpPparXa77ZvvfXgww9+MirGt2/f4cir1Xq7Xt6//8YXv/zl733/B4j4cz/3c48ePZnPFtv9HpGms1me53XTeO+dc+PxGDhqu1GH9yUxCM7ni7wsX54vgUwxGh+aZjKZuywry8JHuIpOrve5bqzqbhMq5aZiA4fjQwCgllzBna0NJ2yJl/WdcIV2kXZivcWzILxmlwz39etf/+N5UVOOpz/jWztPMnB7r/x5bFspVzerHXSgtpdpiFxbdBFEMARIoM/bOFtDM6R+N3U3lwCFlNi7BbLpn+TB6b3S28NXX8Gmrp3NSCCGeDQ/kpQ+/PBhaPyoKENML1+8RKLZbA4AWZatqzWCqes6z0tEfPfdL9RV09TeGDubz1aXa2NslrmUeLfb5lkxHRkiyvIcDdbB+yZst9Xlavvi5Xq9SUg4mUyZpaoOiZO1pvGMiNaYlGLwbEjyPDMEIEwoLOnk+Pj01jERGQJjzHZ1OR5PjDWcpGoCCOZFWdeNgpuFkzUEICjiLCEicVC3JyKJGZFsnjmbNcHHyE3TGGPR2KqqUuI8z7FVntYqvqQYE6e2ZwTIzIYcICbmEFMS4MyiABIYNNRKLSq7WB/f6bdAoHJUqaWndQRlbqZlORnluTO5taOiyJzJnMuyXD/HOWcGZEU6gQfXKw2K9e+Ncmtlde11dpoQRZsN6u0G6Axo+69Xi5gFoB0pbZ0otPX9Vlzz1Sznsx5B6YU+NzS+2spaiB8UOYaPr47/t7eCbzYy4XqFYNhpSylxF8y8Wiy58cn6OTK8A+1JX4lJ0PAKBEQkhRRDk0IjHAmU44UYcbevfITL3eH7P/zgd3//e58+Pa+CeID79+5crlc//P73isn4P/3P/6c/evTw2Wb5408+vH3/7q/92q//47//j0rjjsaz3/5Xv3///p3zi0tTWATkxBwFtSJHQignx/MQKmfN+Xl9+/Ykz4vVah1CBNauNomkxNE5p0z0wkwk1hpDJsUYQkRkZ7M8a+ksijw/ns9Zom/q0ahsqsN0Mrl7+6SqqulkslqtQBX4QO7evZtiWF5c3Lt3Z7vZ2O6bSin5EDKXhxTH44kAMrOv93me77a7o5MTACAy2/326Og4hLherb70hS9983cezmf21q1bxtnJaHyo1o+fPN1s1n/uz/1Slrlnz87u3L1rjNlXB+k0VVhEUW+TycTXh7ppijx3zgmLb5qYUlmOTm/fefHyYnF8y7iMEcezaTEeEZKg4+v84DcWZ7+iOrfXBz29ZZaujN4KdQxDLlJqIZ1sAO66ZS0tFw4cZ/ceubakbjiYV17/47m9zyxyyusO6FrWIqmTX7563p3Q9V2ktoKRmUNKPnITk48hJAmRY4KYwCf0CYJAYNEmTWKISYt+wIIMnd60cOwiYG63/PDAntr4qijK174/7ZpoHanN9KeTyWTiYwicrLW7fX15uVLyoaIoMpvfOjkZleV0PEEBZ+xqtdpsNqPRSEFi+lHKvDBfTJOgKC+hMbp/mqapqupwaHSVap229n63a1S6fRDaX9H/a71F9U2kA8IVRTGdTrUsqVXTfr6HmZVKRkuvw/qe/q5avf7F/vI7jQtQ1dwsy4gIJYFCDRF1LqIcFWWZl2We5y7LbOZsnpnCITBjRwEDLSjAkG13vlIV90uNhJxzzlnnWnxNnudanh3OeA2zsRsOBrq6Vr9KW7bLptHiKofIoS179nxg6jYMEgkgtzXZ9jMTK07kasH2YS+LMmEqx82rTuuznNnwR0L42n9ozfAfGOr/9a8MH3XlR+HAqX+MwsDxtf9QEkrq/1dS6P6xJOaYOKbhk1ZzjkUpPVGAYKgV+lOOq2WcGogNJw+pRS5EhhC5CWJd+dHHn/7W737r2cvzhOAFCOCTZ2euKNZ1/T/+i3++rvb76L/34R/efuctGBVf/vrP/y/+6/8qn4z+T//df3d7sTh7djYuisy0YBzqEgRJKaUwGhfWmhCaPIfM2NA02+0+c4VIYo4iiYgMEgoDMhlQgSGD1OJgCVJi60gLw8IgkkbjQmfgNHXL8/zFixdlWSLiZDxWqRMtGwJAURTL5VIJqRFRWSx0xk46OmztmJRleTjU+plvv/nWfDKVlOaT6f3797///e//4i/eDiG8//77281+Oj+6dXrn+OT0/Q8evfeHH7zx4K3F0cmT52cJ8PT0jnP56nKzutw4cqN8hGhSkul0VhQlAMbQthuGc8a3b9/yvjEI9aGySPZ68eNG8PR6h9H+ys2VMcgITTuQhIbIdh3hm5Hin+3xUxQYYDAV0MaG7cAAE/XdRG07tXH0jQ/UoSpBSIKJOcboAxJZZwkJXWRjNNBAiAKAzIIoKemEPxC1XAlIkqIQAWJr8/TWD6v8HQa9PZ/X3t/h3Wfm2jcppXI0Or9czueL4/nJ0dFM14oW5bfb7Ze//OWyGGdZttsdHj9+8umnnwqjtXa9Xh+qnTEUY6yqPREtFguMO513TpxEJAmHGL33NqPpFMmW7XogE6PGCthb/P5uppSsUbQ36GmoV9MpnxCSzlR4L029uy2S5zkippSKLC+KInGE1EqHKMhN70wIgVDYYIqQZVkIwiJEJog45xCvolRtlgAZZymBzcj4JubOcpn7iCEyM7BkWQbbqmFhSCCQGBnIalms/160WyfSzg6wMIKwMCP2hb6UEve0xUjOaZbDKSUYtBP60O5aOtJNielAkqGIWlUgUuSLPlqNZD9jx/VLwhjDnaobXAV/7Z8drqgrx3ad3kE6DGf7I7z25v7O0HWChcHtuoamhusB6I03wyvJZf+GG5+Ag8v5rDswfMPVHrke7MPrbOIwqUyx4RggRV3bkTklaRJHwfPl5Xe+98Pvfu9HWw8JbYJoXZFCnRCcs9/54Q//7t/7ey+2q4YkQfrW977z4O693/y///3f+O//Xl6Ux8fHz1er3GViUgrMzKSE9YIAjMIEspjNl5cv5nNgjjHGpglHi6w+tJdPBm1qAxoSIGu9jy0zJ6m6rI4AqeZa23x1xmbO6sADEVxeXk5GYwQ4Pj5++fKFsiNtt9vMmvF4vFwu7929e9huNQwVkcPhcOukpTvQRuzu0jd1GI1GFy9entw+5SIeHR09efJE4/WTkyNmiJF3601V++1+9+d/4cuPHz/5lV/5ix9++OE//ae/+cu//MtojdLiU6vTG1arVUppMp/dunVrt36JiK5D6jaIh3394uL85fm5DgJWT56jc3SxXBydzCaz4dd6I8N7dakP3twWz7tuXzuroCa5a230FUCj8ZRq8klHpwdI0DYh+ifwuUCRf22H/awfDPfYNSsGCgTTWkmrGQQAqoXXpY/968hIdhC8R05NJEIJziCKIzQWKAISQDQA4Kwk4UwUFGsAmSi1pSVodYgigFFTqlDSrpfYbVocnLAAdCw5ihxi7keajLMxxpDSdD5LKR0Oh/t33ji9e2e/2W42G45pOp1KAgIzGY+fP38+nsyePHlS1/V0Mm+aZrVaH6oqcqrrQ+2b6XRajgtoWv284FPwV+lLWZa5cyFRCCElNsagTWAI2uxVQ0LQoe+YojVWKwnq0qDjl5lMJhcXlyEE74UZ6gpSSllWqGNTHqNDtU9dUUu64ESdCiFIJEOU5VmWCSDGmFKMii8JIWTOojAKEwESWbaZIBiT5ZYB0RB5SIcKJYEkQpNZigEAOAqwdKKJeB131SpmAABEjkok7pOooHxKyXtWesC+7AbXqyt6aVe8otLO23W/cgUP0YCaiCK0GZuIsLOiAam5ilXVWwpGACBMgiS6zkgJTrS3N0C3Y+vDh06ipW7pwvnhlmlRnT0SrQUctyQnN9xe/7vpOikUq6ZJOwXVf/jA8xFDTxuGV/xh7QfitSEf1UV5rTmDjuq2e1RjdGXmrgzfwD7gIBBpizAxSIrCERBAnICEmJoQzpfb3/pX3/6973xvvYdAAEYAjWcRhIZjOZlMFvPf/f1v7XKZPzjBcfFg/vY3fvu3vvX7v7fZ707z6R9+9NHt4+PtdktHIxGvhRMLRIqOBdlsV++8/eBydTafTuq6ZiaJwDEaAiWfsdZKimTAgLTZadIKBzmboomhScCJ0BKicxRj3Gw2BklHlcbj8W63G5f5drselaenp7cuXj53Bq1zy+Xyi+++s7683O1WWovO83y322k+p2FrVVXMMhqNVmiZeTqdLJdLIfTeHy1m2/VmuVz5unnw4MEPf/jjyWQ2Pz66vFx/+uQZgv+FX/hzn3zy8OT0trX2d7/1e2+/+847737h8ePHRZEba5umUZo3AEg+bNa74FM+yq3FsiTnchYUhA8++MCVZUuyuN0lpo8fPjw5PjbTa1TUw+X9Wi94w+11v9i93s+99C2wzplhO9umhhxfcW/YCtjCzcmCP4njj4PkvBF79i+++s5267ZenxOIyo77kAKnyBJYYoKQJCZIDDEhMzBDSpKiaAqUGFSeQEQfb8atwz/92nMbHq22AwARZVkGhsiayXx25+5dIPIxqMbC+fkqxnjr1q27d+8qOcKzp2fj8biqqiIfTSaTw+FwOOwURrjbbXW8Rgt3iCbGVAfvY0hREI2gEUYwNkRWda4QQkrtyfRFzj564itmZ8iyTLF81lpFTit8kRkQVbWGhyA9faLFvSHKsU8o+6JEX+FUCQgRaZrGGaOtfkvGkSEUBCaRMnOjIi+LPHeIICE0IfgQqtLleWatAQNACNSVRl9dGCLQ8ZQOY5W2BqK14h4h2R/DEsoQ1jG8kOE97C9cj/byQ4ze9/zj/U3u3zMsisYYJaW+vnfjQl6t7bx6bkPsyc3NPcCp3igQ9cDa/uhRnZ8DGdUasrTzEaID9cNHka4XMJgsvvFHP6vudMPefWaedxWKsLBXUgXd+CwYRUKEp2fn3/zd33v4ycoVaJ1LguiyJiUgMNYWk/Gv/rW/+sbbb03ni8Wt0wdvvbk4OqpD/OCDD2NM2+12khe+o4Pvh+g1UNGzWq1Wk8nIIDjntOPADEpiiS2AE60j22PwWJjBGLSWrCVjibv+gib9EtNus22aylprLI7LYrdZFZnbbrdEdDSba0XEOactD+gaQIqzzbJM7Ybq863X6/1+H2OcTufT6VxfPzs7izHOJtN33n0LUe7dv/uNb3z7nXfeIaLEMF8cF6PyR++d//CHP773xgNmOF9e3r1/73A4PH/+/Otf//rJyYkGeaenp7du3arr+sMPP9QvRf/ibrez1p6cnNy/f//Js6cKGX3jjTcQUTiePX1S7favLT++ujyG+264HrrfaqmIrjtOGhIYgSC0M744cD3U6tD+6R6fB2l59Y6I4txE4TcyCPU6mgzqAwE0Fo0lbEMt45yaMaWcZmsMgugabtcuIQI4ElBhNRVYBAEQhpY1qVVdU5IbAkCUlPozHMgQ6bgJdk+gG3YSFMmzjFlC4wlJGKqmJrQfvP9+UZTzyYzQBO8/+WRjyN+7d+/2nbu7zc6STTECwNHiyLpsv99vdjvrbHWoHzx406cYYnjnC+/OFvN6txfgwJE5IVGKvD80IUJdp7qJdR1aZTRCZf8gsjFG3wgzFAUWeUaEhmA+n+5326Pj+VtvPlivllmWOWvquvZVVRSlD369qg81jEpAorIcKR0DgqQYVQ6LCCaTybhwk8kksdR1rXJoDGLI+hhHo4kA1HUNHUHXaDQqcuesIYImNCF4QBKQkOJ4Ot3udta5qq5ZoMjz3bZyFvdN6EhoITK0dVsEEf3OEHqEkw7eIRICgViEIsNZWU4nxaQsCucMojE6dN+KF4YQsnaUnIwxhq4IpbTB2Vo0IiLSOqezTiFPzBxSFBFjrXNO+Q5EQFR5lbkryLcYGGhlb69gAiBAZNqdqlcyMA29O+/96NAZDHe7QEvgLt1QO3TA5XawA7HVJ+u7/IrO7P6RMaANY2uNtWQMqYIEESESGkEiJCEiNGisTjdePTfGkNGfAhpq75mhThQNAYW1TEKm+5Fqqkn7RwbJNwgAKNeXUlm2JXQf6ro20pyfnxvnppP5oW6CCIt5/uLi7/39/8/Dx892dRPRJIHAkAAQiEZ2t9rWdfWTD39y7+0HnNtsPn7y4qyuGmri8w8+CZtdrFJM8e7t2+vNDnOjfQyIElMCkTKjybhIKdy7exp8/fzpklAym+/3fjadAgRrLUefObffbSeTkQBLSlWVjAEEdsYhUggxMwIiMR5msylw9MGPR0UIoanrWyfHmXNNXTlrvfez2fTkeLFcLrfrDTm33+/ffuvNGMLhsJtMJpxSCOH27dvvvffe3fv3N+utK3IEAqTtdns0P/IhhhCKIi+yfLO+bKrq+PhkNp08ffzk6GTygx989NWvfTnGtN5si6I8PR396IdnL84//dV/61dNln366NHR0XHTeGaJMR0dHWVZvtvtmFld2tnz5zGmk6OTpvbGGR98npe7/f7o1q3Hnz4+Oj7KyxEaWl6uJ6MJIZjRTON1HkiX9DLrQ5/XTkwBd+uuXUlds08AEBV7j735RcTUkflouYKxxfZzJ+yhtH3cbY5hteOnQ1r+eFXR/7/c7OckVVeBITKStPhM6KE7KEMqB1ZSQUyArboAa7YnkSEl4QRJIMUe5yY35/kGielnpaE38lFGUMg2WZOVxXQxDymWk3HklEBu37bT6fTNN9/UAsVqtVouL1++PH/27Nnl5eXhcNBA8u7du7PZRCS1SBBETc76FCQJhMQp8Wq7BUERvFju17vDaDSeTubawtQzUr4xPU+lOksJlCkGEfvQUlehc64owFpICZQdDboy5hD+fiNkuVFC7Ee7bgT1+obcuiLPc2vyzI6KPAVvkBEYQSyxcybPAEQy6whFYmIWrSYhYhLm64lBX/R8tTjWLsRXkqQrjMzgGPKxvTZHGU589mlfn87CK2iUG++/saJe7avdqPy89hxeTZJejwV/3SGf8agDgzcerbVoSCz97I/GWWstWUMqittHry0I7+Zjf8LDS9YF2d+r4QBiXdeL46PpdHpoah9iYjo7v/zBex+8/9HHL843VYTGp4OPPkUYBM4hBGb5nd/65nw8/eSjh812X6233/v97754+MiKOV0sEODicq3+NYUoUYjIEhgCRDEE41EGHIOvU2o3BQBozV+3FaJYq9hgtexgHRApIaTyj7ZDoppq63U7ayyBr5vjk4XWRUZlEX3tvT89uVUURV3Xs9lsuVzqWlVZ88PhkOf5YrFQRJvOBytVU490QzRFkQHA4XCo97s8d++8805d12++efrjH//YZPn86JgRmOHnfv7+fi//r3/4/wagk9M7P3rv/el0enFxgYiLxUIZPokohLDZbO7evTsejxWaoBoLRBRjXK/XIcV9VVljpuPJyWJuUHLnFAsm12eFX+0y3Fj2ry7ya09Ua0Cf3Nwa5upFod6bfVa94U/o+Clu70ZsC9rbu3LIojTnAEqT29XQSAA7RsQu17Wk4mFCRGT6OVPRfx3CGmKS9h+zer6QOCWJsRtzTylyiiwxcUzcoTdRpANtMw6m2rHnI75qQ3anri5COVvzojjUtcsz/ZDJZCIimXPT8XQymuR5MZlMCpfFyIioOiBVVW0P20dPHr+8OJ/OJ8bRvtqZzIFpMfFDO6vpSF3XdVKuA2MzZ53FDqKjzMB6GzpQJSmxng7I9yPn6lxHo9JaCAF0xpk6jSG97J6TfvgN3qhHqfs0HUdR+w2EWB8qX1f6hbJEkWQdSQqZVdZ/JoO5w3FpCSB3xgAqBtUA9PKwQ6PW1TEI5WZW1B80GF2H6xHVjThmuA9ffWz9nPfB+9j42HhfN01V+7rx3g+n2nWdta8kXWRJYsvTL4O4CjoJgtfWz/sw7rXbR1kxew3y4ZMe7zb8XyDVYibBa8+Ns2Rt/2ics5kjq6XoP8I/46xCbYd/VxnSbpxh+687bjjsG26vB836mMqyZIDdvkpIMclPPvzkX/zL33nydHUI0GvjGU3oCVLTmNw1dYCYnnzyyG+reTbmfXj/uz/64Xe+Cw1gYmdM6bLdYUfWcOS2O45oyRgAYynP3Ww2YU7ee+h6qAYhc67MnTOoZscZaw3pGi4yk3UBH4qQKJ6FJEXg2O8IdYGqJaTwaX2squr09GQ0Kph5NpupvkEv+Ky34vj4mDvG41YJiIhZrLUEKInzLDNE+912dXk5yos333rj1q1b9+7dOz+vd7tdOR4d6iYmcXn+pS+/vd7A73/7D8pyfPv23ecvzpPghw8ffve7393tdvP5YjyeTCeTxXxeVU2elzbPjm6dhCRVE3yKo+lEAJj54cOHFxcvyyK7d/du9KGpDlVVtWq9nx3i3Hg+tCqdEyEQQjD9CFvXq/ts1zisfwIAUF+f+1M4fkrs+aqReq0leq0tgLZowgaTJVZWMWvQIhgk4CjMkgaurHV+kJIwAydgFk7ADIETJ5UsEWa4EWbeIAB8Nf+D6zZreCGMYKwtRiW2Olsdx3SIdV0/evRov9+rLoxzjsg2hyp43zTNYrGYTCaLxQKArbX3798/Pj7WEr8wMkMSlo4e2mZuOp1WTb3dpwyhLJ02okejEXcKtJojquvS4H42m52cnCghp0avvdW21hZFYQx4Dwon6yTdr25+f5nD73F43xTk2TuqG4whkhhZOCZJCYWtwXKUW0KEiMJksCgyIiBo8fZ0NRPNiHg1t3CdXWiY7Q2/lNemempeX5uN3TDH/WNPE+O7o65rtVO+bqJK/Aw6f8NMZbiKUhuR3VxdP3VHvbqJXtvzGz6/8Z6OBOzm8/7RGKOPZA06C9aCNcNHdG74fPgehk48SEeLDalnffVRur87PM9X42AYUKUwMxiqmrDe7gIzGXd2cfkHP/jhH/zgJ56BCFyRk3UIkDtnCVNKIFCWJTCk2s/y0e/9y985Hc3OHj769m99M724GI3LVMfLi2XhCkQSQzHGHvQgwoDgnBmNi9GoSCnE6J2DjmO1/Tp0nbe+JqW+/dZ1eBNz1GieiJoGVLuOI3CKWhEJoal2+/l0EoNXoZ/og6aS0+lURLbbrXOuV/wpimK1Wo3HYyLSSQZr7W63q6pqv9/neVGWY71jqvy+XF5sd5tQN1/50pcZ5O792XqzYYS8LGPis7OzO3fu/tIvffn8vPrk0eP5YrHbHfI8dy7f7Q6r1aqqKl2fOlulX42yemZZVpblbDbTYfbNdnVxcXE4HDJrg68JUY3bUB+KOoLT4Zd+Y5e96iZelw7eWDOEA1d3/XVz43f/pA/7x/gdabFkytiBorTIeurIGuJ25RkxKP1IEiFYAjJgiEBSt2EwJYgRkzVEJLbN24iIAZKwERTGlBISJGYUJNFfbMdyBzdRwUIiIiBGRBsr0AURDAAWEBJfmzkGtNaOx+P50aLxfjSdjMfjZ/5pWZbPnz+/ffeNFFIldWiiCDZNg9YAwOPHj+/cufPgrbdEpPYNETCyyQzUFjFwayKptwsdtKSDOhndcxBjFAEybXUxxhaXmBKM5rPxeIzCzjnNVPTRuZZdwlpV1wuHw0FXturOAIDqhGnvql9n0Po8YWbR2UJE55xzERFV0CezlDsjYMhgy6tyNenoGh9dZiMHjsE6YwyExAapcE4g1lGYk1gy1iaWQYlDywNEAi2N8tXJDPVdr+nTDmOs1gteAQvxWvolAyscBISjQit1As8jK8E+QJu+iMb/iIjIIomZOKXU1+8QkXVQBxOJqFIBsmhI87Psz6vN384IDQdssV+3AxqY9nlPkdLdlBacmbp39p8iCtE0Sn+UQAiQQQhJQAgHzwG5f0+KUVplT2px4yDY38IOjqfT/SLtiQjIZ1wj9j6vrc/brPYhCRbl9Pxy87vf+s73vv+jOgARCFCK7GNUdDVLFI6QudDEnCg1Hmr//X/1+/PTox//wbfh8QUILOaTy8sqgWhhFo2NTa2NEp2hsRZy65xzzKmu65QgU2EJARFo6oCFR8TMmsw6EfG+5hSsQT1hFIHU8eAAIjCRDgWKtVdl9sB8dnZ25/RWXR2Wy+Wd0xMAPhwO0/E4m5jLy0ud1VP1zbOzs9u3b2+32zt37hARIKFB30QR8d43vlHZ6uWyqveHzNjCZfvN9vnTZ+PJ7s69N0No3nrrnUefPl1dbh689fbFs2fWZj9+7/27d+9+/Re+8MnHHzdNc/fu7ffee+/tt98eFUcpyWp5aa2dz472+/1sOnfWbbbr4+M0P1qUZala8IHTeDbNsqyu62q3L7OSBBzhoXN4vRvGgYQ9vJLwKcJZxfOu07J/Bu+dCnmglgPVyvUG+aqlgq/Bdv4JHn/kbO+n5nnDXYE9hAHBoFgig2SJnEFDQCCAjCwqsNK2ppLmedAlfKzpXZcIJq2FDot1n5XwwSuVPemGMdTo6HecQJLwZDbVqXP1uMYYHUjXRaCT6fq/0+l0PBoBwHQ61UxCJ8S11HODADNF8TE0dTh7sRbBLIMKoPG+LMbOZZvNJqVkDDjXmkIAYAbNV4qi0KtohZu7oExPSceA8hxEYLvd9nSO+gZVbOgnu/VihwjGPgrulXL7LkirERhT9EEJ/htfiyRHaA1OylHubEyeCFwGyGKJnCVnLVE3uDkoZnI73nK1YPrFpTSe0tEgwKBgeKM+PEyJhoyUQ27MjlzxarRABiN9Q3jnjSnJfvJBQ5Mh9jUNjMKNEvGN9Tbc7TgoCg2f3Hjxj7RRP7PEShaN/dkfAVrZvL6Bpxlk/3rf6oNhmfqVM7n6fge5MgAIYRLJ8sKH9N3v/+Ab/+JfPnpyXpaWjEkivp1VhT4ds841h8O4LInp/Nl5LvSb/+gfb5+/hMxCDYfVJgMo0CSQyMAAKbHCWhMzIpRl7jLj67qpDk19wJYgrYWSVlWl3W4iss6oS9MNklJiSSI6twKqZi8i47HrcJg5AHCIIokA9vutjg9JisAppbTfbufz+Xg8bpqmh0MXRXF5edlTQCBiT93Zy6AfDofeqhhD40kZYqPneTgcFouFc248mz5/ccbMt05uv/3uF9fr9Ycffphl2RsPHszn86pqnMsfP378ox+9r592eXmZUlLBB93I+/1eV/Lx8fF8Ph+NRjHG3W73/PlzIjg6mhdl/vjx48+y5ze23ucneTf8xaurffAidUkRtqXR1735T/r4I/f2XsPD0pW2rnp7eAUsJmzH0a6AaebKCiOirjbdPJCGBqdTHNOeQUta3fX2rqySfDYK4Vqds20vdVaViEzbWuMQQlmW28OeEUKMRKTVRWOMAljW6/Vqtdputyr5sdvt3nnnnXfffbdpmsjp1p3bs6PFfD5XjePuprTfqF5aVemOhRzBuVxEmhAODTCDc6jDecwteY/6MO0vAkA/O9835PQoy3IyIWPgcDhUVSUdl7SIaGmlN1I4wM33Vr43VT1dtXSaOypmFKNXnRytGWp1aDqdjkYjjgkFLKk4NUBXG7G2bbD1a6W/891SGS6bKxv6ao26X343/MrQBd7weUPPBx14Vy2dFjyb7gghpBCvauwhphBjCMH75EOrx/tKOAW9WM+gzP56b/TTNvBr48WfpYg6/BOIiMYCGiAzfESyw+fD95C1aAwao3tSELnbuq/9B6+k3cNt1YcLPWgoMYTE2/3hvQ/e/5e//c0PP3qSGJIAA7EIAFhrrQGQZEAyA8kHEAAWTGwAZnkZ18EyOkZgaPaNA9JqBxAmYUXaMrMAFHl2NF9Yay8vL7vdAYiQEhBRZggSE4ikyJK6AonOcUfRuqYGfy3CHBWbJp0GFgDostcw6+LioiiKyWR8OBx2u11V7SeTkY70Scd1rgzv3vsWV+ycIiRjjLPZTPeIlhl1yxPiqChbsXjm3W537969YlSqj/zxe+8RWWuzr/zczxuXf/LJp/P5/O7du/v9/pd+6ZfKsjw/B+Vmuri4OD8/3+12++0WRcqyVF3PEMJoMnF5fnR0pG2U5XKZUsrzfDGb77ab3gLoNfb24UbQdvWcpKWhf20Apx0+uenPXrMpXveGP7Xjj13kVJn1n17qaZ2fAAATWMRWEkxLNkaHZFlEBDkxK2UJkoGUEICRiBlTEkBiBmZSIAxROwQmhDe24k2s6+tUA0VU9Yy0upWi5GVRVdVkMtnHXVmWt2/ffv78uRKZa9tsAPGiuq7X6/VkMilGZZZlNs90HTMzdxITQ2dDRGUJTQ1J4OioLMpxVVW7akcEItBBNDUKBiJglvF4PJ1OEZEImzroztEN6ZyDhJrtjUYjPtQhBBXCBYCUkhWTZZlrieTTcO1iJ/UgMcYYWbUgpMWgp5TUqzJK0zQIUBSFSdIEXzdNUURELPOy8g0AaC/EGMMpcUwoZB0xUwgxxQTGfv7CeHVRDY1+/7yPmkUU6Cy9t+sdDxJxx9FDRKwAGwAAUACD1peYWRlbaNAjBNuO0zJzi7jB8P/j7T+fJEmyPEHsvafEzJx7kOSkqrq7umZ7Zm529yCHnQVZOcGXE/yz+AQRQO4AESydmx0+NdNdvCozKzO4U2Oq+h4+PDMLj8ismp49CFxCQjw9PZyq6mM/4gDImg/2NvhA8WT4POFDe7u/2wcG4YePAAeDzMPlevhfcld15U4yKoQIIEZAr2M3Qbl/vbuPnvuHn+29j/39Hs/hFXnPs/B9v4u6rs+vVn//+T/87ne/ixGKwl3ugyUKAAaAiDhA0wTDUQAkxslkEquGOeSIm5uVcxCqCAkMgRdg4aatGa0rsiDcv3IhgCzLJpNR22zXG14uNHvDoN+qtVmWqXHs0CPRsNc2oG4cSpsmIiAhgykK9BJFBr0G1ygxA+Ocy3N3cX7+8ccvJpPJfr/fbrdF5rz3zOVyudxt1vph6ghNB2aDL4puzDzPN5uNloPjUfHg+Ki0FgCstc7Y7XbbhjhbmNNHT04irjb7J8+e/uf/9NWzk+uiKF6+fImIr77/9vXrH53Fhw8f/vDDD48fP5UUELGqqpOTk4cPHyqnULMQRHz88Im1dj6fv3792jl3eno6mUw+//vf3dzcXF5ezmazoSodwt5QbQxg6Q+Gt38yYiEiHDb2EUU+YLCHSnkA6Fr0//+6mHw0eb9LA+8lnsP7N4lJADVL7GtVBFL1CSR0hMaAJ3SGPcnIe29cZqw31llwBp1BlUc0hqwhYywRkXFoLJB1NqLpKUpkCR2AAfAIBsQBGgGDYgEMokF0nchNH0wBEYiQjMryd7+x0xkUFIgiAoIKViYQDrGNod5sbiRyud8ToPXeWl829Xqzt1RYm5W7uqlCnk9GxfT66jo0cXm0XK1XbdsW07Egnzw4bTgIAbarNuxvVteT8SwEefvj9b4Mr1+dXa9a52A+H+dZVjUlgGQuRxEZczHJ/cgIhBAbAMgsZBZOlhNLkFszyvOT5UlsU7lvYhCfYd1Udd2IJAEWiQgBha2FUZ5PJmMB3JUtGO+LRQTveB9jJKDcZ2BMiK0IGkIBWS6mPvchNgBiDSKBMZBPxnVoyRKDlPt95v1sNibgFENoW0NCBCKRU1htN1Ut2SgPsYkSvTfGiLRBomQonpLjRBKJE4kgdGojImTQCSdhsQZGhcszi5KcwczbLPfOOUNatJGI5MYQokpEGk0+rLX9nuzqVxHWyZghMcikVBEh1EUhICwhSIwUEsXkBTygB7QsKSUjYhCsysaLMDCnqJqv6q6kqqMJOUnUhBhRYBj/af8j9VKfOuAWBQxK6nJfEgRR8r+gGiAJkqiUAZKgYTAMvVdF/4NoDn86ZdMeugxAEbonho7GzQycJLECwmTASidFgwk5VlVx0Sm5kvcISUEyB90qfRtQA0bozM+SSOoeNobxqGiaarNeYYoptG29z7x5c0VgF3/z+df/9//5P766WCef7wHB5ZsQhKwYm4AMWkLDSYhRrIkptgjBUIMUAIGBBCUKITJCixANgjeASVIgFE5CCLOimE1nwNhWpcRmsZgU3u32O0JwDuqqLkbZar2bTghBMu8IAQF2212KkmVFTFFA2sCAYL0NKTLwZDZRcHg2yvLMt02d2jjKcFLksW2R5WhxBIIhcJ6Ps8lsujgmajfra4SAkGKsnzx68N13X+a5HRX5erVaLGbO2JTi+dlZCDGGQNYwcNtWAjCejkiwadvJaHL547tZMbII1Wo9z+3D2Xj19s363Xa9qqajWWwb7yUvCLGZLbKmLeeLCRKOp5M2hu9evUHHy9PFrtpQtT579/oXv/oVkL1a7x49e3m92hqy6+urUe7P3709PTmmzOfzeYl0vt2dPHx5cnJaFCNOAoBGYb7GkkprdrRO6kxNgKwhYJAkKJ3pR5elkSh4k0HrlJiEk7AxveqYKsn3TgvKmAVQOXEUIAADYBEEwahcPOmERCICkwSVmUVJxEAiSosTlf1976cTyr77Q0gxJGH5Z1d7XRL6oTjfbRjUqk4655cPYdsQkYwZiqFDKavD8Yn0k39mZhRtXyBSAiHTVX4MnFK609pibaJ2slX6oHdeIWgnJA2AxizLppP5crmUKLGK+21Z7ar9fl/X9cizyj0rTEv9nW3mY4x1Xfs8F5E8y2OMqpFhUmqaRlHO221VVdV6vW7bdrnMUkQB2O/327L1nqaT+Wg0oikiSwwRRZwzFnnk3cibZ8+fjvKCgJumQQFtzQ2qyohGRLTPmVKKoSnL0ppMZwmafd/rTTHefn1AalhzK+xyiAJ1ziGBlrbYi2xlWRaTCKH06WHuHXOomva2S4lknHGSkgCrTmfvG4W9L5TaERERgYrLdI1+xs6e3iC5zHnvAUhnkHB3sd1rgd5vwtwZPtxfosOoD3tYORS5Do2ZWSKjIWQma5BMRETTRkQCI0YMgJaVt099B599KxZD/Xj/oM1wMJmTTkbu8IXdK+/ev/1nGkH3SrTfp1P6z+umalPm7tMR0Xa/Uyl2RIAQEsN+X1k7+9u//dt//+//vc6NiCi2TdnWSGb48+HJ8acB60o7gBS1XB6qBxXPLAo/n88B4OrqCrmaFE4/XmMQWFK61YFTj/J75FQ4qMXhwGFHv5cU4gDoD9xJKbVta4n0/Y5Go+vr6+XJ8Xq9ns0z7RluNpuHj07VhrNpmiIfI2JZlsIUQhiPx01IIYSiGLdtG5pqu90KhMKq8jscHR3t93tGnIztZrOZz5cvXrx4+/bt+bVsNpv19vzjT55+9NGLz//hb3/44ceHD09XN+v5fHl0dDKfL3e7jSpyO+d00nBzc3O93mfj6bt374wxoamfPHnSNpUCRz0zEVX7ChHzPB+Wor7l9wfVH+5n/uxCwg9Sejpc1eHX3jmzqz7Ee/ekA62y39eN/ed3il75Z4e9joZ8R4Va25XQf0a3JykRGAQCIQPUTRM6yz3b3Qc6ehIKCgMnlsgCait9eMQkYABBIkXaUQL1BEaj4TB1+2gAk3ZfVRf2hjOFOktVHkANhix5AABrLRKmukPAazhEgLqqsjwviqJuWxXZ07BXlmU+GgHAeDyOIZAxKSWJXJa1Ma4qK3WgVcTzfs/W2GJUEPmWkwio66xHhyjOUGacgFiiyWT04GiZ5zmihLZBREsoksgiWZtSWxSFCKaUnENrnYjUVayapjQlWqO9mgE4AJGEMQpTHyr0Qx/oeohorQFCCjQEA+ypuymltmmYOcsyDClK50SgwGgi2u5VzzPFGBOSMdZ7akKMg30bGl0vCmEdvhtjbnXFQEgE2hTrtnHGipchtBPhvaNfetIh3C45PNycQzuut2gEgM4/T+e4Q2vOGOOsAiTSAOhH44iNIBgQ/W3FEhtgB8bEQ1E0TCIdVq07O7R1iirp0hkVgbJaFVjclU2Cen+5XZwgjIgfxEzCexqet5tce63QHeHYz95/avPTQdg5PJhQbs8U0tK8A0GbYUDblZX9H1dlxQyGXFnt6rZlxNi055fnf/Znf/Y3f/M3TSvZuEh4+7LvNW+xh8sC3LHO1tet/beUQLSxY4wqGzYBRrlTFna521dVlTu2NpOeZpdCTKkDZBoDamAy0HuG8HbYptOeuQ7nvLVt3cQYqSistQ20uthSSs6Z3W53cnKCSFdXV8oBPzp+jIg+y26urr1/ut/v1Zs6xkhEu91uPJo1TTObzS6ubgZfa+AYQqhryEbWecsxnZycfPXVV4GZ0GJTz+fLZ8+eXV9fv/tP34a2BAzn797FuDk5Wo6L/IcfzmIEQ+8ePXr40UcfffTiYzIwG8/P9uVkNFksT7PpTMjPjh4Q2tGoMOPR44enVbkLIbx+8xZdNplMVvsGEcfjsYpdDNnMId/3tvQ/2H2H1w8+zMMp0u3X/U8GlH6LEOL9IMqKKD5YvB9a0O9F4vfW9p3/RYCfCXs/tW0+EML1gzCAKARMZDXC6bnWBzxScIQ1aIyxiMqbISIERrwt+IZSb4hL2EFjUPM+VlZcFKOc9w5BDUoFxVtfbP1WhFTd+QAZKHJL30EVi0ua8ZmUovbliSjPR+LYSOdhrVLr1rvxZMYgkQMy+zzzxk5G4+1+R8Yws8TYNE0xmgyxs6qqEHi9hvEoFiPIc5+HPISYUmiaVG/SuLDj+ajwtqlTio1zbj6fVlVljSCL99Z5I5C64xLAWjtw8/O8YObdtlYriRijug/oh2atxXCLCkFFEmkjmCgyUIwMZJw1xsTYfeTWWpYkiQEghCCQEMAiJUqSMAk7Mpn3hc8gsTGAiIHTvmVGznIkazEBApBgklsP2O41QD+TQwKgmFKIHDglZlGminBgtYDobK/vbUL98+EUG06ue2mpaPfk4P4iwokPcZsa9m4N6A0xM4kw22HTiggkJmdVYpJBhuL4sMfQvcEuUetmpYenxnCkHwyhYZiT3Z4pB0X5z+St72/Ae//8yW0Pd2KeUhcOo869o0WnGPceVkSisIgY65u6XG92KICG1tv9f/yPf/H555+3rSwW01agLBvp1eEPlsE/cchAP2PT65qBaVwWAXUtX13eVPtyNhqNc6yq/WyeaVXHsfNMZ1HDljsiqIfBT1fToJMw5HzDcMs5p/4w6m6vQC3v/X5fFkWx2+2Wy6UiroGjMSbUzaChWtd1UYybps2yLK1uZrPF2cWVSj4VRZF7W+1L5ltNpVExyvN8vdshmOl8FmPMAJ4+ffryZbve3EwmWdPu//Ivz/7tv/31n/zJn7x69X+bTt35WXj16mw2W8zn06LIQgjv3p6/ul7/6tOPLzdbm41vbm7akMjA6fHRy6ePT05OYoy//d2XTerw4Sml8XisdIX3U6XDgHf49R381/3p2PC/9wbe710I0QDK+/f/0LL9uTrvp/ocP3X9g4v8n75obabKDqpVdfgKbnuYRETkDBGIQRi46r0TNJrBa8wSEXS95A8wDm4Npu9dUWxLf+lGHaqABh9CBg4nMDOnyCBoyFrjFOZ3cnIynU6NMQpG6XFNpJQ+BQQrbmU6nSouQAmqeZ5bq+8M4eA1D6P+EEJVwXgMxkBd15vNpixLAJhOp6cP5qPCa0KaUrJERZZr8qUSVERgLCVJdVuX1b6qSzVgE8Q2xhACAPgsG03GPs+Fer8bBAW7yt21S3dVnlUhnjv7aW+9s94hgfPW9GatwpFjcs6lFIalaY3JnffWEeCoyFWYjRlCgCaExCCklGrlqRhBYsAkkEQBO1GlCZSJ2LZtCEnUIsFZBKNwgBjbnuSDqkjZyfxo/B8kLvGOguXw7gYF52FBDlFzeOq6rve7TVXu2qZKKTBHEeGYOLaxDaFpY9uEtglNG5s2hSaFJsY2xvaQbthvraRW1kn60RowA+OdS5eHDaXboHl0235BsAgGZPjRW5DTB3/gJ36wEyt6/6fz1UMBBJYDwXsCoAGPLd1PEhx+9AESgwimJAmwDrGsGgFyWb5ab//m7/7+P/yH/3B2dmkzJGurtimbdoheh+aLt4eG3JZ6d66zaGtVpWygM1bkycQRUVmW+33J0On51XV3hfoudL+JaACX6cMOpJc+C7wl9hx2C4b8z3szwFJ0u2kNd3Jyst1uvff7/X4ymaSUZrPZer3WhxqNRlrYEZGiu6lHXSnfQA8NNTjTurBt2+VyCQDr9VpPm3fv3nnv/9W//u+Kwidul8u59/D23Zu6Kf/oj3795PGzoyMnDJcX1xcXVzHybLZ48ODRfJ7PZrO2ibPZPM/zo6OjplF2hFjbqWOXZfnDDz+cn5/nee67mfzd3PRuv/0Q/HGYzN0bN3zwSv+t/0Sv4r37C9CHaTMHi0c6OKneMtC1QX/35/ztdf3pMf8g8tNhD3/icner929bW+EAygS2CIbA6Yxcqz0DegQ5QqV/dYYgBp0hpwKBBPcaOfeCn9YIrMN6hhQ/QJ/q7w0HbL87/ztsg6Hs00tRjLMs08PROVcUY12aIuKcizHuqtJ6N5pMrO8mXsV4rPS4zkwyMXBnDKswybqut9utMIYA06nLsg4eBgDWGtVQUNXNm6vVer221p6cnMzn874SFUBJKai+FnPby090NIyhlzKZTAbLPY24WrPKHUMc1fXw1jiyhqxn5iZoTn3Lh9MjwznnnClyr0ORIss5JWRBYAIwCM6SM2gQs6xrEzoLANC23LZtYsUlUK+5fruiNK3WlCVGrkNsQoyJGUSIjOk+24E8N+yrQ62Qwwc8/N/3DRyGvzq8/ZDIUdd1R2lo29gGjq2kwDHFtu4iX9W0dRWrpq2btm4GkZcDckM8XGmHS1dv0R2pLj5q7GmRRBJ10rvd/w73xMNRaDdEBFaVgfd+i/RAaH1m9fv92d93jjANb3xLtDyMPQBw6Ns8vMfUr7H1er2vG5eNqjZ9/o9f/L//P//x8vJS/1DljbSjc1gWM0jqsUA/U+118kamm9L1BEo4PT2tqur6+sYYKrKsruvttiQCpQrgoLrgvQYbDbr3GJ/Ygwa0OBuqjSEcQg9zVfCn1pHaLG36nn9PTKomo1EKcTKZbLdbg6R3AwClAehTh6ZFEaUhDSYhyuHTSbM2QlXYTCcj5+fnbdtaBx99/BRR2lA9fzHdbHZ//dd/PZlMTk9Pj45ORODs7OLmel0Uxenpg9ls9stf/pKI1NraOTedTququri4GMr0k5MT59w333y32WyePn162DwfPufDs/d9MvS9FAHgA73Qezfq+XPot4DYqZcNKp2395dB24wAgP+plsdPlUnvv4thn/7zCQyEunYPnpXvSIHA7UI3CBbRIhoAzVs7uRYCADZIBjsbUXUZ1KaEHgaA3Cm/AEFnGISICh5Qjx4AAIuCIESqFKMvQzV0oe8pYYfzMwAAMTCisRZlALULauzRZap7wxnnnIs2NbsWAHb7PQAcHx+Px+MQQmJuQhiNRrptdutNnuehbTnGsqyt8fuq0i2x2WzG4/FksnHOESJZa61PSIi43+/2+31CyHLnM194Mx6Pl8vldOwgtTG1VhGyJIwcJYIFlzuEzt3NWtuE2IaQG6PRN8Y6MZPpas1uAxsrAIKqvkjGAAsIgs8yIkqh7XDnCMIAgm3bjsdja8l7j8Bt23JoiTq7bT1ryRpLxhnrDBkU4GgR8pwicNVCG6KxmJA67KwAoFExLADwzjUcWYQFAkMIqY0cGYAsC8aUEim0l5hZEkt+2y0carXD7TRcv6ecJCKq5nO4CRFQh0bDRwQpcoCIUEmy1hrrnXNkOTEQM3BkZykatsly1GaYGEYBAgFW/2gASCIKmtCXSsPrJIgdeq2XWTEoCCQsAInAoABAQkBCQAERQlVWERC8/Q0i927pbkfR/r9ORghAiR4gSAAscO/30C+STq9WK06AvtWpiTINBBKNzQDMfQBkFhEN/3XVpJSaOn759Vd/9r/+5Vdfv2aBYpy1KVZtS0Qk0Ah7X7QxDjkfdFP22/B6p8vaw0xExJCSTRgRc+e96YQjYoLMkTEmxYgA1oL3vg6liKQEOtIDaLpTqJMi63Kd4XAfGpLD8U29aSUihhA4Jf0rPRw0YdrtduPxWLXHbm5uHj6YalyUnrrHzKFpizxPISZhBcE1TSMizpimCU3TOIMdsYGhbVsCtEiIOJ1O97vq5ubG+9y6rKqqOjQPT5fr1aiqd8vlFFLcrTfv4MePXv7q+ZPnmOjq6iq2bVu1q6vVN19+/X/50//+zY9vHz9+DMY8fPCYAUfFBEDevXu3mH+qDaAYeb/fP3j6/OHDxyxRwKhhgEBiFgBA8sOIqf+NQ4v+XsMT+ubk3V7lrfojdAf8sLqGh0SR4cpdtV68M+Jm7DzNDzW3NFimvtH6fmB+/3YtZeH/Z0ZHPecVQZGsrB1LROxuRyEU6mjsoi1NREFKhEKYtNRDvJ87DHtBpVtS4hQ7K75OuvMnLocfwUGEVmoaaQ9E21zM7FwWYwRAaz31RuRNHRR1UlXVriqNs4ujI59nddNwL8iir3O/3wtzipFTKsuSiGKMIhhjats4m82Oj2dakAFAjG0IjSZ6ux0AwsPTB3/w6199/PHH8/lUAaj6yo2hLMuKorCOWGLbtk1bDUe8Mgj1nSrzb8iphyChmWx/4htEg2TQEJH13qOxIpIEVAxVT2oVJISBww6oqs1ERCgoAJyAxQA6S9YYSJFjQJDM29yhACSGkORQuQNR7YTIkjHGWCTFc4qaKQowCKBhhhgT92KhBKhUxcMI9359cNiH+PkWBXY2BnerQBRgiW1oq7qt6tDWKbaSWGKS0KbYchO4Ddw2salTEyRECZH79ccpqMAQSJL7bU/hXnVW0r2yrE9hVbFJbo9+1LIssYhgx2f9ud/U1WeiGHDg7paf+n0YXUhuJ3zDf9GBjOrhqdHdWbqX17YtdOWUnJ1f/uVf//0//O6LyBCCQqK6/L2JHBOgMQP0oAudh7jiD10GoHVi0FA0Go3m8/n5+SUzWwNaFeV5Php5RB14y4AIGGZ196r8oVPCfF+6CHrcUCfe1Au7M7P2CfX2sizVAGEymdzc3CCifq0pBIMUY0SBqqryPNcBR1PVJKAfl0ZTvT4ajWazmd6tLEstBEejUVEU6/W6rus8z9u2HU+ypt3nhXvw4CQvsulsslgsdtvyL//irwHg008/ffHiI0JTVZVWjdvt9vr6+smTjrHXNI22br788ks91i4vL6+vr40xp6cPhxSwiy6DsOqH9K3uBZUPx4Hfl3iOQ4X38/cT7IdW9+7Yh8l7se2wtoP3Yt5wy63f3nBwHD7c4e16UeM7QKM8DO1BIIi1xhlyzmSWnCXvjPfGWeMtO2e9s84Za423xhpjDDpPlsgQWkNGqSLGGKLODUTb+n2LTGO0ki9Axbqpt1nDqPk1KgOp0z+l/lf/KXdCmNYhxRibEBApy/Isy5m5Lmsiurq43qw3j04fX19d79bb/a60xjdVbYwxziZOz54/L6syxBatmS1mz54/z/P8+vq6yLPZeBLa9vzsrMiy6XT2zTff7XZllhXPn30UWQxldRvKfb3ebKsyAEGW+cVi/uTJsTHQtOXbN2884cuXTx1JaKunTx7V1Q4JDWFKsaqrEKMAhBi99XlRING+3FdNa51z3gtQPiqaJjCz95kgsrCxzjrnUmWMcS7z3quFoQAgqmwmN21LxuZFAYJROMvzPLNXV5eLxWy/323Wq/ls2rZtnmUgXLdt5jLvs7ZprfNFXoSm2ZYxphhiDCEmIRFJDEnYWBdjhxsCSW1oDMJkVLRlJSIg7L3JMsOJraMHJ0eTyUhiFEhFkc/G49xnRGIAnPfGWue9saan4Akg+CxLzAJinTPWsAgSWuew02tlIvL21lZJ3di7xXGg8OIsEgKnGGJgTiCAAixsiUTF8FJMnIQTCkvihjtXJTVfR1F+kOHEiKB8DA15oAZjfBtDOoIcEAJySprTCicRViIjp0QGoPPnArw16RO4y0saSEkCbLvuhjAnAgEASyjCOilHYTUlYU4oDGAQFLUKoNBSjp2mgQjoeEBlTllAJAAaQ4QIzCAJmWNo66rcbtb73R6Q3r599z//L/+vv/irv6rqJqZUjHMAjMyBpY6MhMaZJCg9KA973oJOwhVxpiEHRTglALHWhhC9tYCSWArnTo6OEfH6+rpq2VkCEGDIvDeIKdYI8PjxcVuXTVs1tRCBcz5G4STzWaEITxUtUhuExWJxdXUznU5ijGrTUxTFfr8HgNC2IKCyv0WeZc45SyAyHo/WN5vT05M8z3/88e3Lly+//f67Fy9eNOX26dOniLDbbsejom3b6XQS6ub46Pibb7+bzmYAtC/LzGeItF6tivH48vLy+Gi5224B+GixPD8/m01n65uVc85nGQgmTswSYmqaZjQZO2fbUF1dXTjvjo6OyrJ+++OVIbNZ76uqyfP85OR4NMoTN6NRsRgXl1dX08UxkB1P519+9dWzZ8/buhFO0/Hk7dsfzy8udvu2qtvPfvOHDx89dvlU9cyGmnXgsN9LK/ujuAuKuub0xpTSHYrOYS8QWbSL0TXxWUAEBbWVpUtNnVJ1qyDrNhdUlDJAv1al1wcWoGHAD3jX9uHu9Q+N90DkPSSn/DT0+fCDOMxrsUeviiQAAjCaRBMIohgLZISMOENWEZ5WDCoZUsf7qhos8CHotvQYWQERJiF9J9j5rd8aDH0oL+l6x93jaP1ugUIIbYpqYjDgC87PL4loPp+rzEEMHCO3sZxOp+/evVucHP/q15+uVqs6ttkoQ0PL5VxEqqoCFki8XW/2+716N+t0+vp6U5bx888/35YhBqgji+BiMRuNxuh8ua9Xq9X5+dX8uIhN++L544enxxcXZ/NxvlzM3r37cTYdNW0VQ1JaxYC26tWVuuRUs1dObGwnIlpVFVmn04K6rqe+W6+qVUyEjjJBJOtCSJEFUkyRyd36mFu6dVdHRNMPPCwCIKOgNeQIwZg8y3JXtQ3UwAigOEYCMNSz4gC0DHKGvLUog2axSqkQCKSUmpAADRhEMtChk8AYQz0EKYRg+8Emfggndjc9ugNe7eOcERE4aHt29AlIIgkRjCAkTrENKCmlDgVKKGRRQNQYBAMgJgYmw0jKPxdAkiiAmFAgApq+ldI/xL2i83Z1c8fx6PJQVWTl9xPQD/Yt+jcuwF1DU6s9JAANxgKE3XVg6ZqoHWNEIS1wq9rX14Jy//GxG8OmQAIgiVNIoXWE27b+7vtXf/U3f//11183datgbmbWd6yvui8P1bqpf3wEFOB+ojmcOf2XpYuQYowC7B2pUYkO1RA7nV5vyHtPzG3ThdBeD1oDtx7frerZ3oO6AICugkGHVkslRMyLoi6rGOM4y4hIOBHRZDzZbNZqlF3XtYjoVH673eYuXV5eemvLspxNxk1dp8xrv8QSSeIY43w+L8tacS6ZdYvFQvtD1npEnM/n6/V6Np4M2ZiI6JBmv9+vVjfWUZZlKnhWVdVisZjPd99+s3twCkdHJ5vNxliYzQtr7cOHD6/PXtd1u1wumySLxUL9ZE5PT/eb6/Pz87Ozs48//rhsvrne7Far1WQyCQcivXKgjntI4Lu3yw5O5jtL5Z+6oKoF6ZMA6PmsmiK3/DxElE5UqMvqDtuVCYDuGMndURq6V5IOt/AdcUGAQwKDyB1KzU+9dm2lCIqSsA7+PAESAhOK1Tm2AYPiCC2hN2QMGouW0BKSAWOJQIjQEhziwKPcnhFdzOOuxOvfxvBmoIe39BPLBERDnatoDhUa0IY+MLMB0LGXSvEBQNtGHSxb45M0N9ercl/r6VzXtUP78OHD0WzqnNvstg8ePaTM7Kvq4ZPHAJDaoHDkpqpTjNPROEq8uLj68cd3r354K2C+/vpmeZJNJ8upy66vNjfr1Xq9IZ/p5zOe+BSq2Xx6fLIwJFVonB+PJ0UKVZZlZbWr67IoCusdAwIZQ+S9R8TAzAg6nBCEJOyMXywWZV2Vq01uHRHVoc1CK54ADSOklATQWuudV/okJRGRFBMzixp4Ixkkbd4OdHXNwkH5mSwMrFBAg5A7P8sziSGEEBO3AKRzXhFJUTmFKTEKe2ecBeCYON0uGyEGYMEQYoxsUJihjaFpmsyQs5oTYRSOkaOw5qSkTSrRF4Qq5CDUdUIs3l8/XW8TSUQk3mJksCPnquMAJtXbDKGNUahBYEJL1ljLkAiIYrcCUfVIiXXkxkYcEDF2jURtUyYRldwlSdBTWYWN9FaUBCiiWmkCjNLjzxgNESFRn0reOga/H/MAVLMFQBDUD14nt6pYJF2AkQPAi7ZV9TsEYB2qDPFIOmfBhMOziEhMMQQOrUUg4NQorHh3c3H+13/5V//5v/yX1bbOirExZIxLKdEtM70rVJkZTNcoOmy0DtN77HUNsHPoRCJqYzQAo1HuvW/bttqXKSTnQBIwg8ud9z5UVUxgDCgYMi+s97jfi0amPVTqyaW4ZWZWeXdFx4Bq7/VStDp61PxS/0q6KSAURXF9faWQt6qqsEdmrVarB8fj1Wr18vlTpfnGEEQkz/MQgvLWjc2W06P1enu8OIoxVqvV6elp27bO2JTSbrebTCb77U7vnBmjQVqTwxBCVZbe24cPT/Msk8Qc+eHjB9tVVe3bqqyvL6/ywgPjqx++m85G/+bf/A+7sx9ns5mIIJjvv381KsbOuV/94pM//8//6d278/Oziz/8oz/5h999y8xV2Rhjgyjrig/SLGAGY27BYgezvfvUvd//cmfy94G4QkPZM5RVQiiCKIBIiVnPjaRiT9zB/hPc5rj3mpkfDHvCBqBzRYF7Ae9nAjh+iIR4+JY0WVMwiwpPd0hOYoNERshgR2lAGACc2JtjcdIY2AEQpFdaEQERElZlHNVQSIiYKA1Pqn/OBIhd4qyT0sNur5LZ8yzPsowI+xlbeXJysr7cvLt5d/b6bLfbUUIiUl3aJ0+f7uvq4uJisVigNfuqUjQUM4NhS8Z7JzFF6qSrVbQ6y7IsH7940aIt6qrZ3KzLsg5BrMVDwOryaPby+YvQVBfX68ePTqbjcVXtT06P6rI8lP+PgVPiLHODkTpoPdRpaXJKaTZb7sr9drvvsGo9HLRvRDBLRGMJ0ZBlMsYIoWEDYO4M9o0xkpTVYBFAR5Wgg5yYGBiFUwwpJYM0HXlOWRsCSwyS6tjlbTFFTxaQOQUCMGRIJIaAgGRUUY7QGosG0STmqqnJmZhQB66ZIUKvGCD1T4C+Ehoy90MQ2sECpmFB6qWb4elaMrdpLPaqMYgE1iKmlDo2JMTYVrUxxiZLnIkx3G/yyBUmtoBWB/EswEo3IZEAAtL5PYnS1fggDB/+1uqzz9FuB7o6oz+McEPP9nBjDldIgAlQOrAPAKBgQiEFAQjoAS8AqjQwTFtJALA7/jWPGR73zgmi6WSKCuLlFJt6X223Zz/++Ob1m+++/frqqs7HtiiKqmmdcyEEtZHVfpZmOHxXmaMr+N4/+jocUDdkIgBnScu4zkZDgDR2YsdY0JWZ52CtTambCscYmDkvMs0RlZSpoBJFYNZ17Zzy2Q301HhFaZpOiICMMaFtPGEIoSzL0WgEPekzz3NVM2dm5qLa7Zl5Pp0Js2pATyaT3W5XFMXVzc10ulDZF01S3707K4piMsqVzFBFXizni8UipbTZbMYAo2KSZdl6vSXj9ImMQURcLBZluVcGgnPuwemj6+vV9c3lA/fAGHt5cZYXT0KsjDGPHz+uqjqK+eKb7//Fb/6Qmefz+X6/L/c7RTaonrAxpixLO14OUX9opcgBcIwPpCH0W3p/ECYiAB+Ogv3G7MOE3tjJj3USLQdSLId/pb2Jjr0i/XOIDhCgW9h88BoO1+1BeXfHJkWLKDv8ze9T6oGy3BG7Zu0tPj2J3EY+Q92JbA0qb49ASFUfkfQ6GSAAItB509Cb6pVy6HZrdPuxg4ERAoukqF+SaNgzxvSgbmbuUJsHb1g9/JKIWKCuSBKp61CWtQ6rh/2w2WxijLEKeT46OjpxSKvVipw1zo0m49999eXy9OijTz5++/bNeDTSYbXtmMq83+7qulU7ZmY4O7vIfL6tmnfvNvsGiMBnOJlMTeb1wzTGfPTRsTG4WW3J8GRaWAd13dQ1Xd9ctm1rjCOyKUqMLETkvPFumEKjIUEQ5pTSdr+bL44nk8l4PE6CMSVEY61lbnQhqW8qWUfKWDC2k+uNoaPx6cNKJAPM0Ri0znBMxpjYtNZasq5JgZOQdm8kGWMMRE9SeBOEAnKTmjqBJEigc5nUNdCYmVNolUqoIUM78wSIgWVfNQ7zcab8vxBjZLbQCR10MPTD3O2DYQ8OKrzD/yIijgkRjbWHu0JErCpBiCFAg5xS1yJLMUjqWG7G3Vr4pgajcIOCkhSx4nMG55CtMYYHZXTFOyMCGOloW0KkE0YF0PcngE7URHRqj5SAWeA2Y4NuJsLQb3sYDhIAABThbuitnwziUCzeuWcXVrVbw52sUZdSdtlAR6sYoCsACRKykFayHGNTV9vN6uby8vzs4uxdudsUGUynYzGYUvLWrVmcsi8IulfcF5E/dSzeptrcRWeNeaPx2Bhsmia2QYkEMbYpgSHUSKCKENbAdDq21g6KBfryVUXMOdGmxZC6abAcjYoBfjkQ11arRlK5nM8Vcl3td5PpJDT7zWbz6NHD3XqjEgca1ZqmmUwmTVlZpKurq+l0evbux9lstlnvxqPRfrsbT2fffv/9y5f25ubGIKU2jPMiy7LVajXKHzB2JLLNZvPw9EG129d1bbNsNl0URXFxcZUYFovF9XrlnLm+vp5Op9vtZjabVVUTY/z66x8+/vhFjIk5NW0YjYuPP3l+fX1VluXy+GQ8mZxfrReLhVZ+Z2cXRVGsbq4Wi0WMMSXJ85EyKKaT2zLm8PAfMssBGdR/X3Bw5zvB78PxQlGTCD1D9KDUw171p5O1hf7kZx3G6VOlXsPhIKTdYjKh73keMPYAdHbeea/q7pH+nwJa7X2w1Pt9Jnz6HgSZAQ61r4jI9iotHYBKryOpOBkRqfKv6Wg9t+eUJT2YEAD6BtbwnlGEOSEiIHbuGBr2ellOPV+gz1OIk2pm38PadvBrtThHxPF4fHF+qUwX59xkYjbN1lpbFEVbVuvN5hef/qpJ8ccffyyr6g+fPlWe6WQ8BsJQN2UMxNLWjXKVMl9Y62+ur778snn4YE++AIDZDFJCAWjbxhKOx2P1wQK8ef3Dt0VRvHj2XGGu48no/N3ZZn2jtARCmzgSGbLW0O2UAg5KARHZlx2bfjabbXZl27bWmpRS4KA5r4hwB6wHkZhluXPO5xnXd0Q9eJBiImOtbWPyxrZcE6KCzYTRONe0kVPKsoxjLRwIJDOUWcmdyTOWCCmISDKIxoBVuFu6ld1jFgAFbUICjEnquubco1oN91tPc6l7iLthH76PwUspSQ9/BwA6cBPuttRdQz7u9TNBcCAUa9iDEAA6lqfern+bMLYalUJKgSUmZJGMrWUxhhxgIkTsSGmIUVW4ukdm6nVn+qLzMHG+0645hBXcOUfuN3PgMEk/PLneLw01jAH0Zr9yqybIKQFAlxseZMcpBUOgSpehrfa79frmenV9Fdq62pcGYVQ4Zo6h6b8uAFR/XxQQwoOTUr9UvIMUvXOeIIJ0mATnfZZlovb2MWnFZi0FZu/NUOqJiHVa6iW9kZmN6TDbeZ5nGQ8gz0OKy2g0KsvyMOwhYl1DkUFRFAJBxTy997EtRaQoinK7GxzBrq6u1FdIAZ83V9cvX74MIaQ2pBi1P+GM5RhFZL/fe58rxvvhw4dXV1fb7XYyGueFtw7Laq+uYQq6CSHoxLEsS6XxpSSbzebZ8yfz+Xy5XN5cb51zNzdwdLSeTCZPnz7abK/LKlhrQmg0P8jz/Obmu1/9wR8i4tOnT8/P3j179uy7b79+/vyTlNJyuUREhaEO1f9hkYcHVBNdqEPNhx9iKfx8mOjuD0Ygqvwm3Bkrp4N/Duc8CqAw90ou98q4g+B3sMLvFXlwN1Hub1eExz+n1Bvexj1YAaJaGyAAI0mnOoaIKFY1vclYMtaSMcYRdmLvJKbDZPZdF4BInd6KdFieXsCbb2ebLKpPjSklNrfMdBEF9GvDp/fwA0A6jOWUkjDfGopyAkY+Pz+H2DUPHz58OB8tNBtqqmqxWBBRva9TSr/5zW9OT0/Pz8+VW5pSaqSs6jo1bbUvOcT1ej2bLh4/fmxNVjevvSuy8Wy7qYKI974YjWazhSsKPUarqqrba2Po9PTo+HheVVVd1yypLHcqLOuzgkWiMFlrjIvxNoQjKvBNG6HIzGVZThfz6WLehNS2rU47Kmx8kdsDRBb1Bj26QzQJUMEzY1zqdb+UwJBCBEeEklIi2/keGGMQIrJ4Yw0BARMnYYbEIEwiBFB4G5iJ0PuRQ4ltExkMACAZVVjo1OM6oHlV1Wk67bN1YWYlESnW4FBcY1h1h2HvMEvtBxJ3jtd7WJghamJqAYB0wQAxAAOLgHGemQMrSvX2BTAiJG4TpzbEyJIYATxz8kzGOGa0BtFA/xRREgAQWrzlFHZEad1Pty++M165E8Duhe3DSCZ9f1Krvds9j0N6e+dE6D4fc+efiGgIYeg+HXjq6hQwxVaIjHDb1PVuu7q+Wd9cbVbXsW04hSLzjqrdvjTZKM99WTcahkGtHEAQkfWg+4mLvmbSaaQxAOAMOufUVzk0NbBoVBuGHTqrk8AI6Jwjim3bqvqJvvgsAwCo67rIx3neaUQg3mbhyg3QOKQLTKvDlCDPPRG1IQ3gfkScTqfKOtBLlmWac4Mq+TGrYIXCQXWwp0OQPM/VuQwRq6qqqgqcm06n+90mcz7LTQhxMpmUZemNPT4+3lXVZrPJR8XR0VH55u1ms8l81hFgGR+dPsxsRrIfZaOPXxY/vlp/+uloMZsJV96l1fXlaJTPZrPxeHx2dqYxm0XG4/FoNLKQqqp6/vzl9c366Oho34SqqoUxpdhvDR5GJ8YYLS+YpQMk38qg0Psx4meanP1JdScEfvBe9zTwpLfPkds51xD0bsNehDt0i+ERIt/vD3VpXCQ4MP/8PdE4v+9lkFI8lM+4c73vXx3efsCtuW1V3dvzB33Ln7scfhaHm3wYmOmxDgBVVV1fX6sH6W632+/3y+Xy448/XiwW6/V6NBotl8sffvghhPDrX//6s88+u7m5AYDZbKaTP4VX3dzcqFX6eDze7Xa73e7HH3/UqcB0Op3P59pwmM1mi8ViuVyqwplG1j/4gz948vQRABSjbLPZfPXVFzHGLMu0jy8inER1mXU+cfg5DzomOgMjoslkovoyesswGDvc9ofXFSc5CC8NMaOT+ELSTavGDkrIhb5aAoDceWcsYqcZE4KEADGCQt2stSN9I8YggjEwaIYNX/qgYThYlXIv7aapyWGxBXfFWX5iB94v8uCgzht6pPcEzO5tVC0v9BNWrpV+SvriQtuGtm2rOtRN27ZqtKqvmENMSuljVozcIFN3z7T93tTh8AUfMlA/GCfu/fP9BS8HnYDDJxpu/5n/PbyupMMYQ6irUsfg5b6p6rouRZJqA7Uhee9VyeH976X72N87IgTvvJ2uS2Stdly892VZVm3oA1Kqm5AS61RvyHuGLVzX9VCyONdxzHWiBn0y1E1erFVwCvTaZkN4gz6kKVyTe+Hy6XSqgoLU89l16eq30y3UplUfBp0RKiGvKIq2qtW8RU+Dm5ubyWSidWrTNJvNZjqd6hPpvG2z2RDR0dFRlmVq+MDMCEbdQHWbAMDDhw/LGqy179692+93v/71r50zq1VnIfvdd989evRIyX+bzUZEdAGfnp7e3Nzol6UhPx0IPA5L9N6xKR9qG/wzw8F9HvZP3fO9gxxFuuH3B+/DP325t9cObzT5aHL4yobXd+/K7etmr4IbBIAkCIBCxGQoI/FkKM+cy9H5ZIvaFcGTJ0PWsjFsnBjL1kbrxFghw8aKsULYD/lIiMmAmA7hyYQMlJASUgKMERJDEgImYENJcYZkGYnBMJKgIUJA430OaASYRYBV8JEQKREmBAY01hnrSahpYls2283+9Xc/bNe7ly+ez2ezstmtNzc2IzOdUeHz2ej44cnTJ4/rcg8cnzx4QDE6xMu3b9qq3G5W293mwZMTP/JHjx5/+fU3ZVtP5osf363KhkdTszieVdV2PLbL43FRYAzrFLcCZYrbP/7VL72h5XwuzF989cW23I+ms/V+f/r48Xxx1ITUVnE6mWUmr7cVRFksx03TMEvbtrnPnXXrzZrIGmtSkuVi4YzLi9Hbt29T4izLfDFuxcyOjvdtDDEcnZwggXXG544MuMyxSB2a0XiUT8ZlXWK5S21rEPMs8863Me72FRq72myK8cg6V9b7LHNk5Pr63Dnc7DY+901I602bBJzBGCFEaNqQIiODiKnasG9iYGCyJYeohgtJXExTZwtnpKldZgu1XHLGOpdnGVmDSM576k31uuHcbXkqJGCUcR4TJiEBTEyABgnv5XoAKoGC1oDpRWVBGIHJ6A8gCXU/QKjQsaEN2B2pEii1hpPhSBgRgoSmbfbGgHALwgyROQoHICYS0zQUA4aGUms5WonIEVMb68pIskYcdTo8yJF0tJYSpAQpQkqk1bMIhxZSQmZkJmFkvUM0Rt9HAkmKo8YO9RWHK8Jx+F8TA6VoOFGK/U+CGAwKxMAxSkosLNylHSk2BCyxLjfb7fVFuV63ZUVJYghN2dxs1mVTicWIXEvrCh9upCnD4wdPL86vVDMTBdoQM28JxQI6ACPkGJxYx0agJWULchRk64gyR85cXl0BCgsU3rdtmxIfTaccU5FRasPY+WpfjvOiqfZZTqHhJ8+W5+9uHj1ctE3tkIABQzqazVKzNyCA4J0t28pYms7nVzfXdQpNCovF0eX19fHxSV6MLs6vZjOHZCILs6QE8+NljCLGJJYoHEOoQ/XJRy+vb86Y27bc5g7nk+n52fl4Opsvjler0rq8yEYQU4ZsONxcnANzMZpEcVUym4rf/fCuyPwnH//yd7/7KrPZeDLfbsvZ/Kiqm5v19uToVAS2m3WRZ9w2EsPb691sMVsen+TFTMTPZsdf/Pbr3a40CN6Vs0n28MHi66++evf29R9+9tnx4vhNhWY8P7u5+Rd/9Bskaery5ctn6/XNq1dvjC2ms+UXX37z/PlHf/c3f/s//Kt//fLxo3y6iG1o60YSZ87lPiNjhHkwhoNOM0Vnxt1AslNdUd42AiCyIqZ61lynSIGI0AIwgIAwQFKmKgiIJCWtQhfYNJUW4Qid+isJY4oobDkZFmKxKWFiip1ULKRO0xZZQH/0RhZIkUWQhRKDJEwqVcuo7p+/L29Pfo/+p4gIH+riD0n9LcMM+77/nQvc5ubdGffTWQAcHGfQKwuoz0hKKqCutmiMSqW/m3jiARdQxav00dq29T73NrPGda155xBNIhtCmM0m0+l0v98z83Q6DSEAp+vr6+12u9/v9/sdAHjvHzx4sC7bJ0+efPXN16ub9bNnyzwfA9mU+NmzZ6PRaL3dXF1dFUVurc2z4uOPPx47c3Jy8u787IsvvtiXVVVVV9fXx8eno9EI0fT5OKCwangOWc+9rl2IAQH2+/1icWQtzufzum6H/LRDavW5rd4CAILdg2hSrxnxINOsWAAdcugYY5Cu6DshrDWliDgHIQA4l7uwajpBPWMtESW+nz92G4RuGxoHFZ5JxgROGRu11zgsyESE+m4hERHS7TcrOqM6hGLfecZ7GWu/5G7Hfod2WqDNXmd10vl+ccadVwPHxBiM87llD2gIxBCw6fgSpp8vDi13st5aqyAjiFE1UaEvx+Uu8Fjkdr4Idzsf3SX+nDg93kW0Qt+rgrtgVwCQXlhLRPhABpOZA3Nbl2VZqliJdLg6aVNUeT9rLfosEYXYaQIwM1ojMQkzvOcs2OFDBxMzASQwhqxzyjSIMRLd+tCor4KItClNjGdOhyNeRNTtD6BgKO66CZacc21TduczoTMWAJqmKcuybOr5fK6VJQAMtIROfak35WhTAkxt245H+a6uj08W+/1+NBrdXF9D3wkYIDPe+4ajrtgQgrNdhyaEAMYCgDGmqmC/379+/frTTz89O3vrvffeaU+ImVU8U+Wclsvl9fW1CCiSZT5f6tiibdvr6+tHD04nk4nqSjNDSmm92rLEX3/2v1utVsfHp9ZaSWyzjmh4dnY2nhT6RCE2i8Xi9PRYO7H39shwuvYMwjsqX7/nOOz9xwQAEBqkzw8rSDnoZMJ7Xbr+Guq6Y2Y9LrrXefdPuu4ID3dAkW5ALF2Tk+H34e39s97qUJDq0WSMM2qEc6fDCV3l1QfAnmulmB8DAEb5QtqRF+G7Fa4ueoVIJGQESiSIYhEkJTaYUq8DC+qJeih1mpBUKZLu9ZHm87kzdpyPiMgaXxTjGNiOJrpWiqLY3KwUilKW5Wa7+fHHH7eb1X6/t9bM5/OmDk0dLi8v67rerjfCUBTF+fnFbLr4n/6n/+s//u6Lb775po3BOVfX9fPnz1+8fFbX9cTn33//6quvvw4hWOd3u5XL8KOPPhkVeWpT23SS1ihgyRbFKIQqhNDPLznGFGP0TpiZUMqyPD4+jZyWy+XFxRX3Jr3qfGupc+MMIdgQRARtp6+mzQ2DZK0dJNn0WNGwp1txOAq1iarBfrdfAacspyqyMViMMrOrBQAMGYOE0ktzdqKUXZjpSijWV659nqZpWoNB2y/GGGM1/zAHR+fQrToMeyJC+g0bPgStwN12xWHM63/3w2ld34wmdXmAmrmLFaO9yhCZmZhFIGmcZgZOFBMassa71vvIPsvEJzCU0A7NUiIKKamMjnPRe2+M5ZRSuh3gGWOU/3f4OhVpch/ffbDPf2psNmyug34QChGlA8ISDkNxUdMlPjhr9LsOEiWmcr/bb7ZNVQ19Wp2N7ff7fR3QkxGMgeumBQPGUOBkrZWYOAkYMXT7soe5DUBSagUCWCLjnFHsSeisRYaBEJGK9XaztJRkoFcbYwCYCIwxOtLT4Z8kJGv1Q7DWMgAZo1ARxaOt1rsXL16EJuih1DSNdrzLcq9Gkt57IgwhWNfx+aqqOjr65Ors/NGjk7quDYKIaB8SAPb7vfdZU5dt244zW+03bjpxzkXuOuptW7dtSwZW6+2+Kl+8eHF+/u7y+vrZkyfb7c5b572/2V8XRYECTVXNZrPJZHJdrVer3fFxnWUZCtV1HWN7drafTcYKVSvLMssIEZoYiGg0mnz11TfPHj+xxpM3WZaFpr25un7z5s2f/MmfXF1dnZycbHbbxWJxdHKMBoaxxZBGKyIeDrp9cmD+Ze4mMffW5IeGDn1uo4EN77VMhyyoC2GdXhActt9ZA1mvtNJpj/Th8DBK3uaLMTIKKEKrX+QoIikC/JO8vZ+6/YPvGfqRcqdco3HujiC+od6/VGfefTV4G/bwYLbJhynGQeQHuNXa6N9V99SczEBj7xU+P5DyI6JW2cxMAgDkXW4dIWJgARBrfbTcNvV4PEbEEMJutzPG5HmuA5v1en1zc5Ni2zTNZHL04MGD1Wr13XffjZdHH3384ubm5quvvlksl7/5zW8MuYuLizc/vvbeX69u5vP5v/k3//vJZHJxeeace/v27T98/lvjaLk8/vHsR+/dp7/+1enJw836Zr8ry7LGxMxskEzeiUQ0TYNoUkqcQEdOnDERWWP1vGDm8Xh8fb1S0HaMsayq6WRivRnSt05IQsnR0oW9IaIMBgjDcs+yTETUhEVxszpi0QklkcmdSbEWiZkbF7YuIyhOgQ90sTXskTCKKIi3WzCkKzW2bRucCc7GlGJKjsyhccywGGhQJYCDGTB0qRa8V+cNAeD9mdOAzB4enBG1RIMDwoA26xRYL6K6XSIimDhJhIT77dY2Po+ReeJTMtYmCjqRVX2ZLoqEoMeu95lmV7dgGUqIKJYMoBAaQEZAlgSiv0lAbzm8Xex9cWr9bZw1SECIAkkYBRhBEqd0ayKa4FY6Fc3wkcIQ9lJKLYfUNmVZNu1tzAsptjHElEKKKYGEkKhtErdtQOOM6SCFWDcqhkakqbq+ChRIijfSL8Mass4REjPHvp3QBW/9XhITgDM264nwMUaDpPtCJBEBAU4mhXPOG/Tet3WSAwHekBJB5yQeBYwxdQ1Zll28uxgEgKy1bdvGmIiIe+HsEEKWF8Mi0Ym13hlS0hQ2y3I143xwOga1qPSmaRqcTUejUVlH7z0bU5ZlXdfTqdntUpbTt99/V4xHm82mCaFt28vLS0WjWGur/VbngicnJ+fb/WoVQwjeW45SlrX3vqogxpjlhTGmDe1oNNrtdyGEk5OT7779vqnb46MTREOE3rm6rv/qr/5qtboGgPOLd3/4mz8+uzjLCu+c0Tc47ILDWgoOk0XNwJjlwOdSOmWDO0XR4e+D2AYDJkW1FIbFoOJbvTMIwy0Cs9NTUl0K6BQX+grvoPWS5BZqcBD2SNsD0qmaRA2WIhLag2rvf3OpRx0arpsbGmZFLx9WemSRbM9YUF91i50xAPYU9UO6kevUJgwiJtXlUNGHPtMdftSwQUlT2uVl5q6yYKtsf1HbCEk9HMKw9ELJxoxGo3pXrlfb2KbpeDyfLqz1xnJdl86Z7XZLRM8ePwGAV69eVVV1eXHBzM5lbbtumtY5n+dFWZafffbZ559/vlwu//RP/3S3K7//7pUAMcPx8fG7d+/+8Df/4le/+pX2zYui+Pzzz9fnN5PJdDqfXl/fVFXzyce/ePrk+fn5xW672W92KaXcOASxljrdvJqH/oMIa7KmtDA9Duq6jklUhDCEwNBp6S4XC+dsSClzt/hvMm4gxqmHEXWE3zhEgpRS0zRDzad9rUGQXkRx4TYBpAQhpcLDbJrVN41Bpb9JjOm2u62amr3CQLeIQRCt5hNDDzalFC2bHoJ0ry/ebQ96LxvTykIrCsUDEwGRHMDJ1Oyj17Xs26Sd2JHoR+uoBzukhCoTaxDJILcAYoQJKYGIqrIQNOU+hQjMwIJFAS4jawRC6nzg7IC/iE1omyY2gYhUK7WTEQcjIpQZQUJDAAiEwsLCam/LLAyi11FAfysxiuH+b+Vk6HmmOTHre2cZcJssXWeZmTn0H2P/kWqQa9qKY0ohqu0wHLSjRcRabyimvg7LkKqK0VDV1C6zZg+JOzxt94UMJ4mwKuyqirQuyD7q6GEdqP8mYwgG0Vo7n0xrrkUkxZQ5G2N0CDHG3IKyj3QvO+eqfWsAUxvU4JBDEBFP3hijM+/pCAzg1VWnveKt29XtarUdj7PRaLRdN8zMEuu6ns5G+jKKoqjrejwexxiLogh1ned5tdvrMt7v9+ZR56A29GBVIA16Eeosy3bWGZ8Y6NUPF5/++sXDh49Xq9W4KJqmWa/XL188CyGUIt779fqmKIrpZFLuV946g8SQ2raeTsfjMRiL1pLPfeDGObfbQlXV1rpvv/3+k08+WS6Pt9u1cAKAqqp+9w//6Jzbrm5Wq2skadt2eTRtYuCeBnbYFeCehXzb3uzD3mEww7uQ/rsh46Ckg1uiTicGJOr02NdqClPtW62DmXBX2HVF/xD5sK9qeqJeP+rqI5w+rgJzlC2s6xx6/BHDz/D2frLUux/7DmMkqP9kX34NibYgChIgikY4QtRyz/RIk9tBy620dneFMWEymKIest0zHfSgEbHnMCBzV+0JI5D687H+7yH8HRFJOsE3Pehn02ldtlXVNFVrjJtNaTQejUezq83VsJ3yPN9ut69evWrbdnV9NR6Po0hVVWdnYbE4Ojk5WiyOXr9+PZ1Of/Wr6TfffPftt9/mWfHk2XNhmI6mv/zlL4d21ldfffX1N19mWeZ9VhTFq1ev66r9l//yXz158uzHd2/fvXtXl1VoqjzPC+sRMc9zdbvt56NEpFWTKNceQhtDSind3NwU46kS+KqqAm6BMKRorO0wZr6DL+q5RoDOmMgiMbEFLeD0LXPP59WMWLPdLMsUNTeQfL33SUyUlFmILUCK01Gx3TVM2IqkEEWEjMGeHYV9/WS0gR2TOERE9U9MggDELCGxjdFRp4V/zypv6N8S3UeF3dZ/2Jk0HV45vByu8z5qIoggCxqEfkzIiJj6EjNE6al4PY4aUYATB6k168LEkkXjLIJh7xHRWui6nQISmWNqQ0CBFDiFmLy3ZLrSUCihDgIsdVhrFkHQBJmjCDLHntVLwh3x/N5vx5J6xsXtJERXTt+k1TwjcpdYpIPcgplDihxT3dSEgpKGjrRe9lVVh7bVUYlyWsgAyR4bIinr/XS+sBZTEjIASYATiqamhALUu3BYR0QELDFFDokF+mDdKXkMOqKOjPG4K3cKq3DOxbYVQk5AHjebzWzs1XiaEGOM1PsHESCKJOZulp/YWntycszMTcPjwkhi1ZcpS3j0aDoej7frGw17bXtbgkyn0+12e7JYxlgTURcvfSc9ob6yxhhDPoRGEZiK3myaRpKp63oymZyf8WiU7/e1EKy3+198/PLzvzsbj8fj6WS32eqYXM2rLy7O9PrRURhPRiLCHJummUxHR8e03a6JYDIZM7O6X+n0fb+rTo4fWOvrsilynzv/dr3J87wYZTerK+fMrtwmicvTI0Ega1Ls+I7SIwDuIWyHfXRYvX2omflztw+xo6Pw3VYs2r3U6o01q+qfXYMZCAzzO2ZFWyXlp0UAGoimBzQ2Ufeb/vC/Y8vaBkH5Wd7eh2/HD/c5EYwuD1bR3tQpxpKwAdNRmW7loBD7WxDREGCv96KcPP2wrrY9DgAAgABJREFUAQCAHAADByQWZm3og1FVbhKQKCLCCAmFgBlTMt13h52aDSEjMkICEEBKhER4y2Tr0BkieZ4vl8swit7nAEhkinzcSptSmk6nzrmrq6uLi4vdbte2rbbElRsEANdXq+VySWifP3/8D//w25ub9enR8Z/+6Z+myOPxdDSaVHW73W5t5pum+eKL356fn1tL1T4sJ6dXVzflvv7lLz/9gz/4zWazu7y4buqw25XIqcgNAGkdZgy1bXN4yg9lkHPOZf7metM0zcXFxfPRBADG43HbtmCdjVEkKQ+haZpxkSGiN53olzHOGBM5MjNFANMh+4fm5HBm6vS+8yN02ssKxlCe51UbbDLzmY+bNsXWjfLppGgTxKbzZCcC1Sk2BCwA1Dv9srYMux516omJChYwSLmzGvbwPZsY6eUboJ8aYl/HaFowNPpRlcd62sZhy1QfaIiC+uF2GuZoDBhkw8wSu2Zg1os6Do6+6vhlyUROqanrGCAGCbm1lsiCM9ZaYEZJCQwCGEKTeWbmmEJTc9twk2nBBwCpb38QGv2NBCCIBJyEJXES6dS+BAQZ4wf3IwFLosFSdTjFtOBLcgvvDn0bWc87TQ66sBcip6DTB+kB7tx3umLHywbrjbU+CHAKQMDAddPOLTpnEiYAZO4KUOo58kRgtHnubUophpBSQgSvfdkYHQERJhYkMF2QTylGSYnIEqUsy0LT6IMrFCXLptRrU3CIaHNrbV0n3eDCbIzRvq5Bms1mTdN4C6PRSFsXBnCUdYv/sLVurUVOxph8NCqrLSyWOgtcTKeaDjZNa4PXiTgze2t22yojPCTklPv99fX18amfL4+aqm5W9XhEm83m6vJmMpmllCjzRJ3ai7M2pTQuRnVdZ9aeHh0vpjNJkWNo63I2m87ns1evVj6vjEWlLc0Wbjpb3Kw2pw8f5flIv6k8nxlj/v7v//7keDmfT5sQnzx5oiz1x48fG++i3CGrdBFOpW4PY95PxDO812i5WwX2W+xAS6VjYg/6d311CUMUlP639jl7o0lVRILeiyrdhjFM98NeFAahGJN0ZiwQU69AJRIioLzX5PzAe/vA7QzA8P4wXeWUbotQ1PPkTnQU1rbSEPP6SzemAeC+gmQkod5VFQ7IWLfdqr705tuqTjgBUxeG+0RD0xnq9XQIAAgxCRlAAjTk9uXGGHN0dARAoYlNG6AsEcxsNru8vMzzHBG/+ebbi4sL60gDobW2SuXjx48VT9XUoW2ugjRHR4vj4+OUpAmp3Dd13ZZlySIhhB/evF6tVvv9NoQAYE5OTt5898Z7/2//7f/xl7/85dvzs9evfjTG6VdJ2KElYwyJY4xxv98bd9vdZu5gfkQ0mU3Xq11Kqa63ug60V5kXNjjLgdGQoOhWxD7rBxa0SESm84GRoZ14uBOo9yDU5qfpcW51XTtnvXNNE4ylyWRUtnFXRhAe5xkFrtqgzUODJACBGQlIFb9ADN4ZauvhzIBJMDKExK7XJDvsc8KB4sZhYUc9KvXw9nvbckAUD0loZ9Z6sLJl8DQ3RERWgJkDhigMDL7IIXEUtrFrCwMnEUEQUORLTEHACIBzRCHsjfc5cOKYgAXR5M56n7ONLUtqmjYx24Qpt5Y6QA6iKMmRCA1rc5iEtIMKByAsRIzpw2GPBdEY7WxKf0YAQOQkiSN3DC1m1vGBc467U4+AUBJziilEEiEDLAAcU+hIXWhoNBm7bI1ExqQsy3yWcYi9Eh6ECJbIOuOEI4h+49i9ZkEEozrdgN7YOsQQEgA4Z0ilhUJyzmg71wCiMQQgiRWUSISGKHOuhC73tcbkzoxGI06tJWVbgkYs/e29NylZayHGPu5SWzfe+3ExUk0i59xiMR7gyrparIUsyyAGY4wjitsYY8xzV5b1g+PjzWbj8qJpGk1nFekK1ldVZXOnkE6dTdTb/cXFBsi9/OjFt99+az20gQHxH3737b/+k39xfXWx2+2J6Ga9Go/HCaAsy+PlQkRSEjXhk875r0aceO8RIc/9eDzWZTwajaz133//6o//7X+vA3hrrfeuLqs/+y//9eXLhw8ePDg/P3/y5MlX33wrhMujIzImxKG06k4VIuqHTf80Re+D0UE+JFp2Wyx2vW0S1iGfVnjQB7xBYAxFuJ/nqcaIpuBa7SmeTEQY0/DiQaNbEhBJMYqIpK4MA9XSAIY2CQkYn48++JbutYBuu7pd70GdkACREJCAmMWSsYacBeehyExRZN7ZwqBzLs985r1z2gshi2AsAgp2PulKjSZjCBEQhoZndyACoCGDAJ39GXcankQUqhoBrTHWWGNN51Im7L1jSZBEnaeNsZasIcvcEJoUuW0iIjpjy311cXH17u276XQ2LsbqwzcejY0xDGwsGENN0242G44py7I8L+azxenJ6WQynU1nZVldXV3HmLzPlssjtNw2bYwpxiRJYkir9eb8/OLzzz//8ssvtpv18dEy81Y4scjZ2dnx4vSjjz767LM/SCl9/vlvz87OjDXGuBTTeFSIcGiq0bhYzKeGMMbgPNR1zSzOud12v9vt86IgtMaaqqzbtn3z5vKjj1+GELIsR8Qs95vNZjIaTSaTGFtDZA1p3MrzXHQyZSiEWDWNdw5CzQcUaZ0LVlWlYDZFu6iim/deQQ9VVYcYRKhpmroOg3t73YQmBEtordP46bPMWhBJBqFw3iJKbBEhyx0gZJlz3hGAN2ac55mzKSaSpH3XW94xdJ6ieECRISI40M+V3o6rm+Rp9toX91paafmiH4f+IBlCg0hIxlhn0ICgWnwIICq3HjpXAUBAQ0DqIq/VEHOIkpL+QIwcwj60HCJyQhZUflpoY9s6YwiQUJAhtm1T121Tt20jmJgTIhAhISCoekbUKyIMivTiJJw4xRCaFAOnyCkKJxAmBEKwhoRTiiGGNrTN7U8MHEKIoW2bpqlD27Aq7HRNE0EQliQpsiQCYUlt26AkEWmbBpABqW7b1+9+3Oz2+7JhpGwyyfJJG+JuV4YQU4pt4Jjahw8f1nWz2YXlvAghWt3eAoZMkWXOWgKpyn0bEwH4zBBCCDElsQYy70Ibi8J5azm046IYjQoQXpWNJSiKAgV3+91kNGrq1ns5PppNJkW533nrUuK6bHKfz6az8ShDwu1uCwDW+6IojHXXV1cKYHn9evX82UPnHAqMx5PP/+7t6cNJnufnZ2ePHj4kA21VN22dez+bTTlGllRkWVFkKYbtej2dTjfrtYicPDi9uLg8PjrJ8yy2TWiqzJo8y1Lim9XGZDmQO7u8YIF8NMry3HnPHJs6ZJ44xfl8VlcliMzm05vrmzzzRJDnXoT3VfP06VPv3Xp1U9cVImz3m+Vy+e7svMhtnueJ+d3Zhc/yPC9iTJ989t89ffLk+upSUpt7+9VXX/z5n/3DZIyrm5snT58Z5+vQ/uEf/0tyzucFgAH0Qxo9JIKH5VqHWO71Kw7D2OEM71CrUw6KyK7gVWodCAvHFHXtsnDSxgMnVs6eiHBKrBDgpHzbmDjGqKZ9in5hlpi6rA1Y1CQuxhQZEkOISsbBGFmvJ5YUsU2cItdBYpT/rbw9EUExIIS3UHK1wTtATkNSPUgidM5lzlpLSKLpXqcGYr02ZOq6BtPtQVTVJELhju2QVE230+MGZn727Bkzx9iqxqa1lrwzxhjjUJIOLECQe72MxXIpIk3TtE0VmxAj73blfr9/8ODBfD5HxrYtBSIZ763zLnvz43e6JA6R9ESU5/lqtUIB1eK7uryx1ubZKJ9iDGmz2SC63bb87rsfzi4u1+tt28ZhuLhcLrfbbbXdnp6efvaLTx8/flpV+++/f9U0TVEUVdWod5e3FGMLws4Z542xgpT6hmdHuFHJiV78Wn3poCzLyWSCiFmWhVRPplNrqaoqhERECQRT0qM/CUK4ZdJo+t+XmHGINyLStu2hwIoWf03TeN+pfrNE7G3eQI/gFDu8CqV+RiXMyRJ5QpAk3FXwKSWbOQQDADFxSDGyABlvOpe9oVYbJnxt20pvO9Lh0FInvjMUc4cziXsjvWFEOswpre5qRDJGldYZARAJbqmBQoKCwoiMmPpnYQH1yrFglY3HgpxCE1EkSqQUW4nQRv0A0Vo0wgggQsKE6tqXmAE4NbtojGHn2Lnu/oiI2NTtUODquL/r9aRwcAyh9J4PbMJQ0R6C3KrYDgmNthQIwBClGHX2zoh68HULzKiLinCMZACTtpO73ReFRQikg0SFEADQWmtMjKENbe2tMQDcNhZAaeNCZHTCL8AsWeahaQNDaDrMjPNYZHld196gI+OdyyeWEKuyrKqqcKA9Z/JEgKoZNp9MUcCSsUh1W0lkQsiyzHsfQ4sCztgsyzLnAUCRtMV4tNvtDAIixqZtmia2cTwGVWtSbxbnMYTgs+5cwg7FY3e7nQKUlEerC9g5F2OkIkNj8jyfjPPQNvv9XkSyLLOUA8DZ2X52XM4m08XCN02z29aIXIe43ZXHJw9CU+33W0Rcra6n02lVVVmWLZdZ0zQKSW2a6vr6CgiLojAGJtNR01YA0Lb8+PHj6+vrTz75JMuys7MzZ8QYc3l5+b/8P/8fyzkcLZZZkT948ODs6tp7P58vWLCNLMiO7iD5byuzAxk/IDyUSYK7PU95D95yGA77+3SlnvYz4ba2Q2aQAy1YOECsACieUwYcpggkHhJLEREO2vaUyAKASSTFjlcaEqQEnJA7erekJG0EEPpv4O3dMgfgA8Vs14PpXZqVttXdjURdF8gYk2ILKEIkQigAnJAMIY6LvJ8icIiQUuSYtERNSZAVxq2bnxBwu90ZY5yzee5JsTMoKaVQN9oO9tYYArRWC8eby2tF/6ck1roi93TkrMk4ymg0YoYkaAYqm6QnT57sdrubm5u6LkloNBoph6+p2vGTaWdnA7CYH4fQ7nb7fVM75zfr9WS8YObValXudk1ZXVzHTz99PB6PrTGTyeTBgwejUf7ZZ58VfgbAX3751atXb2aLIzVwGY/HTo3FAKwzee7z3BtEYyGEZK0lMtpU0chERFVdxxjLsjQG1+v1dDoFgDzPy+3+6OiobarVapVndjabAUDgO0wwRLRkLCYSUKcxffuK4lGqwyD1NMx+9Jgbj3PnnIsJmZwDRySJmybmJghHiWoAJ1q5MLNwct45QomcEntrLGFKEREHDKEWlFDkNsvVFVBPVQXawIHwZueO1Jua3s783mtd0IEMMfbUBWttGaSTj+zvh0SoGrCigUSQkKTD9xsWbakLscitJaxQJDRCwBElpdg2QeXcUhTbSN0E57z33mfgPTrXBoNoBAEZSDhJksQJoG0TEbVGEUh2iHzaytbPH3qVhpQSQzoMe8NbTj3H454WWhXaw/4wGp0cAjIzgkDnh9K1jQAIDfTw6v5YTDElQIzMsYOsQIypqUPbRgL0ufcWmhrqssqdn2b72LLVxhbq0BK7qQMIsDgiRO6a7swioEYHmXUIAImzLA9KJE2cj/P9vuZeX1DzvNlsVu6viY67sVxijUnUkzKdcxoF2xjqut7v98enJ7vdbjRCa20bIhE1TXN6Oo0xKhWvqqpiNFPoyqHCmff+7dszcyCVZ62LMeZ5XpbleJSjiD5aiiHLsunUNE1TxYCIMcLl9Woynk2ns7Isd+tNDKksSw7t40cPCHizvh4Xo/32piiK3S5YazOXAydLLrZxvb6p63IymaTQPnw4XS6XSWS93RuPxrvtvjw6ObXWtHVjiaw1//j3v/vu+80f/Po4z9xsOsu9Pzs7++gXn2bFuInCQoB2gIbhAObU63fGCrcs2EPe3mFo6Jl5HRl3WIqDMUBXzvVcBbW86sEsveeCAPTokIOkraMSajWYEqfEMfUTvpa7vcB9DyRKFBbBmCQl5gRRuglfSkpgoJ/k7f3k7Xo+EMP9iDjAb1BnThqle3yKQhdiSpISkuFhWjfwEbR7m+UGROs/tiw+ev3866bp6BaCrPhPMIhYbm+yLBuPR0VRuK4yYJGEwikFEDTGGnLDYW3IIyKIRWBCQiRjbJ7nyicMoUaSPMsQsao4xtYap3EupYSMWilut1tJUBRFVVUXF5cxRhUkvLy8DLJ5/vx5XdeTMY/G+Ww2c85NJtVo2gkxXF1dTWbjX/7ylzG2R0dH795cvXnz5vtXr5zNnDd11Rpjlsvl1cW5QUFKPvPFKMvzjFOD1OlTpJT2+31ZlkUxGnwyY4y7XeMcqayf1m0xxvFsKpu03m1Z8sViYQwp3VihX8wMhNZamzqXAOyjPh6gOjX4aSwcTsCBjiYiBtE7NyqyJlbSgHAETqgmIqww9i44WWsdYmxqFjbGGSNtlP4BLRFFhtCzBoep3jBQ1HV2SIzpJ5F34NT3aj7Tiy0Nc2KtGpWXqRc7dCz6ChJ7hfTbzoUwJZSko37kbm4MkaPpzBwxCQ8dImiq1GJTUyQXvItZFvLMGk9WexxOzxXEhOqKwACcYkzchkCk4o+6TA1RNMYo+0c6+q4gq6eiQVTqQlKeDwsyhxhTCG2MKYSoo+/MAXcsDQAgkSSgsz2DqIQQPc6JUESU/4QIfZjlwEnlmzUsGmOIbBtj0zQhgEEhIm+pBA5VOZovZ6Nis6mSglmACa0BRE6SAJOYzA7zUf1awZBF8t47a8vtLqbGGwuJIXFRFLtQg2JY2qAqdHlO1tr9fo8sxhhkYRHnvDOdSm1Hy9GZX4ixbbXxU5dpMsklphjjqCg2q81oNNq3lZZuIQSFT49Go86MhVkAjDG73e705KTa7VRmWrv9WZaVZVlVRWYwpdSEwCkCdnSgppE8z73frVabq+mNL/LZfFlV1c3lRdNGAD67OJ/k2Wg0MmSstbENgLLZbFyeTk5O5vN5We3W67XP3HQ2iTE8eHBKBMb4uq6n0+l6tXn27Fmej6pqP5/Odpvrcrf+x3/8/OXzosh9VVW/+NUvt9ut9/nx8WnbRp9NhA2DhZQG/Fe33XpomO507LfNvVrotojrbrmdNyv2eSiE+gDWB4guuEgHWhHUNqr+D/Agrw+9LEtKiQVur8cYUuKYAjNzJA2ESVgYo0CKElmEIbCEKDoQT4kDc0qpqQUg/Tfy9vp/dqTF/tLRsFIiZkhRtMuldZ4BJAVpEBmkosigl3Ab6H2IGGOr4EpvfWEckQUkRBNCALGCivlUsKYBAP/4kWagujmdN4V3zpumqVNKwhGAu0YRCwqmGDXZNYYIUBKo03qe+7Zt9/ttjJxlLvOZSUZIUugMTkMIHDilpOIUo3x8fX3dVI1GoFev3iDiZDJZb26ePHkyCHpNx5MYY57Lg9NHX375ZRPDeDy2hEeL+fXq5ttvv13f7F6/fm2tPTo6qqqq3Ncuy8uybNu2yEyeZ7PZeDIZGyshNG1ba7u1bWNVdfszyzJCJ6WEEOoajGEVUZPehFP7mU3TWFJbFic96dsSCQiSYWtV12DIeIaDfigytMKTngWstaB+423ThFiBWIM4HeV5YdZlayxZy9RCZOiPWrUUFmFIiYnAkjFG1PuC+2OFiFKS0KYY2FEX5oeSBQ4Sstin8wpMZ2bj+gbGsC47KBkiUedQiAO5HT0Y6DEs3cLmnuWipHXsHkO6sXvQJSOJkRENgVKI2iScEqumZUQUa8mAjTEypygs2HAwXPu2tGiNzzLnMpdn1mU6uDadwQUoZEZijCCRRanrxjsS0OsaGMCQAWQjgPq+CNWDVZhZmtAqdCWFGFKUxEkYWNCCIA3AabV4ERZDrlNMGfY4CCCkxJ1+BLBIUiBb4KREWTRkvSeiWIe2jZy6FndmjQdOLSPLOMuTj00MzCBJSJKCAlDEEjhnYpQYUwwiBFlmfZ4559q2ZY7MEQCYY0oBgL01zT4hgPe+LhsiyrzN867ViZ0gUSsCNsu0DaADD+I0dBFEJHMuhSiiamFVXdez6TTG2NZ1Pp9otqdZo2JGmqbJsmNuGlE2ukhRFGuAtm2HWtxaG9oUQpjk41hjnueb9er6ZpWPZovpAr2bz8uzi92uhbdvz6z1Tx89fPDgQVPu97udMebNm7PHp4sHp0er6xudWYwno7aqhVxKyTrKsowQQlPH2Cr6mpnX6+vJZLLbVwzymz/64ywfVRIFQtOWf/d3f3Nzdfni+eO2qefz+dHR0RdffvPy5csHpw9v1vuHz0/KJlICC7do565ZAggAqoWrMwU0t52S2+wEOlecPgp+OOyp7UrvE8vMSWu+3hVROjLCIMKiLpbpdjqYehJCH/ZSTKpOlZgZIqTEKUEU4CTa5Aws2gsNMTFDFOYEIaWUpG4Ffoa395O3w8E/EaFr/uCA3lG2xKBBH6M4p3p06ifidJjdFcsdj+P28YcmlkibUkI0qnoNQJy0hu14eEkQAEbOHrRehYj2loxB6wgRVAokJSESNRPYbisi2wXCfqBFRJvNRkv4oijm8ykAVNW+beuj6fF+v7+6unrz5s1+s++1KMHSyhhrkGKMbRtFRHnHRVHoiHG3281m1jq6ubnxPvvtb3/7/fflH/3Jkz/6oz9aLGa73e6bb75Zr9ehYSRZLOZIstmssqzIc395eekzO5mMl0ez5XRUFC7GtqrKqioJs7Is67pVO7HxeJxlWYqoo03tKqiiZlGMmXkwSTHGVG3Ttu14UhBTr/CJnHS+ygCdopv0vDot74a3Nqiz63xR+2/T6YgZyrLc79qmjQzASAkNIhTehRRa5jZ2zQ4A9N4AS2wDC5huTi6G1BENRESbZoFTHdq8abznYb6lIVbN6Yd267Bp8aCBebAD71zpGy+dpYOKXGAP0e4Gfod53B0/PNDcSde80m66hs6hJgV3GkXeWCFjJSUh6rdEgNjUIAh5Udgsz2Lu8mgckXHGIqG1aJiAIicAiYlFJEoCwBCGQnbo7hIRIw95iZ5cetntdsPBMQwy0VDTtGyMFavH2XC03cni7160QjaglplAlBBx0Emw1mlgCCHoyaWlz2QUQgtNVeb5aDIuTEVt28ak3N4o2JkY3Kx2eWbyPB+PjdoVhRT3+70qxFprM++JKDCjQBfeALx1ldTeOWuNMdBUtbJaYxvqGgwCZJ0/gyFxxsRIiMgxDj6x1to8R2+stdZZq+2m3a4cLWfGGElWEocQvPf7cmNBNNpZZ/f7ve6dHkjFIQRvDVGnXjuZTNpyW9eViofpZ9i2bZZlDx8uw+V6s43nFxfHx8f5qMhGRdPWzrntOpRlGcK0reosN23TzKcTtNY5t9vtViuf5/l8Pv3hh0s05y9fvkRDo2Lyw5vXL1/84vLqy48/PpnNZm0T54vJdrtpQ/3tt1+Px8V+u5lNRh+9eHZx9u7q6uqjT3/DSCExggUAQp/ifuj/d81HwOEo6IDffdgbipOuJ3QHzP9TYe9WnEyDVzdTNjDAZRKL6qwCACfpm4V9V5MjM6cURESJpjGphlRgZgk6sYvMkoQjQ0oSk8QEgUUrP2YOSbsV0kb8b+Lt3YWoHh4nfWdY+FYJVGmqXZPqUFN4WMGKaUBEAwYRvSdhSZCYOSksrlMaMyliG0NTt+rx0hE2YvDe53nuvXXOEqkQDGTeOmd9ZpXXTUTWknNuOp6pXE2nANm2LBERUwpZlo3GRZZlztsQgrE0GheqZqmOr3vYa8ipqkoSHx+fjPJis9ntdjvnnDV+ty2PT48uzy+Ms5kfTadz59xqtXr+/MVuV3788eSP//iP5/O5SFqtVu/e/uic8z63dlbXdVWtVKFRnzHLssl0tFzOZ+NMoC33VVXvQ2y89VVVVVVjjClGI7Ulq8qOFes96Ghkt9sdHZ0YY0ZZZweTZdluX1dVtZBZ31zvuuzUe2gxs+kDiSa86tunc5FhcjYoInbsOmPG4zEnxKquGgk1N4lNlnnvC7C7sJcInIRI0EjmfWxDahIyGK/Esp5Z1Q+ixFA33E3ctu1wfOjTiTGD4snAAQUAMgQA6WCucC/avZ/VDVHzUAOmb1wc7uhbri4aZb+AKmYJCRoCEbJGBKwkAOmmFQKQJDOURBAogTBITJxSDCnGGExVVUXm6sJ6432u7DfivNtFiTvsmggAhHjg6UOdFBEiJohElPqaY/CO0RwI+hGmBkljTIhMvURDhybzblAeSIdhT0etiQfe3u0LAGhiaHsyZQJp286QzhoASXmWGcDr6+1+Vzvjx6MRMEpijoFZ9c/Zkct8dl2V1lodZuvj7Pf7bRULBynBdJQXRdFUdQgBAdq2nY6zsmx0fRZFEZoqBE4es7yb+3oPzpDisGKMNsMu/hmjHmzee518awNjOp0aIv24iGC/389mMyJyzjdNU3i/2aYiz8qytCJ5XqyvrhXJojlliI0SGJjDqJg0TaMN0ljXpyfHu32JxiFiWe6Z+eTkZBMhhJuyLFer1XI+7ro1hHkOTdNeXV3l3otEEamqajqdgkhs29VqNR6Px+OxtZd13RKR834ymWiSPZlMtFdkyI0nxeXF2Zs3bwDZO0sIJycn1tpvvvthMpl67/e7ajE/CpEFiAWlF+Qcphuq9yS9fiEzQ7r1KdMaulsGeIvzGOjEIjLoQWrHBHrVFZVNkW6gxyJJo50GNmalYMehtaM1Yu8eFG/DXgwa/Jg5tpy0qxElCnCCkDixxMghSWQJWhOmTnWoDkDw38jb+7nb713u/Zc26LR01g10CDRA7PoVmvJrKxKNgJjUDUwOaLMMIpJ7DwB1XbcteW/G43ExHhdFEdrSWkeEzNzUVYxRy01L2eHuzfM8y12WZW1bI2K53223W23ZTyaj5XK5vtjrVtQ/KYpiOp0z826z2+/3sU3j8VhELi8vU5QXL16cnPjXr34EwtkUrCVrbVU1eZ7/u3/3f3r48CEY+q//9b9OJqPj42OtMl++eLJeb7/48suqak8ePKqqal/eLBYLRNEt4Zyrm1rtskS6RFvxHerGV5blerWvqiqE4D3ECClJVVW62623KSYGyLJsvUmKTGHuKFAhpBCCpVse2wA3HbqaA7BiSFmGQWyM8fXrS+9z7/3R0dE4xM2uWq/rWLZIZJxrIToiAWYAFCY01tq2bkICeyCXrLWLBjYlgDN0hUubWn0lA30eRAars0HKjoh0g2EvO/5+5BtW4+H873Ab492J4OFSP6y0WHuDRNpdECIgUZ/eThAhoUhsY0wpZUhAyTIRQQBG3asxNiGQtya0LmvIuyxrfJ451zq8X18OGYl+XMMXoRsqchzOrNRbAyr/+nDfDe9ISV2m/06N6URz+wbXwZ3pdsRLg4ME3aY7A61TRwAxAUBnt+CMdSN7c7MtA0xDyJxLLkXTthgU4KYQbufcsycPiCimtN1umxj01ToEa20MkYgMUtM0gSE3JCLz2ayqLkTEW5dlWbXdIDKA10PNWjuZFJnLEF2MkbnKVPvUWmcsgxhEsLYoiuvVqqoSpDLLstC2Nzc3OjLYbrej0Yh6fo4feeec2sz6LMvz/LxpsszofhyNRkPhG2McFVCWpU49jqZjNcxzGU0mk10j22arIaqqqti22+3WmE5rglOb5zY18eam/PUnz/fl2lq73W5ns1lV15PJJISwXq+JaLkcRw7MMQRs23a5XL5+9ePLjz6ZTqcioggDZv7qqy8mk8n++uqj58/m89m7H98y82ef/noymWwrfvjw4cVq1yYSqDNze1D3/TzQdXKL3iQcugIDu0lEkORgT90ml4czdYE7IQCkE+TsRaI17On8Iol03bwh7DFzSpGZYzoMe50nFjO3LaeUYpKUJApzojbFlCBwShGbFFOCxJyShMgppbJOAGBcXvQv9/b33YPjzu1sWYhBQZld88QgGu70tdla9B6K3PkcrMXCYDEajUcjBE4xGEy5Q2vEIBMyEllrVLBQDAka5TT0J6yofRISNLECFDLgOhpg7jPvXGYdjiaj+WI+m89G43GW5z7LjLPGO19440yCgE7yqc9GCDaNJj5KHbiKUrdhv6/WkSsyzNJOZ4XPzcXVux++/6Zpy+VyslxOJ+Pxf/4v//7ho6Nvvv0dUPj0Dz7ZrM+N5ZD2r15/f3l11cbaWDx98GC5mK1WKyRbVu0337z59NNfP336/PPPPx+N8l/+8hcvP3r+5Ze//d2XvyUDIilxnM6n+3JvoHj747sY4nKxAAm7zc044wfHk2ePl48fHi1mRdtUq+vL7WbHia3x1zfrENk6h+iYDVHO4qoqvn13nRins3nd1LsSjk/GDx8fzReTuNvm1pSbLTC3ZVtVzfHRA+8LBnJZsa/3gaNgMA58BlfX71KyxmZEJgYuiqzel7PJuC7Ltq45RhBIgduUrM0Y3XZfO2AEs9m1Fzf7fUOj2fHy5MHx0dF6de64ySjlJkmAlMAALKYkVXCAJCwMxnExzsiRxOAsOACuS8N8cjS/vjxfHs9cbg1iNhr5LFfhVZdlPsvRWAZEY53zaGwSCImFCIxh5akexIyhgQN9jqWBXLflyBckmGICFjCGjAVroXfdIzKI1NkzMQCgYYQEkgAigAAyCKMIGeuQHJMT44RcNC6RS8aJgWCoRagkNco2MGS9c94bMijCIXLTxqrmuk1VHWMbmjKFKoVaUiOpldRIalACQUSJwEFSk0KdQhXbygigGhg1TVtXoa5DaIVTCA1zZBJAQYNkyXhjvWUEY431zmfeOouEUTimqANKnSf0hEEBkNrlNs9DrGOqiwxDU253azLm2x/eBcnercrF6fPXb69v1ntvHCE+FHnxMPeZu1jvNm2KaCIhozxYTtryGlopDABJYC6Wi8cvPwq78vztWV3u66oVTrPJhCGp1cdo7JeLZYjherM3AEUxbtporPXWZZlZzkbvXr8pfJLEp8t8XOTL6awp94v5Yr1ZscRi4o2TpS92m/XR6SKf5Nt6t2v36+3NLz55efb2x7asZ9NssViyNW+vby7rfYVoE8fYcgqL+ezFi2dvXr+aTIqmKutq//D05GZ1aRCz3Hnnyv3WGHq3amdHJyEwSpKmmuXWcHSIALjZtfn4mM2oDPbyurJ2+ub1edXGk6Pj1dWNMeHkaJZibSwCI4Bb3TTWZuPJkshvy5KsI+eMCd5TTCnP/fX1Ks8L43KfFVk+2ZfVqzdvTx+cFuPJk2dPJrNJkpA3F7/76/91e3NBnEyWzx88mp48+btv3zx8+WscH2Wz5XR5VJZlinVmUm459iRRYwgMQmdO1VU1qHxWJNR5FoOhnhjbiWvqBAqjAAtqBcloGCiJeqC2iUNMIXJgpZpyDCki2cQQtZknhplSopSIY4iRY+QUJUaJUdrAIUoTIEQMkWIybTJNoLKSspJtaqsodUp1gipBFWAfoGyw4qxs7b6xZe12jSmbbN+YqslqLpqU/7fz9oZW54cKvkMlTCsibYzQlZaGmZFvle/7CwPct7Q47DpZa4WNjjpTUn5i5ATOiI61tI5BRGeNtZTlznvjLAkEIgEgxXNq+dKLArfav16v1y9fvGjb1jn7m9/85tmTp2/fvj0/P2dmB8Vnn3321VdfMPOTJ09SSr/5zW9++OGHr7/+OiV4/Pj0l7/8FNFcXly/efPm8vJ6vbtKKS2Xs8vLy6qq/vRP//SHV69CCH/+539elqUaOGy326ZpRqPRuBi9efOGmefz+Wg0atoqpTAdF7PZbLlc6mS7LEvN4EJ/YWZnM+ecMDZNU9bNxcUVESBS27Z5nkeuq6pi7qqiIf1XIem6roloNpsVRZG4DSGgav8LOOekjxpDC7RDe3bkPNCWV4xR0BhjJEmKMYTY1CEhAxnnXArx+bMXyFLFljb7XdykPYAD7/3F9d5bIATvIctUz16rKAu9Bq4icQaM6L16ZagwUkoRukblUIbST5R3H1q9tz1P6p1smRkOPPygt2KAYSUejBK152kAWMCgyqaDYN/6UWhM6OLu0EjUi34jh+9oUPy6HcUd9F2HHtS9Vm1TB+rl87t5Ooiiq8gYcgdVnTXGGBUiN3cno/Ch+livWGMPP0ndQbtdS0RV1a3n3W5HgM6bukrjCcQY61Qiymw2a6Kk2IamRZKiKJD3xsA0GwfISuA3r197SIhCxqQUU4SUkiUyRVGW1SjP9elM/4kpctgQFEWx3W7zHEKA3AMAOKUXEulRUBRFB0ImMQZ1F2hp5b1vmqaua1ViYebQtRjIOpsbcM4xR2OMVn4hNJPR6Opyl+f52XmZkVV0jBpwOufqqkWJ49xJaLUhEUKYzWb1pozcthEyz+fn58vjR0+fPv3bL79omurRo2OW9t27d4vZxDlHTHVqve8c2KeTLMsyTm2M0Ropy9JlhTEmz/12u3eZn81mDHR5eVkUxX6/f/HRJ0dHR2oW5kzzu9/97unLj/7+828++fQXv/71r//Tf/7zFy9ezBaLYtShvpnv7Iv3D/yhl3BYCCoILKXbGZ7g7TphkIO/vV1FKqMyiATx3YtWcsOuZ2ZRZeEevxaTJA4pyqAiq8gxFQsCIEHDnOo2qVF8TJCYImPTRAYDYIEQkwkplPu6aZoIJCL/bN7e4W8ABkThe2DOlBKnpNB2Z4YREZAhMxymH/aVRz40DlUnLRASQWstJ9JPI8ao4z1OsC9La603dmg9G0IRbZYaYSLDAMjMkQARqHfP6bpG1sSmVufGJ0+eEOHr16+3681isSCC6+vrm4vdRx99hIjPnz9//vz5999/PxqNPvnkk81m97Y4kySvXr26urq5ud5Op5PHTx+9+uH7J08e/eG/+M3V1dX333732WefWWPWqxUhphibpsmy7Hix9N7vdruzH9+WFR4dHS2WM2OMwTTOT5ZH86Ojo+VyGULY77c6dbDWtq2eiYZI9ZbyFCW0sSzLzabNCvTeKJo5clDbTGudiKhmkrV2NBqpwpmWOyrUYq1N3AyYz5AGTTy5JYf1Hp6AZK0RkRBatZgBIBHSrleTgnotxNCE0EyKUT7Kj13ux9N5HfdtrKM4s3fO2N6BLcYISeNcNqw3xcdrHyk3SrJM7wnUSUop8C2TAe8aCd2LfMP2vtd713DSaZB2TmO33VccAKGI2kuEg2YOImkfBw0QsDIo4GD4T0RIhgCRBYnJANxyJHAgHR4eQKkHoUivnX0Y6g5f+fBf2oZCAAKDiGgNGipGI2stOWuMAbqNZC7Ph5j6gTh3cAiq2ayxlrmb4bEYAEpJttutburJeFnttuU+5BkBiLf04IGP6JuGDRIINE0TQ+Nye35+9vTkaPHs0Xq7ff32Yl3ucTwqRhkJW0fG2cQxRAihQUPWGG/NZDIBwbZtCQARQmg0ChaTUZ7n7968mY5s2Ub9RJ1zBklHYnUts5nTCAfAzhtEKas9ADtn8txXVbXf1+pKjWhSaGPTgogll1kjXQcbNpvNOMsvLq6X04m2ZNu2HY0zzbrG4/Fut3Mu2262uaej2bQNbQghCddtOxYJMQogAHnv3727EfSf/OJXy+Xyxx/PHj1YstB6dYXzaYxRD8Y8z3fberVaTcYPsiwr91VMLaIvy3LhvaTkvd9u3y39kfe+jVyW5enDR1dXVzr20/HQ73775Xy+/Nu//eb4ZPr48dPdthSgyXh6fHycFxPNGxSS0Z3DRtXCtAOpq+sg5iVmFetDRBQQEj4Ib3i7eBLcuf120faupodptLLs+MBx5SDsxX6GxczKOogpMfdk7kFONgZOSd6trgnU0cCJYIy8r+um5RigCdy0HGISQUGDaMh5bloR+Wfz9voS+CAo9rcPaWlK3CZk9sxirWXBkKJh49EAIeO9iHcHPHYvBxcR1VghJOVY9/WHfl7YqURCl1BnWVYUufc2cTAGhyFIjFEXc+FyPWs0rZ5MJjQeNU2zXq2+/vrrosin0+nx8qhpmu12rXzYv/iLv3j+7MlyuXzy5EnbthcXF8+ePfsf/8d/98UX33zx2y/evT3f7/cAEEK73W7/z/+Hf1tVFbJMipHCLC2ZH374YblcqrB6ludN02w2G9VSsc6MRiNdjkVRaAj03oUQqqqqqkrfoL7xGCOA6YaceouGmwQhyHye73a1cpVEVRaZVTZXv0Fl1FZVNZ/PVWBTRKy1HNoYI3bHfUev0ZU+LFlNtwU6/kMIwQA559B4EeXbWUgphCSSYpte36zn8+1kObfZ2Fo/mRRSh1g20+nIGCOcVAi7baPzHTuQQMCoAoUbj8eHz6th+/3VyB2OEodRlqFb/MUH19W9NFY/IoVvIsuw+W+DSv/3Gs8kdW5w3HuFaLQIqctH5UDyfag+h0YrHvyTel3sOy3Z2zh7OHOEPk+/hZt3ADxvVUeju1hjNNSpK4I1h8eQflCHSa3O7e58THfesb429t4TxCp0PPGmDlXVeO+NtW/eXhgAb/+/xP1Xs2zZlR6KjTHdsmm2PabqVBVMk0B3s0lR4r03rkL604pQhF74cPXAlxtBqYlmox1QKHfMdumWmW4MPYy11s5zCkA3IVLKqDjY2GZl5so553CfUX7015fbulZjBp0yYvZhPPWjBVhdNrcXLUB8enoYBm+Nqiv2mbrj0/V27bTRxgStlBDgY0wmFUVRFIUfQkrJGC0nYGVMJmqaBpBTghiTYLisVkJCFSwPItR1LUsiRl/WldYY4shGMeSycn4YhgGUgqqqnHPcjTHmnJiIy7IchmG9XiOi1kbSLyK6uLgYx1FCqdbgjBWDlLFTx2MPjTHGRETxw2DIgqS1zuYE1tqug/v7+4vL65///Oddd7y/v1+tq+12KweR5H9FURwP4/E4eO+1mbodOPMKuu4IygCQUmocelS6aRrnjCToy6h1v9+vNuu+f/iiaX7+85//3/7v/4+//Lf/PjI2TaO0XbQeJTvMOeNczUtQUkq83Z6zXn42tUQ+84sGmMIhTAjnZzLDeRk0ERB+FPYkxRQSgghsJkqUhJM3PwgWGEcmSolCCCEtP+ac81df/eJ06p+enp6ejt04UlaAGrSJzIDWWAYDKdLgwzgMIQRrHfwJfnvICDBnC0pA6QQiMD03QqZWCyGDIlSQGVMmqxHV5Mz3+7qjy76TYeaUTU8zlVlLiQUhP3uNIm82G0QUmrPRBhEzU8yJKQsd0qCYychRbjJRyhnn885aa3BCux2Px9PphIgvXrxo1yttVbrPn3/++X/8j//x8mp7eX1VFPYnP/nS++Hu8eHf/9t/9/793eF0eH+3f/3y+s///C8vtlcppcP9vdVm9/jknHu6f/gn4rqu67K6vb75+uuvP7x7//kXbz777LNV23777bfB+/WmRWTfd865i5uLFy9vnDHDMJyGkyB1gViUI8auj6MPNIU9pRRRHIah78dxBOPECRZOp96Vuqoqrawfo0QLOTSttSJFOKtIW+Io/ik8ozeTem5iLCthUQYhhqX4m39NMyeRhrJKZVZ5ErPgGOnxYZfxAK4gV0WwlGf3H4HbKJImpzZCIkSjjLSnKlfE5GUDLdtv6b0sKRHCRyc4nFV7nwBYlvdyHvZwti6a7aOfoT2fhkyckuIEYnc3lcQLC3fpCy9/LgRBQiAEVsIEV7McMxo0yzNOSYZSWmuxHF8e56gfPgMcPYdEpZcGKavnaMoArFBeITEoJT1OvbxxACB8vg7B82Un0rrkHBQ1gqtKhflw4JiZAUNOXdc1mxenkO4fxrZCo3BIcH11EdIHHyAEUspWVdWGrICLwtZNOTydQn8EiZEpA2XnrEIGYAWkEY0CpTEGSGPebivJrgDAGBWj3BeyzlZVNYwnY6Dv4ebK+iEaY4xV3g8pBmAqS2jbGjhTpuiHZlUbq3KOStscYtk0feiAoHDQ1qu6qJiPKVII2VgqyzLGeHFxgQzb7er4tDPGxBhvb66enp6Eup4pWGvfv/1+u936XRhGcCZNn06a9lEIoSgapU32vu97RDgc8zfffv9V3X7xxVd//+v/GpP/4s1LAKjrmiNLAqoNxBH6vneFJEnAmQrrmPl0OtXt+vLyUgzFtLHb9fp0OH755U+qoghjfzweHx4e2vXm8WH32efVv/mrf/dffvWrX/z5Lw+Hw5//1b9TE0eIEFGrKdkiIssGiAlTJtZgmZhFioT07PejQA5jrZifUz1E/LhgObP+Pjveacb2z2GPowisTHornDMRZ8oQcuIkxd4U6kRmJSUWulqM7D35kFPKE6gl59/957+rqmq1Wb9+8yIx7HeHu4fH/e6ojB18GofgY2JmEPxIWUrt9N/O25tOn8Wr8tOCj5gzAzESolImJgJiVopBZeaUsyEwxhAo/cltmw0yz5s5iAiYl2/K/tfEbBRiJgKtn3tfAEAEMaSc0Q+90uQmRWxE1Moaa23yAQDMLP7EzD6nHCMifvHFF93p+Nvf/vbu7u7LL7+0djojfv7zn0vN9P79eyL6sz/7s7/9278dxu4Xv/hXzpX/5a9/NXTj4bD73e9+93D/ZIi++OILWabff/99VVX/0//yP//8Jz/94Ycf2rbdbDZt0+6fdu8+TJf63XfvQ4iFsdvt9ubmpqoqPwzDMMiblVdIRMK1WIJTVdVVVYUQco6ZABGKUuhcEAI0K7ter8dxzDkXio0xMjYQTVRBpQp/AxVJwqu1RgTn3HAaaF6/nxYiiDRD+GRclHNOIYSQUyKYWsg0a3zlnAGBjTOJ1XAcTmmIzIWWMCyoEmBKwXsK3lqbFDmjgjWIbJtWCFJbt5pY6h8HrUkclZ9ptp8ISXwSDvFsjnW+wOaz4Ll+IoTz2LCs8I+zgZnRLz8HVkoBA2idAZTRyysUPZfzvuInZdzyYmYWx7PB2/kzfvIufnwpVqhm+CUxK+GzqunXpgzPmDGmH/+tXP050Tm/jQxyIrO2c4cAvQ8ps1LK910EKEqbQlzVcLluqlyNcUwpaVds6y0qvXt6/PDhbluAZlq1tdZ6d/ScozV6s25j9MARGTWyVmCVJs2kuWmasR/6vhedaCEwMPNqtULk5Edj4BTBGEM2aq2RYfSj5HbaKuecGM/GNApzhChbU8ToEesQgtZQFKosS2m0xJhynoDEiCgKZwAgebBkin3fby9aIAghYIuChR4HDwCUISXiWQ9daEVl5Y69994/nd4aA5Fgv9//1//6X//8L36xWtX7Q7/b7VZN9eLFC8x4OvaIaIzKioZhYNCS3OecnXOTxxPFFy9eFFV5PHYxRqLsvb++vlytVnI6ffvtt68v6sT05U9+ur26/N//y6/+1//zz373/bvVanXqA/+o6/DJ+gfOCyJ6WoHTenuOcsvfqtmrYYqgcLa5zkQrCfOStk59y5xyYoKcM6cUEk/OCTER5RzTREXPiTLD0swkxBjIe/I+h5hjpBhzzjmy7Z7G797ticCVRVO364vbm5df/u6b7yxHnwEy0GzkggxKtAXgv6XUAwAkBTjZZ4nwPMwZ69KeAUF5ggZlRh8VyoSAiSAlykbNd+T3zPeYs9TFc7l9zmSXTiYahiV/6PoREbVSIuCrmUwGVJAoI2QVMbucjeLMKqPhyV4VAEpr9RxUQKuxH+7u7uqm+te//EV3PL17967vT8aYu3f7L7/8Ejlbawc/fvjw4Ze/+Nfb7fbh6enNmy8/++zVN9988+7du/3+6Fz55svPcQjH/V5Ga+u23W63fhirstyuNz6Gx8fHsetfvHjx8vbF4+Pj4/2DM0qjubjYvv7s5app+q7rh5OxirOJFKNAw31IPjCjMU5hIUpVC1alqrS5yavtOlM0Fhhgs7lo2/XT075tW1WgEO9kpC+ihZL0pZSUFriiNcaIT/qESj+zxKIzq+gQU4yRmJwrUZthGNinGGKKTLNgABMjog8AAM3arS8uPJjjh8fjsc8E2FQAYBSqWmlnUYuZVs45U05AOGqVUmirelHgPV8Ay+aRjacBl7bJHIk/inafxL8ldXvuczLknHESxJuY8ksv8RnQPT8kmBGAmvUFz/IznI8VQq0UKSICZRAIGRU8M0MkVOOslLZEWQZUGpc8cqkyUS0ExU8BO6xwcpAwk0g6KiW8b2X0RDWXKlBr1Irz8wWnZ8dnDYql/lvet7UqRc+LmDuxj/l07IuiFHm8EoBSTIF//tWLwnBVroaEqk9jTCrGlNI40sBQN6U/DN3x5Jy73G7Wl9f3x/5p/7RaNdYoZ3UIwAkoZa115VxdNvcfHsZxXDUtUUIEjUCcLzZrUQ+nCAaAYmgbK6Pe4EelwBibMjOllELb1uHEOccQRqbknBHP0mHonANjDIJOkZJPORISKNCiAvH09LRdb8RZUxqbfd9XVaWUQp7cu9q29d6HkMpSgUbvg1FAqELy1piUMyJ670Hj+x/ulQGxo318PL794f3Nza1zj6fTATJtN/H64nK1Wo2nXinlHKUcc4aqsgKLUtoOQ69R5RDrspzUwwD6vn/58qUwDe7u7u7ev1VAH+4f/Bj+r//hf/7P/6+//nf/9t//7d/+3f/0v/5fDoeTtgXMPHRpz08OICKak4EYWDODQtKIyEaUnpFnrT4lkplnki5CY5fVkj8Oe8u+SPwc9vIU9nLOHCnmxCnHlKfOZ8iJUw4x5JxDormNSTHRpNmUyIcYQhLaVQw5pQR1GzP4lLyP1HX3Dx3je2B1/eJ2U29uX5firX3/8PThw4en3X7btvAn8/bOfgqyc8QEFxHMJFBtAK1CG0KnlLJWZ+bMFIndJEn9yZn0jOQ8H73MJ0+WkAcATKzoGS533l+aXy0h8fZixZyNQaUUZcg5irTSpm6lDILZQVEIcEVRaMCuP+12u4vN9i//8i9/+OG7X/3qV/t9H0JYNZUxZrtdPzw8/O6bb6qyfNo9brdbY9QXX3zunHv/7oP38eLi4u0/fvf119/WdSEk64eHh34c/8N/+A+Hw0FywMPhsFqtNptNURT7/b6qG6X09fXV1dVljmm/32eKN1fX+/1+HMe+P/l+6PrTOI5Tg8KYnPMwDETDfr8PIZR1td3WtiiOx5NSqixxtVo55w6Hg7XWoZI3uNQcS5AgIjkFzsuOaSIFiDOqZfkscNbqZIXGGFA6Z3EThhmjNJVBitV65bwPioEZU4zDMIQEWsHuOBCAAdjkXF5etO1qvV4byOM4UI4KSKEGQNGFEZvDJQh99IlPg7dpJ9MZ6/Y87D2na2fV3o+XsXiQqsnOfGIlwpR24TLwg7MmKgEpVJCnET8xLx3P87B0HrZ51oTDWediUX1bYrmePeiXP18AOz+uEZl5IphPnWMFiBLzBG4gZKsF0rKQ/9QccWHm6k1XPous8hqstSl6MWHPk69nPvVdWa7e3u9jzOsGUmCK8Or2inP0HEAbULrvhnHIo08AUCnY7XbXdWWbYjh1p+MeXSpssV21zqpY2qJwwzAkAB2zLtA554wdhiEFMhuTkny4wAxNW58Oe2tUTOA0+JAvL1sQLaoctdbaYMogTbCiKJJSmQT/nER2FJCl2lNKMSxMR0AFWmvnXF3Xb7//9sXNrSit1LXbbDbd6XC93aY8Ik6V0NXV1d3dHWXlbImQQyJjFTOnlJw1OeeYQ8qxLNfee2OAGFGZdWl/+9tv/sP/6c+329Xf//2vui7c39+/vH7ZNM1w7ADAWg2QUU3iVvIQvZgQQkrBx8w5aWNDCD/92VfMPAzd+/fvT6fTF1988V/+8//++edfxMzrzeZpv/vln//l02734tXniZgJaSbdIqI0vSMv3uVzlx4zsD4/hHlGfi1/u/Q8lh4GzbUgnKE6mTlNVPRpgkcEKVPOOcSUiFLKkmXmxKKz6dP0tYBaUqSQKSfoujFFCimnmOP0X8w598NBtk5mzTxRjRPF49ffEgFBdq5crZrN5uIv/uLfFEXx3ddfM7M2RfmHjonf+32t9NzeBUCahisMwKgAEVlrcM5UpStKqzQq7q2xRVVYa6xSzprCWq3E5ERPmm/Cw0VttBJghdzM6dgF1tpMbFgGQJzohzHlnGKS6xhAlTMxgdLKWEM5ArDSUqagdaYonNE25aStKcoStQKSrnHOKWmti7Js6rooCmLuh34Ye1TKKDuOQ+Z0cXWx2qyqovBhAFDB+2EYr2+vrXOnwxGVur29/n/+b/9bbevT6cAsfg5krb25vY0xvnz5MsYYU5LsLMboY4gxXlxsX96+vL6+zDnt97sQvdVC3I7J+6Hrh2FIKSpABKSc9yf/+vVnT0+7vu+11t0pVnWxXm9OXUfEp9PYts2bN1+8f/9+tdoww3ZVpZScK8ZxTCkXRSFNGJn5jWNflmVR2L7vibK1NgYCAGu0QhzGrqlr5ywAjOMY5Y6nnJmsdSA84m4IMWljEc0wRmYQb2g/xroutDForLbFvusPnpxBjWA1KIYx5O54Cr4nypTCer3abtYphnHoV+uVuLIhqst1Y4zJKQvvYrValWUpithT12JeRnLiWP2R0tKUK8w02+XEf25jCKJysmFiZhZhbhGjUogKcZYXklAx+fwhIjDxs3Q8I4ASv3gxnlXKWIvIjEDMomCpjFZGo1LGWdQqUZ7bOYBaoVY8bSYAhaiV6IiCeOgpBIU0P5nSWlvjylLJrymljUGjlVKMoIyR8g4VTthzrbQxmc7qXZzUMZVSrNA4qyQZYp4GjQBZqbZtYgi+H7rT6Wl3OHbD/ePelu23P+ytwxABM9xc65/95HOFWRsk0L/74cPjkatV+7DrX95uQu8LHS9XtcK8ahof04e7Y7Nu66ZxBjjHru+IyAdQCsqy0FoPo6ecU4p1XZ8O+7qqmHNROKPRWnN398FoLktQAE1j27r2YfTj+OazV9GPF5cX79+/++LN5wh8uHv7+tXL3XG3Wm8S5dOpjyEed8f+RNdX288/+0Ip/e33P9w99GigbuqffvHy17/+9fXVJQJst2sgcs5sVqtxHICorNxw6tab1TiMZeE+fPjgqd3vDm1TUYoX29X9/f2Xb149PT4WhavrdnfoMivrmm+/P8UMtnCorXPmu29/ePnq2jltFAPA3fu71Wq1blfej4fD4BxoA8B5ta4NZ2QOKQkq+Pr6uh/GGKN1Dhg26/Wbz9/s9/vvvv3mizdf/t2vf9207Zsvvjoeu/V6U7fr7cV1SBmVTpny3K6cLEYyTa4mojpEAjQBhai10soAM8nKFWwnkbhTnbU3YNlls3vGBOuS+Zy0B3KmKEKaRDHmIYQQY6IcQx5jDCmFmELMIaaY0hizj3kYwzDGfgz9mMYxjT6dhuBjGsbYD6H3cfQ5JMqEhyHFxGlio4vAIYLSAAqVBq0ywTCOT7vDu/cfvv/h7Xq1QqU+5e390cJuKfsUQD77/pItUCZEooA0eLQDKqWqhiNNIT0xZKZI2RIwmh9n33yGEf0kPRfdxDM6vfhgw6JcJTc9A2NmRJ7f2jMA9XxweF5OLkkuETGCwMqZeRx7gbq8fv0akH7729/+5jf/tF6vS2cBoKyL/dMuRL/bPTLnui4Ph/31zdV3//jdMLAx+f7+EQA+++xV27ai16Ctadu267pxHCPlpmkuLy8vri6dc3DmeADEIYT945Owi2KM4motNc12eylsoZTSfn9yBbRtm3PUBrtuZIbLy0vvvTFuv99vt9upKUfknPNjWNLGJdlXH7faPhmALeWFLGIZ88iSBpqmPhBizpkYUYHoPCKT1WCMAcQcfWalUSkxSiTQGhABGZghhHA6sUcgoqvLC2utgkak4LwnGSYtsnaifwGCPiWxqpkT0vmVLxHxk0V7XrAuC2ypF6cE6xlxgpOVGk9FEgq8SoIYAAkcRiGj1MuwxD9GYDWhJ5e8+BP2BSwo5XmI8jw9PbOAOd8ac9v/OQ2Ve0LLJAAgAytmUDgbNX/0fmnWAjy/7HkN+fEoYcr3c84xgvd+CN7HkHMGQG1cN/gQYN+z0/Dqxv78J2/Wdbn3fUR39/Ru30FVu1M/GoOD96ttyTyi0ZxiCAGIU4LD/qmo86rWF9u1tfbD/c4qVgqMgsK6rj8xs3NW5moxBufcer1+fHxctbUxSinWioEk70Gmyb1CRnFKKZHTk3p6QcwubbaLC2eMHsde6bKsXFVB1qg0iPUmAJdleTqdHu7uvvrqcxmLLgkTMy99BSadEoxjuFhvmJkyCLsDEcUIpR/H43G4uiruHnzO2RW674e2LZ+e9i9fXL8d+qqq3n33/vb2tqmry8vLHD3lMUYqHFprUxgXKqcr7DiOgKSUOp1OSitjzDj2Dw8PKaW/+Zu/CSF89dVXx1Nf183+2H35s5fdOFRtkxnE4vDHrTu5+LJCli2DWiFo4o8W7bIX5v97trrOjEpo5tgxc5rJdkScxJQxxZiIiEKmkJKIp0TKYtI1pJhzjkHUxSD4LIYZ/eCl1RljDpM2MAMoQkQGAn6mYUyvTU4AnAPWtJu+++47WJqc/5KANx2CrABIyL3SqmSevC1hxpSnCCGkcdRKjW1JNlGKFJOorYkqL/BZHT3rSsnVaIl5s/jNMoOVsHfOxJrsKKYRXabpkBHw58KRQkWTLzWqs6hH0hrCSYpp2vZKaWPkiE8pSZKeKSfKVVGs160Pw8OHu6ZpvvzJF3d3769vruq6/uu//tVf/NVftJv1/3v8Lx8+fBjHsaqq1Wp1cXWlrSHgU9/ZsmjWq0j59PioOF9cXb767HVdl0IhjzECZaMwpBhCEJUjJiEriA4WIvKq3Tw8PFR1kXPuOnj1qqmqYhgmJcDVqri5uTmeemvt09NT26zV5GuRiqKIIQmTt+s64TlY98wwg3Ms1myvtYysZxNR0Foj5RCCMlNdbpRKOS8jcC1ECIXGqMw0ep8yW6crBz5ABlAESoFCQISceRxDIGDOTV1VhQat5YgvimL21NWlmzzPvPeFc0JABAErnkkt0+wRsbyjT8Lej8PJec8wMyg1hT45NDlN0IYF2AMKxV4IzmlzOA9Dl6GgQsXIzKA0atCo1MfsIGUMi03XWXqBiJjTJ/nH8uKfn25OVrTWkQgVzoLuU6EoTU5QKPWcjAQQgXESbzt/+5PfJbOEdETU1sgIPFFOkDHxGEMIIfjkUyZQxrjhcGhr8Adoa7i+unj54joMJ9/v7MWL+4d9TNBcrd69e3DOHU7j5asLGkbnHHGMozdWoYKnJ9/yoSna7Waltd7tDuuWxyAmIXocR6OdK8rggzGm78eysG1TPT7drdraGMMxi/SP1eicoxwE36QNpjAWVtelG4ahLGqtbeEqrXUGQkSKGZCIU12Xq6bqR/JDnyI0rXvz+lXXPa7X67fff3t9eTX6SERVVU3L/nl2y9banCMApEQxwDCGqm5TGsvSDMFbZ5XRXTfWdRMpxnx89dnrw+m3Kebc+3GMCvLT09PFtr64uKrrcv+wv7u709coWvan45gTAIDWmBElVbXaNHV93B+U0UygUV1dXxdFcTgcjvu9Rvzhhw//6l99td5cvH37VjvXri4KVxEna4p+9AY1AAq1VJ1nRep5UcEEZkk48bsyggZE1s/rc7GGZuZzyCbh8xabbQiIWUSiZ83NnFOiMYaYKDHFmZAQc45ZkJnU+5xzTiTFIvkx9D4En0IQbCelRDlnyjJNVKStAiBYaiRkKclmMM4SU+Qhx8gfDHvn2fF55GPRpXn+vsgTo9EaQClxdcBEjJQhZ0wpRXKJZqzqNMT4SAWAZ0DcJ8k4z5x/gNnn6EeJ6vmrlbLAWhF4/KSUPI93PNUKZ8cNEZFSGqbKZhzHcRxTSr/4xS9yzo9PJ2ttUVg5+q9ub77+p9+UZfn48FDXtdJ4Oh2cMz//+U/vf/sokp43NzeuLPbHwxj81e1NYqqtdc4RcMwJEWeS6ShVXc4ZGYR45PvhnDB3Xi7MWsOm6wZrYbPZEJHWmHMyVr1583lRFA+PO60tghbwp1IqhLGeNRqcc/v9fhiGsiy1KZbr4wyJXMLektmdn8I4k/msttbaLAhOBcBMlBQqpdEVNgWvAIkhBJ9TtrZc1WWIo7j5SAhXE+ReuN6UUkoKgx8BqHJFWbpzJREpwSc16o9XzvIiEXFy6ABYjqrzi/y4wfC8h+cZhuJnwQRUSuxSmDlTliyJFShQWVJ+JFDiRTvHnMWrj2a8+Dw1PCs46JOab9lrOH/nk8h33ps9D+fnyfjy+5+gRper8B8Y1UutR7N573K4p5SwsAzEnAk4MaWYpWQ/HruLixXx8epy1dalQ9x3e4uMujgOtLooMmIgMAw+gx9jpbRxBXDOIVZlUdfd0z0UMYbxRKk1WlWla139w9v7MHpcMWdixdbafjhVdXs6jcw5Rl/XVVUV0XddYCaoHIhCBSo2RqGavGerqhLvgqIolDLOOUaDnJVSmXNh3dAFYSOcTvch+rqG9bqtm+J+5733T09DeB2qqqpevVJKSYP9TLU5F1Xdn7zWeujH+ejSfowXFxfOYWFNjFH0carKMLPWerVa3T8emYO1ZndIxo7/8A//8G/+4s+1Vi9f3b57+6Hv+7IsjFFaAzKg4nEcUwjeiy6uquv63eHdar3VGqtm9fnrz0IMD49PIte8XpdffPFF3/faOQb16rPXx767uLzpx5AiacPLp39ewxk2Z2fpcqLSVGIKRHme4S2NsXnXPC+hDM+Ldgl7EuqIWNCbibKP2cfoY06URWjexxRSTJF8ijnnPk57POYUQw5BoOIkdFnBv4hzNdOkXplZARKA4rMe5I9TxvNv/jPVHv8IzCmHAlECvfwCAAsYDAAQBA8xNT2FeyEdTvHbQmKc1MvUc/BDRFGmm0uuKX/lCT0kd3lysUDkJdstdLGoWElP0hmlNeYUAMQqXS0niMwzJkLVzL7KDAoxhQAA2loiijGO45iBXVWKPHzdlH3f9/1xfzgg0Ha7bZrqV7/61U9/+tPffP3bVbN++fLlX//1X//VX/1Vu169+fKLcRydc904nIaeFZZ1FXMiYGX0Zrut6lrCHjGfjnsiAs4KOaY4+nHsT+M4xuRzTALShUx6Po/2+6NSpjsNfQ83N1Vdl4+7p7ZZH5+6qqpevXr1+LgPIRAF55z3IcY4ScKnJPtQokIQ7++kz8ujJRsAfkYzwqwLpZQSG7gY40wD04EiABitEyECAZPWaIz1Y59yYAAijjm6qq0bve9GldEYo5Ugyp6LKOlNWYXiaDgMjXNOTJQkhZMYJvhV4Tacx7zlNM85nceMT356vgE+CoSLaPzcxpkkmBcMy5yFGKfP443cLgXIrAXaiUpc74CAGFCBPn8xPFN/eK4gP9mTqKa+K569C2aeDGmXwDmpItLUfNUKAEgk/hQu40BpbM41HwB+ZKUkdR6e7fTJjHAWkco5a1OIbLQo8sQsLq2UEiBqEe+0WlmjKqvLdvvN6CPD7YuXX799AESZBR263tYYUqqtNca4sl6tWvV4Iubg/fG4L4u2Ktz6Yvv+7j6EKc+IPlinEVFMgowxXXcsnZWEnQGMhboGa7XgLOq6lqNHa9W2DTOlFAtlKbECHRNnIsVKKAGUuCrsMPSHw269bteXl130v/nNPzqWWQMwc13XTunT6VQ7i/MYBRFTzkVRHPdZKTWGqK2xRnvvh+50e/va6YicvB/KevV02DPWKaX7+/v1xXZMNAZAlQubhoF2T/Dw8qFta+dc27YSkoqiyHUNGKzFvu91SkwgQVSk5wUcUFVVXdfvvv7t999+f/PituuOr168sFrf7/ZN06zWW4Wm70/rTTbGWPtpujN3uYBSVkrJ1A8UMoNiNbkwILJiYKWyYWJUigFYn4W9M6JCnscNNOmHTQeI8NFTTilSzMmH5GPwMfuYYspjDCHmGKMXM6GUPBnhVo0xiDLnfEExKkcGZsnk9IR5ZJCThM+rusUd+nl5AwBAzAR/vNr7cakHsIQ3AGKl5tp2yoUBQG4ZpYwxJ5N0zjmmFDNTBqHmT/uWn8uv8zPoubn8Edxc8GZqsueDqZsBoKUgWWq4nHOghMjWnNOWn+Or0s9whnkyNFV4aqaiyC6qqqooig8fPrx586ZtW6VUVbm2bfe7x8fHxzdv3sj5O479ZrU2VmdKv/nNbz774s3Nyxc//PDDu3fvmPnq6goUPjw8SN8GlWrb9uLiYtY2PC3edTI8E9NLmX8s20wsH+UN9n3vnOu6TisQX0AAUBpSSqvVhpmfnp7G0QdPl5eXaTYYY+a+76uyFlG0paUpMDZpKwm3YQp7Z9hCnv0WtNbEYtadqqqQARtRIsqEjGi0Rpk3CNgtpWCcNQZ9ZERwzhgLhMYYAyzqD2JrAkDAjOPgS2usLYiCLP0YY2rt4kkkkFSt9WLyTvhcEE95nMiM5Qk2LW/nvKw/L/eXhXdOe2d+vvNyOMy366N6UWJe1qwBANmAycKrUaBnzz6eCeYfhb05uC7XOS/CnrtPS3Bi5tkdHuYh3/k2EbQNz3BTSQdFmROmau8jSbPz/HfZC/JXE+3vTBxEKHHyOymlcQzjEENI280qpeQ9hBDqsrAaq9IZjffvd2DA2GLwQVtHgNroTJkIh8Gv1pUpnHG2adeFOwmpyY9D06yUnnyP27rUqJw2hzGqcSzLMufYtm1Z6OPxmHPmHAHYKGgrbNvGORejd9pUqyrn5L23hWuaRujM4lBNCmKKPkmqNy2epmmUMsaqAorIcDjs7x/ji42qqmq7bQX2ddjtve/qq0tBPMF8PsiGBQBghUhFUYyDDyFqrYl8jmHpyYdE1urH3e4nP3/livo3X/+w33fXV8XDo1+v4P7+vihe2ULL1mYuNLI2WBR1VekQxlVVMvMwekFy5ZylhXN5fRvC2HWdD6N8dpvNRuDiRVlf3764e3xwRXX38PTy9WcMphuG52YAICIaQfzOC0YOv+W0nNC/skORnrspE3cdYe5tyGUT07J3lrBHRIk45xxiTimNKYcQ+hBjzL3I1oUUYw45xTD1OQcoog9j8MISkZdMs2M2oazzsx4JZFHEXKAb89r+NM2d0sc/3uT8vZHv+SrzG1aANJN8cObtk3j9RQo6xZRtopRSzClmTCTjPRCZmzmwEeK8k2Vygs9NofP9v8Q8wdWBhj4m0QCFZ/w6MWfryuWNZ2Z1NrH/5EyRfrdIeY3BA4DgA4uiQMTSNLZwp9Pp7u6uacrbyWnhMcb4+Zdf/O2v/qZpVjGn777//vPPPvvNb37T/vS2bpu6bRhBPrzRe631F198wQCJKQODVozoU+z7vjJypOYYfRyHHDxPYoyT2I0Sw1Jm0SozpgoheE+bTSntl9VqJSFhvV6/e/dOooVwb611ErGUUl3XNXVrrf3+++9Xq5V8/Oc+NQJfnOPrR6WGbDacsS0LUmAYBqUxE4ScWSlrrSsKY1TwEQBypkLrwro++ZyzGOIlAmZUqLUm5KyVDMFIBA9jdMV8YMnOkcGGZLiI6JwrnJNqjJk1PEs2y4cr72LhXZxjEM7jCs89PQKSQ1CuIQsC5hnAUgJqpbTA0ynKbgOeQw4gIAMyEItE7iRINvXEDBHNeSguvHjhoj1nYM8ZXvqkb/lJY/Y82imxiTjDa+ACSKHM0pIFkOxYnR0BGZ63MM/pgjEGtVoSU1Yo2mY5TeaUsgJ9pJiz1vpwOhQFNE213a5TCkSp78dj56u62Z86QB1S0lqnlLeVA6R+HPTVquQalKrruqrUEImziIQV/Q8fUnogoqvry5i4rutu3HOmtm72h8fLy0vn1OPjDp4FmwAAnBZHISpLe3GxfXp46LrusrRlYWP0iFwUFYJSiEQhZ2KGnMkVTiDBRKAUpjA+Hk/HUyxLkDbMYecRsSiKv//hh5ubC2Y2xnKM8rlIqipSt4hl8KO2DrSqqkqSDzk99se+qqrTQ7+9urzfvTseu6vrF+v16X5/DCEYA1WFcp2mrMZx1AzWakbu+x7A1XVjjBImT98NztgcfGGsRhUpVVW13+9l459Op9vbW2MMUW7X13Vdy92pqurU+67rrCuXFBYAJkeFaZCUAeH32QAwACNrBGDMIL5hiInOyhJ8Xp+Jz+THzsKeaIlJ2BtiCiH0PsSYj30fpPib6jwKKeZEPaecUogxTwxcJcuVzgjcgLRg2RQgMAPSs1jo3L+Rk5yZkaeIiLPP6+/Tg/448v0Lvj8dE+cEo+fIn6cR5XQjMixTq4+6TD96/JHG1Pm2X742xhRFUZal4AD/eCA/D3vyEDhlCEHCwPJ0wle9vr5+/fp127YpJQDabrei6fXVV18VRTGO/XrTWmtvbm7evn1rjPmzP/uzX/7yl2VZvnv3bhiGr776ar1et21b17VAT6fFwVPmLqgWKbzU7KG6vEgi8t7v9/u7uyDGkjmD6GqO47her1NKdV03TfPhwwe5J1ISlWW53B/vvcyZjscjzxoiy6hsyfjOh2HLTZDww7Oa1/Ln3nvxmg8BMkVh2ol6i2A1lVLWaQCgPGkGzjk4ai1iwYVzzlpTVZU8y7JmJMgNw0CzmZxE3CUS5+dZMS2H9fLi89ljGVKexxL+fazE5f0u11mgPcYY6aw+hyL1PAkDrWaOw0fOCT/usi7fWX7zk41z/mvLj84Hvcv3z3EWZzMXVEoR/DMP/vgheY8U08vvLHV2CMGP0Xvvx0nK9cOH3dMjrVbV7dX1drvNKTJzyoEIqqraH47OFYlyzpwBlHHSS7C2KMpSa12WpatKAMgZnDZFUZxO/sPdQ4ywbluF2LatVaiUKstyGPJUW+cpPAOA9zAMk2q5MMrrus45j2OUD0uCU+EqY5zwVmGeelprv/zyS0T88OHdfr9nzgoIAIoCBFSy23UyIDwcuqZppLw7R34xswwOlFJjYFmrq9VKXqfwTcUblpnX6zUifP/9933fv379elO745HXa9X3XFfN09OTPFfTNG3bWmuHgfd733XHcRwPh4P3/ngUcg1vNpuLy8315ZW1VsIeIt7d3V1eXi4NqqZZ7ff7m5sXxpj1en3/8JBzLj5+yDkpBPzzhxwCP4aDnbfT0qwWvaSny9dnbnhJ7k+YH6Iz5X2UryWDkubWOIZxHP0o3/ejjylNECtmTkznZMHnts0k+Ddp6QI+x/U/ErAmzpwtqx8PP/7YblFESIAIqAA0glZgFCBwBk7MkUCsl5BAJ9I14qq9XNVbjQYTFsYWrlCsyrJEUKAYlZI2swDAFWiFWmCXShlrnEJLmYFAISpWwBlyRAqKs8akdYuzX0CKcej76D0wKGTKxClTTixDDyLKPAG5mVJM0twrrFnUeDmT6LEaY5ghhAgqgsqMlCiOMY4xiG5RVa+1LYwtjStOve9Hn4mub2+tq0BD4jSEoRtOjFQ1JSOVha2qYtM2pbOUQvQj5FQYnVMIwff9cDqd+tGHmFJmYlVUzdOuJ1Cbi5vB8z/+9t3g4c1Xnz8ddlVdKc3XNzfbi+uUgQj7zv/0Jz/75nffGK2NxtNhpzBtN3UMp83abtarQ7dj4D6ORVUPId4/7bUrQk6gVFEX69VKa9UPJ84ZQ05+PJ16Zi6bjbLlofeHbkysiHXvhzD6wuraGsWDyZ7SKsXISIiAJtoKQdEphHK97rMaSY+RmWhVu8Zl8KH3oJGrQhdWE3mlstUcYioKXm9b5+wYRmNM1TRaFymz06moStQKja7KEpXKTK4sLrYXSqlMOeZEzJlp4sDFgEpZ55TRBCzmxFprINai2gogHl9G68I6gimPBGBk1ghWKaeUItLMGkEr1MJzYiLOhWKjwCAoIKA8OYhJVoeglQKFGTgSJSJGpJSJpYk75Z4TUJwyAANkopRj4JwQSCMYrZmYmJYULTOlnJOMvrUCrcgoNoq0Iq2igQA5IqHTRV26qgCNkZICBiagDJQVkQaY/kNQwDJdzEwMCMZp60xZJoaYE8Fkh5jjmFNY1dl3x8eHnR/pm9/dhVG9e3c/nPrDLv/sC+NU+NlPXm5a97R7rJtNyPgP37+7ffnZN++fHjrKtgW7yilHP77Y1Hw6/B9/+Qb6R6uCU3HonrojKwPtaktaW2vH5BHh5I/MebNtgHNp9dP9/Vevb8bjcTycGme1NZv1ard7SB7aCjYrfX1Z5/F4eVnn0L/74XdNpS7X6xx87VwYeteYkH1Z1WGIvg/hGI6H/dXFFWhu1803b7/94e3D9avbu/c7h6pUzU0Nx4eHi+36i69+9vC0H3zwKTln6tLeXK7ffvO7uii6flhtrh8P4W43/Pp+wAJuXmwuVk0aj+F02rSbsljd3Z9YNftD3F6+7E6npioe3nUq3//rX7zxw4PRwQ98dekOh269XocQy8qdjruLixWlMfp+25ScuXWtqc1pGDJmMHi/P9TrzRgTK2uLArVhNMYVV7cvM0NRtq5sqptXpqra7YWyBlER57KwlINTSiNjJk4p+jH6MQYfvY+QABkVE+SQUkxRhBcyy4IHBqBJkgiJQfGInIAjcCSKOflEKeeYiVLKPqeYcyQKKQ0hDT6kmEPIQ4xjhCHywdNjF5+6vB/55OHo1Wmg48idhzFgyHpMlEmea/oPWCFoYMWEwMgETIigFCsNGjIBAzIiC9b/46xuIrkyLP8BA/K/lLf3L6mcflyf8Yxb01pXZVmYjKil90RECCACHzRrKy+91/P25vkkn+m8YcXM3B97caFbnlE9u0KDMoIbF5MXYeA+gxWX1B5m1IZku1P7SKB685+XZam11VoXxmptBVWhUSGqvh/6vgeAGOMwREQUQ5/VajWOIwDUdV3X9WTyN5cskj8mpGEYhMzHjOKiRwS/+aevr69vyrJ8fHzcPR2228a5chgGKThubm6mHqNSKaXPPvtsmHv3y8Wl6lrGVOcua/IyjHFLYaTwGSm6FLvn9z+EMDsETSmzFAHjoIjIGGTkSBBj1EaJFG8IgTg6V4orW0rUNAB7EVNPIjloUGmNRkOM0WrDWtOZ25E8y1KTnRegolAlpfN5xaMoL3n9MldLKVmBsp3N2HgWQ1iu+UlTfXn753djucJz11TDWSn2LHso2GFJQnGaSUx6tjA3UQU7kmeBbyKSLgLA+TASMvOyLJccdX7XqFGhVkbp8/4n5N9f781vVlQFUWmtrZVSjJkFsyeLgY0BgP1+NwxRKRXjmFLq+uOUMiPknC+vVnIfZBIm+3feUxoygZ5q5RDCZVsOg3dFwQqHkF6+fPmw/14bpbWuirKzY+WKcewgZz92ifJ6vYG5t79arWIIp9Mpg7iYodZsDEiF6larYRgKa8uyKIpCLE3kbhhjJhF1wW1NdFKq61pkLZ0DrXXTNCHkcQjMk3gpc5bPiM5k0KWTdHl5Ldr3wxCJ4GK77roOL9fH47G9XAunlplTot1uz2jbdg1wYgZjzN3d3e3t7el0ury0ALBa1d9+u7u8MJvNZ03TPD4+rqpivV757tS0JQBID5MIjHbOcUrJzhvq6empG/zNzc3949OrV6+aemWM0WUpB9E4jimkZ3SuRQBYmm3Lelg2149quzMUN57vgonPMyFJCDNkZsyciCAtTc7EUvNREsRmCjGPMY9jFIz8setzhmnWSswzR/afDTp/KNz8Nz3+m8PesgLOb9OPX8FylAw+dKdhXIV1U4FWPgbsGcFy484knz76w3P2tHxTKaWVzTkTy6TKTK47OQPo8xMKFqg3ZZE3lDY1fvosODtFUE4KABhyURSuKOUjX17M8dT1/Sg4IKm1o3ZTT9WVulTWumEYjoeuP3XHQzf0aVlYq9UEOSmKYtOujDFy+ErDUE66t2/fSRe7KIqcOcY4DsH7KM2WnPPDw0PfxaurLSI/PDxur6/EsUgWt1JKpPnu7++X80XGErL9Fu8CRFwcixAxxlgUFgBy4hijVhNQ8JOY91EjbM5grLVKqRiy9ymloLW2hSOgFHzOWWnU2vSHLvgcEihl1us65UCUNpuNeb8PSaIjznZRytqpiYeKNS1BiLVWy6BuyZ9gDntFUThrF2L11KbB5+mXwFs4U4xRwbMI77JciYjPhKeXf89D4Pk6BIAsKi1nrUs6o7Qj/GgKPpkZESIKdBjPPoVJe1NP4E9ERC1QTpaYBgDAyiBqa3Dm2MEZQkcjWq1BK4MKGJAmJE6CSVsVUTozKOw8wokLJNxK6WwaY3yU2e1zH1he5LEb/Ji11l3XhRCenp6YEDkXBSgFr169VGoSQwDE0fvFAIgZgIX1iIg4DENxuRlj2NQlUdY53764eX93/3SI0n6XZJH3HQD0fRr8abu5pJybpmEm2QvEDJxjDEpB4cA5kO5lUVgfhiUszZNgVZalSNUTJRH7BQBZvXVd931PRHVdImLbtl037p76nF1ZNWVVSaM7pZCNlaxRcj7vfVU3fd+jVoMHYGjbOvtjjPFwiD/7vDbG9X0vfGrv/cPDQ7Nab7fb9frbEOJvv/7Nz3/+cxmXyCvJeQKVtG37/Xff1O6qaZpuf5zptJgCAYFWqrROXpU4iB2PxzGksq4O33z7RVGUTa2UypLKZ05RfOoIWSkFI8vnq2gq4QAAARVzJEriQKfnmRcRCT8PZoofLzRoga4gMHMGpgwZmRljZiKOTEQg/LyQ4tLt7H0eYxxCPvb+2PthTKeuSwSZpZl5TkP65wMQnE3o/1vjlzz+lGrvxzHvPF/+5MTMOR+7fn88rNvKWssMMeYQJiEnhI9mEs/Hx0eiw9Ow6hxSv5QpxjwTGM7PR2c0KlAzMk0MWolI5IaXBxFlyAAg1szzMGMiimmtUa0XW9qccww5cS/KUEqpwjkE3dSr1eoURnF4R/G3E9jhZrMRoT+njVJKo5IJloA2RaJTNmrOHEI39D7GjIhv3ry5v394eHjKObdtNRVPBDLClB0uKqsS+UTMYmYXqLIsF94CACCqnEOKo7WlDCpijKLkKP13ZzWCFvGd53pl9roTzEtKWT6I8wmQtRaQTOEyU+AIsyjM4ZCtBSIYx3GzWRVFkYmrqqoqn49jJpC8RCErZKuVUspY5bThMzkJqWU/iUZqVq6TT+SsKppmVMs6VJMAKS1w/GkedkahA/2pP98n075PttbM3vvUTmEahwB/vIeZp6HM9IJhgv4tpWvmuRCRoQMBg8JnmsHshzfFe/U8c5Vn0YAalWhwyFv9xLR9uZk4SwYrRKW1uD3h/MvzBzr798aYYxQWh0Qx4Xp2XVeWZUqhLKEs3fX19Tj2AnESYLOIDFBKiAoSsUryefUexpC0sdqY4TRqZ7XCFzfXP7z/Pueni+srBDZWGYNWm5iiVdD3/fFwuLi40MDzwBirumIijQiGi8JZp0Pw2qrCOhnawZyeArAES2NsTLzglheEVD+cZB4WYzRW9gJIwS16CDHTMAxVacvCtnWZw5BzPoTTTdOejl1Z1mUJWkP04bJpTscDZ9hsNiC6tQh+DM6V3TDudrs3b958/vnt119/6Dt+eHh48eLFN998U1XVbre7ubFKYUqpskZkArUqncOUkiqrMIycQBtIKWnjnDbOOaL0+PhY1FXRaEJYr9e2cKAY9dQB0soIFoaIOJ+nbh8t5ikje2boErAiSgAG+Nx74VlsWlSJCJgZMzMRJCAmjDlThsCZMgjZ3McgbLwQQufjMIY+5K73x2EYfB5DAlAEU1kJzz2Yfz7mfbQr/0Xok/8eYe/8uj+u+c6jIBEpY3Oivh9775vWFs5ajSLnmpAUARFyRgAUyDUxLXNjRJ0zKzHgmIpxPD9otNbM4qAXWdIPZjJK/laauvBRh0xJnYegmQlQJGAQABRqhQZAyTmvlLLWTPUBKGCllQajELK0j7z3zBh9kvnw5eV1jnQ4HLJSKSV52c7YwlWyzcI4ppQF2ylDXSHVvXz5OoSw9DllHxpjuq57+/bd6QQvX66aerXb7QDg6mpbFIWMryXjO51OQofAMxCjc06gJUsjjplDSIOPiBNcc2FMxxhF6hdn8zlBN0wcbdETivE5/qmp22aMKQoDNLV0AEhrTYgppb73/QiXpdUqjgOP43hxuUK0Suntug39mAgUskiwykfpCmOV0npyM2HOErDzDKVZIpmEB1dYeXkSehcNNlAfqWksHfUl8qH40y+L9l+wzs/7nJTzUheqiSvEzzbTGoFZnYW9bAhEzyITAjDRpLOHoilDwq/XWhuttDUpRliejnmxeue5u4kz/VyuL5yDyQtFWrZAzJOyA55lldPrB9ZaWWvRWECcR97PilNTHyJOH7rWNsa+Ow37/T6GYBTKJNxqWLe1cyYmJABlzHDqxxitrboQUwJtEICAJ3YuMRxPfVGtlCYfH9ft9tD1N9dXCr6PHuLorbVjjFVVoTIaBo2QU9gfQtOE9Xpd1E09jsfjsamrcexTIgNQV2VdlNEHA9rZEogk7Ml7GYMviiKH4FwhWgiSOzabVkKdALJ06Y7DyGRCCGJ0oZRqmibk3PfD2Of6lSvLUoKNMQYgO1f6sK9W26pu6rI7HfZ/9uZnjz/8rq6hruux73Kisqh//f592bq6XZ36/nQ6XV9fPjzcDQd+enp68eLF1fXl48PTOPLVtds9dptNb+rq+vr67u1bprBp237oAKA/DQqgKooYo3VlURTGmMPh8OHDh1/+5V8WZd378c1Xn5d1lXJGrVIS2TzW80NwH3MqoJaxjgzOJtH5nBiQFQo+mSmD0ktQmZSzYKrwprBHkIFz5sTEDCmnnDhyFlp6FI3NmIcYxhi7fhx86H3qxzT4OPrEjATSa5nx/9MWy793G+Ksgfn8nXkm/yc8/gc2OeVgDTGTpmEMp65bt7YuKq2RIcdMhjljVhn0WUKqUZ1fRL5QaGLszqlOME9HlMJFiRinMYOpiiLnOMmxKSONNFG9Pg/YS63JM79qASPJiELCG6IyRhdFaa3TmhQDom7bteihS1fV2bKqKu+jstZ7L/lv6YpFT1L6in3XyU9hria1gr7vu65LkcqiRtTDMOyeDo+PT4hwe9us12vKIG2Z1WqlrL24uDgej33fPzw8HA6Hly9fim3YwjdvmkY8ulJKxjRS0nnvvY9FEcNZxx9AgnRexnVzXJGNgUvA8N6nEIhosWOWzg+gjjFkwZXkzKxCSH2fYGKYaaGfX1xKwRfXbbN3D3lkjWCMVsgaQVut5/JLnP+Y2fshBK3Pqj04E5QRGpMEITXrJi+RexntKKVQXA5Sfq4OFyGVj6siOCuPzn90viCXIhhnQbIf7088I/6z1sBMNEl3KgBiYgXaGEyZSCWxMWJORJqmkwjmqg5n+U06e+/4TCuc2qRCupA3wGfoU/i44Ht+p1oprXkS02YBfyulUGgYMzIWAGTlPD4+DcMQgq+bUmutBiCGq6tLGTYbYzLD6AMxWmtj12UCjXI8ZkRkYqXhOHDItKoLAmW0S2F/sdlcXJZ+DMB5066Pp25VV2NMSgERFEWBALvdTvDPNzc39/f3xqg4ekqgCxA44jgErRTnnFKyzuI88RXTEqOU7GJZ5wBQ13VRFCGKOZ9mVukYFGrvh6JA55zw2bMPKXhA2G63zqjudCi1RsSmaRhxGLxyMaXc1hXFQbLAVW0ZFQFGYltVd3dwCeHFq4uQ0v39/eXl9vPPP3/4u2/7nu/v7z///PP37z5sNmVKKQQYfV8YuL68eNAwDPlqY+RMIyKloCxLA5MDc875w/3daei11cqqNOSLy0ut9e44ssKiWMUYfRiZQMAK1lqlDD4jtNVygCOimi1nc44IBoCQNQABAoJiSogagBaqHFMCUDKMS0yUOQFRhtlOKOfMIrDiY4wxdin4MQ4+9GMYYvYxpkQxT61SaYIsZzIza/P7E1HEj/oc59/8Q3HqD13nTwl78AeanH8o+J2GcVVXoExIeQhxRVVtnFFxaRQpNIhKmHiIkosbxAm9ioha2XlPaq1lJLuIZmBVOgRSMA+HEIuJ1IwKWSnWGo3SWimNSqNmTrMk73QASNYOoIiAM6VIxBP1kCjXdZMzKwXWFlPPLWURtqYM3kc/CqYmhZAUallqiFgUhTPTnCPGGMZRILzPnkfMOefH+11KSStrKyM8vL7v9/sjAFxeXjbNKqU0+t4Vpq7auq7NzFp9fHwcx1HEybz3MCtrCJDaGHM6nfq+L4tXsu1jjBLRUkpdN2w2mznST4SElCY9SFjs1M+GYdMWQiytk46TvB2jZgPJ6d9MhExQGBiGjAqUgnFMfTeWpSWipi6tZmQwCq1RGtkYpbVKKVGOGcjoymgNkj9m7yguRzCdaadN5inGLDWKHD3Zjwuan2fJOmvtkHqe+6L6Y03nT9qAP/7O8nlJwJCCiojylKg9U3cnZrdCBpxgI1oBzTgzIRERMkBmRoXKaAOWiJABFGZmRi1yGJI5olKoDWoNTKgmUJKe+/kAYACZWJJ0rTRNUXBSl0WlFpLW9FCiKMiQsxSKU8YjrgvImaYxLmcCZj/GlPJwGjRgjr4uSu+9WJO//uzl4/7xur1W2qYQT/0QMymtU0oIYDUCJMVa9E2yhpjgu3cfLjc/LYoqeq8AU/A3lxenUw9A7aqO34aqacZ9EsteocF4z7LaX92+uLy8NoooJ2OhcFhYYzSKF6bWGOIok36cPTcQtcjxDmMXU9Aaxeddaw0JrLUMIL6HRpsYc1XVdV37EETjUBtsSrjcbnIOXTdU27UMF8Yx7I+HhG7wsanXpameHu5uri50HLuuc8bFkAlDuwJGuH982mzW4zgej8f1qnn5stntuqenp9vb25vba0GmWAuyMoWP1B0P3vu2bVMKVdX0fW9NUdf1er3u+7EbTk9PT69eTfv69vbaOVfU1ckPqKG0VgFzTokIQQQUMk/CzQa1QjFzgblhNhfjzIySo0yMtww466EAMKcJo0XExELrTUw50dzkTDlzyJQzx5RCCD6GlPIYow9x8HH0wSeOifLMumSY9JcBgGee6z8bxv7/Ntv7vfHv9/6UmVMkU5RV01hTIGhjXFFYp40xzmgyRoncnMAaAYB55uFKpoNm5imXME/pRKhNdr6xtXB0JBYicwRQSrV1BYKqR5ZjZFad+AgguhwK85/LS1cAIOAO51wMiTj5MROnvhvDOFIGREwp+2EYRy+9IKuV1jYMAQDEwM8oTbMxenc8Cq5JnjXG2HXdMAw5pqKoyrKOMR6Px+40IOjLy8sYkzC4hbjWNM12u22axpTlw8NDzvl0Oq1Wq5/97Gf7/V6oeEvYk+JPyDHOOWaIITOzddbZkoi6rru4uBBk0LyMNMAzbHK5Pwuhp6oqyIQql8Yi0tj14ziOA7d1BiCtkTPwfPwak4qC9x1oBOcgJRiGIaUKAJxB8eXTCqxGjaiNtkblFIiEXc7GKGakmD9BdZ5DOsdxtNba2a+OmYXRtWQ/y66Qnu04l0Hy+2cU9U9j3rI84OMe/hQ1ppX5DPyhOfKdP56bijKaE7zLov7F4GPQqFChtTYxKZ6hWDwLlBGzHE9aKaMpPc//zsbeoPhZNEtGfEKn+EO7ErVihJwzKWRAhVqbWXlMKZxAKLCwJFMiBL3okpQlhDiGAFcXeHt9dXf31pWWWId+OPbd4GOJSJRg4ksQMmmlGDAmTg6+ffvhL37xM2OLYTg5o/xwqqsihZhzKqxL0TuzVQwaQRk4nU6BoTaglPr+++8v1pvb29uuu7MajEHnjJk7NFpjWZbD2C1ZpjFWa1uWJZMXaElKwr0zZVmKu5m1Os62J9Zo5mnbnroBYMKI1nW9atvd/bvCoYBsjbWnkx9DckTKuqLQL66uvv67v/vXX/3V4cPbruvqq+tE3Hf9F19cHbrxn77u6joWVg9DZ4368z//81//+tchpPfv37958+Yf/uEfYnzWfD+eDherFWXfD/3V9fbwtGs3q34YlFJlU7ftenc4dV03DOMvfvELRujFcmHoLy4uBj8CQMrsjIHKwdQWRyJKslxJfBhksqDFaVhryDnPhBlWygCKk0dC1ASIshABCDKwynGa7RFRoulfZk6ZcuZMOSWamXs5xhhj9jGHmEKmRJSm5YqgEfkMDk0MEwLxDzJOPwlyfzz6/EvD3h/a5z/e9nDW7TlvCv3e75RVdTr2nGK8bJk9pXHoilVjtVmhMwgUo9cKhEQpMyPx/zXaoZk0GIlA9OgAlHNQVZxzjiGllPZ9MMasm1bO7gnFDth1R2OVUYqRfIop6baunHWZA2URLlRENI5B8dQWM1YbJSMiCH7yuzkd+xASAFkL0o6yplBO7/d7YKW1cQ5jjMBKKe1cGcfctu1qtVJK9aeu64bTqQ8hpJCLojSl9t4fDgexX2/bdVPV4xAOh4N4xsqijDF5751zVVUys/gnuMIYq7quyznf3d2tVqsvvvhCeO7S+pCuTtM0Wuunp6e+79frtXOFPJfooIqxQIzxu+++u76+jjFqjdYWk+N8WfnQi/RDURQ+0+PjY9d1zDwMAxIbCwaQOclzOQfa4KZeHYf+adcnhHplATRACJ5XlfEhDR7qEnLmENKLFzf7Y7i5uuj6J+TMFFHhdrM97B4LqxHROBdCKIqiqqphGIzVm3YjfTZBpS+Ai3W7yjnjzKbAGYMqUU2sZxZop9B15Rw/X9hEpEEv63mJ9FIT4xkNfNkFk6ipAZF+ECpeIlrmhQAzuHMOM6hAg4JMYDQSo2IFOPiRRaqWhas6PZfTRio/aXUqmAxjxe4KGCbriSXopixkeQCYaDzyFqyRoV1KWepd6UZ2EAQXjcYiIjFk5hQjzuIA/alDRVrrIaUYY9O0v/7bf6iq6j4/FdZoVDGkpobPXr345tvfusI65x4eD6dTd3l18/Dw21KSzgIO/VgoBRqBQCvjKbCGbow+8vriwh/h6fEOkdt683B3f33z6rTfXV1sFELb1gD7nGE4hG0zqXbFGN9+eP/VV1+t25uxP8bQpxhfvrz97tvftvVkHikiFeM4ErBxVuQAq1J3Xff09IRgGbBt15eXl3/3d7/e3lzQJFCHAg0jgpvbq99985vPP/9c+pxP9/dtU2lgIGJWTVVKekCZH556Uyc09vJiHUN/e70ZuuN2u/XjeDh2meHu/rGs17asPvs839/vt+vi9sXlMHTDMFxcXDw8POx2j7e3ty9evPj221NbQwihuNi+f/d209Q556ury/1+f3mx6UafAU1R3ty8eHh4uL+/J8DPP3vjnNsfDzcvbouiqNcrRPzyyzfDMAwdIKuYQ86MM5h5DLJmcHI2yBmRp5wWlDaWNYsxUM4ZMiBqJe5dzNKZmIfgGdHknEOSpiYQUSQmIp9ySuSz5PZZNClzzow6xr7rujEQaINKAAdkjMmJaR5hACJDRgbKaZnXSPY26yamaUNNW3H6h+APNkV/7/enAdOfECr/eGn5SRBOmTWqxJAT5ykRMDmb4FNltXEOWTNJXpm0FgyLIgWLRZac18vFl+Aqv7Be1zR9Yjx1PjMRwKtXr7quG4eT0tC267J0yBRCYJME4LdEcav0Al1ZLouIMt/OmRFzSkwUck7j6MMwCgIAgIzWbdNM+LGUUkoXFxeiAR9j7E9dnA+UyhXH4/FuvxfIpeg4MPOH9/dE7P04KyclqdJub2+lpEPE9Xq9Xq+l+DPGjuPYdZ3cZIlPiCg8QuecZA/Ouc1ms9lsmGfrRQBh2g2DHwYKwZ9OJ85JPIwog4Cbu66TDg/MruU8cy2QOGfOKsuqrXWtAZm0oANQQWHRGBOCAMNEfw4VMEzAmYSIhcXCqdKBRrAKjVVGgXPi9CTqfywYpWmoaJ/N8xac6iKozTPi9Mdd/qUulNG3lHeCvpEGqcDH4axtIr9/Luny427nMkR//pGop+LZUBqf94g0FKZZPDHPMBPhXxJpoOeZpUKMgErSK5yoBmoeUk6biz5q+CxKCxOMeUZIAwAoVKwYUWmNWoNSjOisQ0RGjbM2Gp6LU88DxeVW9IeOCIYwVFUVQqjrmgi2F7qtK2NUzGCt0VpnppgZtOr7PqVkNRQaAjOlmAGRWQFkUjFB71OlNSI6o5GjB1BAIrhxdXH5w4cHawqlIDMoBQtWCwAeHx9TSv/+3/ykrZtj8s4YjUoBB+9L50R9vyzLw/60Xq9l3wGAc44NGmPqanU8dTc3Nykl51xKAREBydqCORNRXQsiDJg5xiiGzC9ub1NKRqECFj3oEMKp7wjAp1zVK84+x+AsIoNCZKWOx67vx74fExu0pVKKERITIrZt3XWd4AZEU+n29rauoW6gKCwAVVXh/VCWZQjBGAwhJMqbi22zasfgT32fKNfN6vLmGhQ2TWOU7roTDCoRCSD85fVlCKHr4HQ6JSZgjaiNopQIFAqMHVGJzgJxArQfj35Zmhc0qTwi8+TDJ2dIjAFBT6khUwyJso8xy1Q1Acs5HILoaqaH3otYjBnT/tSdTh0xgDLj0UuDQ6APWgGRAmY846fCPLiRXf8nhKo/9PhTIC1/6Jv4+6AuiYCRU6SYc0jZp+xTDjF2XVc5VdflBJ8AVAwaMBEyICj582luopQSIMbEQoNnJJ7WFgGTfDCZmKaS/OnxUTYMQx5O3dj12qDW2lkDoCbO/5lvmaxFppRSnmEL8ozkvc85G+1QMYJWaBJTXTdKKaO0iC7GGFNIOedVXUt1Mo5jDIkIlDLM3B1PXTeMQwCAqizrqkYBPaoxi2V5ZkRdVVXbrnPOr1692u12Irl5dXWllHp6enp6enr74UH25IsXL9br9TAMC8q8aRpp9AkyRTSvU0ox5hhjTsQKvffe+xShKo18QHL686xAJnmuEv937xewT0pJAxoULS6ti0Jpa5U+Hf0wDN5nY6CsazSO2TOhUkwZmEEkTrxPE6HQ2qau2gaMAqOxsEYjNHU1ep9SLq0zSFZrp43wDgWbijPoVAZ4UrN+MlTGM6DTeb9hanXOU8AF2rD00s+ztOW4P9foOocACP5NsFBCM2Dkpck5YTtRVIJkgIEySwFGQIWTWoQyonMGGumspQwIqJFhknQ5M0xA6TYBE9ICIBV5RQDIzMRIxKCkb68BNSAoxNkqQystru5CHlTAKouDGCMCcqbzUnjKtYkPh1PO3PeDtY4hi1bMqqmrugRKIYwAkIH7vj/1Q0w5jJ6ZXaFrBBVhTAwMoFCDisxDzN0wXtSt1rqwOgavEay13g+uKG5urr759vuqbqwD3wHMLH5J43LO9/f3CF9eXmxy7qvCECcAGsehKIqHxzux2ySiuq7v7x+IWaGW0lspcM4A8sXFZnc65JxjBBlR19UkZLjdbvu+Fxn2vu+Lokgxvn71QlJnRIzeO+dGH/b7o1Iw+nB9+zqc3iU/rNpabGHQuHfv3p/60I8Q8livrTFO656IQhjLVTvGqLXebren0+l0OpSlu7h0IuMnWy/GuF41vuuMsTHGSPDy9Yu6XR/6oR+9Nm61Wr18+VIo/BKbgajvu+54WK1Wx/tjVVVFVTa3V4xKRid93yEqmByQQSHhjFnlkJUcvQAiuy5UT+l10+RSA8yc57WRco4+EUEGpswZCAB2ux0R+Jxy5kQ5JQF+p6IovPf9MPqUAaEoiphySLRarRa4HLDIvBAQDTHIaXw+bsA/DF2BP6nP+SfO9j5JsZdRyicxj5mBMQNn5pw4xcVsXo9jGIahKExZWGuUtVbIW5k/okAs/0o5OEmc8POhlsI8gSdAkBmhQUQ/9lVVVVWBCDn6nKN12jnHho0xWj+3auUh6P/4LIE6hT2lRKBqwnQ4q7KVXMwY44C47/vD4RBjrgpX1+0yd5lGmykNwxBC6I8n59zt7S3OuEoJ4UJrVco4B8aYqqpkePM3f/M3OefVanV9fS2WC/v9fr/fPzzsr642P/nJT169ehVjFGDLarUSPczFyUGGMYJYkRjsvWelx1GEp6EoipxYKzbGSBohMUC6zYxKnG/nDCCHEJqyquu6LSuAnEPM5IFFRDsygyuUtTZOMcMCJxk5aa0AKEbyPqSUCmubsli3ZYijNcpZw0BFUSQShUM0RgGA0uCcUPvHJUqJH6HctyXsfVKZGaPhk4n3WX9egmJOaUF1Gu3gvHT7A7O954V9LgaNs1om8iR6+3GYBHHkIWHxAACAAElEQVQskT9UiISsANWMqCJmmLyaNSArVIDGOCQmnJxTpgcxIrJilMOInl2dJ1lC4fjPQAU0GrSa8mYriC4lmfyEA39+U6iVBoB0Lg0BFCWJnCvslFIKedW0lELlwBlVFjpSjjGKp/ng4+jjQgcxCNYoRgUKoxTPgCkHn+k0BEkajMaYUrEq1m1z9/BwcXFVuUJUIpuySNETAwDFOLmjyBt/eni8ubpAvgLOcRwUIBArpeLohZawWq1EdVEWMyJ5PwqDTfLCvu9j9LqsAEhaL5IQN035ww/va4NKqa7ryqrIOa7X62/u37WVtVpLWDoO/tCdjDWR8uZi+8PjbzilqnTaKGt1ZjjsTyEBAnRDKldonSnLQivuus5oVW8ud7vdyxevd7tdWZa/+c0Pb7646k/dOI6rqkwpNE2tFFZVAUA5ZzC2ahvQqh8GpfVqu6lXbVmWqc/OObEPq+saGfzog3Uq8Uhh7A+IWjtbluV206zX7fF4zMApUkgRIGdmpbRWSNLrwkVrlJgz8+RpMJnU8HObjYhJVB8zZ2AiENtX4TuoyahEeueZiGKO3vsQPbIuXFE4PcYEo49+jDGnNNm/OKOctRpN0dTnidfzDPsPVHv830hhmIFgf2rMgz8wVDwPhAAAqAGIGBNDEp/ALBgtE0Pu+xE467o08wmCKFm2IoLFwxYApvjPuHjNSDNGSFDzCEQppYxGpVR7dRVCGLveWF3XZVGsKMeUEk7TGnmWSbFCCIIx5hSJiGUWIs1DhUaGtM+BHIAZ+35k8jmlEEJK5GxR103TtHEcZfgkj0W3vq3rsiydsSGE4/G43+9lEnPaj0IsrcpCpE92/U5AmKvV6vLysq7rruvevXv3+PgYQvhX/+qn6/X65uYm5yzfbNt2vV4LCYlnHKY0x2OMBrX3vuu6oQ+EyppK+Elaa++9Vs89TKXnKganWylFpIxGAKCqqu12u6rqGMf+ePJdF0efcgRg68A5g4gxxJyFMx5lv8xz9UkwqV6ZsrCb1Wq/T4UxpTM5RVcWHeXgB+ccERNF55zWGjEs2Ao1mwBPQtXqmYSgzh7ni5Dn2k3+SoqeqT09dw60sx81JOfx3ifZ2yfrHGe7HwDAM5kJFkzwuVU9TjJgcmzI3WW5w3i2S6RuY4FfgppnGErGK2rW83wm6T6Te2DS1AfFqLVW1og9OhqttDbGqDOJhkSJmQlm0xlUMkFUgp6U9ALmVJJZZu3MvHvaXVxsdo8P7coQ5bJ05EelwVprnEWtQCtlHGrNzCllylkr54yRApkBYkqRcfCBUSOA1kgplkZfXFz85usfmLMP48315b4bm6byMRMr0CrnkRhjkoyZ3/3w3e315fX1dXfaDcMAAEVRcM7W2ratd8fTq1evxnFcprPyRVEUTHxzc3MuNC9LS1JeskgZY/TNRVOWZR/9OI5NVSsGztnqYlaiZkSMIRdF5UFXdcspFtY456zSWutIcQgEqFCD7yDnXGDRNI1WHPp9fzy0V7c5Z+K0Wjd+jIfDRJDvutEY06XUtq0fu01TPz09cI5usyaNYwyEsNpumlXrnIs5CRP/cNgR0c3NjSmcNYpyhMicUsgppaStSXVbNrUxZrOuiZGIxhjGMYzepxRjDMAVM+uZpDetsanHg4qRATLQBPMDGKNHRGu1czoReR/HFL2PZdOkSClxioI6HH0Yg08h47pZ3dzcEMOhH/ohOueKUnTncwijVBo5xRg8ZFJVuZQiUytu4l38AbG9f85N4fc+/sQm5yex7Y80ORGVqLfliS/MiTKRMmYSqIweo43JWCJIKVXNRsLSGZrgTEdjVrGSSzGziCGlDDmnnAMTLqqFMcY4jjzwMHRFUZTOGqtSTACg8fmIlF9eijzJcJlZdFvMrIY1cw+UTH4pg/djTsk5t1lvi6JgomEYTvu9sNFjjCIiJTwhp804jrvHJ1FUkaeTFr/cN7HRWogBP/vZzy4vL6+vr5n57u4u59y2bVEUrmoB4Hg8xhhPp1NZluv1WmiCsp+lz7l0/4go+DSOYwiJUHnvcwKt4eXLl845piR/6H3Uhsdx5JRijMZNXd++7/f7/fHYvX79UoaFTumcg8RUAQIYK7ofOi0mCawlRUNAyqC10jqnlE+n01VbF4VdNfU4nIrSOudOw0kbLIoC8SgKlilFQSjknJPHKV7WtTYWZptANKDOrOyWB53pDU5FITEzC6pWBgnyliWRlImynI9wVvadX/CPF4JTUxSfl9MCtGHmxBkV4pkFEmpYflmfUSamp5tDGz9P6VCCHxKwVjiDNj+BlSmlGACN1tYoCXtKKaOV1udKNBNtGeGjtzCbEOU5ri5Vdc5Zg8qJj/vDzdVlfzpWZUmYlFKoeEECy41KlDUR5xwDBALUkZVCtApVzpyIM2E3+ExARJWx2mDOed02VsM4joDdxcXF7vRD0zTfv92hgqKuqqqsqqrrOlHZPR7j6bD//M2rHNzQjSnE7bpOKRSFFX5R27bvPryPkWQEXpe1oKmrcrVarQ6HwwwCz0SsNRKJjgwcT3trrdik8IhE9Pr1674/WauLolBAxqhuGIRuC8ZG0tJQXq9bq5Tk5X0/iJkRgPKRcqacc1m5VVM++ENKqe97a61M9f7+7/5xvYZhGDartVJQFM4YUxS27zIiphwg09XlRUiJmcu2Wa3X67aVhXQ8HqW1I0OKUlufh8f3dwa4KCpjlEaFOfmxS3EkwLKuXVmWZbEu6rJ04+j6sYsx++E855uXxKzTBwo4fzQ4L4pC4OUxxgys0FirnXMxs9LglLHWNqsaQAnWorp8lXMeQzp1fTWMMREqo6xzzjEBEWVKIYQw9H3fB++fhnFRxoBFPIFo1rD9cdj7/1W19+Or/JEmJ4FCmUgQpMw5Uc6cMscYC22WPRZjFHG/nBkRlKKzLN4AQAwJQU0SL6Ck7ZZSyimTEONCSCkTkRJsXspVVYl2ZYqjtKoLV3l/VMgk8nJMABCJs1okgzUAx+iFNgAAzpVa65RIpmgCgWHCiWyOWuqSlFJ/Ei+NbtJ2ylkCsDBGT/tD3/cCclFKCeqMma1qvPdddxyGQWtd13XT3F5fgSg1930/DMPpdCqKYrNZ1XX99bfvUDizzDIrbprmcDioyWsQRI1zHKfeoMRgmfYZ6wQdaq27vLxKKQY/yFAhpYQKc87FPOoTF9ynp6cQQl2Xt7e3l5ttWRZxGIdhkLGf1pqZrLWCt5+VICDGmDMrLbIkhKiNBmY4nXpkcs61q+ZwtKUrCmuOx2gQ1qvV8XCw2kTKcp2iLHPOMJiUkmhi6boRZFpKScQC8+wXj7MtKqiP4tZSu0xBSPCZM3Apz75LeAYbOe+xL188/2gyU/4oshLRJNMpXVUp5mSmyM9OfsKQkXQqxwTiBoLPiRoAZJpdxATY8iOgGiulABfnZCJaNgqLdIM1qDUoSR0VKDXPP3hqikojFBUoFA2qqS2fKecMzMTTQAEAulNvjJHlRESrVVPXJnNgzholkE+ksEw8+FjoKAxrLcxXzmidMTYxJOaY86kbYoyWuayKtip3Q9c0ze3tVXc4alNWK6uUKsuSGYIHZWNRVFVVSZcSAEoX7u/vX72+NUZZZ4axf3G7lfRXEMuIGEI4HDrnnPeeqHTOHQ77ly9fC7ZT+iKyEsTuipljTE9PT5v1pfAWSuAQwps3bx7v302CRznJtjKm1NaA0ka5MUar8HJ7gUSInDPvdjtmIGDUGoEQMcboCiu7Eikfj8e6rne7HSIqDau1lv1iLWitBTektQ4hlGVJMV3d3PSnThuzWq2KwhnnpA/88PBARG1dWWujD0lpBRi9v7jaEoEf+64fAaBum2a9ckX19HDnyrpZ12VRo1ZWY1UWzpDV0lbRzBxDjjFGJiICnNJ9QJ5oetIM0AgouXFkhcY5V5XWFgSQMwfRSdBKoZFc6tsPj4+7/bsP97v9IWRiUJmACARrba0tClsUhZEuDPFPfvKTRU9HpjN938tp8/sj0J9EYfgfwtv7MUyAZumHCW5JNAxDaSrEAs8IVUop8f6WqaY8tJ6QuPKvYIgXYcn94bR0LSRn12gQ8XQ6ee/rulyv1xcXF4g4DN3d3V21rZe8e5oXMkhuZa212iw9LrmmjPEk7E2q/8rJhlmtVut2TUS73e5wOORIEs9kE8YY4+wyRURhGGWULTCnEIKANfaH+5xzjF5GEVVVCVL/8mprrRWbPYlbIYxPT08i8FmWpai9iODnMumUUkbSXnneKHWnhMOyko5oWVbDMIzjEMO43tQxRqhKuWytLTPHzOM47vf7w+FgjLm4uLi4uGjrBlX23kvBiojOOaKglCLiGCKzUmgBcoyRALRIdQEBgFYaIAutXmtdFJNTutSUiGi0aBQIPyGklCxPxWvOWW5+5YqFtSaBfCna5lYqGVd8uhSJJUJP3VFEPJNt8ynCGQvifNz7SQL3e7fZ+S/zJFKG57+oJ2OEZ+iNVqCUil6kgRk+FlgS02Oaf5kQFCDBhIkRXiTAR/klzJptk+efcPvkHFFTM3Oq5FAsAhUqjagFE7q0T3hWgkCeZvCIKMhe770xxodx3bbOgVLOOQcKRCsLZ5lW+fiUUs4BIFACZtYKnHNDiASQEozj6FPURM65qqrefhiA+Obm5rdff7e+nO62Mebyst0fuudCdr7yalUfjvs4eqN0URQhTMmKc+5wOEhIKIoi58PV9fUweDFC8T5fXV0xyOlBVV32YTBWl2UNc//geITXrxqZ6Aul4fJq+7vf/sOL643cZ40qhFAVjTEmEiilJEQ1TdMfd4Ls77qOGJDIFrW1UdsipxBnf1pEDDECgJjEtm379PRUFIXc5JyzKA6WhYsxNE2TQ1yv133f28I1q5ZzJmDRprm6uso5a4Vd1w1dd319fX19rRFjdyyKwq1WVVX5GITpn1LaXlxxjpMCojIoSj1KydnIjDFGhJBzZoFtTxvkGdIi/566Tmu9Xq+LokBhR8QUgi/rNiXfdafj8XTqu2EYhjGklKrtS+fKV69erNeb9w/3T4/HPoxMWFVNSmkYur5nZ7VzrnBGa/2f/tN/EhX+tm0FlLfdbnPOP/zww/83oeqTh9ZlPWlInP3LOJsy/+i/WSp++m/Z+zPXfvrO8oUhZY12RhmNRkNhtXVGaSirCtRkm6K01mCMtYUtd+OARqOxWY4OpRAVEx+PxzT65BOEjJkK1CtXbsp6vWoraw1woVXtbFu60iqnIcXhYtve3ly2TUk5xeC1Um3T0OAdaEMIIRvCxpZNUTllnTIGNOeUfEg+ciKD2irTdR3lTMK9iAGB67rebNeb9VYp5YM/dScfAjHHHH30RSDyIY4h+8iJMDMTgiQ4Pj7uT+/vHu4ed6d+TMoqV8Ycej9k4PXF9vLqsm6bZtVeXF0S8eFwfHx4OhyOu6f97mlPBG2zTiFs1+ucU4phe32pjLrfP+XJS5A2FxuD+O77H9iHy/VGpzxm2u0Pq9W27/vL62ut8Z9+89sXL64O+3ukaAypTE3jLjdrzZRjzAyZuR/6u/sPdx/eMaXPX9/+6z/7KUJOafTdaRh6772QcgjAWek3ajTOFDWoovP5NHhQQJBEJUdhRkCjrQLbrMtmvclMiVhrSDlJxAljp4CS9wZ5XZaQPMbYOpMXORJj7FRhKwSoq0orJTwgPTsMILFxRhYoMxNzyjnmnCgra1jh5FeHQAikEIxGpcSWj4VjpxUDxJxcUYAAmYxGrQQGCQoNKSTkDDkSJUZWWllrnFZWoRYRFCIggkyQCTRnJIJMSKwYlHiDEShURhmjrFFW7MTkrxrtCtTy1gyiFh18hBgjM0XOiXMCysgJmTQa55Sz8p+2ZmKjE6l5iilNK4WiUqQILYBhVpSZpryeEDknD5ytpjh2h+OTRgagh7sPx9GHsd8fH3LqLzZ2tTLO5lVbFmWplB2HdH31uY/47bcfVqvN2x/utFUpBmSoa1eX5dD7EEJbtcPYaWanoCptXeDN5RaRM/Omvtk9PF5eXrz74bur7QooloafHt5fX7b98aiA0uAv1i3HPHQ9ZL4u4822jf74+sXN/ulDdzx98ealUtA2zdu3b19c35au3D8+OW1UBsy0vlh/9913r1/fKKRu3Id4yhSrqnRFMXSxLFo/pjDGb7/+sN6osrRffPXyeNppTS+ut/d3P6zKom0Lyrlqt8c+HkbPbB53O0VEqWss/Oyztu92MYXP3rx5POyO4xhyVFYrxVXJOXQ5+n/9Zz8N3hNBiJyLqq5XRdX8zV//089+9uWmXX/9j29fXa4swsV6ZRS2beOjP3Td+vKyaNvDabdqKmd1TqEsy7apy7Jar9cpU1U3RdVWzaqo1jGjj2yKmqxjW0bWfaAxESlrXemKKqQQUswpGK3KUpeF0Uicg6GEeeA8KvZFAYVFgtEPndZJQWaVcx4TecDkKtuuq6rSAJHzqIAKDQZA5WgI33/33dtvf9jf3UMmDZgGH3zUgKOxd4/3Hz7cEaXt5eXV1QUDnU4dWMwESStWbiQYEgTlsGxrlTRQppxC2u13T4/7xFy365evPkNl/OhTzEqBQQWUUvBGoeKsOGsZQCIrYDWJbPM8rnzOVSfg23/HEHqeHf++jPgcOYmLxpiYM0Wdbc5ZZaldUkqQibUW/BkQa5Sti0QUfcg+SEdjdXXRVLUxhjOBQhTzzEyff/651UZbI8IonCnmFEYvpvJSCQGwtCiZue9PzMyQl+aqJLxN0/hhHPyoAC8vLzerbdXUzhTH08mP4/5w8IMnYKO0Uqp0RX94WkgRkxJmlGEgvXv37t3dvXPu9sWLqqrun3a/+93vLlZt0zQXFxeXl5cX641UeMfj8fH+QZRWxGFnu91K60OEp4Gy8PeVUpUriOjy9nLoew3YdV2MsbKuqiqD6vHhXlJXaegBwGbTlmUZw2iUdk6L0F9KifJUMZ9Op/1+r7X+xS9+UVUVAgnZKM3Kngu0EgCMNhmQWckYai4+FrDgZKgAMwXndDptNhtjVVEUyHkce/kstIKFb74UXvKE8vXC/JOKX6YaOLPLlwI9zyqdy48+Qax8slyX51rwt/KHdOYEcj7MW7zulrbnUmjC3PM8LxnVGSLzGd55BkCFHw3F4RwRBlPgkjei4cwREEDu/3mdKhgAKZE+uY58bWZiBjOTEJFhQsoRPcvi8EweKIritN+llLSeWK3WTrdadE+Mc0RHefacoVCKmZ2zMedMyAzOOR8GjYqVQuSUkh+jj2FTGFvW3SGFFAXGnXPyQ3JlTZyMMWWpEilg2u2fxBQlRjZGCPtOhHPb1uWctcFpHq+ULI8QJh8SMcKUBRNiWNC8lSt+OH148fI1EXbdB+dAIKDM7Jxr65qZjDGlcTFmozCE8PDwVJTF3Yf7q6ur3/3u+5sXL5qmATgBgMD0vfdFUSD2WmtR2IgxOmdkbC+vUCpLAMgE3fHUNk3bgg9D2zbOuRBGQcAJZ6MoiqhIay169+K9LjdfCABykBrtpGfonDOspLhsmiZGIzBy59zl5WXXdcKXL0orTamcc8KEiIwKEZ2xzaqtN6uba3r7/n1mVKCccyBqUzEfj8eUQlEUFxcXhSmGYdw97rt+ZEZrivUai0yAKuZU1q2rG2vtt4fhcrM1VyZmHv0QM19eXLy8ffU3f/f3p+OBGFfbTdO24xDSOJxSXDmtlMo+JiBbFrZ0Yz/849M/Xl1dtW17e3u73+9Pxx0AWaO11p/YSv6hBuQnj/9uYe/HmLflO+KvKb0oypAzz70p6fMK1CXPdGbHzCmR9AA1KqCJDoVSGoo3ikD7UH14916j0tY4YxUqrZRBQAM+hjSGzIQMqJXVRinFmdCWgtiOFHNOPo3TKCskFr83rRUqxYqIKFM/nKw2m9V2s1pXTc2Z9ofD6fCuqMowBmRcr1bG2jDGu4f73ePTaiKdKO/94XQ8nU7DGHLO3/7w/Waz+eqrr5j5cDy+e/eOCNqqLstyu91eXl7KAEMGEl3X7XY7ZgZiY4y4iMnBenFxsd/vRW8FhcRWFDIvVEoNXX88HnPOtrYwu00aY/aHp8U/6MWLF9JfgkzG6MXACEFEibkois1mM2G7tWZ6hjlIb1Z6hqgRAIxVmIiIkcSAnICyUiAyQ8qgMUaaZpiAUMlwxbraWht9WhwBM7JMBBER9CQqLRFRYHI5RKFqLL3QBYSyMPDmWdcsQn32OCehL/EJZhr7wspf4uUnpPVPwp46o8mfR53zPG/5+pNItrQl4UcwGQAYY/g0SCvhruKE6JyviLOn4PNzIaCeutw0gz3lf5a3IGgaRiAihOemrlKKIC9S7Bomw2GlVN/3Oee6LCWNEOUgpVSM3jgnHErmLGKjWhlmKMvSH08pBw3Q1NXTcSCY0K3e+/3p2PdNrtuqqHa0zzkao5q2CmGMDO1mrTXmHEWmrl2Vu/2hqVfWKSISCXIAEBmjy8vLnDOTlT6hJM1Le6BpmlM4yLhOnnq5J8aYvj9VdXE6dafTqa6rtqqdK5Cpqoq2bfpTV5S2ruv94alsWhlMuLrsum5zVQFAXddGaVaaFZZ1fey7w+lUNg2rR9BKQiAjVk0zBO+cy8BotPe+Q7VatVUF4zhuN5vtdjMcD0qtBEDedWPKWTwfnHPEMeccY3ZukgUflNgTKlFkVLM59mScwinGiMhVVUjEzTkx83SeAHRdJ1iB1bppmkZbJ4R60VVBFdFoZvjlL3/ZDf546vu+j5mMMduicM6NIWoUUBhlVqs1NhvlbPHw8GRO/aHrUyZdOlNWx+Nxdzit2tU4jqdjh8qUzpXO+Jj6w/5/+Q//h/3u+OH+4fHxcX/3gYiKoiqckWFWSHTqhsQkyMfox/1+z8zrtmnblikOp04AuiK0MW2rH0FM/oeHvU+ejz+C+8sIgXNSsqdE9ZisHJKw8BZzzoxpwiJmYmLKGVAjKmBQDJyJmFNKQKy1RqUpRWUMIGaGqZyZj4MMIBKrlDPNLndqtk1XSiGaJe+u63o6fWaChJyAF00jSyqEcDqdYhTQMzzeP65Wq816fdiffvf1t9Kpf/nyZXd3N47jOPrT6XTsTn3f+5Byzp999tkiFCK5mNbWWnuxXa9Wq6qqhJB7OByenp72+721dr/fG6Wvr6+NMcfjERG3262EKFBYVCUhWGtdUQDA3YcPSJxSQoa2bcu68t53h6OcDrLQZWOs1ttxHJ1zw6mLkSSFZ+ack1KqKMoFNyV8+bapLi4uJIWcQe3iWic+Ekgs8txAREI7VQLkR9AIRiGzijEBg9YCH4jMrJQWhyap1WIQ4gcRkbLKGCNBVGtdF+VSOqcQqSJmtrOLtzzOVcSWkHZeqC3xbCk95e1Y52QUuqxA+Z0l7C0LZr7Oc6g4D3v4MY5m6mkgztO4T9WSPvmr5YsFFvT8C7MO04+D6Cf/Li6AcCY6I6nMsp7lzkhQzJyzWDdn0vrM4ZKZZ+6EmFtphLqu5ZNyZZ0pWmuZe5mGEyU9SaLIi4SiKNSxg0yugLZt754OiVAoEiGlrhsOpz5cVKgrZrZOp5RevLh5f/ehqBrn7GbV7g7d0HPZ0NXVtus660xRWOe4ruvj6SAfWVmVZVkOQ8/Mu92TlERS0BhjrdMAIAWfUirz5LdHoOq6HcfBam2U3u12RNSuSymhlAIJOIe0b1VZluVhD0VR3N3dWWvH3jPzYb+/vr4UVBoAIGhr3cPDU9d1FxfXWmtEDpGtJQEBiN6eFJrxcDrG1Db1zc0VM3OKq9UK88K1VYgogFLZg5eXl8Y4iW1KqRSJgJh5mjXErJQy2skuNsYYO5GXZFPUdS1M1sene2utUiA3IcZorS1chRgFbmOMIUailHyKid7//d+3bVu3K2PWx+50PHZ+t0PEm1efHfaH4/EYfcoMQALiDoD66vbFlvj+4en7D+8ed4dxHFNKx/1RhpEE3J+6kKJSRhn3/e9+1zarTVv5vqAUrS2J4LR7SHVR1+1q1WitRx+HYTC22Gw2jOr+/v502N/c3Gy3F0Q0dEcVo9Db8OOAJ5neHwpS/53D3vl1n7cxsJ4gLZxzjsQ5c0qJaJKxed6cRMzsx1HXtbOlBkw+qMwK2WiMMcWYY/YpxJQSEiulrDFl21DKgw/LxykvQIqYiZ48I2KZOWeW7E8WAcjcQymQoxOQiCkTIlrrjDF2zh8jRSRUjFYZRHSN+f7bH+7u7owxm83m5vJmv9//3X/9u5dXm2N3enradV2XKOMsQ9U0jWjNHA6H3X7vnPvss9vXr1+PQycvpm3bHOLj4+PDw0PXdZvVWsSHpNUpopR1XYcUGcE665yLlIkohuDH8Xg8Vq6QhV7XtVHqsNufhn42zOPVykk/syiKp6cnBBqGwSQUcJAzljJKn+Tt27f7/V546zDrn80ZxRm5DafbJ+IkiAyZUkqZIiAoBUygFVgjIquTUKXQHpRSVVVFLxgtgdVpZswZiMiqrLXOmUIIdVMajcYoykw5xuQzRaP0M/wlolIAikWW+pMyawl151+chwrJLqWvfl4vLtHuGa6yjK+naHSGYRa7g9nA77zaI0o8h5zzFyaISpgrRUDFRNISXG7y4sEMM/RG8tnzMpbFTl2rRVQvM+U8qczkM7gNAQuwFmByxwaB8ZMooejzWC4fLjPvHu5jjM65snTGWKlILOqiKJTRqHXO2aCyVmegtnZ9SkoBMhhjQgxNVTurtQKfswKNRjFTiMnHlDKC0sqqoi664fTy5e2H+/dNUzmjbm+un/aHGKEGWq+a9bpVCoxi58qyKlKuZFMXRYOI4zgQwX53/Oyzz4qi2O12wzBcXTXGGB+GclXmnGMm0d7z3htX1nX99t27zWYTY7z/cFcWti6rvu9XbaNhsNbE6GXkCUgSsfa7Y9u2oPQwDLWxl9vtZtXs90dAjVpnplPfjSFoY2zhQkhKAyi21tjCdP0QcwCFBFwYMwxDSuHycvtw/6HrutW6WdfV09OjJOXSxmiaRhmjRCEcjVbaaGe0y5qJyPsgGrPjGEIIACMzC0alXdVS7g/DIPVuXVfDMHz+2Rd93/swlGUtVKics/ch5F6cja0QMK1lUETw5suvYk7DGPp+NIW7qkrpOT0cRltWK20Uq0QQBn8a+sH79Xr77Xc/fP/2rXHlqy8+/7Nf/vm7d+9+85uvu/2hPx4PT7vEpJSyrnQOFeXaWQOgnXt5fblt6xDiqevIqtPpwJnW24vtdtv14+54jImapgHKyDQMw36/v9iu1+uNBjydTnLCfzI++KMB6k/V5PxD0e4PNTkzsGLIBIkh55xiToZjzNkoGaZIFzQzaqIYI6WMFgwqRiUVXubUdx1k4gxIs8U2ABHtH59ySqP3MYSUs3CDFeJTeqirqihLrZR8XxvjrGWlFkKSbPqpNMxxaZdpM+VWAiNc+HB13XjvBbd5OvXW2pcvX+acHx92Dw8PEtsOp+PxcDh2p/mwKEUHpO/7p6en47EriuL169fr9Tol+u1vf/uv/uxnIieNiHH0p9NJriPyngrwcDjEGDebjXTzh3Eoy7KoSqUU5yRaKmJHYIwBYjkBx2E4did5d957Y0BM+BBxUh2jlHMuyglSWbgiJ2Tmp6en+/t7mcC1bSunsHR48pmV+dknr5gQQSMwYiZOkDMyWA0JQWtljFqEjRCx64bT6QTwom1bmjIV3/d9MxUTIpiFCDpnPwyhLKwcBLiMdWMqaocz21Li+sKwXKo0mEVspUaRIntZmcvilN9frv/Jej7/fXlo+Iju+XwXfsSUX36yrKvzS/24TDzvzcJElvgoPMOZusoSwlNIMkle8MnT69T6xy9y7nciKunEKKXUZCiTCYhwAYXSRCHt+74sbOGMpESuLKy1qBUoba1NDCGMZVUUhT2cxrqpdo+dqIcU1vkx1nUZwv+Htj9rkjTJrsTAq7t+q21uvoRHZEZuVZWVVQ2gG41mkxj2wiFlOPM2/3FG5o0yQpkXzpBgA0I0uxsAgUKhllxi9/DF9m/XfR7UzMIiMgvSC/k9hGSau9vymaree8899xxFCMEmUrWDB29c0NZbjzyiMpXO+37oLi4uyrKgGDCE2WSUSpGnnRTU6i6RTJsoEWkIIXmeaz14bzGixip0kD2KHzmyheMMnHMuylizgxZrVLajFCuliqJUSnW9m4wTxgjnlFJMEcEodG0rGaeERKyy7/s0TY0xxSizRp9NxtppSlDX1KMs41wOg7ZmP0mSJKLv+yxnGNEkSeKd7/s+hKCUkonwzgTrBON6UD1tp5NiXBYh2ON6ZoLFWCuEGHrNmYyKFkIIQmj0DoxjFRFh3i972KfIMZOLC55zHhWpyrLUWlunY+KLDw4tVCQx1hpjEDFCCEK5g/Ds+XfOB0RwnpVlmRvjFovFarVqBrRvoCKqten73tlACKuaG5Ekn//4R/2gN5vNcrUpRuUf/ZN/ohG+efX6+auXajBx8BchNGgTpQeZ4BDGb968ef32pQ1+Pp9nqOy6rt5tuUx8QAQgooFt20bn+qZpnNXn5+cxaznu61OQE+B3Djb871/t/WDMO0wMoYigOBusD8YhZRxnWFtvXGDHDl9AGJAxBqDHAaLvhbPWGItdNDkDcqj34/hB17ZWG2V0cJ4wyhnHlGBAo5HAgFzwQ9f3avDWUc4E40k+ssbGIBcPCM4pkSi4YJ0F5DnngqWCcuSD7oe0yDtjql0d5/Yizm6tLbJ8u93ebe/bto1bK89KRvhifeu08QCUc0yp9V7bwVr7/PnL8/OzL774QgihtO77Xsr08vIyYo9a66qqrNKRHIwxHrqeMeaMDSFIKafTKee8ruu6babTaUzWwPkYKb33EYMyVscjvq7rtm2FEARgGIYsy9J0rwJq1X5SrSzLybQcjUaRHRNlqJbLJQBEbcC+74UQhOBhGIZhiNVP1AcIIWAUMMbWG8AkYEcpwSag4BEAxeAwAADFQBGKejsBA8ZUKdO2vVb2iDoCgNauyDFjLGp8Sykxol3XDQNgQN66QDxBOPjQdV2apqmQwTogsPeABgghGGvdAZ49iTr7OHFcmacAQ0wC3vlQHx78oDh7r2uI3j0n/FDr7vi/4WB7dEDU0THQfhD53nsGfBxXDxCiwidE9RY49PP2FNPo2IBPxNhQhJYROrCyD1pEJ+F5b5MNCPYi/LFc3DtoH9V5vFNK9V1HCcrSkXfGGMP5KE1TTAmlVBmLCfHGaq2TRAjJhtWaEWy1K8us7/s0zRHqGGPrzRbAIwyAgnUBWRi0GZTVDkwgeZk1bevB9ao/vzh7e3OXj0ohWJEn81nnMbu7u6U8QRhRhoxVAD7P0/v7XbyHbdMjjI0xlHLnglIGY5plGWMMY5SmycvbGx8QIaRpGqtNmqaU8b5pKSGMUPA24YDBM4pHFxfO6iTBOPi+qS8uLhhjbduWxfj+/j7J0vV6Xdf1dDoty7Jte9X1ghFCOcKwXC4BIy5FQJDm2WK1zVKhlOEJs8Eggns1CCG0NQSBEAwToAxHsfsom1kURdM0iCAuhcwSnkjrvQtQjEbjySSOqyJCUimZ4N5Dr7QLQDmjB++tmDN1Te29jSyYOBQROxfRhCQlqd9L7xqEUJrkwJm1tuuGfetECESY955SbrTWw2CUlkOCESUQijRpjVWDquvW762TgVHBGLu4ujTWa+d8QL7a3d09vHj1kjAm8ikjdDyZbTa7V29u1HfP0jQtyjHGeDwen13Ozs7mT54+ffrZZ5vNxjm3bSrJRdO1zjtCRJ7nxnvv96PrcXFGE5siTcfj8XK5/A8p9fbX//4g5/fjX/xnb7B5GF13h9F155z32Pl35trOOW96DEhQRgh1xg5df352ZrUxZu/RevSrAx8i3dF7HwuXmLbHvOk4NieEKIoiJL7u4iM28kdi5R4l24dhiNN78UGlhq7r3ty+zbJMiiRWQuv1uuuG+N9N02ht92iSc1Hbuu3bIxAfLRTath+G4auvvuScC5FQSpM0lVLmeZmm6XazyrIsy7K+7y3XAKCUapomzusMXR9pnFLKyPFp2/b8/Dx2+OIdi5OFGOMoMRXrG+NclBC02jjn0jTbi55Y2w+aEMIom5Sji4uz0WhECDKDimOhAHsGWsxw5/M5RiGuyNOhwMi9JIQYBR48xuDfmVcAwUAxhAAYAto3+gAOuprW2mEYhl5rrZ2NS3k/c8k5xhhzJmOqpDVQSqMmGUVYOaP7YWg7nWanVRE6oWJGihocRFzDYTrzqFzzQWcugrfxpp2268L3Zk/hEI2+v7V+cL8d3h4+XnBSC7oTS7zjy0VQ4YMXjU9sD+7E+zdx+AiCi2ODMxxvMnqPyRnQ6et6tBfnfFca7pXPYtgDhAIE64yOqrmizIuq3iilCSFSpBhjIFj3BiHkvHfOCplySq0aEEbeh9iWLgoEyFMMbT1EYSvvvbPeAXS9740fTDAelVnig0UINW11dnb29ddfX7gLBF4wMiqLTplv7+z8vBVJLjhVysSv+KjL2vd9kopoqhDbHJzzPM/JwQm9bduI6UVtv/F4DAhvNhspeQAP4LNMWmsJChdns81mx7Jod67TNEUASilUoqqqOOdd193e3n/11VcIofPzs+12++jyyiNNCNZaY0wjzTK2BjDGzhmM94Ki8XQCgKpu59NxLC6llJKzQfXb9erzzz8N4CgVeZ4neSqE2NUVQkjmubW+bXe7XS0lL4q9O3REbuKJBwBRuyO+SlxLsdCM9J+YvB7GbbGUglJmren7oaujMLcDAJkmQghMeZRtGk9GZVlSxqMsC2U4z7J8ni4fVqvVAjwSQiAPnRq6rtl8s+kHDYzkRZkkQiZ0V2+H3aZbVEWW52WRpPl0BtvttqrrxWaHMUavXv3bv/qr0Wjy+7//+5999pnz8Gd/9mejcVqW49lsah1YH5Rxm221WK/L0WTPailLBH6z2Xhjrq+vY5r+7x/w4kWoTN7fpe/pXv/A9bvN2r/fZoeoQB88jj4+EAC84DRNeCJZCI5iXBZpIrn31hrtnKWcc8bUoJDzo7KEALobUimr7c4o3XdDtds1bRt7eN77LM8IpTJJxpNJlmeAkHU2QDDW9kNvrEmz9OLyYjwZG2tW63XVtUmazOZn5WgkE8E4A4QGpfqh45KXo1FeFIigQQ3aGg8BY1zX9ZubN2/evF4slk3TdF0bpzKdcyECfIQSQrRRVb0L4JI0lUlinXPel6Py6tH14yePCaV5kadJKqWcTiYXFxeCy67tzs/nVVVtt9t4zlVV1XUdpbSpa0JImqQRnAwhRMGCdFRQzgijAWDoe2OMYFxw4Z2nmGRpRjCJo/pcCMpY3bQIISFkkiSY0GEYnA9CiGHop+PJeFwShK3VEIJ31hhTVXXTNFrrqPwppRz6ru/7rusIIfjA9WCMRUt0nmQh4CgtYq0zWiltnQdrIE1RnuUIIa2MNQ4hQB4h6qx1ScLzPEf709s551SvvA/x5JpNZ0mSLZdL78O4YME7gMAIwRj54CLtlzISwBOCCcVxLBsTRPbe4+HIuzm27mI42VM2jsoGh1rsSGI66iTg9+khx8chBB+C3/u2hOjUSShF+OC0gCA6Rx9C1h5Rj+shdgFPI9MhZO2JJ1F1GgjGhGBKYv8vVvyUUsIoZfSY4Ftr42sBikZmGGMc0cBDQNtXfwDgIHgIaZLGT40Aog8oRkgIYYwO3g99iwJgjNbL1cP9PcMkK8rtdhvAo+DHs8lsPmvahnE+DEPbdnlZauO6flBGr9arNJFvFy1CIDknGJ+fn3eD6nRPCFPWAw5RoTQY4MROJ5PpdAa2ts5labper4ssF1zUVX1xfoExef36FRdyt+t7HWbzfHY2Xa23f/Dlx5yzqqrKsozKfG/e3HAu1pv106efRGF0hALnLElk33eYse12a51RShVFbq0tRkVTt0WeW2MBkJTSO3d99bhrm/l8nkry7LtvP/v0k7Isuq5vmkamqdLqzc3bfhgIJn/4h39otHbObjbr8/lcOffs+Qshk6btirJsu+7Tzz/b7jZccMp523fT2bTt+gCwWq+LsuQ0RqwwKsvdbpslMs9zNXRK948ePeKJwIRkeaGURpRcP37sAlqtNsvlcrFYee8pY03TAACh1BiT5Xk/DISQ9WaTF0VZlgSho1RFDHvxCiF0Xd91fZRhN8ZZa7yHgPF0Onv06NF8Pi/yQgrBmRCcT8YjgtDQd03bGq0jxiY5p0k+GY2vrs7PZ2dpIoPzRmtrtAvWeVfXu9V65b3N8yKA36xXSI7Xq/VmV2V5cfnoUZJl/TDsqgYTGgLmMt1sd3/zy1/u6vqjp5/++MuvipS+vXm7Xq+3u+3rmzdCpo8fPwGMlTZHcIhgRCl1Ieyq6vGjR+v1OjJ0tDHHhMDtTdyjHMQppuIR+l619x8aNn/XdUqiCwAegYfgQvAeOxecDcOgqaQIEaUtJkFylGdpniabXeWF995bQEopN+i6rhUmgnGEUWTze9jrpyilmrY9Kp7EaiZC3lrr2A9zzlV1HSfwnnz0EU0EpyIgb5T2EBIh87KQnI2nE/BBW62GoW6arm171Tnju64xxinVW2uN1V3bt10z9Go8Hrdtv9vtus5gDGVZzGaTshzLgntjB2OiNXOapoyJEMKjR4+01pyKKMLZti0CMhqNNptNTNOqqgrWxXr3FP6KYwPxoBRC5NNx7GTs6YsBACGKCeA90B+FGGLRHB1i43KhlCZZRgipm05rnR1tAp0LYNEh5DjnokpCWZaRXDoMQxwkwhiTg16G9x6jQAjpB+0PakPOOcZoKom1jlFgFAgOwb0bNfPeEwDGSAjIWiu5CCH4YMvSbVbr+B31vVLKJEkmhDCmi6Qeb53FVkqpfYgx4FRq5x1r49RnnJDjCsQHg6EY5o6VzSmBxe9NR/fXESw95cg45/BJYXT8jk4rSDiBCgEgumOe5oKndyPeydM/iWJmcAiNGGN/0NIEgJiHxfd52k08fWl0ylnFJ5qfCI49v9MpQ7+XQwNj9kwxiOL/Ua0DIEtE2xoPJIRgjKNcWO+KUdkMCgAIRcZYAiFNBCBI01RrC95iDEopaxRFoIMnFHy0UwLQAE1vml5rjzKKOOcHDzwbi5W2bhLBsiQ1wWMAygD5YJ1JUyKljFJ8fd+PRqNhGOIRn6YppRSdzDjG7yuWCPQgYhfvHheUEFIUfLerrXVpmjpnnbOU4s3qYTqeRGu91WoVlf8i81kpNTs7i4ywyWh8MT+v68ogiFCTECLLsrdv3242mzzPo+QKpdhaa51mTMSpO4IEhoACUEKKNI2KZcd1KBOJEDqI8wlCiG5V0zR9P8RklDGRZUm0/XPOPXv27PLysm3b6XR6e3urlOqbejQalWV5jHlN01RVNZlMvIcD5iniuBRCyGLYrTfL5dJpgygpy7Isx0KItmn2nEFCYG9SbZ01nCHEEEYcBB/n2cVsarRzzr19WHDOG9W/fvXm9d3bvmuyVP7B7//s3/3q7fx8lqZpIqX3Ls+zs/lcabtYr6zxRKvxeDo7v1itt//Ln/+vjx49enwm/8s//s+X6+2zFy8RpqvF/cPDw3gyG5W5DygeR21TGWPiIKNzbjQaRW2v2C8MIWitEX4vtH0Q1+j3f/CfGPm+/zx7OMUjD8Far4xVxgrDac4p48YY5/qQMM7IQDEXIkkSEIh4IIQ4ABSCUur25q2gDO9NMsF5H49jJgXGmCcSYxwP7hACEDyaToQQiBJCcD4qMcaxv9XqjjDMpBiNCiZFkWY8kRSht/d3Q9ttqk213VZNo/q+132wwVqNEAHwUeKQMFKOi3I06rpuPB09/ug6YuiEMEoxpRwYNFVtO0CMSSmTNE2TXEo5KgoAkCKVUg59H9HXGP9iuq+UIrA/Auq6ttocmahH4F4IwTgHhPxB9Pm4jimljJCmi3M2VkppvWu61nvIsiw6E8U/iYfgdDqNPuwQwHsfmzRxFD0m0WVZRjHGruuGYYj/iw/CYN77AD6EUDUquisopcPeG9aBh4QDZ4wQZI1D3kVhd++9N5CmNOKcRTbBGBmrCBk3VV3X7fl54VyIBB9GeZoEhuMwUxuwiy09Y6xgnBGKYY9m78+4ELz3/FDnHUFFdJCZPrbTTmPP9xgo++v4J8cr4kXvTaljHBDyCACCPyjERwO8vUkxQmA/xD+OxJkjOAmHFiAhxDsHCEGcwTiJrB7eRdaAY1TDOGACe69EiJJlx8+O99pmcDBLwnEnOh+cd/6dnt/xaeOdRAeRhH0v09s0TXe7bXzP1tokS/phkGniIRjviMNKDZTh6bTcbqvJZHZ/fxucx4BU3yrVIwzBOUKQMwEQUAbewK71VaeMJ8YFwrjWVvAk0pW7pu+6bjKZzMaTh/WGU+ASjDFB4dlkmuXJ29s3XFCle0JIXdfGuEh3tMYjHJxz/JBOWadj6DroszvGGAbECMUocMbqXZXmeZom4G0ihORs0dZnZ+dMiL7tYpjs6rpte4TQ5eXlZDRu25ZTwhixNgTnVpsaYRoAI4QJox5CN3QylV3XJVmKKVGDsc4JiSRNCKUMM2ttAI8QpJmMTuTHNEtKGULoh4FJIdPUONf3qmm6JEkuLx9RSsfjqVL9y5evnz59ijFSSr1+/Xq9XmdZVtVb7/2j8wtKyXa7ffv2bV3XkemmteZcxiy2qTtjNsc8gKdcCiGkFJwzTKzSq4d7Y0x0HEzTNAZIjGk8A6vNklIqmCSEBBcP4UEp9ejirGk6QOwf/f5X/zT7w9Vy81d/+ze/+tWvZuNH6/W6a7ej0Xi9edjtdiLJfvLlF9lN1jTdzdu3q81yPD3bNXWvzdPPPv2rv/x3XCRPP/38q6+++vrZs+HFK+Rx27Z113HOhUyjgEbXdREAqzGOtqPOOUJpVCOx1jJO323wg9L6cQ/ScMrD/k+IfL/7eQCiaWxA3oOz3hhntHNiP/ER9l17MMY0jaUyMdZiIN553Q+66bquU/0wm80EZYSLgEAbo6xhUmRlsdptGcGIUUIpEZw5GV/98uoqyoQnaTKZTLIsAwCt9TibCCYopxiwC05rs37Ydk293m6Grm/7RivlvCecZCIFj3TfRfIXY3HujRMSNQgZRhQh5H2w1ngf1dzQq5s3WZZdX19Pp9MQQlPXRruYzEaLu9VqZY3hnGO0tzKPt45zThEOIbRtu16vsySNRKwYzo/t3L7vkyTBQtRVFc+mVCaplDEbPXaq4jHtvWdMTKfT6exMa73a7rTWQqbT6XQymaRCck7A+uhIEumasaaMubPWOpplR/6Lc84qdRSgiTQqF5A51Igehb7tjAaMQEgQnHoAH5z3Pk75hRDUAOMxitzroxMQxjjP8/W6jWq8Qzt0Xee9L8sSY39UppBSDsPQ9N3RbdmfmHLFCBFpn8cZtWP9d1ylx6hzSic5veLjp0XYsRXnf8h4/TRkfhBN4f1mwbu4dfLTo//DwWb63cjEcVYv/sIxRsIJ4ROHPWc1duyO9JnYzwsnDkXxPWilXWxE7wM3DgEowtoHcB5j7J1rq7ppGgKICqGtY5QqpWK7GiEkk6TtuqZrI8qqrWnbNsuK6WyyWq3y/Gy9IECQlLJuG9U7hIEgIADOAcaAGfPGKAODsn4/u4Jc8FTwoWsno+loZLU2AHgyHq93lRCAMHRdl2D06PpxlJwuiiL2HYdBx08xDENVVVxQ51xRZAf2mWnbriiKY74ipTxmM01T9UN7fn4OgAiGyWTkrUuShHM6KvOb2/vZbLJZ1xFu4Zx//vnnTVU/PDw8ffp06LoXb988urx6eHi+3+mHEuQEUXeM0f1UtXORURknkjEgbyzFpNE94SLSC+J5HQCA4JhMbzabqmrjRmjb9vnz55vNZj6f5Xn+4sULSmmayrdv3wohqqq6vLrEGG8WS+ccQjiad0bpYMbEZrMZj8dpkhPMQghJkuzV8LHfe7rFIWlMKKMME2sMDjC0Xd+0xxVIKWXlDOHAEBeMEU4zQV2eee97NZzPRoTR7a5+9d23Tdf+gy+/+L/8n//F16+3Usrb29s//zf/9tsXzzHlwZtf/+0yLcpxmXg/X2021XaJACPkbm5e//7v//5f/uVfvnj27dNPPzdKGaWtD9oCYrTrOsaHKL4vhIityqqqZrNZ5LnQUzvo3xGPIs/gXUj8P6jU8ydy9d57E53WjfMIlHF9rySDNCGJzKQkAN5BUNYIHGUDTMCICRFn9bz3YK1Squ26wWhMiRBifnURGflRNaccjeORTSlNiizylfeyx4RkuMhy7ozvhratu0H3zlhltB56JighKU8pRojsBbB9cHB5PqeUU4oBcDysleqdC0oprazW2rmAMVDG4vH1h//4HzdN03X9YrGI0MfV1dXl5eXQdV3XPdwvN5uNFGI6nVLKOOdKu3hsee+7po3C05HmFHHIo9B2BCo9hSgkH70RyqIo8wIDYIxVP0QDaA+h73tlNJfibDovy5IQEv3/CCFFUUwmEyEiByy6xYX407hFIzgeY15UtY/9xRgCAWBP2LHaOZekI2c9pSzLUtW32/XGOcgkUEYoxc6i4HzwgBDg6Ojt7CmRck/AMaYsx5TexzegOrVZb6OJDB52jGJKUBzfZBQHZ9p6NxoVGEdaorfB4oBJCBhhd0DnTjHACB2HQ6l6Gm++fx3XKjoZPDiOAIcfoiufhrojfniINJF/uWdhQoC963oImGBKTw22IABwygDAHV4r9ucChL31RPDHIBf/xDofE5EjqBvVco6DdydkHEAniQLGGCPAAbwP/tANxAQP/bBer/u+l5wnQuqmct5G+PEgDU8opbum4ZwjirteGasIKYs0Cc5GVhTnnFIKwVsHiIDgYnDG+7CHbwl4B8b6QZtCAkXxTpBq0M65siy3q43VJq66ImPVYLrBidRPJhOtlXM24vkIoaZpohJY3JJcUACIIwfU4GHoAgLCmLEeYcwoTZJkGLrIdVytVlEdfr3eMEKLPN0st+NRQTGJoFHM/Pqh55zPz84Z5ZGnNplM3tQ7owdnVK9MQMQFEzDS1hrntLNlme+a3WC0lFIkiQdcVZX3jmBGEUKcYoy1GRAKzhkusiQVk8nEe991XQCIbLthGO4XD+tNv1wuq6Z1zu2qWiTpelsZY97evimK4urqwkEgnKhOPSyXdV1fTmfxzXvv2rZdLBaU8jRNi6LQyiLolVLDoENYx6wFwGdZlmaSE8qkGOVFPipTKYdhwCGe35FrxqWUjLFXdw8Y4y3GGGOKaHycc+6NrlVLKM2y9Oc//dw6NxjtVFtyV+/uxin5v//f/uVqUz1/8co6mM7Pk6JEmCvnvv7mu//tF3/bd4qR8OrF15+eJZPJ5Nnz51Xb/Pin/4Dx5NmLl5NsZCG0bae0rarKmCQe7EKILnIg0jTOdcQNSE4U+D6Mayjsq70fConwHxr8ftfzAIruKuARoID3YwzOMyqMMW3rUYKFlNZaYwNGIVBqveMEonZ1xgUSKXJeKeWt84B8CECwZIlIkiRJaJ4I54WzGFCSpaOiZIJHA01OGeUsjjFY5y0E4nF1u4pZVawY0jwpaeG913rYtzqCPZ6PzrmAoOuabuj1oKx3GAimhGLS9g0jfDQpi6wUifTWtX3Xt91REC/LsjzNomnRarXSw7BcLtumz/M8S1OttVZdCKEf+piW1nW9vH+IA3OXl5fnZ3OMsTP2KKkQp+xFkfR9v1wud7tdkWazyVQI0TWN6odY/FFKB2uilGV0nzDG1E0bU9EkSSLmA3uuY4iiX3H+DyEUR9r3nM++d85xRqWUx5H/eOrFw5cQYjwgQgnet8ecA4KAc8CMUBJFmW0IQAgApt4BiWPshBzpbYnMuq5LZTKZjLXWUqYYkYjPCCG8whjQsQ0ZM5i6riM5k+J3xN1jx/vYKXmHEJ7shL9nAR+rxmNtDd8zZziO7X8QVH7XI/j9KXU4aLigE4onOmGi7tmnh8zgIJoSkiRBBz/3UwjXH8TYjq/ivDtl9H3w3j54JL4iOujvBOf3siyAsiyLQG0UGRGSUUoxIc65JM+2bZ0WuTaG0sgmGAhmQrDd/S6EgGE/N8YwBARMsE47ir0D8AFhDBhAWbfdtWcCWeuZFCSAtXa3qx5dXIaAtNYiSTmhWZZt2q33EE/Ytl3GsbNEZjH5K8tx7ApzLqVIle4YY0r3hAatdcx3438whjnnTVMde59lWTLGCEJScqP0oLpJXlBK1utNmeeL5SravEyn06urq8ViUeQZp0z1bVPVo7x4eHhI01wpo1Sf5XkIqGkambAnT67X63VsQEAIaSrbto3yFzRCOADGGMn3OhXxow3D0PQtpbTIUuvdttq1bbteV1XVRE5DBGmsNbG/M51Ou67L8/zVq5fe++12SynNGMcYxz5FluXDMFDKsyz79ptnQ6/zPE/TPDYs4ipSqk+lxBjatt3c3r1WL733KIAymiBMGZZSpmmacIEZxQFAiBhvMOEWrDeD7huMcTEaMQTe+XanHAZKqZAyS3nf1F98+SkRfLXajVL26ePL+8Xq2xevXr34pm6HbDR1xmE7WNVQBgT5r7/+ejqdns1mddPd395mxTjLsjdv3xTlOMBezCHaC0vBIgbrnJvNZkqpqm0jC/3DMIRON+Ch2vs/rtT7gd/xKCDsQzDGEIBU4CRJkkRCtPghSEPw1nkRWEC2V8AF0r6qKmvt0PUuQJplWVmIRBLGCKONGlIhs6wQlFHBGecegTOmnIyDdYPRBFDACLwflLZKY6w551RQwgkAIAzGG2ttOSlgnwvbUyejIs2Grg8EOGeE0VSmMk04FV3fB++Ntc74XnVxdhBwoIJjRqPPdRw3ttY2VX1/extCyNJiMplghDabTVM13vvBDJF7UlWVc+78/Pz8/Hw8HhNMjodgnKmIX7mHEC03x+PxKC8i3kgIqXdVbPLF2hchlGfZaDrhnEc5WiFEkhfxFIjfhffeeWu1jryAqJwSGYNhP84fjpXEsc6LZdNxULRpOsYIIbhpmt12CwBpCowTOJkHBwCMEabUQWAI4oeK33iELFarlXPu6upqsVjFNe29L4oRIQxTCgCM0EjkEULkeR7b+8eRuHCQ6DyNfHBomKGDoMsxVH/QzYLvmdMecaqImp7WfLHqOqXSfLDUT4PlYZf9AAvmWJydBkuE0KDUKSh6/IV4/w9dvXdUl9MQGOvC+Dx+P+J38Hw/KMEj58H52IkM6B25Jt4iF0y01ot5W7XdMUy0HWbTMaYkvqVh0NP52cNyEeF3IUTmwna7FYmcn8/Cs1Wa5hBMnB8XAgYP8SDmqhuMd85hwAj5thtu7x+ucxacH09KZD0EtNvtPn78BAVAIUQ/PyklCiA4RC6YMSqqscskbZpmu/WzGQshFMUozh0ZOwDAMAyMkVjVYYyN0UmSUIrjl54kiTFqNBp5G7qmzbKsLMv1epskSbRMjIukbVvKkslkMpufh+hGKYTVJk5PoeCqapfnFwghYzBjTKk+9gjia8XFVle7qK6ZZblzDoKhjILzWg9FlkajsbjMjrMrhJCoyhtBoKurq8lkslwuHx5et237+PH1H/zBH3R9EwFVdBhjKIri/Pz85vmLSGlJ0zSuuPV6e3NzczY7V0p13QCAI7ITU+Qn15fB+aHrhq6nmEzOL6bTaZFlz18998bux5YIdca2basH5SmWUmZZJoXAGBPMYkZebR3nPGCkrXEIkiwNwfRd+NHTq67rbN9fzbILlK82tdUy+fEnX335o24wTGZEiH/4Bz//zdffPH/9pto1rm27rkuSZLurf/vbX3/x45/++MdfzOYXv/7N10AIofg4jB9pfUWa1nX95MmT3W4XEYimaRhjkcD1u+LRhyDnf+L1g8+zj7cn4mkAsN1Wo5ydz/L5fD6epMgp5weCgVJilBZC0ICQ9YQQDz4yD43S1tio3MOlGKzp+356OT8WKAghwlkmZaSKxONA2ehAuxdoRwciYlRp2bvFEhJDBcYIAGIFHeFvq3SSJLP52UHZXbdtV7dV8Pva3xGHDCaEyDQhhKw29WQyKdI8mqNut9vVYr1ZrQHgYj7Ps7yu677rYjCLb6zv+wgrPbq4/NGPfpTnedd1u802Au4x5sVet1Jqt932fZ8W+XwypQhX661zrsgyY0ySJB7CdruN1WE+KsuyRBaklEImeZ4TLo5K7RAcCuCcj4Wv955RGnEMdIDIj+S3+JUxxjClsb8YIyVCSCmb5zNGoNptu64TAqeSI0TUYAggAIcCxO4RwhhIZAzuu3FxBP4oBHN+fr5YrIZhYIhGMQ44UEtibIvLIMuypmniFxoDgIP3OJzH948PDgzHuHhkcp7q1cH7/Tk4ae8dAVJ8onB2jJenbbPwPo3zWMY5o+GHWn2xGjtltcD7rb53VSCE+NUDADoBRdHhS/kAdN1zNU+al/7kM0Y020FgjCEc5Vh9CMF667131jhtEEJpmkZXgfh5y7L0EOLpbMBnWeYRROLDZDKJjnEBwWw2E6IRgvddZYzigjBG+sH5qCfOjLLa+4BC8AGGQa+2m6ZJg/OT6Sh+WarrowQlAhLCfpARIRB8DzkQhMbjcbQdrut6GPYVQOyCRxDy4B7s8MnwIiHkSFGSUu52u9ls3jVtXddXV1d5nr969ebp06dgG2PM2Xz6+s0dY6ztuqdffJalxf39w3Q6xZSsbu+eXF8453abZXzaPM8pxd67ruuim0rcNYQwhEJVVWmaUsqiR7x3PoTgnYt5W0SDIxl1P6NCCOC9v3TsdAzD8OrVq67rxuMyz/Pb29v1ev306dPFYoFQaJrm6uoqiu7WdR2bglHaN8vys7Oz6fSs7/uuHeJpgxBijCOE433o2y4ed/EGUkqNUg9tW6RZhA0ZY1mSRjqMMSYps9hGvb+/s8pEN4ayLPdhmFHAKCvyPEuV0Zvt5tfrBylSmgglsmIyfXw5H41Gd8vN3cMKBf/tN79uejM9m01GZduPz2fT8/HZp598frda/Po33zTdcLdcv7m5vf74048//ni921V1e4S+grdaa8NY3/efffZZnPKKXaEIDv09cY1wkWBAGBBGJ/9G97IAKED0Zj4+HrPO0yPgmDDG4+lUkwIAPELBew8OI49BYzCSuTRBo5L/w9/76dXjWTfUnWoGq+uhU95TK7EjJBBkQQ/aKqu0sd47HxClJBEsS0SWJ6Mym0yLyTRLGMGIETYqxtPJVPCk7dR2W2dpobVru7brem0H5611xlg9m0+VMcY5kSRZJimjhIFMOeeQZoJLwMRxZrKMpCmi1GqqaQKIh8H1u2bdqg5zUownAQKmHDD2wFwA40ANvu/NZ2ePEsR0069uH26ev3p4c6uajmNyPp3NxhNOSd80bV1brbUe2rZ+uH11++YlCeH3f/7VT7/6EmPYVhujDUtEr5QOIRuV6WTkKem0Ud7Xi7cXs8l8Mg7OWKOSVCCC17uNzBIqWNsP94vFMAyj0Wg2OeOU7/p2MNp644NDOCSc5inNBLWqw96CVXpobT9gCJmUeZqyHFEO2vVNVwdksyLlQhrnXPDaOkylsn69a4FwoMn9cid8mJUFWLe6XyhlR6MxYXK9rYvJ2abulpvOB+CCeB+0VT5oryDhSArx6OJqMGp6NvXBW2/2s4NWS0JM1yKlHs3n0zRtg+NZ1qkhIMylEIxxjvu2SRkThCVCCMw44QkVCU8FEeEwrXFcnPE8jf3I08IrPug8EEIZ45RxEu389hEoIAQEgCCEkcco4OBRcM567xyEgAIQBBgBQQgDEBLikA4CT3D0YfIQ3tWgp+EwUjBO24fv8EaMfAgmeBu89SHaNUaHPkwZphxTBpj5gD3CAREmKKIEUYIZxZQQSjElmJJjAfduVsE575xnPFCCKQ2xHg0QvPXKWN2rtq13W9V0grEsSb2zxujdZpUkCeaUCIlFghiTiQjBTIr09bNvcskYQl03OE+2zZCWc2u369Vr44br6+v71ebhwV1czlRrOKbTrKg39Yj4gsOgYV4ib/rpaCTTkUgy5zHi/Ne//XZ6fka5wDQEsKNxcvP6O479JIOnj8ameRhlE8nE8+fP00TevHmFwJ6dF5xjYI5JIlJqvLHgqrZR3hIhQGkc3NlkvHq4f/r46f3NXS5zHHCRjjaLTSGLJ1fXqus3i+XlfJ5ybkA78M55pfpu6D7+6EmRJ8++/Xo0KmZns6+/+6buOpEVi+2O5ZNaOWQHyRCAWy0eAPzj6+uiGPXdkOfFzc1bbxyE4MxwMZ9xFDLJGEp32yZJUy4SKjlPEpbJdV3Nrq9//d1znpeT+eP7VUPF+Ntnt+uNmV8Vq8160AOXyXg6S7OxMt568uz5zdNPflyMJoP2q81WJkVWFptdjTwjVGoTMBF5Mel6s9lWjEtC2Wx+PpnNRuPxeDJBGAeA0WSclaO665X1NiBlvcwLlmbbtitncywSnhZEpMpDNWgdEJFp0zaDNlwm84vLi0ePsrL0GCtri9G4V7ptO+8CI4wAxh44pqRXtuv9oIgLtmu7qpI4XM/n15fTSS4ltcG02LZn4+RqVmTMffPt87/8i3/dt9UnH12fnZWfPH705NF5u16eTYr13VuKbEKoHVoW9m6U/dAKztTQ/+xnXz377lnbNFmaUkKcd3sxojjYum9tA8IMECVU/PC4+gegzfFxjN+xxd6r596naL+j2AHinCZJQjEyqnNGJ4KXefbZZx8LRhaLh7vbW2s1JhAL5q7qh7531oAPEDwjJPgw9L3RxkMglCZpmueFTBO61ylAeVZKKeu6ef365v7+HmE8m81ub2/bth1UH+nCUa0nkl/3/XwCNDZmGaGUpmkqpWCM751vEfHBG2MxF1FEGwAxKhhlIWAz6EQmxri2bYe+x4DTJB2Nyul4opsusodvb2/ruo5iY5PJJDLQVqtVtA1CCHVdt91uteqvr69/+uVXjx498iF0Xee8I4Ra7yhlSZpImQQESqlhUFrr8+kIAGIidkg+SHzyvu+bunXOHUgr0lrb9B3GmDEqhchkIqVklCAAY4wzVqlBKRWcZ4wliRRCeOKjEqN3IZKevQtRCAYh7H04OMMbrY1z7mwyjRP9VbUDDLFx6L1nXHTdMAwWASBMvD8MjYaAEGR5Op1OnbVpnmCMtFZD33HGjNbgg7cWI1QWZfDBIE8IUUOPADFGOaUIQ1M3BOMkSWWaIIT3BH1MQghwElo+IKp8QLk8NM9O5TFP5S4BADC8a4PvpcL8uwUf6yTYG9mQY5SNT/D9xtv3t8yxi3yKc6Kjg3Nsju8RiL2YVXymI0szeHskv3z/hb7/8aMCJxxlC4OLfuLWqL7thqF3zgXvIIAxuus6jFHbdkwmaV5oaxjjo1FptCIY9V3jvffOd53qhiEAYZQjQuq66VojU0kw98F4QItFiymU5cgOtXNBKRAErq8vtFZeNVdXVwBgjJJCrpeLyXhydTVfLh7arsnzrKqq0ahIkiQvcqUUxbwoitV6JYT45pv7x49HIpFVUxd5lhU5QtC2rQ+BEBIAhkEF647tvTTJIvRKKaWUWWsTIaPDZSwUAMAjSzDbbrcA+Or60dCrxXJ1eXk5Go1fvHhZ1110PqKU3t0/GGNGRcEYM9ZqrQmlWZZFsbQ0TaKDCsY42ns5Y621za6dTCY+eKXUFz/63Hu/WC4wxk3XrVbrj58+ZUxUdfPNN988e3bTd81oIr0PWZYxJoQQmODVar1crgDAe9t1Lec8SYTWum6qh4eHr3/1JgQdbUSjo8tutxuGIUmSmADVdR0n/Jxzt7e3eZ5Hh84joy061HddF2UXj8OOUdWMsii/hCghiGBKiEyTsiiV0ZxxRPBuu33x6uXbmxttjRQCjM7zklC8q+qm65gQnEtlDBeyGBXT6SwviiRNKWHWWa30/cOmyMurq0uE0fNnz7999p3qdVZkBNP/7J/+50ab27sHmSSY4Gi9lGZJRG6KotjtqljeWesAIwh4r9XwTqdvL3r0Hy9Ohk4EnNCJ79cH2w9xNnR909SSonGRnU/HV+eT6Si9fXu3W2HnFcEuzWTX6r5XHRtGcuSjfSICRggl1FsHAMWopJTKNC3Ho6Ic8yT1GHnvAdk3t2/X67UQydnsPEmSbVX98ld/d0ilAyGEIXSUlu6HOv53CM54JBhljCCEtDGAA0GYYBYYRTig4EPwhLDgvbfaGocQIoSlguGE7raVUQ58kIxTwsGHervTg9q+XUStasbY2dlZFJWu6zrqJEViZOyTR6hwOjn7/LMvrq+vh2F4WC77vgdEAJyyJs/LdN/PsMgHxikmiBCIhIU44dc0jVImYjtRXSzP88lkkqapMS7qiGZZlqUyts3TNIXg9KCcc1qpvuustZzS6HyEMY7YbNcO0cYFIWStiY0H50LXDsMwAIDW2hibZdlsNl2tVnVdAfg0lZxzTEhZloO2cbgPAQJEgHgMAQEOWCv1rpnUN20xLqWU2xBCCIQQiwxlLPJR275DKTsJJ9h7jzzEpCG265xz3vkjusUpOVIuj6yQ4yo9jQfvhZkTmeaDLPU7idsQIpgR//bUJw+d8kQ+CJnxpU/n/I4PwqF7dGwQwgk6CseIFw6h+PjnBxI2xpHdEmMifBjbDhjvaXdw3870sdvnACAazB7/Kt5PIQSGEPMa771MssV6I0IQSdrsttp5wNSonhFUluPF/V0I1qi+rWoq076uinJUluXtXffw8CDlKMsSzJJHj3jTD8aqNE0pHZaVIQEAQGm72rZJkjirwTvOeZqm6/X6k4+v0zQdNn18JCJXTdP1vZqMSdVU4/Go61qPQKQJYTTGIcl43bVKmUGbsiyH7bZvh4RSxpjRWnDetm1c5JzzEFCaphEZO4qyeu8DQtpp51ya5kmStE0f78lut3vx4kU5niqlFovlz372s1ev3kghQghd1/VaEULSPJNSEooD4COrkDGGgVvjjdJxejgygN41FxhL03RT1fFb22w2qu83yxVjkBdp3/f7cFXVbdvmeVlVVV0PWcY3mw3C4ezs7OxsnOe587rruv/mv/29qqrevHnT3by9uLjI83wwdvHm5vZh8fTp0zgufHV1NQzDw2I5mUy+e/Ey7uh41V2fpitjzOXl5bZudrtdkiSXl5dnZ2f7ZsRQW22Ms0rbph+8dZgSTtloMqaYJHlxNjs3zu4224fl4u3NL5rF3eefffHJZ5+cn18Y72wIy+WyN3Y8nco8k0l2NT9/fPWo1+bl61du0P/yX/yzv/qrv/rF3/xlIrPLq8cX5+f9YNte/fqbv7u9v//k0y/+6I+m3z77brFY+YBGo5FxJobwpmmm0+nd3Z3x3kOI+yKgaBEDEJ0XAKP/OCnq4zzTB6n0qYXjEQJFCFFglBJCRS5EnkuM8Xq9Xi/unn50NS2z6ayYTgqZMGN7azUhJCHUWssIoYCDGWKQiDkI51ymKefSBq+7VhljjN1UC28DIEIJN87quq7rOqoqwDtZqah3vDfpYIzsvcKt5ZwhxENwWhtrbTSvwRgwRMQ3PNyv8jRL0xwlqO9V37SVbsCD0Q4HEJhiTINzTdMtl8vtdoeVp5ReX18XRRETw9hs22w2cY4wJk0x65zNZj/+4rM0TauqWq/XTdfFoYI4M55lCRciFnYYY8Go976r10mS5HkREXxjTGw7xwGP6BCbZVmUM4jzv2VZpok4tr6t8YNWUcEo0lmjxxDC2FqrvY74Wxz8iGIQhERPah9V2xmLqoNoMpl4b/u+1XpIkiQtcsYYIISp2DbLaIlBcEAACBEEPuZcxoHRTmuDEFLKFACxq2+c9QDWO0SJ5MIDaO8k4uHgGnpo1AEhZGi72M0NYa9X5t/XmIb3gYoPir9jGLD2PXvmk0oxwLHM29dJ3x/1e9dUcyeamaf//iB58tCy+gEx6wPh6F3Yi8Qo56JP3v4dEkIwJRhjRt+bqT9eseb+4KOdRsF3YXh/K7AHoJQyQhEG770lOBZATCYQsHEBMK+bYbXZjfOUYEhk5rSLdg3NdiNz76UDmRRFMRqtdrVBSDmPCA1XVxffvXi5Wq04obPZzPv7KB1FKVWDavqeYZyJBADnozJumWJU9kMXrUW22+14PK6aOhK7Xrx48dlnn/3mN7959Cir6zovizizBABd1wWPet1FnicEvJ/XVipy3MuyjO1kpYyUkhEW/zf22JxzxqmuG8bjKSHs/m5RFOV4Or29vV9vdjE+ffvtt8ZYOCzaePRpoyPRI6ZrsQ/NGAPnOedRXj9utDIpl8tlUWZpmq7XW+/tbDar6xYhFC0F+r4F8NePr6bKJEma5n4YBiGSsxnkeVmW4/qj2mj329/+9uzszDrtnFFKpVlCAjfG/Pmf/3mkp45Go6i1NBqN+r6/vLx89uxZPHNWq5X3/vr6On4cAIgJcbwJEX747rvv5vP5T37yE2vtw8PDN998gxBKkgQFjTElBMUBdkKYEIxzudltR6PJeFxyLq3VlIvr6+uPPnpa31988803/+bf/UU5Hj/99JOLq+vJ7OxiPllsNr1STDRJko3G43Ge4cePcyHervp/8o/+4OMn1zdvbl+8fr1YbsZn8y9/+g8A43/zb//CWv/lT3/+5Y9/cnGxe/X6dd8Pxu+TcmttmqZxc8XmxTFJBYBY+R23238wyHn682OciysY/9DVqZ4zxjnTQ7vbbIauTYWYjseTSWm1ttYYY6rdrq7bYdBKWcEJBOCMogDOKIqJEJQwUuZ5lqUySTGhg9Z10+6quqpbTFGWZmmeOeeX69V6vXHOJWkey1tMEeNcSEYZtc4qrQj1XHDKqPMuRCUwjH0ASigABBdTe/AOrA3WgUyyEFDbDNW22m13fTs4ZbzxqZQEEFivBzW0neo77Jxk3Bo3mUwmk4nW+ubm5ubmxhgTM9a9Uaq1sX395MmTH//4x+PJuGnbh8WiH4aY2yZplmXZbH4WuVjDMFhnKSUEEwghS3iaphGFiCJknAuM8XK5RAhlWZ5lmfe+aRqtDWNsdnY2KkdJIhFCBGOEkFZD13XWGGcsQBBCJGnKKXPODsMwWG2Ni05dhNBhGLwLnHPGuLWuqmqtNecMISyEmE4nq4fbuqkc+LzM0zwHQDZ4hMhyuWlbozQQDISKEPa2Ns4a5yCVWCYyy1PKiExlksjdduOcc9YZrRHBaZYBgoD36hXWaME55wwFIARZY1U/jEbjLM8RwgEQIQQTihCinB+dpo9UzyMmj05kww5r+F2jGmNE3v1NrKViPyBa0EWey7tNcTJoEI64Kdqrg33oH4veH1c/jjGcFmrHx9+N2h223P718THqkWg2RE+GD39XkDst+NxeUvQwCu+9c9ZbZ63puxZB4Ix5H7z1IQQfoB+UlKnHBAhDhN09LHZVdXl5yRkDb3ebLecMPNzd3XkfJOUKGGMCENJGY0KUdptNZZ1vuq7vjZC8HJWCs/FoNBqVSqlm21NsszQpRyPvLYbAGe6ahnPmnauqHaX07u7u6uoq+DA/O9vVu121vf7o8c3tDaUEIORFAQBlOUII101LCLXaQkB926dJRnEgGMcZ0L4fIk+KUhqtDQNCznvKGGHUeW+9643ClCRpqrXt+z7PC8ro3d291mY0Gg398PzZ4vGjuWC8V70xusizOJeaJDJJE4zxoFXXdRFmRAEYYwRjrTVnLMuyYdDG2rOzWZpljLO+H5z3VVVxKZ48/ogz3rXdq1evldJvXt3cvV0Sot/ePPRtE7xXvbp5ffO3v/jN82c3CHSRZwRHHN5DCAQho/VisU3TbDye5HnBhRRCBkBt16/Xm7wou653Pnz89BPnw2q9ubi4JJRleSGElEkqk1Qbu1gsV6s1oez+/uHlq9dN0+ZFOZ3OnPPL5UprpY3VxvSDUtpEXVof4NH19Wq9ef7i5XqzlUlajsaAUNN2RZJePbr+6Vc/u7q+3my2v/362+VqBQgEY4mUgjPV99vNxgx9nsqri4ssz2fj8SgvJpPxdDJmlCilu6a5uLj85//yX754/qLp2sl0Oih1eXk1DL0ULHjnA+R5DgjXdQ17IOcdDOIBA0KRUkkgoP+Iau+oZH/kuX3w+Onejhs7wH6QaJxn1xfzp4+v5tPxenlXrXd+aYpcppkoyxwLVled7mrO2CjPBKVgrWCMC8EYk4xjRhEmxjqlzNArbZz3fjo/V52q69ZZzyjnTGht67oWgiGEMHkHMeG9O8E79Amh/fQui/biNo4UY4zJ8WyiRFTVdrutnLFSyjLNKCHBAcbgvVducGrQvfZR0pcSKApjzP39fWy2HVSR9rciDppkWRaJy2mabrfr3W7nnMvzPEpNyiQtiiJgFNXCQgixGqOUhoAyDs65ruujDE8IYbPZLhaLOH4QlRdiRBQiiVRmzjkmgA4OapGE6YwNGHEhEikZY/4wy+iQiwQzQljkOsZ5Va1133fOm3i8JomIv7DdrhHCZVkmSUIYC94apbTSfd9rBVFgC2McPABEaTSQHIx22011dX1FCDLGIkTSNK+qLaaExLFozrTWPJVo7/UNsVT12iCE3uXmxiSSRbDxOKx2Wu0dg0GsmD8A/T7AJ6Og2KEYAnTwkj3GLYyxsf7kacMRUIX3MP93xeUHOeKxpIvV4ZF380F/LoqKxdnu+DmkFAAHUBJONKwPTg4f9PM+eMIPANUPwqFHYKwFTIL31jvrXcAIUQIAwYPIU6OscQEA39wu+r69vry+nJc0kDwvvLWJCKMi29V9cCoCDyiAVRoRJCTb1art6hBcngshxGazYxhNp1PrwnK5ZgHe3t1fXl4aY1AwEd15+eJbKVmWptutHY/HkaodbcrfvHlzfn4e4Zz1ev3xxx/H6WxCSPDAGI/iZMYYSpmUqe638YYfR3fwwagIY7zHeBiLnpcA4JGfjGdt2w6Dms3O+r5/9eZGSqm07bru5cvX3sOTJ0+OPcI47Rqhkb1WzsEXRUqput4YQxB2zgVKjTEA6Orqyhi93W4/+vjJarXa3u/m8/lmt3v6cRpCKMtyNpuMx9PHjx9LkU7PxM3NDaU8z3NK2Wa9QyhEjighSGstEz4Mfd93T55c/+hHP/ryJ78fjfE2m433vigKjPFqtVqtVoyx+Xzuvf/lL385Go3SNP2Lv/iLCD5F3nvkkC8WC6XC48fnUUR0vV4vl8uzs7PPP//8D//wD7t2F0KIJN54BMWP/Dd/84vJZPLo0TVCaLlcLRbLWGiaTkWBQxt8MZpMZvNeDW9eveacjybj6C1DOfdaN9ttX9dElrmk00+e/OTzTzZV/eT60b/7q7/59TfPbm5us99888/+2X/5Z3/+b//qL/7Nj7/82W9/++unT59qaxaLxXa79d63TcU5B2ebpuWHcu5kof+QJud/6HXatEAIRR1FdGCNH/dhUZZSiCQRZZpMilwQstnsXj1/HpxSbZMXCaeMUhc8CYH0vQHugvdG8Cg3Fo8qa+2irgkhCFNjfa+NsS4QSilfrbeMMCYkYNP3vTEOIUQ5i6s5nndtqzHGjBDGMGckeBs8jupiAIADooRZa4OLSTWlwKy1zloH7s2b11KIyegsz3NBiVG6rqq+6RPBnTF2UF5b5B2y1mhtrU3Ks7quB6UY55PpNJJN4iRDjEx5URRFgRBq2naxXFqnQwhFOS6Kd0N4GOPYCFTWRGHoKI2GMdZdFQF6ANBar9frxWLVNM2TJ08i6TzCfVHuaDQaMULgRFtEDyoOZsX7Izhjh5aegxAwgoAJwceY5x0gBM65YVBR2JpxghASQiCENpuVDa4s80RmIQQXHey0W693WoNx4AIApoTyAA6sCz4gQrIs0XrY7XZpmobg4qR8mudt23J+4O4TYpxLGAsHd8D9yB2y8QiLRjNKqUSmR+EufNBSOQU8j0EF3m9IH356UkTh01GEEEJAfo9yHoPKKVga3p/Jg3cUlQ+H00+RzFOU9YPe3vHVw2kfAe9H1OEw/B5dLKMg+98Drp4+eTgO44eAgo8BNJpJHD8CY8wGb43RWgMChIkLwKRQSgHmMikWu+rt/bqq+1/86hv3o6cXkzzPxouHt+DdfDytd69N35HCKaURQt6HEHQ+LqdTRJnc7GpKudZ6u23SlBdtPwyD0sAZGO0xplXdjMrMeQh+35OmnDBGQnBnZ2fRn/Lly5fKmCTLuqFngpfjEWF0aHvGmDUO48AJXe+amPxJmVrjcYBgXZbnkvE8TSnGmNI4exOP7xACwthoPSjFGONp4gGMc9EMven62Jjfrjdd1w2Dn0733rPGGORD37dCCMYZZfiYGwGAUipJEmNsCKHIciEkJVQNejweu2C31a6qqunZzAdIZFoWIymS2Jvsuo4ztrh/UEoxJhibWe0YgeA8l+z60eVsOs6yIgRnrd1sNgDQ951SKhFpluS/+LtvQwjFaFKMJuv1GlM+m80urq6bppnP52/fvn316tVkMjEuNF2dFaO43YZhEELM5/NIEdjL3FDqnKvruq7ru4fl27sH7/10nAshIj3iMBNp4kjDw2IVs5YkScbjMcIUEHkyv5zEwZLgIx7bNg2nfLNZrxbLarsZj8fz+VxS0m37tm0vn37SDxrSPM2KTLBPrq8e7u6//u1vN+sHwPj/89//93/4j/+z28XyV7/8m5999fO/+ptfPH7yJBGcTieX52e/+fpbCIjhHxCmCOhAEPuPo7TEGfhjeDu6tMRTmx2u4+CRDgYBKNXfvr1/o58LQgQBHHxwWhCcpTljYuht1ynGOEZsNMoxxjxJhOAUgBBsjGnqOgK4CHsfCCDME5GkeZJkQ+gOMloq1kaUUnA+NvAopdZq1bUoQJbLjGaEkGh0wDDDlAbrMcaMcE6Fw84j4IR774fOtF3nrJ1OzvIkpZR2TXu32ehB5Uk6Lke6H1Svq22t9cAwIYQkUjrn7h8eYrVkrV0sFrGZhxCaz+cxJcQHp+84w9sPToo0z/Pj1GrcjZGgEX9HCOG9t9YRwuJAjzE2LvrlcimEfPLkSVyLEHDf9/ENxPLR7pWoIIrwxgrpKEeyV0s54VkgZ4+phnf7VRKVo7uuI4RRSgH5GGm22y3nPMsywZO27a1xAMhaX9W93RvdAyGMEBIJkwABU5Kk2aBUrzTnwjnbNq0xVsoUUyo4PVofuOCjnnK8YtiL/u4RwIyfJYaWo8r2acD7gFRyCngew+EHYS9e3vsYmGLYQ9FjDwEAxJ7cYRYCHVVdED7FP97VkfhEI+20y4iPzkEHbDN+F/swtodYCSEEkb2+aKS3YIxjtYejix5+j0r2Adxy7MR/UN0eL3+w4kOUUMZi+9YGzwmNRkVJkqzvH0QpZZo1t8tt3dc7+Obbl9NxWSRpmWRt21MEZZETD6oZrKwo5fOzad+rXdNLxgaqMcaMsa7r29ZoCznGndLGukePLm9f3gkenIdt1Vycz4IZgg+j8bjXKsakqFu0Wq1ms/l2uy3PxsZZ5DDG+LPPPnvz5i0ASCljKEKIRJFGjDGlvKqqTBLvfWQGpClQSinnsUA8GrbEzlAM/EmSVlUlpQwB3d09YIwnk8nNm9vNZoMxPj8fCyFevnz5k5/8BACGYeCCJknCpDjq3kUi9F7nHQAfRPAJoKqqAODFixdJkkT7iOimGYut5XJJOJtMRj//+c+fP3++2+1i7RWXxzAMWhtCyG63Q+hOSnF5eXVxcSGEYIxyzouiaNv2j//4jx8eHiKwFJk+8U8+/fRTKeUXX3wRl98wDNvtNmoCR5bAQX2iWa1WUsrJZAIHSs5oNAoH47PvvvtuMpnM5/PIIQCA6FAR+Z+R4LPZbN68eRPRrG/LidVmPB199smnZVl6D+Vo9Omnnw5Dv9vtHh7ulg+LzXp5NpnOzueTIl++fQOAJSWWYK305dn0v/4X/+V8Pv/u1duqG2wg9w8LTuCjRxdf//ZXv/fVl3/9y19eXl4+urr4+Mnjv/u7v3MBMcYkYwF8AIyCB0QAPAABFI4F3+/s7aHfcb3PDHjXKYm90Fj8xmHJ1Wq1XC43u81qudxs1rofMAoUY0YIJcgaPZuMAXzfd4xRmQjOxWx2BsgILqTkjBAAYJTE3+m7DmPiA2htA0JJmnGRWhcGPWhllNKDMt4FgmkqkyRJOeeMEmtM33UIhTIvpOTeeckpjnLpCHPOEplQzIINWmkUsDG22tZt03HKR6PxZDxhTFhtm7rVw0AJ5YQqpXfbXd/1RluCCGMMA1ZK17t6t6sGH5TWTds2XauNYYJPptOz+bwcjWSSEEoDgAs+IBBSZnme5cXZfJ5mmTamV4ML3hoTbRmyLJtOJlmaIgCtVNe0dbVz1sSR+bZtAWAymZydzfM8j4EzeIhp2ng8FkIOw+Cdo4RyxrxzMWR6Z4/djiRNvN1TQF1wzrsQkBASY2K0jUHXOb/d7g52EIGQyNKG9Wa12a5nk0kIwXpPKPWAdlW73lR9bwABJaANnM0mjIv1ekMFiwKJg9bWOx+AcUY59d5OZxOKUNc2HoW8LHrVdUOXZrkPwQ3ae58mMpEJpQQDss5ghKw2GBMhZZ7lPFrLIhwPtWNsO23jxYTjFHs8RL5T3G9fvgFARBoojj1CjDFG+z95F0QjmBkPjoP/AT7dIzGMxUi59/FBe/OUGKcj8L4fUj6hg4U9fxOjwzADITSSd6y10UvsoNKCjjv3qNhyfKp3Qp0H1o8yihCMAawxMQEK3lqju65jhGJMooMgANLGeg8h2M22ojJFPHnx+u3zV3fagvfGWl1kaZ4I71zXNMF5mbCmbt9W3Y9+9OO/+/Wv8mLkUXhYLBEhCJPVZrutvPcgOCRJarXdbneEskwa6zzG4eLiXArunFqvF2fTcdtWZZF555umSUSSSPny1WtK2PzRlTXWGDsajY2x1a621k3Gk6Iouq67v384arIzRvUwEOSLPBdcBO/jVxWL8fVmmyRJlhda6+Vm7SFkRU45o1wKmXRdb6wvytF2u3v23fM4Ws6YKPL8fH5+cXHeNHXT1N67osxC8AEQpTRAUGqw3ksp4+ied24ymZTFaLVaW23LcvSLv/vbJE0pF03bTc/O6rZdr9Z9PxRFaa1bLRaM0KaqHl1dRoH92WR6Np0hBNFWLc75zM9mieRaqaaud9tt33VqGHabXbWrMCHBO2v0qCwQhODdxfmcEkwwGvqubWqtBoxACl7k2agsijybTsZXlxfn87M8S8ej8tHV5eXFeZ6laSI5owiCswaCz9JkNp18/vln19fXSZJst9sXL14sFos0TT/++ONIpovYacR+67rebDZFMe7abtfUm82m7dpEJt67ly9fPjzcT6fTn3/104vzs/Vqcfv2xhpNCU64wBiCc9booe921Y4z/tVXX1FC/vRP/9X/7//7Z4yF+fxsGBSAIzgkWXF/dyuF+PyLz599983i4X4+n7dtSzCJFmHR4ZJgggCcswQAwr93tfcBO+6YSB53bzyFj6fMO6accRACQIg80ripo5hy27ZJwrI0GY1Go3LCRRQIJQhI3ykNNpXS4uCMinmxdS6RCZU8ADLadUNlrav6dawnEiGRRHsipg+xq8cIpXlKCMoSGQcVaPAYE0ooDsh0ygQDgFHAIQRvhxCClMl0Os1k1nXder1WJgp5eLoXtkcEOcDOg3cuaKsjbKj6fhg6rS3IJJ4vEZyM3bWosXI8+OIxF2viPC/7vu/76liXRBeCiMNEKZPYqwPkZcI3qzpmwbEKlFIyJjDGSqnFYlFXbSRrFUUB8M5YLloyRW5n8DYmyHuZMb/XK4lfUKTewIn8Xd8PEUyLStZZlnDO46xPVHWBgLlIMKLbanl7e9+0Jkl4VWsf64XgMAEhOKYkBK+dj97NmIAy2vmglKnrej6dCiEcuGMkiC9KEIqM86jWcSBvRIcE508uiPYL75c7p+Dhu8rsUHUdC6BDLAx/3/qH02AZ8UN/XOrOu9Pn+QBojdEOTpgmHwy/HjeXP3F+wId/0PvjFtEDCxGMMfZOvWuif+86xvhjhRrDv/fu9A5Y7wghQDCKFTpGIfYVKeEEcSkQIS74wXrtQTsYHDS9u33YFKmQIk2yot2t1aCThBGPq+0uFfLu7m5+flGWLs1HdTMIwQE0oRA7UhhjD6CUJt6PihIw3W6q2SjDjMY3Wdf1w8MDpyz29na72ppo0MMoZX3TbtZbQkiUbmnbNk4KxfiEEEIBrFFt21K0dzxnjLnw7uDK8zxWMM65o55y3EdSpkmSVVX18uXLZ8+e6SFcX8/jbootdmttnJ/ZC6ViHEWz7AGq8d6XZRmrzLpqb2/u5vN5nqT/6l/92ZOn17vdDmM6Go2Wy2XXdQkXSqnz8/O+79fr5WazIYRoo5bLpff+bDYvyzJJZVxOwzAYo4Rgi8UibpBodam1Jph675OiDM4PQ7ddbzw4ZzxjxCgrU8EIlZIzwhEBo7Qyg9VOJJwgzDmVPMuyBAWMCFDMXLDBgbaqa/q6rfRgAAeK2c3tm+Vy2TQNpfTzzz/P8zwS6Mbj8XK53G63UsrHjx9/8cUXVVWtViuj/LauCEGc85dvXt/c3Dx+/Oijx09kIuq6/h//5E9SyX/yk5+Qn/70xYsXr169whg9fvwEF77abRAV07M5Qe7Vs2//6B/+3k9/+tP/4X/6n/8f/8//17/9X/70y5/+7PHF9NXLN+nkXBDglLx69u0Xn306nU4320r1LaaGUI4ohQDeOhdMCCF4FDh9B3KehqsPYJDwvrLZaV/keI4jhOIxjd6NLr3bb4cn3FMqsMfgyNXVFVhDCIniRg+LOwB/eXnJuc9karTxwQshCIGuUSEEyll8ciaEsaGp+3bQ3oENDnPGOI+qPPggfy0Eh+AYwZwnlGGCEKGIUmr6CjOKAg4WW+fCXsCeKaVGeZkkiTHu4fbBmYh+cDMY55wxykULiJhWQ1gulwggChkaY5x1gCkTtLc2tr6KohiNRrEtjBB6eHiIwlpHUS5KaZ7nm23VdR1CIc9zSkjXdYMeGGNR4sjo4UhCCSFohCIsFjfqwaIhxNyqaZoQYDQaxel4az1jLDjvnTdG9X0/dL01JkKUEZSI4VA7672PEogEYaUGjLEQ0ns/DCryReM0a1RKc861bW2tzrIkAM6LkhK+Xm/u71Z1bQIAAmIN2LAnThFOZC6dc9q4RgdrrVEgJHTaRC2TGMgppcE4cF5QFpgz3nlvKUJRk5oxFoIL+yIIRxzy/bkFfwRvP1D8etczOwmKR+z0ZK3CMbC9v/7fbYTj8yCEQnDvnvNQ7WGMI8PzKJB9ZNPgg7Dn0TDhdK/hExGZY3w7hj1/dNQ7eRxjbI3/IMLtI9lBzeA0HAIAI7H9aUNwB18/H+dVAMDE7QwEkEeYEsoJVpTiEELbq7YblAEbQDlolXnx9iEV9EdPH40n867rANd5UfJhWC6XUsq3f7eenwNjJElk2w1pmgBoxgAz2qmBYRo86noliOcyQYjUbdOpocgYpgRR3HXdwtvzs/lkMgku9H1PCcnSlBJJKa2q5m6xTJJkOp0yH7puPw7LGfHea6UJIQVLJ6NiXCbj0ShL0xCC6ZVSygUPAHk51lorrREluZRSSkxJCGG5XM+mzGr79u3d69ev6zqUJRVJ2tYNpTQaPfa699ZJLmaTKRbe2eARhOCOqy6CupeXl8+/fT4Mw2effXZ/t/j1L3+dpimXYlgohPFoPP7bv/lllKJFhD08LNu2fri7T9N0fn4WnOWUlOVkuVgxQrM8JRgnqZhNxgEcIeSLzz/FiEZpQ6WM99674JwzzjBCg7dtvcMMW2U3ayeoIDSkIk0kT0Uav14HCQ6YCkoRBQJOu071utfKKuRRMS444YLTZDIajTPkEaKIIvr5F0/rut5utxGxiGBp19ZGD5PJ5OryvK7rN69fdl1XluVsNhtP5k/ax03TLB8WD4u7TCZFkd1RFsAThD/++GPJ+ddff4sxvri4KMrx0G4Xd3f1dndxdc2kXC0eCJPj2cWf/cn/eP3xJ//NP/8//dEf/sFf//KXf/2LX97fLy6mZZDiH/7ezyez2Z/8yZ8ImXKZejNcX86rtgNMAFHjowgt4oxxLvfWbB8EuQ+2/WnAO+yod0n0aVp91AZE7/EFAI5wTfD7lk1AIfi2reeTcZZLa3Xb1RhDkqRC8OXiNkkSjBEOWAgRzDAMA4KQ53lEZkMI2llldAhBCJknyb7LiDAKGAMiFBGEMSDvTABgiFLA1hgUEEZEsJQijBzyPiAP4BEChEIYZ6XRZll3yAdCCAqh7/uhW5sAnFLCCaJ0GIa6rttqPxdICMEYQgjWBQ8IEUYwFt4dY15s18VcbDKZKKXquk7T9OzsDGMcp0qV9pzzWEK5Q5kVqZtN00Svnygx07Zt27ZSCkIIY/yYnPa9iqVzURST8Sxyt7TWkd0TK3KlVBzqxBhTtveO8d5rvX/82Kw96kQfVSuP0FwswmLVGO2n0zRFjmVp0ffD61e3Dw9VIhkmfFu1NkAIwDnE5m+SsKrqtVadghDAAnAEXa8CIkIIKdLYl41lfRQJDEq5ABE/j/3jvQ/q96Sc45sPgD+oePyJ8cJpADgdaCMEn6zb9zgg8e++n/nFXzj28PbV27tZCXyc5zvGOfy+pOcpFBkOo+UnwQ75k4H3WOPu1WEOTUEAQAcl0tNAfgz+338wPsIYjlXREV2IjxO6J5cGjDx6h5FaowN463TfVlXbaAcGgw6oM0E/LDnx4zK9OhtnRdkPewBg6Jrp2XkI0HWt1rbrOia4tAGhHSKIEKaU8dQxzJzziOCqavI0yVMxKJ0IFMlKMWmIgEQcm4nWx0PAxviu1U09BE+K3PGDd9Whg9i2bSMEK/NreTblBDPG/MFO0jmHCI4avINWHlC0WfXe9+1grVWDqet6u16/ePGibWE+T4uiqOvaGRsJ2DHNjViOECIpWdd1TddZ6613ACAl51yWZfn27dvRaPTxxx8vH1a3t7dR+nkYhpitAkC0GXHOTafTvu/ruu37njIS/RC6vpEJj6WC0sMwDGkqx5MyQixVtU3TPEI+SZJxzgWXGGPtdCazbb3VveYJt8oGHEb5aFtvccDB2abaOXDgAAjggNuhlUwmeZKKtMwzMZ5SQRlm7dASIIgicGC8cdp55JH3xrqiKIoiHwbVto0x9qOPnkiZZFm62Wx3u+10Ov3yy59Y6+7v7+7u7n/xi78WIsnzdHo2mZ5N8iwLwb1687oo81FR/m9/89cY0OdffIoxvrm7LcsyE7LISm3M/cMtJqwcT0VC2mo9GWVdvX5rBpFlX/3o0/lk9Ntvvnv9+vWrh7ZI+Prh7vbV6/mjy48//rhX5vb+QQ0dJoxyJJkIlCprrVHOWEROqr2/D9v5wD/vey6dp50SOKFix9/0JGbLPip7YowJiRWhv76+Yhy/ef1SGvb06VNM4OWr58aYs14Lhr13TdcGMyirE8EoxSHshSe8AwSEEsxkQpFDgMCBCx6hgAhBgVGMvTHeGIuRIQCeeGdxwIHaPMmd8XHbAwAOAN4bb/UwgEexDBJCOGODdQYDY9xb1zdtFFWx1mJMs6xIssx7b50ehkFZ570nlDJMzs6mcURhnz4bY5z1ELgUItlbXLZ9F+OQh5CVBeecAGrbHqGQF0UiOULo4fbOWhu8wxhHBwHwniAUSYycC4RQbJ4rtVcVOj8/n07OYnvVORcCKKWIB6t1/Lx7cJXvBSmUUr3qnXMIIxzHnwmxNkiZIhRilRmjYOQLSCkpJV3XDsPAGI0C2RznWrub28XDau08CJkZ5/sOEIXoH00oQhgoI8r27QC9B0YgBEAU75paGS2iNqZzgvHgLArAKI3tTHSy0o6VXBQJi0Ibxx4eIQQQOdJhTg/68EPjcccVTin6fub3w+sffmC1wzt88t3ij4FqL5h7EtgQ+tCc73gdUZNDRIxvck+QQQhRymPYO9ll++D3wSeCvVgaPb6f03+ts0oPWtvjdo5l6F5C1AMARoh4hD3CiDJkA6XgCdK90dZ4DAiQBdwb71oHblGmglGUyoSnWbNYcsoGGDDG8zk0da0dKOPOr66NC5zvtWkcAHaBUoQQIyzcLtrxuHx0NdfaKoUppV3XAA5lmedFGglrlNKyLNMkX2/qrus2250P4H2o6zZLRFFksT89DH0ITnKapTJJBOcsKKPt4Pb3kDLGIji8WG0QwSJJGWPWu6ZpqqZWSslkcnd3//DwMGjgEpiQwUPfDVeXF9PpVDC+2+2GtkuSpEgz5INzNkImzgXAiBBKOY+q0OPxOBXpzc3N2ze3CCGM6GrZiFKOp1Ot9cPDQzkeZVkBgDmXwdu2bZVSuEX1rtJm0HrAEKQoKSFKDU1Vt3VVV1sAjxBKEpmmGefS2RAlFzAmCKHRpCjzkXG6bwcumbehV50eDTIVzlqjdXCAKaKYYYww4CfXjwiigAMK2Gqlu8EFCx5pqzCQgDwK2AWLAg7IYyAiE71SzhmESJEl0XYbAC8e7sbj6UePr+K4ujHu808/ZkywLPvFL3755s0rDBA57QChnIxGo9HQdR89/YRT9ubmTZZljx8/ttaavkHIAQDywVg9dJ0LPiDsAY1mZwknWvWUi6uzcd+cNZuH0dn1Rx999Obm9r/653/8l3/z19/85leXjx5Ny4wxtq3qpt56QJhyQjmnlDE+aP+u2jvdNuGEUf3Bzod9xvkeehn/jfMr8D4XBgAwhehchyDqFkb9e/P7/+DnEPxut8myJC+SzXax3W67rrm+/ji+etSbYcgnSZKnUg+KMQEYh0Mq6gGMMVW7ZYQyxjhjUTcS+RDAM0wYF4RgwShlmHJOaKCUtlXvo28cwpQzRigA+OAcgJCccx6sWz4surpBCCVJ0nT9MAxN0wzD4CEIIaK3b6+UcVZrazyiMpEiyYosSdI8oUdKXsTuYmK4V20AiJVZ1GHJ87wb9v5qAJCmaZ7nRg+bzSbWhTJJtNbVdtt1HRwaM/H0PObsEQOMg4DRbjhGwRB83/cM8FHJJZZNCO+V+IdhUFrFLmNUh4ldNMaYcybOHR5pn0mSMMa8dxGyi8bNGOOEZd9+8+z585fBoyJPlDJtN1AKmAFCKE3TELwxKgSrdbAWPIZAUICAGa2bRimFKFZKpZLneUkxxFT6eJQfY54L4L0nGAfA3rqI4h5Pc4wxwoRS6tA7HOI41RDe5zqerlv4IWD/ewnfu33xwe+dVoonGeE7LZWjyQMcxFyOaOf338bxR9+/Ii/GH2g13vuw54v+QKiOSyWcCH4ef2StPtrMHqmk+GBVH0JAB07pvkLlOEkFRhwpHQATigEx470JGFPoNby5ux+P0k8fXyRpTkVlGh1bUEWRLtedDeCxGU2VECLLiAs4SpwAgHeQpjL4wQDEoMUw9t4H72LVOJ1OuRBN1VJKOU8QQsMw7LbtarXa7hohGCW8bTvkw2w2s1ZrNThnsjzJ57M0lUki6roWiHnvA0aMMUyYc04b470njCZJIpIUIdR2XV3XVV0Nw9D14eXLl1qHySSPqAnD5OzsbDabcc5VP0SmdCw9QwiL7d0wDMY6QghlPILP3nvG6WQyefPyzS9+8ctxOcIY3942WbZ3DXzYPFjrLy8vpUyr7U5rzSlumsZaH2MDwiGKrcQuGsYoTSWhyHvb94Nzbrfb5nmeZTljghIuZSqEJITsIts8z63VplYYYz2ohlRNsyc9SSkTkcSWEAB0Tfv9pHCP6SFHotk6SyI/H2OMGLHaaGu8dR6CUcNu6FU/cCn00Nc1zhLHBM/TZK/lwvmPfvT5l1/+OEkSo1Tbtn3fRXXiPM0Wi8W22p1dXqm+//W3X8/GszFD6/VaJsnl5YVH6GGxavpmNJ5yIfu68t5mRenM0LbtuONLJecAAIAASURBVJD/7I//yTe37eL+fvVw93/9b//r/+K/+Ke9Mpgnq83m+au3vdIB4TQrCON1229226HXZtPAaW/v9Gj4XaXeB7v9mHieRrsPM2JCgvcIISnEtCymZZ5Lxgm6urp69fzbutldzOdZJt6+fWOs+vSzTyQfRbaHQ845KwUVnCMUjDGMiRCCGoa2VUoHC0QrH6x34DH2DnmM4kQTYIyFYIJnjCKCHMZACQ7BWG3aaqCEUEoZYQRTFDnoHk0n49XD4ubVa/CBchb5jfe3d72xMW6VZ3PCqDK6rtuH5TLJMoQQYXxUJFmWZFkhU0EJ97qOuMTxgIuXEOLt27cPDw9JklxfX5dlWVXVzc0NoWmWZeWokFISQFXVVLtNXe/KLKcMR+w+Nt5lwtNUMi5i+XW0EeFcxL3knFORmxeCUkopo7UOgJ1zOE7sUooQUkZHgDtiPjEFJoREY13BkyhLGwIKASllCEF7IUGtrTVHl+14qt7c3r9+e7vdqDznnPO6bq11WSYiUMYYG4zCTlOKnQOMwcZ5AADKhekNYOqc6bqOU5xIwRhzxlKEHSAcgKK9vdah6IE4u2l9PKl9zC0iB4di+v0ocsAJ966ncDI/cAyo6B3PJXxvL3y4/k9BVIROZKzfm897r6Tbo4gHrubpS5yyWt4DYPcDDFE55j1ZGXQyLxgOFpof7E104pp0Wgu+e7MEojOKtTY4dwoXE0I8pZgQHFV9nSEERR9hIJgJjgI3xvTKppQD0nVj3t7epZIWicjykmw3cWyOIEQIWAeYwGL58PjJZ2VZ1q3qug4D+ADGWSZEu92lBJquf/nqzU9+9BhR0uy6hLP5fF4URcwRsyTnPNKSYVvVq822b4MUjFKuOx1HHQBCJPFPx6UQjHFCCULgI8IZm9bG2F4NymiEUDmeCCGsD5vNZr3ddF03aDUMQ9u3uyqUBc6ywjkDANG7jnNe13XXtIyxcVFGRoxzLir8OR+O2kCIMELYbDb70z/9U4rofD5f3D+EgMoSxYpTa00Fd2DSNG2aznvwHkblhDE2Ho8DWO89wShJku124xRrqxohwARQoMYoZwwhpBiNOOcUYau0cqqrO4Sw98C479uaU6yHrm3bNE2NMV1wcacTQoau2a7frcOLi4vTtQEHSJxz7kOw2g8nGaT33hOUykSmCSM0Sl+OypxPJ0xwDAgRzAhBKHhntBmC80PbSCkpI01bG2Mms8mj9Lqrm6+++qqu6x9/+RNn7GK19NZTTpuqaW5fffzxxwihXVUBIY8urwat7h6Ws/kZ5Vh3XXDu/PLyo+vLu4f7r7/++nL++W696Zrq//3f/Xe39/c+oC++/NmPv/zq6UePn798/e2LV9td7QIwKorxaDKZXF0/hX9/kPMHA95pX+GDqHnyI+9toIyOx+OnHz15+vjRfFJKRtIEz6fFw+Ltw/3b+/v7EMJ4XCaJwECMMUISxgQcRBebtonBwzlXV21V9z5QoCJ4dDYZAQD4AADBukAoQZgTjhBwyihD3jpvnfFOqbZt20LMhBCJkAghbYa+782gnHO//e1vMcaSCyml1SbqUqZpej076/u+btuqqiI8QhgriiLJMsYYT7MsyyKBvlPDMDQpiRN3Li7xSBuJNEtK6fn5eQhhtVo9PDwcca2oBCaE2CxXt7e3EFxZjlNJ67rebbZaa07paDTK8oRzbqwfhkHrPr7Do7PUwUaV5Xne9/12u+17hRBy8dzH+xM2Njn6vo/9DL83BnunLoE4ikVeTPmttYyJOLjTdZ1zdjweR3nDKFTxzTfPdtsaYwghCg9SKTHnwnpwzgUMxpgQTJIkgIAysBqwBxwg1p5SStA+cvpzKY8ndbwzhBAcDrZzIUTrThS8Q/bYCIxXrF1CCHvS//dC1JFIeczMDh/Qf7Cq//71/34geU945WT9f4hkvgM/DmTdYz/1GKWOJ04MbMd6MeqP7QVID+PqH7T0vv/+I9Jw2uqLjx+fFgBstEg5KTHRoXd4TNda1RvjPPIHEjILiFkfqromGQgGnYK7h0qyN1eX8ywRo9HIWguUbau6LGWrDJXJ3aL55FOWJEnbm5j9BI+sd5TSQfvr82nTrF+/Xv/4i2vG2DAMo2w0KcaCs+VyWdd1kZUR0seI9t0w9E5r8B4OsIofhgFjEJwlggkhlG77wUvOpRTBvbsJg7ZGa8JIhC4QQk1Tv337drFaxlJda1AGRiM6jWM51s5ms7Iot9vtbDqJJel0Oj2bnSml1uu1Uorn3FprnIlpLiGES8m5/M1vftN1gxkUQmQ6PVssFvf34fd+79I51/f9aDQyxllrb29vy3zEGNvtdlVVlXnGRSKlFJKNx6Oq2o1nM2PMdrtqu9p7Owy985BlPPa/KWH7JizmCKEQUDkSxpgs2xPR486N4/8RyIlnRZxNklK+fPnyuNdOLYuTJIGTtnSEDZxzIk2apqnr+hSMicsmSZLI4Dul9hjGX716xTmPC+Ph4SEqoI6LMrZaCCGPHz8BAKXUdDrjF9OHh4fNZkMJ8Rjvqo0PaDYZ3dzczGazyfycM7ZeLtq6Gk0n/+Kf/7P/4V//5tHVxePHj//Vv/ozwujd/fLf/Zv/9Re//NUf/tEfEUJ+9PlnmIqm7x4eluttVVUVZisAICLNP4hqHzQhPrhI8HvJs4AcAkDUExYwpUIq6welEGMi4cpo43RWZNh5hsI0k3/wk0//0c+/mMmQhO6T8wz3qzS0VDfMNBej5KefPPloPuNWV9UWeY2cMUqNR1Mu8vW6TbKZEKOAhFJeKUMRzhJRJjyXRDe1IGiUJuM8HeVynMpJmYzzJKWYo2iFqVTdmk6nJJkVszIfY8B912832/Vys13vdru6rlvOJecSEzYo0/UKECnL8XR6tqjulVMBOcIx5ZhyzBniHBeZKDIxKZJMEKfaZrvQzZY4ZdtNyjDH3vYdeJMKjkJodpXRigCu6vb1qzeLxQYAc5ZAwBdXF2eTSfD67tWrzfI2E2ySCwq2q6uh7Zy3WVZM52f5+Mxg2ih39/a1sZZxLhMphRBCJEJIwRlCBMAb1ey21XKp2gZ7y1DwBIzTTJAsT60zy/Wi77skkX3fSilGZUkJ0UpppQhBSSKHRgsuwMN6ta7qpsjLs7PzJMlev3mb5WWS5to4ACKTrGqHb7978eoBAU1ZWlAmrHPOaYwtY6ju2slZAZQNPgxEbgzstNtpLxC2OnACHPtxmRKwk3EmOJmMi6GrGSOc0bbpCCapLPp2IMhiFAjBlBGMkAveuhAQRoTumqYcjfKyYJQwRo1RCLwUjGKgGAgKGDzBwChmBFEMjGJKEMEQXfFQcBg8JQGDQ8GiYAmK+KuF4ASnBAMhKLaiMUYII4QRJZ6QQEjA2CPkQ7DeG+c0BB+8QxDQ3ibBB++8dxjidoJIqUWAMEIQwMXONwIPIcT6N5aMGEeuJ8YEHWwWAIBzBhCCdxB8NPPzVjujED5l8fgQPA4BA3hrUQgY9naA4D14H5yzAQARhGkIKABCODqjwf7wpMRDUNZoaxAlTPDByWYgnqRVo5Xy331707Z9UYxZlirENUkXg9kFatKzpRMVGT0Rod6swdtRKhlGueDtpqIBpkWGA9y9WeAABLAOoUwmfWckHZIEbbbmJ59OvOmnWYGN+eLjp/PZ/OWL1w+LRVaOWZpUgxrAgxDrxWpotOAwLQinLmVQSAJmkBQuz2YBwtCr8ew8ScarqgOaSkm1852znXYak2J6Icppq8nDpnn++uHl6+WuNkqhfoAQRJqOPEJSCOedN0OZivkoSzjKOBDft9sVp/Do/Fwmcret6r7nImWCts1wdvFoOr3a7jSlBSLZ3/3ds2cvHxarZjSep9n42YvXzvsvfnSprDkbX0qWGaWX9/dCMDU0QkI5SlbbB0SCMlqmKZNJ1QyLZTOaXHjvkiwjIiknZ7tKcVkCy7LiolMY0ynQ0kJmvDRBtIZVfcDIW4cpT1+8els3fdMq73HbKueDD9gZ37XD0GvvAAPxDjy2AQUPPkoWK62MNdZbY00/9G3Xdn03qMGDxxRzwbmhNBAKlBOeJmmeF0RQHzwVJMJnhGHGMRdUSCYkwygVTCDrVNs7ZbAHr7Ru+/X9fV/Xpu/b3W5zf1uvV2A0R2AoySaT+aNH4/k8LQomJZWUcBrANW2lVVcUiWSkrbdgdZnKxx893q7umnrx1VdfzGaz+4eHdb1lMlvXTT45+5/+53+9aQYqi7OrJ+dXHy03dT801hnCk+z7IAl8r8N3TCcxBAAcECBAkTsHgACBUooQJJNEcIoxTiQnhHR1hbxjBP/jf/h7/9W/+OePL88ICmD69eqBoBCcscZ47xjnhJC6ru/u7rRHETqP3A3JhXOOItz3fWRq0QM3LyYgHz1+IhhHCDFK0zTJs4xR4r2nBLdNu9tslFJS8CRJEKBhGO7uF7vdrmma2Iw8dsWEEJHxSCkdjUZFUYQQ6rp2aC+9U1VVdGCPauVRlSaiHHGuLubIeSoIZoRizgQg0vf9drtrmlobs15vHhYPw6C44HlexBnz0XjU1LvVcqGVShOZSBldg4dhQAhzIaRMMcFK667tmq6TbN9+i5PUlFLOGGNs6Hqt9dD3XddppeDACHV7A+59My+8Ew1HMe2KCCchJOruY9ibInnvhZRR9toY0/VdLM7yvBSC90qvVqvdtmoH5qyx1qLgIHjvrNZuGKyQmDLWDmrXdjaAC9APSptAMA4uyASPyzxPZZmnWSowhDSR4B0hOPigVNT14N57CPboC0NOB90AhmHI8zxJEhp1y0LgnMd5zd+FT3w/vXPunUv7Me2NfJAPutR7WBK9xxOBg3zM6cY5NtW891Hi/JgSH9tm1tljVXi61zAmRxj2B+frT1sJpxDou7wb3lHP0PtgLxzUPo/F6ClDLRxGEmMBHbP1ALDabLUJb++Wgw3G+15bpZ0NXkg5nU0ABaN6Y7UaOggBtg9Miqwsd1U1GDuaTpQxN7em6zaz+flms1UmYEIpZoPqCeBPPpluNxul4dF5XiTi4mwyHZfjMn9787ptW0xQKlOEiXEuWj8Jmgy6Ct5zgRnFhGBGGYIwDH1WFkmaYkoDYGMdIjTLst164QJQxpM8z/ORtna13lVV3fVD2/Tb7Xa73TlrI5EthAAYJVIkUkjBUylSKRACZ81uu8nz4vzigjC+Wq1X6y2htCxHXBIgpOsHo22S5m03/Pbrb54/e6WtwRinewBDT8bjIs+01olMq6oyVhOCfXCcs6LM47ihlNK7EOmRAEgrU9f11dW8H4aiyFer9aPH19rayWTcDcPZfC6kCAFscIzzYlROp9PJZBzAEMbmFxcekEgkY5xy7gEY5xCQMlob40KgjAWEjHMPq4e67uumruu+V4MxPiAIgSitnANMMCEcMLIWejW07fDy2cuqaeqmub2/f3nz5m6xaPvOOZ/kmdJmV1XL1Wqz3jRtp4311gPinPOyyMfjcRR2SdMkTdPZdEL3IqXAuOCMD0O/3VabarsnUngfdc6yPM3zPLrM39/fV1U1Ho2ur6+ttd98840ox5PJRKbpdlspbWZnc5Fkq/VWWQdAitFouVzfPSxu7+4Wi4e266N0+3sDDL+rwnv/+n7L3aMAieDDMGhntPdZIh49vsYEnj17JhDilDy6uvjo+sq0W00wTkS1al3nOCMIQpqmUYK5qTutbB86Rigj1Dm3Wa1tYYZhqIbtxcVFEdW2rO373um9RpoeFCEky5MyL/I8pwhr1Wut728frDbgXZZlUqaMsaFr27YN4J3fk/IJJoD2M8+ccy4oJuCcq5td1GW21tbDLmp9Tc7G8crznHMeI1Pf97HN5q2LJ07b9M51mGEhEudctWt6NcBBQAtjfHY2OTs7K8syzTIpU290Xddd2+ZJKqX01qm+jyGZMU44A4S11oO22jjwPstShE4YjGivqRWh1L7ruq7zxsYOHOfceRuFpOu6jkAHQsgYk+f5kfoRny3Kf4DRTVX1ahBCZFmBMe6GQSklREIpF0kqRdL17e39w9u7+6puvd+f+N57Z7VRNqJYRTGygOquripPEh8wty54BEVRDKQVYj+TEOFHj8E5h/cI27v5tjzP22p4F3yisPihK3YcbDiyJU9xwv0pf5CMwidSsej96fVTmPcYAiOoeJx/OMYzdNSAPlF3i19uOCEzH6FFxN9jb/oT878QQtSIOQY59P4kw+mWPAraHePTXgrZ2+Oz4fBhthph0Pc7Eei0k4dPjCBOUdPjuw2AbPCY0eVmPXg6OZsv6qHvjDaDxyj3uYegnWv7zjosJK+xaY0jSUZ4winXDhEhz85749B0OpXy5q7qJDF5NnKmSSTdbpZqgE8fJxfnU46BEHR+fu6dqus6ETxNU23MMAyI0CRJAHBylrdt3TVVkWep4IwCwchaSwlxNlDKA8LdoK3zAZPdbkcIS5LM+eBcCOAe7hevb+4Dwi4gpXTbdn0PeYoEl4zgpml4JjhlqeAYeUKQtdZ78NaUowkhpOk6takAsXIyCYB3dSMRnYxndw8Pm82OcvPtd/9/yv6rV7YtOxPExph22bDbH3/vzZuWSVukWlIB1dVVDQiSgBYkQPoF/Wf0IP0NAWoJEFBqNPSgQonFKhaLJpNMZibz+mO2Dbfs9FMPM3acOCdJoRXYODjbRMSKiLXmmOMbn/nmy692sxnmeV7XdSa4s7au67IoxnFs23Y5O+37ngsym02GsTs7O2Ecb25u6rpOJDVjDOfi4JgfMAKNb65fA+C2Xc0W9Xa77VRj1xaRJn0YkKgtWm9CCNPpHBFPLp/Nz66EYBBjss+9u7sFH6zTh4S8GKO1dn55xqhgnBBkITpnQ4guBrROMyq4oIwKJDF4sE57F6sFzmazuq6BoAuWcJYVkku53W4RGctqlgEGDwCD8lr13vWpNxjH0TvLOU9+UkKIg0QKEQXjUnLOZepzlDJJdgwAzhlrbSblqxefXJ5f3d/fv3nzbrPZTaf1fD4fuqYsSxqDsyYXfPHylHDRD+qrNzf3t+9+5/f/cLtp2lETglrrIs+tN5Bme8e8lf9xlQ8gYvyo/GGgBKqqphid0YzAfFqPp8uglOQ0Wr1b37FoKYlMitPlCSdOihTKkQhvEQmjlL5dbYuimE6nVunVdsMJ5UXZ+SCFSJx78IECMik554LxLMuKPM9zCT70fb/a7YahC9YVecbyklKEEJqmMUo7Z2KMRZW88gJjQkpOKQ/BeR+dM0II7+046nHsQwhZJoSYlCHPhCzrqi4rkUmM0Pf9brPtx0EN46hV8n7DCEDQasMpjRGCBYg2faJJZ7PebmfzyVl2Npst6rre51o5td7sKCEn8wVnzJhx6HoIjjFaFAUAsT5aY7S3MaAUgnHO6X7BSg1aKhtOmzSKSPwXCpjK256hEEJir6QakBbWg5VzWkDTRMd7P3Rd3w8+hjzPCXmMZYhQVWWWF5SwTdO+ffv2u9evdzvjHETiJBeC0+hsb6xWIDMoa1FUZTsqa4K2QIlHZhExy9h0OgUIBEM8IqMSwp1zmeCccw94GA8IIQznHwvPcf/tnkHw4Xp93LcdV5rjAgYf6HD2FTEdz4HieKijh2nH/ofwj+TTvhdXHB3nRxvKEEI8KmOUp7F6/P9xl48q7rHTWDxS+8H7FvODQTt8ON386JgPBe94sPfRnwFANw6E8noy7fp+M/p6fgmIMs+CNaM2D6vVOPTRAWNoXOiGsRcxBFfa2CkXMZJg8qK+elr3gzLOzeZTejdIRiGYImPeqmDcyRyfXJ3EYBfzmeA0ePtwcz2pas5p2w+r1dq4sDg5lXkWIOrBCiHkfD6d1IKSGJz31lo7mc2sD33fA1LvA2PCBr9ery9PT4TM7u4ebu83hPNN06422yKvBq0TS7koMM/zuDdAsRUWFBFCDNG7EFQMnJMUmCeyglGm9G7XtdL4ajLN66oo6fX19d/87Bce4NPPflBVVVXtGONFXpZF5a2x1grBuq5rtmvnoOtbykianafSuNmsk8vJZrPp+76qquSuGaKfTRdEUkGlWulk57Q8PW1VV03KXdMld6rosTejWdtxHI0xs+VldF4BEZTVk5IiqSZ1keUoMsFYTvZSesF4gOitIzQeSG3HZ87h2wP3Kl1WtSxDCNZaY60N1nrX9k17v07dapFlgnNn9Nj3fd/rUQlGk1yCMwoxJM1VetiyLFM+jLV2t22UHp31l08vUzqbEGIyqeq6FkIwJgTfO+DPZjMASOk0VVXdPtw5PZ1Mp9979fwfvvr217/4OYjiv/oX/1z8+V9++fUbMwyCI7dYT6qmG6Lf89rYRzXvn5rtv18vErcN3zPIEAIAdE1fl/knz59MquLtm+/M2JNJ9vTitLm/W86nxKtm/fDyyRlKors4v7wQAjjBdLa1w5jS68uy+qSeFEXx5PJ87AeZ8dPlCedcD2NyLYkxUrKPjuScUyTWWvDBqKFr2rZtvbGcU5lVAMAIDcFppYZhIIBVNZmUVaDeueC9DQHS7spa7Vxo251zIQTHuSzLPM9LSjFGPK2W4IOLQQ+q74dgnbImWOdisEr7GARlSCj4YJ23wSvvpZQBYbvdJagwQEzF5urqanl6QgjxzmirE5ZoHZSzWSHFOAxd10UfyjxLKlRjjDLaWhsgMCooY5wxEv3B5iPt18yoEqvT6L17b0onT38wqr3cMBWJNHhPD560fQmm9j4YY40xY9dBCFIKRqi11hgHAHmecyF5lg/j+PU333399TdNB4QApcApDcFp7aJPyCrkOZF55gIkkAMQkAAQmvEsK0qX+D4UCKOEkUjQxxgRXAh7fQWidGEYhqSIJ5wF56J/VCkghr2eIcI+SSqFYkMEiPgBzerQ7e3xzw/DgPYVMcZIaCQhxgghWW1EArivfxEihkN3GGMMETHRZlK1iODtvs9DQgAPTFGMj5uS49WEHPlix5gsA98buxNCwj+x7/xHq2yMkaQHjxHix+r1A7YZUyz7fodLYowIJL1bBCE+Bjv4GCAiRCRIKWHeewQSgRAmgGA3jrtGiWoJnGKgWca1spGgj8E7GFSMAQgOr308P58RWX33m2+AxPOzy0lerO7uPMC33357efmsbTptwu2qn9d5045/8gfPxn5Q/c718fd/+CkFv364adrdyWKutX54eLi5uSOMVZOpdM4Yq5VNToRZngdvvQ8+hggYAa3zoAxQCoiSMYIsk8Xd/Wbu4O5h++13r2VeROQxoHPee6+VpYhFURKCbdtiiFmWlZlkhGEECHt1SBojeG+7YQRCUYiSZSHEXiti3dffXbdtV9Sl4MXd7f1218+W05PlaSLjGDXG4GIUu+1m6GCxILv1arlc9r1xxkwmFYmwXa8xIIkgGS+yLJeSAkbngzXR225ou25YnCzv71YnZ6evr9/meR4BXnzyIkZ0zo2D3rWNHW2AQDlttembFr/5Vg8jYwQj1NNJkeXPXzyrijLPc0qRRz+VRVnXuZRSkNR1pU0AfZQwHSRYCcY4ABsP290wDF3X+Riqqqpn00VZ1vMTIRghBCNE7wkKBMllCT541VBkhCFjIkFKyXPqm2+/u727B4DZbHZxcXFydi4ECwH+w3/6D5zzuq6XyyUhLAQYutFaW1WFcy5da1VehxCCc/e3D/PTpbXKabGYzD55cqn6oRkt9faf//EfXV48ubnfXJyfrH/95W67ChEJF0j4vux91Of9jyl+H91IjNO6NGp8uLvh5Gw5rfNMXF6cTapyU+d1mXMC3XYVTieFoCzndSExpt2GM8ZE51M+VllWoigZY4JxA5gJyQmNzgvOUwgDISSXGQH01vVtBz6MQ6eUSi2OlLKsasoIhLjZbBgSypBTNp/OCCEU0BjTm+4QO5esmVMBOAjpEsXRWg3AKaV6GGPEGD0iJQQ4l4yxmKFzRlNhjAoBnDapR/Q+ItKyDBGxaXdd13nvfXTOuU8//bSu66LI+77vuuZxcBOLIsfgu67ruyb6WJZlXeYJlkwxDIjICUNCwDvjvCzkwbnYOWeVTi/BGpN0DikDOrkFWmtTAnvaWqZ2Iakpkvl6qo4JVRiGQSmDMUopRJ5Ths57HyNhnHFhQzRt//Dw8N2bdw9rYALKkgouY+Bd046j5RSyjAnBKKUuwDDqbhiVAUKACk4oByqyLLu7vlPjOJnmeZ7Xpcjk3qY5bTAjQRJJYtmleF4hROpjDrXh2PEreXWmon6MbcIjhnFgq6bfHqvi0gnvj4QNh13tcV2ED6eD6SQ8NF6H0vKRzcrht96Hw+O/n9cRYtPG8zEL81CS4z/RNR7DsIeKHkJgjH7Y8IWP/uaA3O7fkOM5/aNl2jFKfCD1pbuIogy74Xa1bjtlAxjvmOBa+VFpSnlZTQjBdrtxHhAAhTCjvt309cmoIpGM3642zaBWq1Vd10M3fvLJZyeny7dv3iHAtBSLSQlBc+K5YBcni4vzk3fffds2u+VyOfRd1w1d2xPALCsY4c55Z0Oe50iiZAwRjbPpimNCameLvAoYzWiAoPeYHG22Te8C9oMOQLWNAE4p0/emqArvATCGEIzz42jKTMxmszKTBGPEQFAyRqTkWSa4EMbq+9VaOz9dLGeLxaDM7d3DdrtdzOvm+o4xMV9MmLE+kF07/M3ffPHy5Zn30TlPCTjnjAFEyPMcrM+yjFAgBKTkfd8nU/jdbpcSJNImNV3jMfp2VCEE7UM1n3bDuN5uzcM9AJks50CYKHJZVsW0ng5KW4WRmJjv8nIyna4P1kuCB4rbXWOdH4xyzkUf6ro+PV1Op/Ock8SiKMR73+BRmeSGeCBwHgybLk9PsqIspnXfjdqq7W6Xchm7+5YhIYAUEGIkgJQiI7QuK621VlqFEek+ilwI8Xu/93vJtLPvxzdv3q22u7qusyz7n/zJ/3y9Xl/fvP3lL39NCFnO5hcXZ4vZ/Pbm4ez8tCrq7Xad1q60uAVwMhl1AV6cLpbL5W++ffsPX35H8/rF5RkAXD172rbtd2+vgTDOpPLxH+n2fhtg+egWD9QWQMAAiCRGAMhlprrd62++Vu3mydXZ5dmzy9Pl5eWlWdZD3wZrOIkP97fLaV3l1GolBYPgKYGqLBGpyDPKeFFU3dAi4ti349BxzpzV4zgWWZ7neQiUUppLAQB7V2UfMAZGMBM8eb9oNSqlzKjOzk/Io+hKa93tmhS0YaJOAyF4tMZIojRCSJKgpkFrQtiyLAsu7YYjAPHeam2HoTPGGaOGQfV9m/5vrU/A6dXzFwAQvGdUCJ554vNC1nV5eXkppXBWB28RAgJwxqTIQ2RjcmcgJB2J9dEF3/XjYVOPhDyuQpQCkggYorV2HIah71O1Sw1fKmlZWRDGHj1cdLLZTSUhvXbnHAChNGG8QSnTtYPS2rtYZFIIQRlL80/GWAAchgEoW292b69v297IDERGGc8IY7rTAIFz4BQTVwYZZVyaEJSxSgNyYFz6QIzWQKjWOumVH6EVcrzEhxCi90j3piGEUYEihGBiREYJZ4xQ5xxSEk2klFLOmRCImBpZcuQHi0dGZQmWCYdwpaPb3vfnEQg9/H36P3xIJCGEuEe1dTzy2DwmlRwXoQNY9FHtPC5FH5XJ4wM7prQc17z0EvYVHRFSsU9ZDYj+faYSeIgR4qNB4F51n5rUGGJEAIIAqVOG9C0iwUgoZ0k+hJQFxIf1VgdwEXZ9x0TOIYzepiARLmU5rSGYGGxR1mUxefv2vlztIs/r5fL23fVgutFG6TxScnt7G4LjFD59WlOKi/lsfffF+en8888+OVnO+3YzjK0QgnO6GlSyADw/P+d5RZAGD0JkjAOxxENE771L/Sk6H4duAOTD0I2jLqqKEDUMAyItq0nbDWp0RT4xzqetWIyAow4BKAHvIiWkKPJpXVVVRcCQCEipkFwIxhiJgNr5+/VaZHlZlB7w69dvml2HlExm87uH26qezudLyuTN7cPb653zQPk+dptzHoP13gsBRECM8eRkjiQul/O0am82m3TiKaWEyCjdW38QQoqimM2mNhOB+W3Tvnz58vXr17P56c393WJxcr/a+QCc85TwPOGZtjr6eL8dhGBllY9KJh+PPM+cc9vdJkTv7J6UsF4/rNa3RV4Nu3Uyz5zP50m0cJiGHM7YQ+ULIfzmqy/qelIUOaEALlpnwAEA6KEHLvJknosEH8fMSinG2Hw+Z4xFJMaYYVTemLdv39Z1/ezlqxhj0zRtN+yaruvHzbbL8/z84unF+dNx7Ieu+/bb11999c33Pv3s4X5FEAFilVdSyu12a0bz85/99fc++/7LTyaCYN9sbMB5mf/os5d/8bNfkKxi6KtM/v5Pf2StvV+tjOqQ1fCRXP3/j9neRwgMwO3t9aTIeZHF4DabTSHE+dmirj7l2entO1Mtp3WeDc3GqR5keXP7rpAiee4xJox3etARtbeBMCQRQvScsuV8med527bL+bzv++gDIjpjtdZj12tnOaFVLrO8jNFrbZVS0flcyFlVe2tdSgFuu7ZtEwmzKAoOLDkOpBdLKU2cmqqqErgfYyyKgnOeqI8EeJqpplvClJ1zq9UqNYuJG1mW5Ww2m0wmsqw3m03Xtcmm3TlX1cXZ2UVVlVrrYTQEIiEQvDUmSCGcjSmhdzqdCsa7rut7lSgzhzWKICRSUyakHsYYgvVeq8TR6Q8KrXT9pAYIAJIFaCKGCCG6rnPOpS5wHHWWZWm+OPRqt2vGcYSUYC45ISyEYHxIPNFhNNu2y4vq+vb+5q6lBKpJjkCtDy64vu+zTEzrCiCM49irseLTsq5t17sAOgCPgMCtt20/9IPJpXR+hEf40ZMYI9svygSdc9F5QvaZdoyxiOxAG6GU4qO6KEA8cFnh0XvzUCo+0obv7TYePW7IkUPm4Y6HwpAu8oPLyeECOZS3Yyzx0BgdLxPHjVeevxcjHvg1AED2XdoHNOkQAhJ6XPMOle/wjIffHvwQ/qkKmqrbcRuHiHCUxn5cUD8AbxIQESMiGu0gEuM8oWAM6KZZnE9KwVmRNbtuNBqDz/Jc8EqPXQDiuZRVEXmxud/OT8jy/CoGt/nNtw/rflaJ77775uLsZD6rPnn58i//8q9IMM/OqydXZ5cXJ3oYv/z6dZUX9bS8vr61zvoQs6zI8jIi09ZRDFUl2m4zDAMhmEzFMIDxVvVDO/R9p28fts7BZ58JKfOm6YwxP718dXuzaro+K8oAwRqf56Isy7dvN0IA55wyzIQsimxSVowx13eIKCkXlHMuAniljfVuMl16iO0wbnfNMFpEyjnT1mplOSd39yvnojJBCsIBq3LSdgMlJHrjrUXis0xyQrz302ndtu3JycI5d319c33dPXlaG2OWy2XaxxRFNo7jbtcQArPZy+82uzdv3gkhvvjNN8uz09vb+6ur53VdD6N6eHjYbNfbXZcE9em6OFnOIJKrJxcZI4clvRlbABAUJnVOSJlWMMAYzPjDzz9Pp4fqe9X3+JgPkzwH0o0Tgo9cASFTG5rCK2gg+0tmMalTqTNDP1oXvcc0QEFNgLZ9l7ZUgsu8LObz+cXFhbV2NNZam+Ul43LfUAJnIkPKrdL9YIbRAlAh+JdffwPBz+fzJ1cXyFhSHv/whz+8vpUPd3dd1/3gBz9cnp1vdr0Ze8nE//RP/ujLb16HTWvG3Y+//2nXdb/4e7trewOeRELzahKP2NiHK817nyr3ofKnJeYRLKIA4KMPIaQ9ImOEUxqjD95mmbw8P1+ezJ21OYa6qopMUgxlJjHYZrseuw4gpgQpY11EkDIv8lxySTnJOGecCcactVop71yz3XLGovdGq77r1DgSgCRXsGoYh2EcuuA9SZEv3nljdtvt6v7h7ua2bRoAEJwjgHNO5OIRQNgvW8n32VqbolmLoogxJosgpZSzQAiNEXa75ubm9uFh1XW9UhoA87xYLk8uL6+urp6cnZ2XZUUp6wc9jmpUo1KaEDKfz548uTo/P4cYjNZaKxKjzETGBWAM1g6DIYhSSIKYKqv3Ps0e0nlAKa2rejqZMkqMUpwy8KHt2u1qrcaRUUqRWGO6oaeUppwjAGjbNiUnyDyLMSKSlDSUAujTBNvZ0LX9ZrPb7RrvQ54XdT2xThtnKWNlXceIq822aVqk9Pr6ZrXeag1agzZuMq8nk8lu155M6hh80/Rta4CGejrJisI4P2j37rrTEUKEyWzSD8pF6JUlGIKPQpDpbDKfTSRnhETOKOOUIhCCSEjwQWsdI3DOATzjnAsBj84jgEgQm6aZzqZ5lkHKH4qRMkYeLYFSyUwv83jRT0ZuCUFFRCGEtzaGkMZl6b7s0SM0Sbnft4zee+8pAEZIs5/DV0yib0CCSABjCNEHjIAA1u3x2wOWuK9S+xyrDypQKtmpiTxMVtKRHGskDuoIRIzOpQMIzhtnrbWHNKb4SNwEgkgee0EgSCgSGgCcDy4EQEI59zH6ECnjgGRQyoWAlGpj190wanv7sP7qmxsboJ5NV5uGiyxApJwDEiSIaYQZQBnFRdH2Q1XXm23Ttt2TZ0/vbm6RBAI+OF8X8uryYj6pfvF3fxsD/PjHn/708xfTqlqt7rumqetSqeHd2xtANNZleSlEZkNESmVeIJKm7ZtmVddVXddd11nrvnvzhguptKaMhxh9cEIQREwagCzLR+1DQFkUUuY+hEFp4ywAZRzns+lyPs+LDKK3zhBEwRgHV+TZdDbr+z7LsxhBlsWoNcsy44LS9vZuNZ/PN7vm7uEeCXPW1vVM5lXbDat1E2IUWUEJu7vbWDPmmZBCaK2Wi/nQ98+ePikzfnF+fn93983XX0OMmcR2pyD4+XwmOOeMeue883kmpWRjP6x7B8CUslrZi8snVTH95uvXi/npOOrpZC5kURRVWdY3N3fX17d1PX24eZ1xRjFISoIzwWpGoM7l8ydXGH3XbrrNutmsrBqmVf7s8vJsMS0yXuaikFxywilQCBBsLphgiMGZsY/eZIJmnJLox7ElGBgBzpCSSDBAsBhdsM4q1TW7sW0pQCElQzBKvb29tsZRzrKsyLKcUKK17rphtd22XZ/YD0VeFnkxKnV/f7/eDuv11lqblyUTvO37vutDgK7vEfHbb7778osvKWFZkVnvATHPOABQQu4fHuqyLMtiNp0N46CUkln2zTffnp1ffPX1V7PZ9A/+4Pd+/rO/7sZQFSVlIsNHk9Y0OElzr9Q0HKZBaTpirU2rRvJsixAppYIxLkSM0VrNKV0s5mWZh+AZoXU9yVgMPjijrVHRG8FIxnmeSc4eo9gpp4QBYPDeOd9sH/SojRqMtiE4QRinyCjTanDGGa2sMsYoq623xhobjXbJsyEEq3XXdJvVervZpLpBCUkBPcXjzQYrRCalkDI//Mu5RATnwjD0bdsPQ2+tT8ngXduvVqu3b9+u1+sUZTefz6uqOj8/PwQoxyPznof1ZrVatV1TluWLFy+ePXta1aVzznsbnKMkCiES5dpqba1lVJJ9jtz+HU6c6XEciqKYzWZlWRJCgnXBuxijHbUxxmpzSGBP8GY9nTym88Rk6aS1DiGITBJCKGUJPHHOEUKzLLPGN0233e4ONtMxxr4frDcyy2WeW+t3Tdf2g7HeWH99e7/bAWVwfj6Zz6chRmMc5zyYQRvtPBAO1aSs6ilSZpzvBr3rNWHABc2rSdsNo7EuQsYwxJjnfLmYTetSciY4yQSXUnDBCCZ6BQnehxAZY5y978DwcQLnQ/DWFUWR5RkCHHvi4GO/le4S3icPcPhQKrAfuf0T3pgHQcJxX0gpRYjHzeLxNvFw9+MBW3wUDBxOkgNCEhMx9PGh9od0hJR+1MMdY7bvq/Hja/TeO/8++2b/ROmOx85nhB4f5zG8fGhYk5oqtbC9Aq3tart7c3MNlGRlNbog8wIJiRGShWbwPrjggg8uZmXd9UOW5/0wUMa8czEERnH9oD95efrsyaUzQ7NbU+KeXM6fP7kSaAmis8Y5CyH03ai1FkIGIELmXOYxwqBMPyrvE5fVa62bprm5u00qwPOLUyRs17Qhgg+REMqFhLiHtd9cPxjrQ4jGeh9BCEEIM8bkeY4YfXAEY5aJMpOSc0Bvh6GqqqqufYyjGkejCaeUi3YYlLGbbbNeb7V19/cdoUAJe/XJp6NSq4et83E6nZdFrYxdb3YX5yd9NwyjDU5DBE7he599hghPrs5+85sv7u/vsyzDfUJZGULAvU0/iTESCmlka4y2pMpEkRe5Vq5vh0xmjIu7u3sEREIgQC7zIivUqIy1k2qymAhKoun7sW8huEywjCOBEL1mGGd1dXlx9urls09evXj59OnZyUJ1rdVKjb3VKngbvXNGW636rtlt1ve3N7c371YPd33bWKO8M6kY990uhpgJFmK8u7v97quvrdHBe0lpJkQmBackpdc//fRTKoQ2DgjJioIRtmma69ubyWRykOMiAYAoBZ/UlQ6SMuFCNNqFmKxlcRwHxvnNzU1V1oDws5//PMT49Nnz2+tbH810No8Afd83TTObzfM8p4y2bXN2euqD//KLL374wx/8xV/8xbt3b/+rf/kv31yvu7ZhKXs+nSJpMpSk4lVVJazskFDadV3f92oYabJ3ZAQCCSGoYIkxhALlsp5NZieLjBGC0SM1zu8aLQXLGGEQ0QcSPAUXrc4kJ4AANGIIIRjrjHfeR8mBYsS0RgA6pMBJDNh3DYkUSSSAjCIhhBIgEDAGZ3Ry33HOJa8pwSgAZEI+ctyTZUdKGuCUpvw2mSZ2yVgrJLcMIIRgjOh9TI3RdtNqrbU2lLIsLyjjSpuu67759ru01B5MmRMd//r2XaqOV1dX5+dneS5DdN77sR8IIZwzwXkIzhnrbYgRmEDvIYTgjbXBG+Os9wjAZSbzIs9LxOi0sc54b6MPutN7epUPwfmUH0YplY9CnKQjTDBd2soQQmIM3u99h9OpNgxN4sIgEsZ4imIZB33ydClkoYx9eFh1XYeEAYCyWus4mWCWF5PJhDGxWq+bXSuEiNp4B5SCzHkSI/Zaj9qutzvjIKso4xkgdfuJElgfgt/PDx7RhY+jHAmhSGkkwQZfUJGGCiEEpBRwT9Ys6ipZTMUYIYa9iSUhjKQ8uXCgyaR3IG3p0ql+eCLnnDwKbYDHCno8h8MPFREfjcCPaZPhw+yRY1B0/wdHI7294OGxsr4vP0eigmMdxYERevwUMUbqH+toiJi+Hr3aI8G9mP2I4Er+MeX78TsAR7lIAOBccM6Po/YeAPfIqtaaZ3LfdLqAMWIIMQJG9DEwIZSxXEpGcL3bXS0XmagyRuq67trdZnU7KfPvffri7HRZl3kcNtEDBowu9uM4jsoDRKSEYYjoAYiQ1AetdHBWUmKtHoYBCGWMjcoySYGybbO2PnIpcpFxSossD8FBJJRS47qiFIC0GwdEzIuK+NBtjJAMIgEIFiMlUTCBxCPS2XIuMrFtmhjjoDUX4n611j5sdtsQcbPZjToiNQDw6sUrSvh212oTnI/tMECvKOV9P7QdZHzrPSyXZZ1nXbu9vx/+6A8XTdNA8G2zK4u8ruuvvnorJV5eXu6222YbCSAjOSNUioIxkrS2IUbnLJM8E9nNuxsC5Pzy4s23r998+zqvSiHEcrlklGZSLmbz+Wy2fvfLtBDFGKsiF/M5ZSUhpBQsNQB1UZRlzhgzqr/bPFR5Yazq2zbtdZJN4DiOaRMphJjVRep/GAaGQQ99349937oIeS6Lsj6Zz85PTsdx5JQRAGestz7GmGw9BuvLalJPZtbathucsYTyq8uno+q99xSilJJCDHZ/mYiijFp3zbbt+ul0Mp/NjPW393dFkc1Pz27fXTurn7588fb65qv/63/3vU8/u7ich0idcxcXV03TbFbrtm1PTs9/98c//LP/9J+fX1189eU3u/Xd/+6/+V/9H/9P/+dpmb16/mw3nbDDhZ1W8Pl8fnJyUtf140m/V1AmLJUxpkd1NH4nnHPKBKW0KPLFbDKpyuQIVZdlXmTbdpguc0AKKTwn+ACEI6MCjdaJtRBj9BGcc86HEEAyCtGHEIPzzkanDQBE52WRs8Qbp+gjiT7EEL037W5rRjUajSECJYIzKfI8z8dxTKzLEIK1Pi12aZRIkAEQiAQJxIDee2ddlgvvIgDEgD6A0apt+64djPe5kHlVgg/Gu3G16tVoRlVOagpIOBNCACXW2F3Xjl2fF9lycfLk6nI+n1OK4zhap5N+ExEpsrSU7CmanMcYU05HiOBi8D4CUsrocrlME6kQAkaHEbx1ehzNaLXW1rnDjoQJnoRHMUbnXUJK4ZEzAgdJ+COhI8aoRtP3IyKWZZXANKNdmgLKorbONcPQa+ORECRKqd225RwvL69CjG3bMsYSltypURBgDGRGi7rMcxkQtNb9OBgPSIGLzEcyGq20iQiMJJ3c++KRMgvTa4mZIEdqoeOigkcU/9TRVFWVlUWi3aYCk5CJAywfwnvNAx6llicXko9+flzk0n+O0dFD+wgAJH6QZxSPFOjHxQ8/JLkcQ454zMw8ouAcHuG41Xs/pfswJvcjus3hLUp9JCKG9MiUIGLCao8P7PhdPS7bh28Pz+VdcC60bWsVBOGNMRCCtRYZdS45WQeKkSBlSADAGJNkZ7PJZOzbLMum0ykjoRBss74bmmZSkovLk5PlTHJi7SgptcYbk8jIxgVgVAChQmTGOjNqwrgLcVDKGEcpneSEMZYVZVnVo71mVISIu7YbhuBClFJ6xhFVcN5qJYQ4O70oy9IGPxptrE/1QAgwxlRFnuUCvVdqIOCySV2WOQM03u22TVbko1KBsXXTWhcoEySi9ZExjBGcA8Gztm3bfphO5kUN6207jraqSCbzIutns9n124d37/qzxXhxflI9y1IheXh4yDKxWMy8j95DopspFbXWjPUAgTGWU55lBaVEayUcHcfRaj2fz8e+223W56cn58tlVRbp9CMh6LHzxpDorFIUYTKbFkXBGMskn0wm0+k0z/NpVVJKvXPjOI5DF2Psum673RJAp42yhiHhmcy4EIJxUhjvKKCUfD6pgZK+ae+bLYY4P73glM3nSxu81rrrOgBSTmSW5d57pfTY9VY7iiTP81xKTUk7DtZaRmiWibzOMfpEVR3HEbzLsqwqCwBomqZvu5EwZUyvnTFWaJdr4yIgF1xmp8tFUZQ//+u//tU//OazT15lRfEf/vw//vN/8T9rR50JnuclEtYNPVGEUmqDf3p12fTqX//Lf/4Xf/N36/vrf/0v/vlf/tXfXH3+Jy+eXrCmacqyPDs7S/4jUspEau/7/timSEqZTMdX4+id09YFiFzmk8lkvlxUVXVyuhCMxxj02EfvbIBm0N4PNTUZF4xEEqygYVbki7rIZIYxRB+CcyHZDgEwQoGA5CKJtzDEgEABY4zO2CIvIETvfXDO7GMXlbV22G4oITLLirIglHnvgzNjHzjnFCOJABEYEkqRCi6EUNEjkhjA+0Rlg7QiBQ9am3EcjLHeO2udVjaESCj3EY0yahiNs5yyLC/rapqXRQrgUMaREAlglpecycXppCzrsioiBmO8NqMZB2PM5dUFhIgYnQ1Ou2ADIYQhU1pZbUdtQ4RIkBAmBE+6PQDYN0beB+eTainYaJXWzu4ZEACSi6ootTWEEAr7nNgYo3UuxChptu+CQ0gTWaWU0S4ZSSMS773RDpHmuczz/H69SVWZycwH1bR90zTjGGezAgmx46iGUQgRvMcIlACnkBe0rCYizwFgHMemG7p+zHIRWSCMq9GaQdkQCQIhNMtYcJZzThJmIDijFKPX1sRYkMde7bAW+xiAIOUMKWGEpn7Ih8Afb4d6FjFlLr7nnhzKQxLsH8g+qewlhkt0/gjJTL6bBBEhAu4dTT4oP7if68VkvQkI6YsQkqTnBDACICBFQpHAkTzguMLtkyUQfqu2fVD/Dv9+1IAefhiDh31S8se0lHTs72t5gjUIe1TyRSSYyl0EiEBCxBThF1MMZQzOR4aMALXKJYmndzHFe+2fBWiMgBCRAALBCOPYzy4uNttVli3bzfpsNqWUajW2q1vw5vJq+erpxaTKfLDaGMEYJbwbmn4f0wpcZpRLD1RQFl0YtBq3u+12u2t7xmhd188uLzKVuRCNdX1vyor6iExkrhtiQIgk7uWISW8nA8kiQUrZZDbt+lFrjRjn85mzWmZcCBqtj0CFYFwwQvHt2+uqqgLEgDAorUIIEafzxZs3bwKQYQAhYpmxjBuMceg6WdR3q4f7h13fQyYBkGhjjIGbmwchYDIV52fLq6vzaV1+8cUX5yeno+oRcbPZhACnp9VyueRMzmdV23WJBEdpkmJJROCC5pzkedl1XZbh1cXi+vp66DfnZ7PrW2WtjRAZcRCNEBGREmJfvHhZ1/WkqjnnjFEpZZFljLHtdk0I0WpYr9dpbffWjeNYVCWEGJFEQgOgB2SMM5ER74auV03rXBCZDD5QLigSF2KAwBkv8lxmpbY2EGZs2G633vvggSClmUBAA8RrN1JM7q/aOdUaBMAYKEaI3hjjtCIIDKOUkhEscznKaSC0iFjiJJcyUjZZLE5OTxnF7frBh/Dp59+7uXn3i1/+Skr5ySefffH12xcvn8ms+Ou//dtJVT57cnX15GK73SIhi7NzF/y6Wf/B7/34z//ir549OSvKP/m//D/+fDabsel0Op1OT09PZ7NZmuQniGwcx0OsRrpyUpjOyclJoshXZZ1VdVEUeVkIIbTWm81G6UEwhgDbpiUUqqq6eVgtprPFtMqkYBh88G3XqS7O69J7s6fzBUDENDzZU5IipJUaABKBc+h6eCzDCcpLZS/PskRDiDE6a6xxPgZCbKItxKABYG9pkpK7pfA+Ome9B8YIISxGCAFubm5DgBSBGUJImhvngscwujFlfycrzsSV//rrr1OLnGVZQQuRZ2m7UJQ8MSf7vm92G+dcIcV0Oi2y3BhjjEppf84FgOB91ONgjPPBIxUMKRMiy7I8z5VSRVFIkVPAthlUPzg7hhCsdcc79+SrCQBd13HOgZJDJ5GicNz+6Zz3HsDFiOM49t3ofbTWeh8QqBAZ5ywReda6T53N2Kvtdq2HmGXk5HRS5uUwDFqpvJAQSd92wUGWQZljWeaykEiZS0nQWhsTgXlnIzKrrGFcEoCiKBljEBQA2HAIDJL7+pRCUGPAGAlJpJJ46AgTRAlhXz+O3eIPXc6hrzr85HjmmlDfg1zhUOp+u6KkWyKRHyfPPWKJ+4T3j71jjgrbMWb7QR06gj0fjxk+avLgw4D4Q1sWf4tifQA/8bHshfi+pzy6d1IpfPDqjl81Pjq0HbDNA/smnTlpo0ARAKixjggWAQgyRnwkEElECBggBB9jtGittRQwRBfBE0KMVaof6rquC/H8YjGf5hBsznmWEWutavRu1yqjOZPIGJc5oWzUxrRdRKqNe1iv3t2oEODqSp5dXCT/DqvHBN4i0r4ftTZ5TkSei+RkIQVY7ykwxlplrLUik3meG+ubpgnBVXUhBIsxaDMyiHkuizJ3ztzfN0hIPwweIBCkgpsQXYjjoK9vlZTgHDAKdT3lXAKAYBwIubvb3e2gliAEb5peaxASjINXr56dniwEiW3bdtvNb7548+Lps4f7znnjbEh8gru7uzwrnzx5slrfI2KMjlBAROcs4yTLMq/ifD7PGHFqyKWYTys9tFLQ7frWe18URSbr+aTMsizLhRS5pAQAhlHjOFLOWD/ce2+1SthPjB4ioYQjBJ7LPCuzqsAIPobogwveWK+dxwh5WVAmvLPWBRZRZkXOKEWy7ZSUkmcMkBlrjY2Ci6ycYqeMUcYYQqhkyAglSAIh7agmVZVVdd829/f3euzLIp/XxTj0DAEoaXdb3TfT6XSxWNTTyTdbtWs7pZRgdBw1BLdYzJ5cXty8ebNZby/Pz4pMuuATTf3f/ekvnn+6pFxwJo2N2ro3766llJwRRunD/W1Zz7788muZZf/1v/ov/99/+mdVJv/rf/1fNtuWffLJJ4nKGEJIhPujze+eOZ0A/VSHjDMJI55Op1RmIYTVamO945y3Xeu9hwxC8DoETjkSFiJSIavJtMoEDwatRqeCN4Mao/XBmRACAmUpBI8x1XcJKhnH0RoDj964FB6n7o9hY9776D2j3HtnrDEjcTHEGDkTXDI9Kmut85EQImUuhAgqmKgwlNZ45w0CFZIxKiJ472LXDcmDDoHG6I1246jVaBwNAEDY3kmIc973/Xa7LapSCJHED6nt2BN/M+CChuhG1fd9zzkvy3KxWFhjlNIJs0WgBKjWenTK2jFGJIxKKYWUSS5Eudibgtqg+iG9FcnxWWuLiJLxEAIyeqh5fd/nec6kAADyyKoPIRir0udordXaeB+11mo0KfckhMiZpJRba8dRG2N0zhIAsrnfGQuTip0slmVZWm2MMYgoKBvHkSLMJkwIURaEc0kICTGkPNOIJCBo7Y0HECFG5FxkeS6yLEY0qgkuEqqTnalzkhGMMSadk3OOIRG5pJy5EJPQPv0qUVTSopy6YcYYUBL8e9ua1Jx9VBvSmZya4IPT5kGnL450dXikjfsIbHwPQh49+DEeCB+qIH4bkDxcUIer6dDtHd8L8T1SejwjPFTZj550fy+A+FgI90fx6AITU2NKyN7IG8lvF+z0/h8khgf1AgA4FxAoRSoljUJobSkhHt6n6iIiAoUQYwwYgEuy3a6rqvDGLmZzozWr8rLMz5YTtCaNnwUNMpOUhs1qNzyMm82OUConJRBKCI0RlTLWe6RsUHpQBilMJjBbzLMiV939fFYqZzjj83nNeb7b7e4fnBAAsdeMcEaV4kFbpUdGCFaLEIL1jjGm9WittgHoODKCJM8kp4ISxmgITo39dt1cXV5uNjsfovEurydd243arLfNdE7LokKAoVd5nnsX727ujdIE2WSaZ7nLsgKQMqEXVNTV9Ox0OXTNr371K6f0dCILwS8upjHGpmmklCcnJzHiu3fvmgZOljCbzebzuTEmBMs4YSxhHiAEsy4EN0wq0XWGEHeyrNu2RdB/9Ae/k+RJeVVyTgnx3o29VZrme6NBo10MFNB7b83eFpgxyghJawshRDA+ek8RgRDBGGGcMbTeR+9dgKwoBUBwblAmfSLBOZHPGCcReSB8UOO727tOKSHEk2fPB+cHpaMDQgiJJAEq+aS6X23atsXocynqurZGX19fl0U2n9SMVl2zc0YZPW7WD2vn/+wXq65pnXNSsBB8cGZWleenJxADp/jmzXcvnz/9gz/4Az2qX//6+sWraVHP71ZbQHJ1fkpIuL+/d0Z/8urFr3/9yx//5Kdq6P/kn/3Rv/+LvzDG/fD73/tPf/6f8+KZlZylpTwtAYfEzrRKwofz/LSyPL/6hHPOZEaQWqet8SaEEEJadvfTLDNSJiglTT/EMFC6clpJAgLtLBPzST7J87FtMASAQBEZY1LKZLM59mMIIVmoeOcOe+3dduecC+59OHXaxW/7bq9py7JIqPc+eO2933czAfI83+e6ATjn1ts2cSAppZnLON9rMyf1LLFYh6FNK3JaZBN59WDQnroZ59zZ2dlhgxxjFELM5/O6rhlzTdPc39+3bZvn+XK5rPLMGNO1rVIqOiel5IwFxh7XfUMI41JIKWWWIZWEM0Qsy3Lo+s32XncDwcAJTc9urU/JyDHGNFnUxqRWj3NOGXPOxcfUbOdcID5p1b33wzA88tvjMAycCyklJTyZeWptnXNDyBhj1jhkcD6fPLm6QsTNw6pvuxhC6mL71pyeTs/OzpRSCAqAWG8DsHSOMMYIAZmLaIMQwgVH962kNsYUNBlp7Vd2SiljhAQQjOzhR8rSAStlktY1ObAcZnLpdE27jX0bFCH5blNKg7GHknOYoh3OlkepKEvETmstFfKjRupQBj5yY9l32DEcmshj9BKO5oKHxzkibsLxH6TXkmZ7h7Po+O7xQweyjzDPj2rtXowYo4sBANK8EJJn2/4nJCEThJBHtf37Pi/9/6DqgccB8GMRJUKItOcLVBgYqZQWSJrDYYgIkNJwASlAYEI0m+35p5/aUZ2dnqxub8qyzHndrO+sajN+cnYywTDuNpthbB7ub8ko27arUvoakhjQR6+1Zllure3HwYVQVZDS2t68efPZ1byqqnbsASAR09ZNay1QCs45JBQtUIpW664DjIFjnzb0iOjSpy9RCNE0Q13ls9mMY3DOWm20HkOA+/t7pVw9r22IqNT9w0ZkQggxNh2WWOaVVlaKPORxo1RZlm9Wq8lkVhSxbUYmyNXlU85Y1w5///d/b/XoHSwmrKqqZr36yasf9n1vLQgRyyr3DoUQFxd8Us+ur68vLs+ccwCB0n1eSgTgnFZlsdlsTs/P5otZ8tAgNBZF8ezZ09vb213TDG0zqt45FxGdc4xMZ7NZVRfW2r7vvfeSUSFEJmXa5iJnkjAEgogyy2zwxrkQrGM+WeGO4zgMQ5Zlk8kkyzLnPADkZbkoS0qpduz169c///tfEkrzqgoxjs51w1jP+hCgKKdSSvCQjDK0Upu2tdYarbyxG7CCoOBMkHh9fY3OlVWW3v2hb2/evv3mm/Xb/MwYQxGyLEPwzpiu3a3uH548vWjXWx/0uzdv/v2//9Pf+clP/tv/9v8wjuPb+3a3Xd89rBgl0Q7zSamUWq/X55dnv/jFL374kx+H6J9cXL65uX3y4uWLl8/+489vZ7MZw7xUzjltnXMEELjc4xvoY4zKWuticD5xe05OTkaIgaCLwTuvnA8BYgDvPSC1xmuvnRq9s4wSKghDwt2gnWn7bkAyjI0bx7oqT+eT89NlITkj4INHCplg1qr1+j4B37vdbhxHilgUhR7V27dvx1FPJpNcSO89RZoI98MwnCyWPoRWxcF6pDGEoO2otW667vT09Pzyoqxr433b7kLwiMhEWRX5gUpHKCUA6P2gVaq1qeCFGHwM3vvlYumcCyYYbzz1hJBM5NV57a3jWVbWuZSS0kgpIlUR6bjuVNvapqHWTieTeZFbax8eHsZxrKqqmEystQ+r1W632y/BZSmzrCjLsigYY9EHP/babjWA1UYYywlBIN55r4E76mur/UApz+SEUWkM9MoqKwpZEJACOWN2sFttBkTPOSlx2u/GtmuHwQQfvcPRemWDKBYOqUbiPdoYAjDDs8hi7LUlsSjmFyfPspwHbXfNQ7drhKCZlNaNYM2kgkK4qFcseH5y6ZxTShOkSOkwbHut8roIyGyvd20fkA7bxjpw0VDKN85IDgDQW+spAiUBIqeUQvBaSck5wjg0ZVlO6lyNLXGBIVXKoPVlUVBKY0BKKaE8RIxAkEnkGBB1oBAgI8Ja65wnhKSN/F5+ThlGQB9JUjykqBfOjXfHQzdESKWiLMpUe/y+iX1sy2IEBKQkUcw/LnUAyRXl8dsIkPaR1HvrvY0xhogxxgCpXhKk4MkjnxjRgd/TuhnFEJIVibU+TSUPae+phYwxepHt66h33kdCiBBSSmmc89GHGCijQooksdDGAqFAGFJMZdwH73z0PkTrI9IEkyLlECCil3nphrcR6Hr3sOk7Ui4mF0/HEGZ5eX39mhGI6CFgCA6pRF4wyq0bJgvhrP3pDz813frsxaLbvvaM3L57+/zZ08XpglK+Xt9mMbz5zXfnJ/WvGjUqeP6Dl7u2vbu7/t73vvebX/3q8vLyu2/fvHj+8s36XSEqFEz1cbtuPvnkk+1uO52eXpw/+c1vvp5OTrfb1lvGGRgN06vz6+vb2SwHhySr/bCraxEEx0CU9n3njEZCckKgBT8qKKTNc11IdEZ5rTGyacWNGjOWOG/wcHNfcyK4aNvucjY5WS7ub+/O6kKEzrjuyUIioqDlZJJXk2mMsW36zeZ+c79To0cNv/P9l998+y0AdL0R5ZwXZw+rlXGQhwxR7ta3wRhG4sO7t3XN1tdfTybTejIJQLu+H5ShXChlJpN1XRNvt8mykZH49HyeZVkww8XJbF4X2+12GGhqFTprp5UcmhWY8eTkpKB8vV5TZKfLCx+Dd5EQenNzU8+mASln8s3tKp8IRAKEWSAqUIjMSUZ43fkQIAfDxtH14xjvB4irEEIpQQiBZb7ZbldDO5/P5/M5Y/zliydvXt9st00HLHjCmJRiESPedtdNo5RSUkrOc6ONaU2MEaAYdN3cNy9fvmxt+/r+9bfXZKc4iKkHD86RgXCBlFOIrnH+tH76qy8eJnXtMH/+8uqG1M3rbVVVl+fT+aLMrunmYRXUQD0+e3rRrPtu++1PfvKT119ef/JZcTJZUGROqc+ePv3Vr95ef/m3zD/ejunX70GYdOFZm0xDvPcx0v1y4ENwPsAeJHHWIo0IkVIqBa8yWRZCUOY3I0FEyoTgnM/JZDqtitPlHL25vr71djxZzpd8ulqtmu1mHEek7Msvv9RaX5ydEyEe1lsMcbE8pZQppWKIWZF5H9tuiATrsnbBJ8LCoMaUJ0c5F0L8sz/5Y0R0MbRDn6Tos+Wiruvtpj80i2lt2kf2JGfLRxXzoY27ubkpyzLZhiWioMwzIUSWiYRlA4TJpCrLUkgWY7y7uxuGgRByeXmZ5/lut0sD5LquQwibzSZ5uwBAnudCCJA8DfOEEBhBOaOVsimXAFLIKMb0KTwqlxFJjKi1VsEaE4y2IQTGSAhOaeucNdYED4RiDNgl9bwyo1ImeTMgEYIRQoFQJJREjCE4Fxh4FzAvs1k9qaYThjgOndIDRTKfzyC4rusIwMXVSZHJxBhlyIZh0Na74LNCCC4AQBnro7UBjQNCOCDxPvpgH9MZIUawFsZxHAZljOGEQuLZH43H4IjKmDqVQ6OG8N5C4n2vQwgCBQA8yos//CrGmCA++igGP4z3ftulBT/UruFHUj9/VHiORAvHpP/fLoe//avU0aZniEdgI2HscHiHho88eop+jLjGaF08cFb3rnuI1trwOARNjX56QMaYCx8cxuEBEznocQacNgYmOR4Yv0eYrdYwjoP3IUCe5zRCCC64BMaSNKjMsoyTUGbZ3d0dC6pb33FKZtN6Pl9keZ5lxfXNdSnEZn1flvn9qkWUl5dniFjXtX1cZ9brrTE2xphlBWU8kr29QIxRZpIxRpnIsowKCdAiYp6DUmCM8R601mUxCSEUBWWMDc7FQJxLl0+MkcQICF5KSPtmpz1GRx/9bhK/gSL6GDBCAooOXAdKaQgOEauqit41TaO0CyF0Qx8jpv2WEIKSuFxmiUUihJhO67KsOaevX7/OEIDgu3c3CGEyqRmBosjGoWeMhhC01lSIPM8pl9774Ly1Pg3dCSGcy4RaIWKIpq7rhNUlG8+USPDd2wcAWO+2N/d35+fnJ2dnq8363/3p/+fk7Oz09Lwoylefffrd69cARGn7/OWLTXNvtLNBUcq5REqY99EYM5nO+3FgIlsul5U296sHzuTFxUWzvk4y3+RLdX5+TgjZbrfvrm9d8CenZ5TKh/vNw/3KmGCtvek33viAwbtIhY0uDnoMNthgtbKbZmONJ5zstq1xLoak3nbOGfCBGsxzWZZ5VouiqC4vL9tuZ4zZ7trgwVU5Y+KmW1dFeXp2fr48Gdvm9ddftbvtn/yzP3z79u2vfvMPT5+9uL6+np0sq7LcNLvNavXTn/zoxbMn7KB69t4ntdyeSxLeY4mJSHKgA3jvI6Bz3jmfImdDDGVZBuvGodd9xxkVBCd1npWF16UxBhmfLc8W08pbM7bbu4c1CTbjbDo7EYxeX19v1itGcDab/fqLb96+W+c5C0g9EKUdY6wo66qapKDXBFpJxkUmp/UEo48xGmOo1lWMs9ns6unl2dnZ3d3darXatU1RFBdX53Vdj+N4fX9bislHpO3kXZleY6LzHGYeIYSyLJM+PYHAnPNMcCF4cs/DCILxqizzPB/Hvuu63W6X/PSklI+4ok243DiOqYtFxKIoyrKUUqayV2QZIcQonQqws9Z7LxjPuAAA61xS4znnMAAlFCIYpcZReR8ZFZmQk2lhrdVaOWdiREIoAg0hbne9Uqrt+25Q1ruIlAqJjIOzHpyPaF0wIcSIQJAQ8uTiCcTojbHeJjhXKWWNLjKR53mZZ1WRaa37ruec19PqoRmUUhGBy4JQ9C7ECITR9sEjByaQEBITuRQiQEjSO28hsXGttVHuIwvw0W8akSYf1IPBNDIKbo8EMs6zIn+0OiGUUoKMEJLAPWLsb/t1EUIQIyEEyD9ijPKPlhP/6OT70c/JP2Hg95GW4PBvhCPWJe4pp8d0kqNySFNlgg+FEIlWk8rw8ZHvy/MjcTcht4jognfO4aP16GELu8/n8+9zAQ8V/aMqCI+iRmst5cKp0dngPWilHOvHEJwLhIQICBAeuWiCCEkJK3Icu7Zpmp1qv//yqo9gtWm37fL0tCiq9aZ5WO2UJGMz5FxsNiOv8erizHmTpuBa6xix73sbwEPMijwGdDGEEBihKdXSGMMACTIAHAedvP1khtoYSkEpK6VsmiaVK+9j8CHZ3TgXKSUhRIhOSLDWjmPw1HMKe/6X91WRHxRsMUKavzjnuMwAIG2PCYTZbJYa6LLKgwdnbEL4iUxWVrEblFLm+YtnRrvtdmuMkZJbo8pKCp5t1g9lnlHJrPeXJ8sNIkJ0zjVNwxgXeUaZQAIuBKddCMFDRMRE8nLO9X2fZ6XkAh8ntcl+qK7ry1ffW6/X19fXza5t9cjGppxOfvJ7v3t7f9cO/XboB2dWuyZt7BygD4oxjpQpbTbbnTFplksZl33XjXrdbLeL+fLs5LRt2y9+8+t+tzk7Ozu/vJjlc2PM1998BwCMidOLc71pNk3bdw+v31zf36+s8SHAzg2cCiboMBgumWAyAPgI6+1uC50L9t3tXTUpbx9W2lrCRTt2QoiyrgTjlKJgJMuEFHzU9uLqye7XHSDrB43IXIgIfLasjA3eOXB2vlgu2u7r3/zDarOTRf7VV19NplNtzfxkKQRjFE0MRSYlX+5tl/fmDkddBSMfBGwemwr6JKtLqaTgIxII8eH2LpM8E6JcLDijgtG2G3a73Y+eX3nv80zMJhXE0Gw293ersd1++vKZ4EQr04xbiL6eTLUa37x5s952y9OTxWJBmByU5nklhOiUXe1uQgiJuyGlXC6X3obdzV2zvUuG3C9evPj888/zPH/97vW/+w//Xghxenp6/uSyKIpRq9Wb13unZuL23PEQtNbJeDPFzqW2L1lc4iPj7vPPPy+KAhFTD8cYAQClh+ahLcp8Oq1ns5kQQim1Wq0eHh5KzlPw+mq1ijFmWZaicXe73cFbXUpZlmVSW/Oq4JQCgFF6GAbVD/ukjwjp2nPOjalBTF7+kRGgLsRR9eOoGeNZVVRVKTMSIxoTEZEg894bp621g/OD9t1oO6V9AKQRAgBa7UMEEpCkYQ4XosiqLMsKSYdhGLougb3eGmu01noxm0wmE0LIdrPu+77IMpnnNsRRK6U0ACilbEBlHRVkMlnere4Yvk8CSghiCIFQYJTEGAASTePgeIIYPyBhpiHTgcYCj6ZclKUNWRJxUkopQZroIYcW8CPeByJ675LF12F9/8jo+cMK9EFtO2jDY4ycfEz7/EfL3uHnHuL7g/mQKYqHLPWj3vS4xz2WHv52mTzm0RzazcNhHOz34oc99EeHdyh7h3UgPubu7ilClBqXkiPBBe+N8REMGEYwYiQYU5MdwVPvIeLQaRKDGvsMouSiKuvddhVjrOtpVky+/vrLnLOvX1/PSkEplyXwLGOMpV1dUvEaY5iQlBgAwhgzep+vK8TeuEApJQAjQYhx0Eopk7z3YoxFIfreEMLatptN8nFUNC9ieD8WDSEgxhRWlHGfaMKJ/ZOk9+99UJOIJsTU+NbT2WNaCyQps7dmGIaz+dRaD8AyWYhMhgBKKWs8k2L1sOn7NkZknJyenrx797br9NlyFgmlTIzajKrPODcuzGYza41Wqh+GYei1VrIosiwrcp7IOIIQoIQxQQhJ69WkDgBQeP/4ovZogbGqmk5+craklG92zWq18STMFosvX3/77u6ecvHNm7dPnz5VoyFCfvXdtyeLiTFtjL4s69PT8yzLlDVDr25vb/M8xwhfffHlz/u/XSwWT58+PT85lVdXMUZKeQhktdpc392eLM9+8KMXTdv96tdffPXNm64brQucy0wWAWJZ7wUC4zgOxuR5ZIz5EIBwyvnJ/IIQQjlvhiFJqs6evcqkTI7BACE4H6Kzzr+7vnn+7JmPKIoaKbGBxNEBjmNNnLWS0Zu3b8nTJ9/74Y+apvnFr3/9X/zJH3/z7bfffPPNsxfPd7tNlpe5FJmQX3x3Rwhjh1F/CAGPCN8pV++Af+5FzcYgj4CEUjhyWgoxxvOLU6N03zZ67Is8uzg9uTi/qKqq395SSqNx29fXtzfvHm7eUvDTMts2neaEYXDGMfTgoeu61WZbT+anp6d1Xa/X6+2uq6pKZtw6u+0651zXdVrrkpDc2sHaruvKsvjx97//5MkT7/1Xr18ni/eiKGaLWT2fUc53bfuwuldKVVU1n88h7pfFZIKQSlEqgQeQM3FYEtRJCElNj3OOc0ooKJ1OTT2f1fPpjHPaNbu+78d+YIQmL7GkGT+k4imlktV6YmQkceSjNxuNPiitx3EcxzFYlxAnoDHdcRzHcRi895xQzrk2Fghoa/SogneyyKoyq8qs63bWOmWtD9G5oFVKjh5B1oMLnXHjY3KANcZ6QAJMCikzKSUVItEWpJTb9YoQIhj1Fo0yCHGxWJRlmdyxm6ZzPsyXJ1VVjeN4c/dgYoiEEmTOox21VjYGZIwVBQAyj+j9XiYPEUMIBJFwBtTgEVPfe49MPo7BMCZ9d0BKeMCY0ogIIUlbtl+vGUVESjhBEgkeElzjEafRQ8DHNHYgeNAG+BBIjPSxFP2Tfdvj16EKIu7TlX+74Xv8Pm2Y4qE67UsmAsSIkSIhQBH2YCmmGCCClBCapOXOhseSA4AhxBB8BIiUcNhnaSNECI9L+XH27H7DRAnnnHKegByAxJOm8JiadMBLD4XzANgeG2o/vh4yKG29p0wQh4A0wcWccwIBog8uOO+DCVEbQIK2f3pxloWizshmtXJaBedOnzyfzeajUl2vsCqbXnvvs/NysjwnhCg1pMoXEZgUTe8XGaZQWSDEeJdo0nmeO28pFemwOeeUyb1VKUl8qJBlGVXGe9/3UJfeOcgZiyEQkiCc9GJ90iwhJreECLAnyjFB4PHNOWwjjDFtOzDGmqZhjC0Wk0lVQIjbtu06yNvGOU8p5ZwTFJTSKDljLJL4/MWTtm0BkBBW1dkvf7U5v6g9wMNqLQTvhpEBVJVcr7eL2YQxTgrCOd0z3bRGISRjlMkkvAHEfdmjeziUMUb5XuMbHx2aLMZx1NP55MWLV/ViFiA2/eARXnzy6t27GySUMfHk2dP7u9WoFZXy7375q+fPXl5cnjW77t/96Z/d3+u6hvPzi6qqxl4VRfHqxctu6Pu+D94tF3NAstlsbu8fGGOEyfnidNePf/Yf/+LFy0+23bhte+MC5QK4HH0YhmHcblLkmXOOc14QJmQOAKdnF0qpLMuUUvPFyU9/9/eFEHVd36vgjBnHcVC99z4FbcbgrLWnFy5SThkHRB+oD4Am/uIfvqrK4ic//qHc7G7uNy9fvnz68tUvfvY3m922KKpvv/22qqoQwtMXzy8uzketXz2/DB7ez/Zi3FuCpdM9FYAErKUKp7Xu+x55kWCoD/jTMb59+7Yui8VyVhdP8kySGDa77bt374jXSqngDKeEYMzrGccgJNs0ncnE6XxyOp+P7fb25p3TanlympVnMca27btB9aP2EZFyJExmhW4amWUXl5eT2TRd7RdXl9///FXf97d3d/f3913XCSFOTi9ms9np6XK73nz77bfjOMpMJKuCEALj79MJdrtd6vMAIE3gUs0ryzKFP6Ust/TmcE4ZJ8boruv6vn3y5MliNuGCDm3zsFlH5/M8n0+mdhzbtgWA5PHWtm3TNPsG+vF2sMsSQlhjjTHDMFilk4kopZQRmvhsQ9uN45gYrRETK8EYRGOUt05IVlaScbCu7/omRnQenAU12n5UfWe0DoPphq5vR2NtckiJIYL3cH61zPOyKEvGmA0hKUbGcczBpC0/o2S5mKduVQhxc3MzDCOlZH5yzjnf7nab9Vprg3khOUWkIYKyVlvTjwG3O5lnEZl14LwL4BEJAsQIzsUoIkTwPliTZJjMBg9SxoAABOBx9rbXku37GM45IH2cYO2Du9KsLhI8LNz7bma/lEOMe2QxzR33rvDH8bOMwT82ivstjPRx8gf7qVs82iPCP33bPx3s+118HGGm2V48enw4AlePYfb0+Idx5rHEIsbIHnP4Dn+ZrLTxMZgQHz31HxMb3ntyHtfvRG09zPhTEbXWWuf6cTDeUVEiREIpEhqB7NULEAHC8Yuti5ITOlgbONu2zSQTUuQJfb2+vqU8u71beWR3K3d5kQWKs0pYa/NcZllmXDDGaQNKmSzLAgDnMsYeEQkBKeU4jiEwH4OLARGp4JxzIAmQRGst5xQpGGtDAKScMfNRZ7yf1KAXYu976b2P4BmlUkrBmFUjSRMChoIDY4RAcC4JlyHLWJnne2MNKedzsNYqtec9DEMHQAilnEvr9NnZGWNEKaOM7ocmBLi6Onvz5t36QU1n1jmoJqKsJtuHO+99VWeFlIl2nrLPnNXjEItpefhoEmOcZzLLMu9ijLHruuSflYpfURRv+s0Pf/yDtm3/+//+30RC/+SP/4vzq8tf/P0/zGYL9/rt1198+fn3f7hab+vpBFp6dnZxOp87F8ZBz2aLf/Wv/hWlvOu6BKql1JrZbPa9730PEftx2G63o4mEEK38fbfN81wW+cPq9mc//+X//ulLKspquTQ6aGuV8xGJQyYKybPsoPI6sNBZVuY8m81mlNLFYiGK+mc/+9nNzc9Onr1MPcM+0wvAOedsKMuyaUfGc+0c4p41HqkwAVfbvuk0Lye7+5u71W62PH352ffeXt8u59O7h9u7u7t+6M4vzgiAGYZAuXOOHWcpkcdTBBHD0ZwvXZZpzMOdJD6QGODRYAKj99Ev5zNGaHS+bduh70gEY5RSqsjYpu2tGhfz2cXZaZlxHi0GyyDovllvdwSgzOuz8yd93wZvqyLfbDZ933trikxkmcDojTZD3yKEelLNF7OyqpJ9aFVVX3zxVVospMylzIuimM8mk8nkqy++poCMkCLLYgzeWJ6XVV567Ydh2G636/U6UUvwUZuYgJREYEmfU1oOEJERoJQqNbRt44Mtc7mcTwWneuj7rvPGCs4ZpRBjUoCkj3YYht1u13VdjDF5nKZEx9TqpcHV0HYp0CiEwDmnjxpzpdTQdiktiDNGKXUxGOOC9THG4IJgZDKpJlXpo9tutyHGAMwH7Ee12w67drQmxEBXUXvrTUQbI0QQDIs8F1wuT87SJto557Q2SqVV8nRZ9n1vnRUin09nUspuHO/u7vthFCKrqoIwfr9aX19fe++rquJ5gUCUUu3YWReMC0bD1vWyzJkQhKLzEK2JMQBSxJi8mUME5+DRdztEgh8t6MFDKgoxGWYQJHSvjKRcpFaPIANKkFJKWIwxxEhiJP7RADMlp8eY+rwU7JfObZIqzWPu62+Dh8eY4QH3eyx+e3J/Inz+9u34VQAA7l8aIALSCDEwoDEhzQApHCFNHAlQAKAcMc0gYR+AlwoVUnI4V49vAO9xl3Su0kcMP0ELHzV28Ei9OWwR0reJNPTbFV073w3KWCCCIMYIBDF1G4YRoCRgfCQWUYaEFYJ2TeuUHryqM5Hn+Xw+o5Tttu3dwyorqna00cW8zj0REaAui7bt+75/9uzZru0fVpusBOMc9T4AyjyjfP9yOMHBWaUJF56w4FzgkVDKAUCpWM9kImPHCFprzoExxoQ0xhjtjbHeAzzyVg5vQwiBMCI5yxhjjBFE7y2ieHSiEGVZUi6KbCul5ByKTCg1qMHPZrO6KssiN8FITmOMjAkfg7VGCMHzfD4/a3ebXbNzzr148cK7eHFe3ly/UdozAUb7XDImMmUsk9I6b41XUUMUjNG6KtLSr9XABKWAlFLwwUcXRaCEAaEi59ZanWY01vgYuBRlkX//6uSLL/6h67rvffpZP6o/+9M/zWT+2ec/evLkGafCW393e/v3f/fLuq6HQTHGXr16BREpI6PSPgaEhFS5+WKZ57l3brVa39/fLxaL6XSaCVnNZ0KI7XbbKb1t+hrpycXVH1ezZ68+i6QoFktrQmr48rKklHW75rCXSmZmwzB4783rN8MwJOb8+fn5Z599JrP8k08/++tf/0OqjrnMKBcRMACJBJnMHlabPK92ux2jIgRCKY+Rl9OlGseb9W5W5CKfvLm7v1wuzq+uroNTxnEu7+7urvjl+uFeMJrn+bTMAYA9jut8siw6Bv0PY5WEBqQV6vAa9hthgBABI1RFGZwZx1EphRA55xQiAGybwTqwkby7e3j99k3G6LPzk2dXZw+rh7PFrJDsYbt923dFJmfzaS6F6c2eqoCknNSz2UxpfXt7gySenZ3N53NEhGjLoi7LAjFyLvu2G9SYSzGfz4uiCM7cvrv22iilE+C5WMzKvPDeb+8eMlknXd1ut4sxpt1HeqUJSKmqKrV6iJiUfGlRCcG1bdN2u7ouLy/OELzWynsfnSulFEJaY5vVRmRiOp3GGNfrdWr7UnRf6h0P+7L09lprUzMXYuSMSSkJoNbaWJtg/jSuz6RkjKWoP0AWY0BEIbKqqvKyaNtm1zZFVYcYjbG7tr9dbZpdsqHhthBECCkEsTaEkOdysVwuprNk0WLtmJSIhJCU2kXCMJ1OE08khLDabruuG0Z9fnlR5JWy5vr6+v5hHYHxLNcO5nlljOkGs22aiMRHAALGAzjHMuRcEOtgBO89MiCEJR+viBAceJ/cxOiBBbkvGAH3s8332Qv7zR+lfM+ZjO8tWvbKvH2RI0eP84Eby6EjOUj3jq1eDgdwGJUdcL/j/k8KcahqhwFh/FBadzBXO/5hjPERZIsJRyGEERLTcPJAOk2rPDwObH7bJubQFz42uPGwN03KxYgQQvBhD5YCgH8UEFJKw1Ef+1H5PG4ZD++t9l5bpz1kSEO0EAIixBAEZSTN1yCGEIP3IQJC2PQKnLk6naHphBBWm+fPnmZ58e7mDiLRxlf19Obm7oc/+ZxQyihmmVBKXd/ePHvxwgW/3u1m08mgtLYujcY55xg/GF/t39hE8eWcEOJ9KIqi6zqjDKVgvacMkDJE1FpbE60F54CSg28AOhed84n3XZaZICTtNdNADxEZoUKIIs+5yKqqHIaBU6yqyhmrnWWUOkSlVDEpUvJJlmXWx5RnMplMm67t+/b0dNm2rZT8u9s3J6ez29t7ADg5mfdtk5WF8/HuYf3s8gyil4IGZ7UxlMqiyMuyVH2XCsMhCSfEaK213hljgDApZbpUrXdpeBZCWNT5559+ut3urq9vSQi/9+PfCYCb1cP1m7c/+clP/7f/zf/mf/gf/l8EMJPF7/7O864bun6oqqqqqrQLJ4QsFouiKJL6TWtdTyYhhJvbu3/4zRcxxsXVi9RvXD153vf9ertzYMt6vmuGm/v7b9+87Qc1KBMAQozGmFwWatA+OsmziMFq56MTTHZDO61n02Ux9mrXD6tN++3r15yKuq7TJamti8YyKrIsy4ssRNhs2+ezBWGaCgEBPYKPdLUbLs7O86qqJzUiXr/91hn96vmTJ09f/OLnf22D75vdk6dXb1+/6dvuj/7oj6weKaX09OmLwyl+PN/fc+gJAYChH5LwoqoqIgWlLPpojA3OIAKlyDnjjA5dp9QoBS+KghBivXchRMB+HJkQFFk/9MvF4vnz54TQrutevHiR50UIoagnIcRBqXIykwCTuiYIZZFfnp8ZrTbbzcly8eL5s6oqEDyjWOYZp2S9uX/93Tc37x7a7S6EWOWF5NKMut+1aujRBwqYZ3JSlIKy6DwGnzP27vbh5uYmDUX6vk+WH33fr9frly9fnpycIGLq9oZh6LrOjKrIch/c9fW7zfqhLLLT03me57PZRI+DVqNgglE6jsPQDwhwen7Wtu1ms0mmt0lhOp/PhRApvT0lN3nv+77f7XbeuTIvsjyjSIIP1trtdnt3fbNerxOd+iDWTqBo1/RaqaIsnjy5krlYb9bamvlyqbRfbdq3Nw+rTdMOXmlQBrQNhsEwjtrYoqzOLy5OTk6yLHcuBAiDGvuu897nWVZVlRQsBl8WeYjgfNDWNl3X9SNSOp0uqBDG+u9ev12vt1lROR+VtYuTU1nU765vN9udzIvJdH5922cFbceIGISQKQqeEKqdsy7ITKRYeYpxWuVqbE9ms1xyDIERIhjnjBMksFfORERSlTkmBQzjhDHKqBCCEk44Q0KQMMCDcgARkT7qyQGAMkIoxeQnIAWSBLTGEAOhhAshpGScMc4oo4QSQPTBG2vTl/POh5D6U0IpoZRQYo2LcR+cEAFCiElXgoQ8jhERkSAhlDJCaeo1H4OLGKUMCEFCnAupE0VklDBKGDKKlMD+iSihDJAgIYAEkFDGnQ/O+whIKCOURgAfAnms4o/CviQW3FMT9wy14A90FeffN6nxSHd/TOBMdTTN8reD+9WvvzARtAMPlGel8z74IAWjBAlGQggSCkgCgAuRB18VeS7Yq+dPSPAE4vnFOUBsBvXt23cPm0Zm+fLk5PzyyX/4j//xd3//D6dMOesY59PZ/PrmZtd0xrm2U/WkOjk9J5RtN+txGM7OThgl/dDNF9Wg1JNnz5wLq/W2KOubm/tehcvLk6FXq41ZLoqHVX96UudF1vU9YTSE6H36YCgAAYiEgJTgbRQifPrJC61GybnW2mhNwRNKAYkQUmS5c67regSUkmeZDCF8+83DZCKl4M45pw0wTOBNjFFwVle1kNIY8+b166ZpdrvNbDohCLvN9t3NQ5axrrPWWil4iF6NozXBmJES2g99VVXz6YQStEYDwLSuT04X1gbvvHeOUZYs4Bnjk7rebLYhYQiw1zklcE4rvb5/KLP8+599fnpyGnyUTFydX3LKx27ACH/w09/9/LPvCcK8sWeLBSsKxhnjfDqbTaYzQDKMY9O2TdsiIRHAWGesG8bRh1hW9Zffvm3bzvlw97AyPhImtfUvX326aVug3EeinZd5Pp3Os7LkUpZFVRSVyCSlnDDKuaScIdLJbApAgKDWth8HADKdz9q2H43y3qc9vZQyhGi8CwAueCmkcUHwLCB6HzxEiJBLNg6jNkZr9fLlK4So1Lhrts+ePlXDUBT5uzc3AG4+myOCYCwvCp4CCv5RiOajvdXht95H7yN5v28iKelUD4N7HE3Bo+7HWa+MLspqHIe+6+fzZTVbvLm5b9b3i0n1i1/9Zre+D86+eHLBCG6327t18+nZHACEEE8uzsqy9BAvBX3y5AkXdL1eA4TpdKq1fv369Xa7kVKen5wmZLIoCkrQaWMJiaFgJAjGCQEIgRLCKbNO902bLum7uztEXC6XaWAQY/zRj340m80SH8w5t91uEyfFRdP1zTj2Xdd6bynDLBOTKlNDp8cBABhBY8zQ9ZSy5Wx+f3+fePmpz0u8GGvtdDpNjKbU1z/GJLmMC+ccen8ohInMWZblAblilKZAEGOMc2Y2m52cnYgsC+AZz7UaN9tu2wybXbvaDV0LykLwQDgyJkz0Ms/TtFJK6SNYpbz3Ro9pwpTneVWm+UEIISittNZa25DGIyIjnDnA3bZ59+7dqExRVMq5gGx+uqxmi+/e3ezaQRkoJ8KFSDlEIACeCxkArXHWu0iQcxofwTRKabLYBg8H/63fOgk/IFgej9mSXOHQaZEPjafJgY2ybwkACEL4iGl5OJkPeMaewpCKR9pqHGCP4yfiGf+IwJnKhhAiPiZdfDD2hvddWuJtRkREKmXyTWXv/UV/S/zwkVzv2DPsn7rFx6XwYLkCACG8Pyof8fj4Dy/c+/elEY5mfoRRE73W2qHwkZMQAQhj6L0HiBQhYIhAwn6iii74iDCO42q1qgW9uLqYlMXrd9fr9QOltCwll1nCyW2EblB0TkP0k0kFBLdNY6w1zjPJqZDGO+OdDwAkhhCs1dbqNIwYx9HFEBF88EyKGG3bpGRwcDEwBgFi4qZymTkbEcMB2o3Rx+DLKsOg0kNZayPn6fMVXCBicEaFAITGhKGzrB+HEEKZF0UBnFHnXLAuxpgQnQP3zVobgHjvX7x4EULoxzGZFRMKZ2fT+XzO+WCU7rrOjr7M2GxWUwL9OFSFbLph6HeC0qrMy1JEglrZ05OTNNUDROfCqFUin5+cnCT4J4SQFXkaB2qtrffRxevxHSP86dPnbvQ3d3cQwrQsbIjtbrvbbE9OTn//p79z93D/xT/8ptHGGNM0TUpWOTk5O704n8xnwzgi0ru7u+12e3Z29vKz719fX3/55ZffvLn/9NPno3W//vVvIpAf/c7vTGeLbhhdgO2m/c2XX27Wu6IqlTXamNPTUxNcen+4EDkr6NEoOnHFixKc913fIyHLk5Pd0KQVeL1eJ4VuVhRSSmcfad6UkBgDiQQxIg7a5FkGyKp63mklivL25m3zcM0gFoIuz04XpzMf4ma3jXH67u0NlaIoKvZRwNhHEM0xvJN+tfenP9xCjD5ECKPqMQLngjEWAZ0PabdsfZxw2bYtIs4XJ4Dk7v7B6fH58+eCgKwcOO2IcMGTrKonUxt13/dPnjxRzr/9zZfL5fLVp5/c398jJYvlKSXJPt9eXV09ffpUUGahzPOcU6q1Hro2LeUEMXqd5zmBMPZDCA4BTD+u7u5Xo8VHX8Q0z0DEV69ePXnyJGnJ67pO9K0Er2VZttmuVqt75/ViXp+cLCdVwRjp2l3KfR0671yggBRQj+rh4eHg95juXhRFIikldDtBl6ndFEJIyrXWznvnXLdr1vcPIYSEOaTrllLqEUel2rbVWs9ms+XpcracGWMGrV0gxsbVpl1t282u23agNfgIBIEwjlIwossqn0yqLJMEwXsbgwOIp6enjLEsuTM/usFprTmn3nvrAiEkk5Jzoaxpm/7Nu+u7O1VPSFVz7exsPl8sl23bXt/cJR83xnOlFOfMOccYJLNQrbX1jgkphIhokESaUhTCB2XvuMLFGEPEg6NWACRI9lMuZKnPI5wB0n0YOaXhcTobYkhFKRBMYQb7/ovuC0ZEhH3S+COrE2KESPejPqSUAN3n/R7OfiAkIsYUUB4/iKY7VNNjRnQ8khYgvsdQYjoAIDFGmWcQSdLpI2ERk5voxzr945pHP7QP3VfH36KiHnDa9ygOvB8BHl/Z8R+7fYigBpmXATBFnFPK088lZ8G5CC4mpzmIESAQjBACkE3TnFTy5PzsbJIHPW42m4TnF0U+OhjGMQJZb97mRUUYL3O8D+Hs/MRG3O1a7+OoXVVP8qJAID44zjmjkVIKwXFGjXGZLPp+9B4QqTUuy7IQ+l3bCCEYA2utEHt7xfQ5WmudgxiBM8aYjNEHbyilQkjGoO97q7UT4hFSfuyVE5GG0NRMpYs3E1wIknZFlGFeyEk+T0okpVTbdwnjQUSl1KtXr15OJm3bbpvddrvd7Xa31+/Ozj+xSlPcm3NOJhPnTdvuiIKqyEWWUwLWu67va6yklBkXKJBSGgk6Gzjn1rsAoJQqy3JaFM65tu+cc1zKsq71oDfbrcj42+/e3t/cv3r16Q8+//533313e3NTlPViNqOU73abh9vb04vz//X/8n/xV199/d1337noi7pK1+x2t+uHoagnk7p68qLI6812t7v54gsp89/9oz+uTr621hpns6q8vX349T988aOf/M6r7/3gv/u//d+fPXvx/NnL0xMNBN+9e6eUooSs7ldpE5bGCgfacJKjHDxyt9stpfTZs2f3v1pJKaXkyYmJUmqt2603Mi8BIADGRHmDgIARI+EiAmn7cdBa346vnlw8ffGqrcvl2eJsMeXgirIeh67ZtYKJX6+/yGf11EUWjhzr4fFcT/y3o5nEe1tCRggQikgiOO+9hRADIRGiD4zz9MKsS67BBDFWVdUNAyKeLE+Bku12S7h49eyZSznIeek0e9gNY98yxrJy9m6z44zdbRrcts6ZMsT7bXO7Wi+XS0Zc27Z3N+/GcZxO68V8LoSIVAxKd12zXW+sHgXjuRQMoZAiOE8xRh8Iht7otm3VMA6jQcTUh6VPYrFYpJrXNE1d14jYdV3Kl09EnqZpuq6bz+rLy8vz0yUhodltoneMwDCMzaCFyKpiqrV+9/pNEJg4S33fp6ZtNptNJpOktkn8qNRVpEUt/VwppZRqmiYFBaR6nHqR5L+QHk1KOT+dZpUIIfSj2naddbHpxm2jHzZdP4JzEBlQAkAYMIacLaZ5meU8EwwBMeZSMFYkDvohhccakyImGGODsZxJUTBKKaFUWfuwae7v769v7XQK9XzugBRltTy/0Fr/+osvtfE+0TMoG7QBJIN2IQIQGhHTphvInoHJGLPaEEjnWBr+kiO2/74ahZhawD2X4bDEH/RkdD/m38+9APEwgYMjkV+MPgkD4TH0IN0YwXQwJC1yH+7t0gmfqEnHtS0dSYgfkFYOFWjPzXm8+6GD9PF9XX9UUAQAQKBI8ANR/W/dDo/2/go9CiH6oGv7rVtiYqfbwbMmhECRHvqetPk7eLh8sPN4NKlAnkcggMiliER4T2PcR1ciRIweca8QSeklTHBB2PxkT5weVG8IlmUBEKSUgx2GQWVlsW2bk5OF0rqqpoSQ6XT6sG2V0jzLjQPOBSHUxxBi5JzHgBgjEiyKQmudyUJrTZhMp4eUMuWIEUIRQGtf1zkhe3ThqK/d2/3EiAGD1jqvBWM4DEP0RimVLlujukQ6kzKPlHiHyhprXZZlfduFEGT6NWVcUESkMjt04eLo1nR7S/2kNDg5OZnP5wCweegZxbrKvXfe+81m44M1LgyDRox5NueU+GC1dUwZgsyhPTxmAFIUhQkuxuiRJGpeMramlO7adr1eM8yevXh5d3d3fX179fTp3cPDru2eP3/+wx/95N/+23/781/8/LPPPv/kk0+GYbi9vb69vb743g8Qn/GMr1dba21EHLXdtF3lQtsrnsliMs3q2e3Dfdv062783T/8g/v7+81mJ/Pi9Pzs6skL48K/+Tf/z9lsYYzZ7Npts5vNZp988slJs7i5uZnNZsfXxYEqlXxnkpGCc04r1TSNtfb58+eJBoiIMitSQSnLUlsPAECsf0RHCAVCSC4LNQwxhNV663SXC3q+nC+mNfhR5iVDvzg7X9/BOIwhohrN7f06ImMfsM6OJvAEyfGuFh831C545j1FiHudX/QxRgiIyAklJCXT2mSNipQWmbjerJISruu6EOD07CKvqofbm7bZdc2WImSSp6X/9bt75rqnVxdbY55cXD779OXNu+tffPn1Z598erPavfnu281qRQgQwNvV9uLMPH/+fFRDs9vd39+OXZ9JPp1UYIEEzwQdu1FSUpclAjRt2/RNJIExtlqt8jxPabqJvbJerzebTZ7nk8nEGHN7extCWC6XjLFvvvraWDWp6rOzs7quAMKoejUMeZYZpb33gnNGqBmVGscYQllOUquXPq26roUQ3vu2bVOfB4/GVGkf6gflvU+Rj8n2WnBuUmmklNC92573fjKZLBaLel5Yb3RnlHbGhm0zrLZN06lhRGWi8RApMMa5yESeZVl2siw5ZUAQQkycHZqUgkpZpQZrlVJG63TRSim9KJOc31q3bYbNZnN7e79uIWdwdn6JjGplF5OpdeHrb767XbvJhPvoGKfBR21sAOI87DfdWZYaFOdsKqKMsb4dSEqqAyAEHgmTSWVAU/xv2If7eEKICw4RU1oK4QwZJYQRwkjSq1EaD65mQBEQSMDHcd+h8ULEEANApKn74ZQwRjijjB3K4WHFPy4tH1U4AOBMpMONj0nl+2rhfbpiDpDXPhTXqkO0LkEaY4yPGQuICLjPrttDhIgRSUQChB7sPSMAEhIAIiIQhMeU9ogUCJL4wawu4aofNdCHmpdkMB/1hcfYKTnieaa54HZs+lHZEBiljIiAQIJHoCH6gzwxEdCQYgQiOX1yeVqx+Pe//GVB/WcvnkwndUQQkkfCoQ/WacZYjD4Ed3N/w393wRhBAlqPQIBzrtVovOvVCIRxii6GYK1BEIIKIUbdBIgB9qaIieWEFBLIHBG834sxGGOEciGEs1EbY0PamijvbfCWcfAFReRaa04heScJxsKjZ0KSzoTH7T9nNIRIKU2KXkaAEGK16ZX9qMu3xmlmPvv0U0Tcbrdd1+31uwhSyquL0lqry3wymYzKKKVizCaTyXr9MGhzd/8wqatpnVPOjfVa7/LFHCiDsD+vhBAYaQhh2/XT6fS8qowx22anteZSXl1dGY3ffPd6eXry4tNPb25u3t3cnJ6ff/Xdty9evPj+j74/PZnd36/+7u//lhByd3//+vXr+Jd/9Yd/+Ifn56dKqXfXO5kXXAo/jP/5r/6qqCptIyD9/Ac//OzzzxnjXde9u3l9enr+k5/+rjbhr/7yr7/57p0QYypdZVl/9tlnb9++/bu/+7vypnrx4tnl+cWuGQ5bq2NoPVWExKcLITDOnXP39/cvXrwau5FEkslMZllyaOOcV0UWAkREwShhNCEahELyB5jPauO81vZnf/vzz14+nVbyV7/4+Q8+efnTH//gxctPMZJvv/7SR5jM5re3W0JydthXJsDycFgfYTiHPaAxZs/tjBEBKBLBEOM+rtN7b4xTSoUQI+xV4ens1EYZ407mM5ln33z7Oga3a9q+GzlnEQkgc5Hsmq4ScNN0P3n+Msjiz3/29wTgdHn2+ubh+u3rZruxxkzK8uz0dLGYnS6WeTnRVtsmOgK8KooyFzKL4L2NHsH6gCR6BGfttm2atgGAvveU0svLy/Pz83EcU6O92+2ePHmSvFR2u13yJDPG3N3dPTw8XFyevXj2dLGcMuLHcYzBSSG8t+PYM8Jn05kaze3tPUV2eXnZQQJVXJZlSeitlLq/v0fEFJWeuswUeqCU4gEOUEyqPcnGbL+NjTGJ6DnfByUHUMMwGhsDMOPCrukf1t0wOANofbQRKBBgnOWymtRlXRMcECmAj4CEAMagRz0MiiE5mJF65w6ciKyaKKW6ZmjbdrVZbzbdoAEAzp8sqczGcSSMRiDffPPNd9895BxigBBiWVTWBxcgBEcIZEWmH8sePrZi9JEzmfoe8jjmOj4D8UhUlwR8BBwiCg6HVi8tzXtu2wczqvRADnGPFr5PxkH8SIh93EIdl73jDukj6O/4vse9UbodGCXHf5Ma6KP+jSJi/LBBOxzJvuz9Vg0+rr4f3chRu/q+VOP7lnePVT7qEVKneVwOD2EL4cM0osMBXF9fb3Y7bQO1NjKHhCcyrTUewQNGoJCQg8gIIGWU7ZpmCMr0Q7XMA4Zduy2K6urqajDufrdr27jZrHzcT1WTX27yPcmyDCk1BowxXGSICI+yE0KAMQYxotl3eFo7Yy1jPALkOaTIT0r3eyllTJ5njDGllLUuBPAeILp0fqXPzXtvLRgF2YSmHC7JEx2PxeC11hExYtq+0H4YnAP+/yXsT55kza77QPDc+Zt9jOnNOQCZQAIECJASq40yiVatVa9715te1R8ms65tdZmsTVRVl9SqpjiIBAkCSGQipzfGi8nHb7zj6cV19/D3gLJ2C3sZGeHh4e7x3Xvu+Z3fwDli8FYb7yHgarVCLqOYKkruvPfWu4NR5bjXOgAAgABJREFUQJqmp6ens9kMAAajKaW2MdGnt6p4ADpoG+H0siyNGYLREHzTDVprJXkqZeyBbPBSa6ESLiXnLFD6g08+3dTbddMg4nQ6pZR2w9B13eX19uLxk27ov3nx/OTkJB9Vq3ojhNDf2qdPnz58+qQz+ubmRiUyqZJ8kr94c/v5b3/9gfloNpvyRL25vF6tVy7QRx88EzJZb9rr28Uvf/Ob7968nZ+ezGenjMA///pXv/3idz/5yc8fPH70+vLm7u4OkKk0WS7Xj7w/Pz83xiwWi5vr667rzi4eHwRyx4TheM44yOdiuNhyuby4eJgkycnJSVEUVPC2bbu2R8RoERczRgK64AHBe+8JYd57SrgLNs2zZbsZjBuzPAB+/e130/FoXlVFWZWjibaIznRBp3nL36FfH209780MyGG2B3viOKGMkN0wHgF4oAwA4Z4YhoBIYlwqo8Q5V5alEGq92q7X6xCCD6jyArzbtgNjjFBOuRydT0/m82J29t3LV7/65ec//uwHZ2n5z7/6DcWQZpVKgDJCeJoWE+3J189fN8Te3dzU9TZRwqHdtk0iWZlkliBLFRNEe9f1XaP7Zuid8wHV48ePz8/Pyd6fjDEWySzr9TpC0mdnZ0VR3N3dLRaL+Xx+fn4+n8+5IM60+z3CG6Mj87zv264bIKBIBGNMd0283KNcPaZ4RMg0btaxtY81zzmnCLfGgg+pVCRix87F7SBuVNHdYDweZ1lmrdWu7bXudTAaV9thtW6axmoLgXEgNEmlSBOZCJFIKgVhYPuBSCAEvUdAj8EZY9q6jTnOgu/8aJRSSighxJvW3NzcLRaLYRj63gwaUgWTcTmezKy1EXa4vVu+en1tA+QFb7ULCOPprGsHRNJ1jnJI03S9bSJSfjT98s7ZJEkUZ96Ew1wq1rP7socQAniPiBYRKQZGMRAAtuufYrg5YxGoiPgaBcTYqBBPSPRS2VtfRpxzN0snEGCn0fT7oL4DIPlee3SYbx2XnGOvzvdq0vGDHHBRymP/RKNIDwhhhMcNfV9e2Z6cwwkhntwfio8r3/Fh+biyUkKP74m4z384Siw69KAAEPw7WovD7T2o83DI2GwbawMGsNZ7r4Xa5TgyxgADgRAwYAjofaAIBAjSm5u3VQLPTmcXF6O2rZe3Nx9++GGWFUyR8Xh8dV1bp/NMMU5G4zLaJ8VfXZblYINDCIjxHEZIbIsxsiWHvk2SDBGTJNluF103jKoMEauqREK6YYhjoRBC03STUcUY2zSdd7sDECJSSoSQnIFKANF0nXYa1InSrovvQJ7nIYALu7eDchpJWIPWSUKikqFtWwBQggMEgjSR6XQ2m8/nSZKEEAIgYwwgOOdW683bt1cAv4tDfULIo9MLDAQJcKmYUIyTvnfbtpGSc849BN2Zrt6gh6pSYjar69p7b7yTUiaZzwhRnDHGtNZVVY1mM2tt3TYRpJ3NZo2XvfdEyG6z/vK77549ezY7O3358mXnjX9DqqqqZmOaCmOGDIv5+cn3f8i/+urrL7/88tmzD5OiDCH0RnOR1k3jQ4dMPv7ggyTLV+vt5c3tttWVGj763vcQyWK9GHo7m81ms5PtptXWnJ2Np5PJeDzOU/XihbRWP3jw42awx5DJId5OKWWMie9tHP1EkDm4UBWVtbbe1IMZlFKjcSWEaLvOORvLHkUSAgYIgQRFVGwtKAkqUw+fPE7zJE3zz37443/+h7978+ZNu1mPi/z8/KLZbDfbpvNYtx3//XPlbt368N4Ci/fhnHMmGYt70d6rMKCIExeknMTZO4vf8CGkaWqNjjz+9Xp9e3tbVuN6u0YmCATriQcihKQYfOiAy8/++OdvXr766rsXP/rZzzKV/NXf/h1DWhVZWzeKs9PTB+PZtO1M8JZz8fXXXyyXK2dhUiXOmVTIajS/ePiwa9ZpmilBbDcYZ4FSwqjV/vzi7OHDh8aYzWYzmUzyPEfEoii+/vrreGCP/8YTWVEUT588rUaF937QAwUrFbXGNfWWcxgXI63128u36GE2PYvSyI76qqqigrBpmtVqRSkty3K9XuNeBG2MaZomMkWNN03TxCsAAPq+DwQOenYfQvzxiLi2bWvo4H0Yhn6xaBervm7Be0Ag3gemkqys8qpkCUVEj64b+pJSxggihGCdA8aYoEwpwVgppUyTPCoUrbV60HVdX9f+zdur5bLnHDiHJCPzyfTi4sJ7SwiZTqcxWX7QUGRgrYtRZ2Ux6jsNAH7f+hxqxqGd8histUVWCUq0NzFPdQ/vUTzMwPa7/J6acc+wP+5C9oez3Zq67738O2TI48boUD+iZD6WqJ271buWmIdpxKHs3Vc+fIdBerhDzO08rqPxR7TT9+sobohkZ6gN+/kl2T0aJYQE8k6c+nvcmfc+J4TQY+O0I5DT71XnsRCyvTN1AH84ceNB4XAUwHJ4N/aTMBSCRy9C6y0TAYCGgFJKDABhZ+HkgnUBgDAbLOU8TUVMy2N7xvLq5qaczC4uLox1wHiE7uMEsSgKj8A5L4rCbtvY2MVnGLzTWjN0hBAfbN/31bgEoFLKiFXE00mWZYOxWrsyz7IsM7rtut1FIgQwSggFAsSa2PYSYLSqqr5ZaA1OA+fc0t3YSQiBSGjwwRNPgFIOnFNKo2I4Xv+6b6WUs9k0TVPCc6VUVuSMsWjJFDO3v/3266qqHIbI84pNP2Ps6y9/d3FxMT09Y0x4AgRZIND3/dXVqmuM1pBwSBOQHLpOX7nrZ9NJBKiNMUioEIIwCpQUFUFERmlZlkVVdl232mzatn32wQdXV1fb7XY6O+lt/8VXv2OMPH36dH46XywW17c3VVU9eHShlLq8fP3y5cvp+OnHH3/UDRqAJknygx/8IH395vPfffP06dNiPGta/d2rN1e3d8aHQdvbu+U891laUEqLYtw0/eeff+49ZmnZDb21/vPPP0+S5F/+yz/N8/yf/unLy8tL4Co6t+3iMPfIVhw2xS45ppNmWSaECJ599NFHVVXVdR3nf9HBXyVJCAEBo4icc4LRL8XAeDxeL5cqFUjIaDS6vXq9WdycjIvVptbaNtDlSk6qUbzCb15cbuuWnZ6dobfBmmBN8BaCp4CUoNG9dyYEF4Lz3iJ6SoExkgihOFWcM0qd89YHwiRPsqQcDw423dAaGwgNhHjEAJh5pyidz0+qvFyt6751IJJuCMBSbSAAL/MxZaLZdkDIo4eP/2//1//L86++XF2//fDJwyrLN6uV4uL09NQ7R4DmRc4o11anSUqAvHjx8ldvujSbnk5Gk0SdSvZslJ2nJDO9DDYBQgPpO7vadNtae5QqLU8vJtYb4yyXsiiKqhpHcqnWgzFmGLo8zx6cz9NESIknJ2Ol73LuUuEFGO+M0xoc4UQYS5vWbFvrCacq9Yx2zmr0heLz6bjMUmcG03cMkKDv29rpQTBC0A9t0zc1OkshkODX7WvglgmCNLiAASgJIngqeOodSiHn8+lonAIYbbYI/dv65HaNNwu73NhVY9sBgEKeq9lslCdcsKBYSDnPBU+ZUEDBa90PRltOuVKZ4BK5oFzlk4lIiwHIutOrflj3+u168+r27rub5WKruYLJrEpSGZxNUjmfT7777nlVjU3Ab15cXt3VgTFQBfJcwsApZJXabNa36w4BHj56sFhtAmUqzQhlzgfJGKeMIjIg6B06lyUykZITfPTgnBFQnGdpwhlLlKQBunYLzilOwFvBUQoqJZNCciW4EBZQO8vzlEjOlSCcEeqRBs5RKkYDQ6AQCIboYcQZUEAiuKCEM6QUGCdMcZWJNBWJBuBCUSZ8AOtDQOICGueBRHIuI5QjofHrziNKgZyB4IEzR4kBNN5r7ykXwDkRAjhnjBPGKOeE8UA4YZJySbmgTBDKQ6R0UhpleZQx4BQoCRACBEHFzrUFgUbEjlAKNLgQeagUKKOMEcYIo4R6QI/xIw7/aMA4CSKExHGqYJQRoBgAAwLnUQgIhASP3se+BgXl1ljBhZLKaWsG44w1g/5//+atp7I1vtcDoGPEC044B+cNoZRwSahAJBQop1JSMRibSGmNSRI5m53VzdC6QFX1u+9ezU4f3623ZTnN8uR0OilSOUplmfqimsisuL3d3txu8rS8ulqkiuQZp9QN3VolglHCVVIU8+cvr8ZzRTmViq9W62GondP15lawwGl4/GA+HmXbzd3dXTeqgElqneEaWADugSNIQEUxoVRR5rreaY8OlQAXfJIXKks8IU43WZEVeeGDM8NAASUFhtjUmzRRzgcmOFHqerEoTk5vNhvBsTedccOmXjXd1rjhRz/6QT+0aapUIgmELE1m04kU3BrtndVZOQB5cX357YvnDsyTJw/OzqaJ5InkVVmKhGvrF7W/W8NtC8uODXll0pGTRRdgCB4YcGrBaUF1GOpge+INIyCkLLJ8NBr1q9txKiZVzgHB2TzNqqIkhAmenszORtno+vIGLKQkUSSZ5bNVW1vrsiLnXMokHZVjztXPfvrz5d3q5Xevb26uP372LASfCq44F5T84JOfvn590w7+6mbx0cefTmbzTdtYHyhnQCnhQjvXDnY0nVez0007GK2VSoSQxljnPOOCygQJ9UCFSoHyXtvBE6QCmOQqQ4LAKFcyyTMkpG5bbYxUKmZoAAKnjBIKASEgRRIE6fUQfAO+ezRNP30y//ThbKZQ+v7nP/y4rlcqkb2Ht+u6PH/YAL8g2+3tK3Zy8fD4IHl8oD58/QDOAoBUaTzPHgwsDmbtcQp1oIbGx2HBBSRAiPWu6wfrXEDwwTPOogmVNdo7U5b59z768JNPPrm9fjH0RiYyS3JrbV3X27pu6ubq+ooQqMqiKsssS4P3b6+uvnv+3KmCU2RoJQmjREzKJFWCIGhrgTAXsOt103behyRJyrIEcFJKzgUhILhkjNd1fXNzE4M/lJLjSSU5b9pt33eU0nEiGGPOO2OMjcw3F7Q1QGjTNIM20Uonnq8ZY48enEfxXyRxxeFZZLJEz5dDbns8+DjfU8opYd6jtcG7wBiTQjIWTaslF4xRCgSd89roqzVE07yu63odvAfOQUrhormaUkIIBDDGtLtgP6uSrCgKwsQwDL02jLM0z/tea+Pavts2TV03q/X67u7u7k5vel9VMk/VMPQY/MnJSVnkbdMQQgnndd3cLVbG+iRVUiWIYNrh2dMLruT11Y1HTBIeEKL5VuTyaK3Jvm/bNUNRYU0JRf/4wUWqRKqkYFQwmqaKU2atCT5QGh22gFImhVRJqlQipCSMU0qVVIRRSgkiBu9CCBGiF8Dj8BCicfNergA7/uSu69qjqQE5PcwL32vjDq3VO83fe4giQCTUCC7u+8WjtowLeWCcHjeUbD/zo5TCPmOdMfbe6O/wYL//CLsnQN9fuceYzR9ofI/y4mM0QbwD22sEYx92uHT/7psrrbXbQ6whBAASexdEjD7ju34dSAjBhcApYRBGeZInqutq9Hh+fv7Ndy+effjRar1ZLJddW+dpMhmP8ix9NE+zrDDWXV5eN+1QFOVidS2lKMpMSkEAKWVRo82oWCwW42lCCM3Somlaay3ncrPZeB+yLI8q5LZthyFkGanKUgrhtWWMM7o3wqU8duNaD5Qi48AYEBooRSm5lJIFE0Lwzse/aYTfY1QfZezk9OTL330Vgv/+J59AbESWi8ViwzldrVYPHjwoy/I//+e/attaCHF+fn5xcVEURdx5Li4uPvroo+rkPHjvjAaE9Wr13bffdU0zn88+/PAjPdjgcTqbjccjY1tjvFT8+m7dtatEirLKBIWhazhl0/nUOs8E51w6RGMdEpqkWZEXnPOTkxMh+HK53NSN4CIAbLbb15evl8vV97/3vT/6oz+yzlWj6ubqOs+yi6dPbm5uF8tlkZd/83f/LU2yu+Xq7dX1YrGcn55IlfzlX/7nJ88eSym5kH/0R3/05tWboihuF3dv377t++HJkyeD1oxyrTWlzFintWaM53me5llRFJLz6PsRafPWWudD1KEfqozzIYTAOZdSzmfT9Xrd9/1kMhmPxzH0kVKaZdnB3JHvMykRkQouhZCMMhIyQas8yZWQjAjOGCXr9SpN86KqpFTjydh5n7nWWcfxXRoL7jW8R3P4+yp4ECcdQmjJQf/k/R/cKYAwY61vG6BsGAwQBowi+qHtAzpExzlNRFoVGWOk3qzRt0VRcU7rTXNzc7u6W7Rta43J87TIK8qEtp5wNrTdYrP1SMajilhNYJCM5Xma57kU4AeTJAmhTHuMa9h7H98vRO2cy/M0yzJORd1sVquNtZYAs9ZmSS4oW61W23odFWzVeOqc09o6F+KA3XqntQ5gjTGE8eg0ZowRUo1Go/l83rZt27bRxTv+2eJ8LiIVEQ2LZm/GGESGgQQCzgZjDAbGOWeSAqBMhJQ8IBrnCcGuN9ttu1rZeBn1PRoHsQVnjAmlkiSJLBLrwwFdDAStRx5AqESlmfPBeLfe1HXdauubrm3aTmutXRgGGBykKVRVRTG0TUclRk+ZxWo1mczatr29W9SdJwBRhm/MQAg8fPjw8va67oJKYTwe3y3XSZp5CIeJ0c4qGkh0bNkpyAIRlEoplZJCiMgvZ4wJRhljFgIEQnZZcSbKKGHPwtgVDAQSkOzXT5RbKyYopSEgpdQj7pR6hLj4ttNo5okAEAA9hgMX8RgLjZO/w2V8jIiayKg8qjRM7Cg2EI1bADwgAWAIJIZpHE3RYE8ce68sHUraMbXkGL08iPbee7TDHY7L3oGtc1zwdtPKIz7OAclkjJGwSzc8eNNHK+qY6hDnLnFAFbSOGpsQgg87VQAhJASMQazxHeecxzEYBZByF5XQNM12u6XEc8DZ6HtZmhrTxQizGPjlfaiqqqpKgJ2iKwSIGHIbWu89Ixx9sMY4YyEgo5QzFjyURRGvt0SpMreSC2+N1poLSgknhAVPjIlBlt55ax1wQZTiAN650LZGSCallCLBAD6EuJwQse+1c0N8A5MkOT8/XW/rL7/8UmTJZ599ZurWGPP1118XRQlAu264uDiNe/TNzV3TNAAQqW2IhFI9Hs3n01kif8oIberNzdurxd3Nd999d3e3/OOf/tw6+C9/9Veb7fbHP/5R2w+/+MXneQFt4757/qJvq9PpuMykA1Z3zup2FCMZCCOEgXdts6032ydPnrx9exUg/PSnP71bb/75V7/Z1pvxZDpY8823X/3X//qLP/3ZZ9/73ief//q3f/EXf/Ef/8N/ePLp9xBxPp//6le/+ulPf6oHl+fp5uoOwf/jP/5jNZn+D//D//2fP/+8rusf/uiPvv32277vP/zww8EaSunz58+n0+lyuXzy+BljTMrEeazrWmt7fX2dlUWsUjF59ICoA4NjUP09XN05F6UvNzc30+n07OwsTdNINYgT5UNDFa/wrtcsI4JS8LTTpmn7eZVN5zPUmR66qsy5IKZrVnVLMBhnT6uxNZ7Nzy+OBwbvrM9w7+q7t2+nh27vcKg8SKkOmoz4CuNNEDDWuoAegw8hjjCcs2miIPg8Tc7PTh4/OBtXFaLtmvrs/JRx3jTdmzdv3lxetm1HKFFSzecn85M5Z1wb473f1M1ms6Wcs6yg4DOK4zyZj7IykQS9tTYvihCg7XXTaW0MAInTsiTliFgU5enpKSBcX1/XdVMURSTNp4lyzi6XS8bobDZNkmSSSe93R4EA6GzoYxjQoKWUZVXFIwwijseTs7Mzir6u67Zt4984Vtx4Zgz79Or4xsY7MEZDAD04rbVzntB4xqSEQJomXEnnvbHOWL/eNDe3y7va9X3f92gMAAEpIUmVUipJU0KIj3IrHxhjKknSNEVGuZRCJkolwMSgzd1ifXV7u942m+12s63rzjmPnDGViiKTSVkF6/TQi+i0JHhAHwP8NpvtemsRQEmihPLe9YM+nYiTs/M3b96um0FwUo3G682Wc4GERFeaEILg7ECgZ4xBCABI0HMKz548LlKVCC557PYSRrBvm2EYokjAo0dEQqlKMpUkjIvofBK7pdj8xGqzS1ogAgAieRF3NC0ExBB8HPFQShkl8WgppQTO4iZ+PJmDPfnowHY5GIAxKRlj0Rx8J8LdSwAPvdheHchoNO56r3WLM7Y9zSRKO+4XFLxTvf6PPrkvZuyeH3tc+Q6Hg/eAHNx7jYYQDp6llFKyN2qJIqpIPei67u+/uzHGICGH453z4aghvv+lsVwTxggGEtx8XNqh79raOjudzTqtp7PTV2/eEAAKZHm3HFf5qColNonKGE/Wm2azae+Wy37oz8/PQ3BCsqjXIEAYYc4Fa91kmjnnGOOr1dp7zPNcD1YIIYXUwxAwJFKlmUpV4rzdbnrBgHEi5I4g4yNsY2N0EUjJKYvOU0Coo5RknKRpGusWo0wIwRgPIcxOThglX33zzXQ2m85mX339OngdELttTYAyzn78oz9arpbO+vOL8/VqwzizxvngGeVd116+edt1bVWOnl9e1ZvN3c3NzfU1Ondxfv7BB8+ePHlSltX1zZU29tPvfzKbT1++fFFvt59+8nG9vc1TrgRjjFVVMT85A0Kvbm65TBwSHzAgEYJLKZ01TV17Z9MkLcqCUsI4H4/HVVUCIdPJJMuLNJGjyfibr7/54WefNW1TFCMqufdhvd4+efL0g2cfbuv29u7uk09/+Or167PzB23bffX1N6enZz//k5//b//pP+d5/uEHH4YQvvr667Ozs1E1rqrqq6+/7to+rh4h4mnYWmulUpPJZHl3V5ZlURQRPxBCUMaHYQC4p3Ea6+ImGeUr0bU/horHYADGWDz4HnNB4/IZrOWCM0KIN+Cs4jRPeCp5niilpOI0SZKm3t7dXEPwktPTqgiIbHZ28V61O9S8w8HzgGdSSmWSvldyd/w6xENE0TFXlTNqnCOUxiFLBGadsxTQe6MEHVV5nitCMJF0XJU8SW4Xi1dv3izuFlobRnle5pPplDBWFiUTMvqzrVfru8VSG+sAiHe54tNCjVLBIaBzIXguRDPoTdNpbYEQKVUiBaM0Ubwqy/FoQglZrzfr9Rq954zGKauSwhhtjJmfzE7ncwLgTW+d8yE4j4MxTdt2Xae1JZTmeS5VEnXNVVWNRyPG6Ha9bppWaxPdGo2xiCCEdM5H6//Ieov7cQhIidCD67rOWscYk1JQwYGgTGSSZYxzY73WtuvNatksFrqxznuwAYCAEJCmPElTLoVxbtC6G3pjLVDGd1l+DDlXSUa4XG+b568uX7x6s1hvrPNNOwzG+wCMcyEE54oKKbho+2G7bULw4/F4Mh5zyoGAVOr65qZpjA0gGahEYQBjLQb/0QdPmqZ5c3nNOCCAShJtLAIQxg7Wd0IpIDQ4t9t5ARED+MAIfPD0aZ4mSopEck5JJiVgqLfboesi0ZwzFhAZ52lWJGnKhYgjMbJrv2KBiQlEhFIqCCeE7FL3dts9UNjHLUDcrJFSuqt2UkmphJD7YAYWrTUZi4o6Ev+NhY8xzqRijBPKCGWMMEoZpYwyToHGnIh9iRSMC8r5YVHAMRMHwPn7xHMarbRDiC5If7DOuX2K+jGeucNwj6raMSj6Hlgab+4oMhr3CxwRCe6ibaJ9Qfwklj1rrUc8UBJ2CWg7p2x+oLxCdI4DwODB27P5xA49+OCDL4sREDabz9ebTVPXGHzXuPk0L4v8ZKy4lITwvjddb169ftP1ZjabCMFiBKH3XjChpPQhKCETid65RCVDP1CgeZq1TYserDab1do6V6RZXuRZIhGCNz36wClKwSWnjMbAwsBoiBoXRj2nlFGMqk8K4G1fjUZFXlhrrbFJkiRJyjnvhn4+n9dt57wnFGRCEcirl7ch2JevbsaTqu2HbV1/75NP31y+Wa23WZ5lRZlmqUxSlSguFKHUI/TaeOuUSvIstUZfX10tl4tgXVUW89lcqaTrO6mSp8+eFHl2e3s1rorpeKyUiN5JhAmZZkkxAso9gLHOOhcwcEazLBuPKq0HlSRAoGlaF9x4VI3G4zzL3r699j785Mc/+fqrr6YnJ23TCSEfP3769vbm5ORkOptfXr796quvp/PZpz/49Hax6Po+y4usyAOS5Wr9N3/9N//iT/9F17Y3N7cPHz5Ms2y1Wq3Xm+l0KqS8vb3LsizmejnnIkYupMyyzFurlIqKhag8AUqHYWB71pL3/gByCiGk4LGLiPKttm0RcTKZ8KPDJRzzk1WqYpuAHoIlwVLwaHQiGCMhS5LZZJymSjAo8nRUZKlkPjg2P7/AI0fawwp5R+p7VPa4VLshxLsQ6HEy3+EIyRhDEjwGypncUdcoIeCNbZttkSUns1GWSDP0VndK8tGoutt0V2+v7xYrRFBJKgRjXAghi6oQUiKA0aZu2vW2HoxFQp2zSrBZlc/HRZUwQYFTIqWiXBjr+sFaZ+O2FbuHLFMXFxdJot6+vbq5uSaEAsJqtbLWSk4ZIyF4lYiqqhilXddRtN6hC8F5F6cdzgXGWBF16AG995FpSQhZLpd920ZoKGKYcdgJANGcBY9sG3cUOIt9NwyDBYAkUXmeCyUII2VVcqkCYt+bbavX23a1aeom6MiEp8A5KMXSLONS+GgdQoBzLpMsyzKlFBJw1hlgq/X21eXbl6/f3C5qbRwwzoTSLhBKhVQyyQhlgzFN17bN0GsnBC3LvKqqNEmQQN/2q9V6s+kjppqmglAw1gAhWZY/upi/evVqW7u8kIRQhwSBhABcikPG2w6L926njAZCgQTvGIQPnj4usyRVu7KXSEkB2nrbtm2cBSaJgoCM86KoyrLgUlLGGed4lKXAeezMCAAoKu8FCXsVOGWMMxpHfhhCnNBFsTlPkoPDPXlXmQBH6ovDt/DoDhTuy89+PHcvLoy3uIbf6/YAAOEoxPxILMEZ//0OjxzZ0LxT836v7NFjceS7E/r4FXesakC4X/gBY2W1kZ4LECfrf/fNlY+m4IcDLpBYFCmljPH7OWiE7r2niATc2clc912aSACSF/mm6c4fPKKUffP1cz24soCLs9M8z84nSfDQ97ZuO2BitVxxwcbjMefUe+us1X0vOBNMDL1J0wxDj0jKcmS0AyBCqMVi0XdDnud922EIaSIZI+gDhkAJ8c4wjrsrhFHGmFQySSUhLiB6D4wh55Qx5AKEEOhcWeZJmsawqCzNyrJK82y5WAgpz88v6rpp2/bJkyeb7VpIenfb/+yPf1wUxTdff/f8+erVy2988D/+0U/qulYy7bpuuVinWfr40dM0S5u6a7qu3myff/f8818/f/7tZd+vwfu+71erldZmMEPf9ZRinudpqtJEnc5G1gxDN1DOCOVNp5vBeKRAGBCepGmSpgRRGy0ozfM0BByGPnif5bkQvO06bU0s3icnJ7/+9a/PLx4QxrKi6OqOUnrx6MFqtYo43MXDB3/5H/9j1w0PHzwsy/HNzfWDR4/TJN02Tdu233zz7enp6c9+9vPPP//8iy+/HI1G52cXzjkuhHfBGIMIjMdEPe6ca/tus9k8e/IkEtq993sOBDLGOBeHnsr5EIF0pRQGvzuvI0aoPHp9JEkSobLDkS6ufYuAIVAIjKKgIClITiWn3uqhaQnBMk8no6oqck4QvE2kQAz8sP+GI+9dcmQKdVj/x5jnYS0dIM3f1xXtpw4UCItW9AECCUAoEuqn4+KDZ0/ms3Hf1F1rsyQvstwZs1xtu8F6BEY454IRlFJyLsejqdbaOdv0w91qPRiblmNCSDBdrmSeqjxVUhKGhlMupXQBo38VIcRZ572nSkgmyqoIIURZuhm0EMIY2/d9madxc0mUSlNl9XDXbRljmVTW2l7raEhvreecqyTJsoIxFpOs8zwHgLZt67rmgLgPzIt/pAO9hez57nF72jFcWrTOBAQhOBNKJEqlklKaZKm2ruvspmmXq2azaevWGg+BAWNUcAoAUkomBCIYa9I8I0wIIShnQEivjbbGGbvUfr1er7eWUhiNsjzPAxJjDBPK++ACOOOttW2rtQMKUBQ0pi9xzrU2XdetVqumMZRCAGD7zsO5MBrl52cX2+12GAYpAADKslzVHQIhnHMhEKh1wfvAmA8hBEK5kNbaiOh5h875iHzGkxxHL4SIUTzonUfw9J1I2MPOLhg31hEKBIAeoE70uAvSDRQpIu4SjTAeCQhjDAkJhJDgIyBMvGcYAuy8+AghhNEYjr7TOcCONXMoG9FsLIKl8WeiYDwGfNO98TTuY48OAon7pQQAe6ee3TDP38829qvmXU3Cu6AlOZI3HHvkvvfd98ree48cJ6CHFU3xvswflna85hljEZKKe1Oes5jS7L0nzh1s9sL+fCw4A8IIIVrrtCqZCC7Azd2y6brT83PKCVpM0zR4mwhurE9TFVxomsZ6JoQqR5VSinG03RDHBKlKKKUhuFSKXgPAPgVXG8tMcJ4gKCFTpQgnkvGh6+2gqWSpFFIoAAjeW6cBKaVcKcF5mufper1sWxsQEAOlJJLIsoxqrbt2KPM8ri+PIVHJj370o+cvX97d3aSZEgmPhItU69n4jHP5N3/9D87B9z4+r+uWUfGXf/lfTk6Kp0+z8WhKQOjBXF/fFkX2+PHTqR7MMFxeXnr7xlsIHl6/vV6tIMtgPi/m8znnHAGy7O10Oj2ZTe6u3ggC43GVFWNg/Ha1eXt9+/V3r3/42adodV/ljx+cKiVN3y6XSxL8ZrN59PRZWZbbujboi3JkfLi9vbl4+OR3X33zb//tv/3yd1/nCMvl+hf/9I/m7/7bX/z3//rP//zP//4Xvzg/P7+6uf3e9773+W9/q9Lksx//8Wgy/p/+/f9rVM1Go9HDh4+++PKrX/7yV4SwR48enZyfffvtt3//93//x3/8x5988snlm6thGLS21oXRaBStrzZN3XVdbPKiImVn+Mc45xzeReAP16pzLssyQkhd14yxPM+994vFIm47sA+VPKwF5wISpAhC0ECocdb44APt2qH1vbNDcGYyrhIli1RSdIlkFCSbnV0cM1AOy++dpe599B1ARCGTQ7cH9/62eDhiHxbS7jEp+BAoo4DgrEX0FEPwdj6bnJ+eQrBvL1/rvjs5OclStVmvX92sjbbaOOscAciLYjadTKZT71zTNCEE68N6s7UBuFTGhUrSNBHjPKlSoWgAbyNvsG5aY72xXhtrrWOEFkU+qkZn5yer1XJxtwBAHsmN/SAom82nSilGSFHkZVk0Td117Wg0EpQ1bb/ZrNu2jWPVeCpJklQpNZ6MY0BwnOcBQHAOEXfuJ/upntY6+lDEUV9sL2LYXtfY4JFyIqVMsyTN0yxLkzRlUvbabrbtatMt1+1qrQcNQCBwYJxHII0KQZlwiIM2XCrCOFKijVlt6+V6s9nWddt9+6ZGCKNxOTud52WFhGpjBusZF8bYftDdoPvBWQ+cQZqqokijIlsp5axbLBZta5QinNPYtxAaXyJMp6NHjx+8ff3SeWSSWxem85P1ZuuclyqRMkHEGEZxuLyj/6zgghEIzhISPvrw2ajMCqUEI4KSPEs5habeNHUTMFAEoZS1llBalKMszyNpi3EeSSuMx9aKEAQfnPeeAYv0FojeJLiD8gIGemhZOBX7mHvcPcL9DO+4YaK/dzPxvHl05/if3Xlx3/8djpIRG3wHGgEAAJUkhwkZknsPGgj3Bez4dniSO6B4f6NH8bnHIOdxwTsmvPhjzifeP0+y//wQehxPPH/zu8voQBmxHCEE44IQEsnJiHAgxQQE731ATBNBIUxH5c3NVZYkzjmhkpdvLsfj8cOHD99eXgoWKEFG8Onjx4XC09NzxuXLl5erVX17e1eUeZKo0agMzqI3VuuqqIo0t8aOxxNtmhCAM7lcroa+l1LpwSqlvLXGmCxN8jx31vRDJwUryzLLOaXEea+1NsZiBNMkn0zH3jtj+vgqEIBzopQaV1nT9s670Wic5bnZp1FKKWfTadt348nk6vpqvV6dPziXUilZfPnllx9++JRzcM5nWSaE5By0Nk3TIML5+fl0OlutVldX11rrPM+cc6enpz/+8Y9n0+r29rau/WRC00zGaAXv7aD79WbVNp0xelzm4/FESHm3XF8vl4TJ+enZydn5MOiXr1/d3NwKRssyl4wJRqSQeZ61dd1rneU5Y2y1XgLiw0eP1uvNaDR6/fr12fk5IC2q6sMPPvy7v/tvy9UiTVPG+Xq18j48fPjgJz/9adt2ddv+5je/ffbsw/n8VKnk89/+DoA+fPhwNBr97ne/W282P/rRjzab7XK55EIIIZ89e8a5aLu+bdu4NhHAObdeLieTyWQyiZnblFKhlPeeEBpne9baONuLc4csTbquM8bE8Wo8XU0mk0OVOWSxxeZP5SVnNGZ8W63N0FP0gtJECYJIAfTQe2+zRBVFcTqbG91TAqyczMIfMqI9buZwb18Ur+A4FImL6sAoPVbv3hu1AHS6V1IKwSkQIXiSqETxTIo/+vGP+r75p3/8hTH6pz/5CQB88eVvnXPX6wGQYPDeWSXEaFSliTLGWGMIIcb5ruusx4DgAbhMRhI4wUfnc+oN8eZ0Oo5CJK1dALKu234wRVEoqZJEXZyfIvqu7aw1gGCNi67q0+kkOlUKwZWS6BwBzNNUCWGdu7tbrjcbICTL8zTLs7zIi5JQmuXZznl9sx36IfgAiASJNc4aRwilhBlju7Yfem2tGwaNAbI0F0IOg95stl0blFTG+NGkPD8/k4nM8qwcjTzAetNe3y5vbtd1q+tObxsMAGlWsJRJlQqpYpBh2/bGeyblaDo1xi/Xm+VqvambpjFd77suzB/Oq9FIynTQ5m6xvL1drjd9N+ht3TvvAqK24BEQIC/ScjRyuvPOQyDBhabrmqbzHhijIaB1oBIgFLyDi/PJZDx69fJFUzdpmmZ5zhi/ub3tekcoUM45F9YFBBJDOTgXlBJrLaEcIThrRkXJKfno2ZPzkxkHkIIWWYrOUAJNvb25uSbBM0q4kF3Xj6eT0WiSZlk1Gke+pJBRGBAndhHNBE4o5zIWhwD7qFpEAGCMsvvGCAnbR/6kWUTRuZCMC4jzPMp8QMo4Y5xSFgexcWhnPe5mhZTFOxBCcedJtM8jwd0sUAgJeC/YP9StCLgdui7C6KGSESSHmcchDOh4ZP4eaMkEe69AHqrXcX98KIR0zyyF/ewT95G8kWzcdV1VVTEC+/Ly8pevVxEIimROiLHShMRuNc759hNKTin13lIC6C0nyChNEkUoBcrrtjfGPnjwcDQevfzuWwjuwfnpbDpR3FXV+Orm7u3bm2o0+e0Xr6oqyfMMwHLOGIWqrLwLbd2UZUGB1H0DhKRJent7O53NBq3bupVSBu+zLJuOx6vVgjHqrC2Lwnurh5YASVRa5kWR54Jz58zQ9dPJRHBW5FmeJQAOgy/ybD6bD7obj0acc+9cmqZSKc5Ynud938/nc8pZN/SPnzwBgNVy9eEHz375q6+yvBi0efL02Wg0fv3msu+Hqhp98+0qSrq++ebbruvPzi8QSN201ujF3cJ517Ztkmaf/ejHT589mk6nSGnwyBh33td1w5mQgi0Wd2VW+IBpVkznZzLJ67bfbLbamB/88Ada68XdbbPdCEryPGUE6roRSnIh45UzmVRN0+Z59t23352czBOpNtvGOXtycuqdZ4T963/1r1arddd28/k8SRMu+Nurq0EPCCiUGo3GDx8//l/+1//VufDn/6c/J4TqwVSj6quvvtrUWyHEjz778TAMXd+vluvLy8uqGl1cPOCcN02rtZaJGo1GzhpjzAcffHB5eamUquu6KKsQgrUuMjO9910/CCGKogAADD5mcUfsLV5ah1bvAJDE/+WcEyqE4BTQGsMoJEp2Tffi5fMnjx977521GIKztm26yMYa5SrPUjY/f/DeSOP4kHiMcMa3koldkYMj8mdcZu9NB+O3ZCL1MBijBWeUIAl+PCqePXuyWa/eXl5SgPOzU0JZU9faOmtdowmnPOJFaSLLokiUZIR670II1rleW+OsCyEgQQDS3JWZenAyKRI+ypNMya5tQgjah2EwvTZAqJQyTZIyT7MsW29WMe7Oe2+sAY8xc44CkZKnWSIEA/AhuLilXl6+9c5TzgkSZx0Axr9KJOZGilHfdYcZHiDWdV3Xu2C/OOGLU5MIVcc+r23brvPWQlWklEJe5nmecSXTPCOMN93w+vJ6s+3azgw6WEc8AJNSqNQxaLp+uW6bTgNlKs1UmnEur+4W6+227gfrA1AmlJCJVKloLFjje6P73gx6cA6BAiUkBAgBPMZDrsjzVAgJALpvIlUBEQetjfGIQAj2GiajhAva9346yebzWd93t7fLk/mMCmFd6LXR1lNGVZanWeHDPSsqNkWxz/EBGeUUkFEIzjx99OhkMuEEBSdKcIKBk9DU29XdghJgjAFl1tjxaFRVkzzP0zwPCEhi2k1kItA9sSVexkdEfwIUSJTvMUaBEroPHopIKWOMSHVgaR4apgPv9FCNDle1C/efx7HcrggxyvazPb7nNpO9avCdOUJcHft2jhAC9J0Z4XtjguNnddzw7SeF7yOZx8/2va4xQpH3dz4ecOxzJ7TWnLG+740xl5eXv7rc0Ciav38oQvbCjEhG2NPh9hmHGAiEJBF26POiJJRpa1utKRPz07nVw931WynYdFSWeZ4nkKZFQPr27S2hbLW6HY2qsiwZw+A0YzRVggQkSAmSvh888ZwJj9jWjUrz4D0XMk2Sb79bSeHnJ5M8y4qi6Pu2abfX131RMELiH4Uzxhmj8T1GDN4HqXiWpZzzuMOFEDbrtVSqHJVlUUghyD44Kr7z0+lMSnlzeyuVms7Gb68uv//9n/zVX/1tnqeU0q7rPv744+g2wrlLkoRz3jTd7W1zdfXae/fhhx86Z1++fNl2rVJKKAWEAKMnZ2ej0Xg0mlSjMRLYbraIoaxGVTW6vr4qyyrN8m4whNCT05Oyqpzzb968nk6qk9m0b9vl8jZ6DzZtp5SSKsnybBgGxIDBp1mCiHoYrLPT2XQ8ni6Xd5zy07PT5XL55NkzzrkPgQChnFECvelvbm+bpm3bLng8O3tweXn14sUrQlmaZo8eP2yahlD64sWL1Wr97NmzJ0+fXl1dJ0myXK4Wi6VSaj4/SZLEOmeMGVeVc+76+no6nXZdlyRJ3bZJkvh9r+WcG3TMgZKIGDW1hyVzDGMc8BiyT5RkjCGjjFAMGLxnhHLGAvpoi7zd1H0/IAJhMV4YrA9O911v2Pz8AT3KNDle5McL6VD2uFCRcnN85DyQ8g9fOTyUDx4AlJJ5kiRKJkrmiUqUevXyxc3NzfnZ6YMHD+/uFuvNlgvVtN1gGSUEADmlWaryLJOMQfDeG+u8NlYba7zziAEgICS2fnB2cjYbJZyNsoSg3242nEvrcVN3znvOheSiLMvxqBSMLZY3IXgSwBnrnWeMJkmaqoRSyLIsVQq9t9ZEPYP3/s3bG+eccd45RyhVSmV5rlSSpWlkKvZ9r4fOORsHOla79Xpd13VkAxpjtTYhBO8D5zzK4Pq+b1vjHDAGZZkmiSrKPEkSLqWUST/om5vVy9dXbW/bwfUm2EAYk1woIKzzBihL0mQ0HpXjqUxS433b99ojEkoYY0IwmVIuCeNA2arRejDtMPRa+4BACeOMcu7RBwBCIEmSPC+kVNEpNAwuUTwO4frBeI9IAAF8gOms8s6G4M/PToRgq8Wds35+emqM2dZN0w4+BKmUSlIupHX+SPQW918CQHwAxikF9Np6o58+enBxOpcUpKBKcBo8Z6Rp6tXyLobTBaAh4HQ2rUaTLM/TvAgYY444PZAvGeU0MlSAkp3fHtl7d1FKGKXxEBMbvl2PyBjljIpkR7xknFIWf+LA3oyf7FsjAkAcASC7wHQW70kIwR2phe+InbEcEiCEUXLQ8xwv40NOAo3hDAd7B/JOqN4fLHvv8FaOyt5h6cFRstV7FdQdrXE4aivpPmIles3H8NWXL19+cdMRQvCIvBMfKeI93u+wJkSkLB4dwDvLKEmkbJvtbDqViVqutgAMkVRVFbzdrhZK8vlklCWyKpUPiEC//fYFAcq54Jxxzgjxm/USABklZjAx5rDd1iJPhRDbbT1oTRnve1NkOSW0LMV4NEpTNQyDs9Z7l2VpnlOCSAnljCcqy7MsS7NEJWmS6EEzRrM0KfJcCrE7RgXsjaaEpknKGffOcy4SJbXWo9Goruuiyucnc2N34JMehpevrx89unh7dUkp4YKtVssHDy+0GabTiQ9Om0FrTRkEhLYbEM2PfvyZsWa53vzuq7vr28s0SynjXTfkRZlm+WQ2q6oxY5wSEgIOgy6rsuv07WK53mz7rnPWSiGn07ESVHctOjeqSsnY3WLRtm2Wl947qZLxdFJvNoPulVKCc6mkMYPgMkkUo2SxWBljxuMxZ+zXv/2iKArG2Xa7vVncnJ2dnp6flVXZtN35xaP/8r//1Ww+T9OCID0/P//qy69EIqWUo/F4vV4TQqOdyM/++OfPnz9HBC4kpbTr+u1264LPsoxTioibzWY6ncZOwDovhAj7i/BQ9mJC/SEL83h8driSj6GLWPl8CAAYgg3eEwDKKAbwAe/uFutN3Xa9cSF49EgCoQEZBW8sssnJGR7djhfJcfU6nAqFTKJj5OHgGe9waAHfO11SRsdllSjpnJWMjUcFCeHtm8vtdiOlfPTgkVLJ25ubwVjGVNP21lMMAYMTnJV5mqcSvB90BwDeWeOs8c4HDEA9YEC4yMKzJ4+qVII3iSBmGLq2TbLMurDZNggkSVLB+XhUjqvSGNMPLSHEO2+MIYRmWZanOedcSiGlDN50XWdsH2t513X9YFbrerMxjIfJdDqdTIsiT5IkTZMQwjAMfd9jdOYFcM5t1tuIR0dH9jjSA4CYoiCljDiSMcg5ZBlXSpRVUZQFFRwAjMHbxebN5W3dGuuJdjBo9AGBcaTU+tBjyMtqMjspqpENsKnbxXqzbVoTwLigrTfWt9rUbb+ph02tPYB1zrm4Fd5r1AghEdGNJ5hd/q12AqAoEi7kMAz94AgBxiEEGI9zAILBVVWhpNiu18YMs9kYCG/btq5750ElMk0Lwpj3wTm/Q9V21WZP1gBCCSWIfddAgCcPTp8+epAIpgRLBGeAktGhbder5c5MmnFC6PzkpChHSZrkWR6ZkFKpQ/PDjuZZjIp7GBBiOC1QRoEgiwUm+iXvMkUpcHnPqHyXPPn7rRIhJBy1W+ywIMm+yh7p2HYvn7xj8kKiuAKRcUb2mld6NLc77vb+j8re4VcQQvZI7jtMTthTZt6DbQDAHjtrI0RAIoTACI2fxMs1TqO/+eabb9YWjhT6ABBjV3cA7D59kFLKeLy0iLOGUVCCt8329OyMC/nm6orLJGDI8pwg1pu1YuTB+XxUlkXGm7onhP3ud19bi6NqYp313krJFne3AIjeGz0omVAkzjo1KjgTN7e31getXdu20aJdKsk4td72fTvoDtHFKIau1dbEszfZSVBYTH+kQvDDKDfuoVLKssoDYvDeWKv1kGd5VRbxZKCUQkDO2GQ8RoC2a5Mk+ea7y8ePHz979uybb765vLwVgq7X69FoFPn38V0+OZnO55Ptdnt11Vq7VVn6+MkjF7q7he50vanr5XLTaT0MVkg5m85PTk+qasI4J0CkTHbGvHmZ5RnBELwhwWVKVkVGKem7BgidTWcqTZfrTVQ6jkbjru+NtWmitO6zNEEIeZ4tF6uAYTwe3d3cLhZ3s8nkg+99+vz589dv3nz88cdpmizXq6u3b0fjsZRJ3baPHj0etH395mo2O9HGKJlYb//hH/7h6QfPPvnkk816u1wunfez6ZwQkqbZoM1qtbLWZVmGBFarlR56pdTZ2dlmsxmPx1dXV+PJtO/7GKkWN0Zj3aHsRQ0eHLnyHgRykQxIjpQFhBDvAyD64DBgzPYKQADIpm60dcbYXpteO+sCFalMMkqIRcIPcZrHawP+EAfsvY7wPbYYOQJPDs+VUprnqfe+a2szDAktwAetddM0jIonjx5xmVzd3PlAuEg6bR0Svs8eVEKkUnLKjB1010XHL9iT6BghgYAnkKdJleeUEhZiokK/e9fihksE57FMC0Q0Q88pNcYMunPOp2kWOzwAiLP3pu+7rqPEU0mM1fV2bZxvewCA0WhycnKSpqmSMstywGC03aW/Ch49Kbquu76+RsQsy6Ide9QwHAA0vxutBwAQgiiluAhlmaZ5Mhg3aNu07dX14m7RUilDINZZYzFQlMwTAiEACtEMuunvnHO9HnScYRLoBkt3VovgEbyHECAgUO9DAEKA8/tpcAhBUKaUklJEeVbfm0jUpJGXj2it9QgMgQADGmSS6L4fj6o8z5Z3t7rvqipPlFjXTQx8kYlM81LIdHBWa4NAPRDCOIGoJyeExFetrBkCBOtBcjDOUkql5IwFzikjTHIW/2DWBgjIgBDO2V7ZtiNPURpxyD27GCBO/BD8npZ5uGh3x0OI3ixAIkC6b5ICIAX0gA4Dg0NDgzGQPYR4/oyVGhGR090bSAhBSpDumZy7IhQPq8dAiMMjnxdEDIghBEnVoZIBfSdFgfwen+Wo03qnpYuvLuKxh1d6XP9+f22+t8aPi+tBDhwxeQDouu6+rXxXdxh5T9GsJ0amxU4xkZRgiAnyQKJNARjtmEQEYowRikcKTZ7no9GoH1ba2sk0FVxd3dxwmQRExcV+KEBxR20XQzsoKYRU1pjBGkDa9x06MM46a4tRQQH6trHWGjOkmWKCSyk5SeP0UXDFmDjK+E6dcz7YgC5goAxUIgghxbisN1uC4Jzr23qdpJNRWRTFmzdvPvzwQ8bY5eXlZDb58NnT0aj6+uuvf/jjHw7DMJ1Mn3zw5K//+gvVbs+KsxevX1xcXMhUnhanKlPxTRvPyvEMTs5P7u6WjPNnH34wOWmXq/XbqztrYLlt8yS9W6wePnhwcTYbjWcRUP3Vr341nZ/ZYbi+vgYSzk9PpJTrzdIgTqbT+biUlGzbBhmjlCa5GOxws9xUV7dKMgCqte66RknBGENr0OtMjseT8V21uHx7/fXvvhgge3B2/sEHHzDBrm4usyyRiRiG/vz8jMpNlo0fgtjW+quvvvqz/+5f1Ulzt7mbzWa/+c1vsiz77Ic//v73v/9f/+ZvootKkmQnJydCCK13qQuMsa7rot/0arUajUYRyfTecyFh74hyuAJxb9Z/PJY+lLrfrzsA0X4XaJwUUOYJo5QRSVEWQIXxxmnT674zIfDE83QTDCGEVdP5exXuDy6P+xkA3Z2L8ciQ7PDvAX49UF0Qw2qxtGY4nU8nVdG37Xa9RsQyz8/Pz/theHt9TSgnTLZd7zwwyjmliZJlkeepYhDs0O+s1r0PEHyAABAAgVLK+Jl0Z/Op4jQRRFCwumeU+oDaRsodE0LmWZalCQbfd52xOiYBcc6KvMjzQu19N4ahb9vGOccoIGLXN03TLNaaczg/Hz9+8rgqS0SkjOVZNuh+6Id+6OMJBRG32+3d3d3NlZYSiyKPbJe+771HAIynb2NM1w3WRtWd4JyXpayqAghtu6Hrze1yc3u36QdgQvUmDMZZD4FAIBgQEbDxsFrXN+t+22oAL5KUce6BAvExNxUpBwJAKOGUMxr7vOhqH4vHLuCKxc6Pee+HYXABGYEkEQyCEMwH7HrjERgDmSZCCMTAhZjNZt7bu9tFlsrxqFqtVnUzeIeEUyUzJoVD0MYN2gLjZB8jt+9f4jPhfddSIOBMWarz+ezDJ4+yRHAaEsEFJYkSQ9cuFgtvrXMuRrjNZrM0yYWUWVFEc+awuyZjkl9kmFDGmEeCuGuB9jjn/t8opt57csZzWWDiwEU8LhsHGOO4zYotwz2SESHHCE7So44N7rtG7+yhXB3XMyHlPXH0ne6Qvrcej3/8D1TEvW7vD+CfR7vD4THxCCMCvD/FMkqjtNQY4/eq01//+teXAweA4wEL7iT/wDmPjqOxZFrnjTFKcsAAgErw4N1kOuu1vVuuCKVAWJqmqeTr5R2g++jZ49lkcnvzWgj58MGTm+u7b7+74RzyPBuPqn5ogrOjqgD0qUol44u7peCCj4qmrrfbhjLWdi5JmBSCUJIXGQAMQweEOG+qshSCK6UyNUqSTKkk+q0wzjhnnPP43TRNkiRRSsWTFufcBc84T9OUAGmbum0aLvh4NKKUNk3DBZdS9kNfVdV4Mtlut83go3vWkydPqkq8eHFNiL+6qstSWWuzLCvLMsuyuHk+ePCAMfry9cvVZkO5ePz06cef/GA0mkilXr1+a6NZYN00bbfd1M75Iq+yskjTTBu93Sz7tqEQEsFyJfXQD23DGZufnGRF2ffaeSgnU+d6DGidy7IkoEcIQ99RSqbTqdaD4FzKZL1eZVk6G0+ff/vNh5/+0Wq13jabUVXN5jNO2eu3l1/+7qtBD7Pp7Ktvvv31r34zOzm7vVm8vbp5+/btJz/45M2bN/0wEEJubm7zPD89O5vPT375y196H0bjyXw+R4TYBU4mk+h13HVdURTRcuXl69dJkgAh8XQVXVoiYQoRpeDkCM88VvUc0JHDcZZzzihhhFFGOeeECUIZUAZMNF0PhAKlAahxzlhvPGoTtk27bjSbnp4fYybw/4/SQuhOLX8PYR35GbLdZnpwqvX1ZuOMHZfFxdkZo+Tu9rZtm1TJ0WiEiJtt3feaUuWRDNp6HyiQRKmqLMosYcTrYXBmQPQheIQQgGAIJngfAChlXDwr6GQyFhRSybzVZugSpbp+6LVlQgJQxvioKqUUeuitMcb1Wg+E0LIsq6ISghMgnHOjh6ZphqEnhFAKwzBs603b2s7A6dn00YNHWZZ7DASoEJwAtG1jnY3bLmBo2/bm5ub6ujUa8pxkWRZHZd776DaZJIn3vuv0MABjUBQyz3PO+WSaxmTO1bpuW7Nc16uNBwJIVNsNgwbKAChYh8YFhLDqnLaAAJJBlmdCpQ7RuBimR3xAY91ggvNoHHqPZJc2R+/lmNG8nBLE4Iy1xgYfKCDnQClJGCGUOh+cD4RAmqZZURRFGULI88w7u9msAEORZ9boZqNtAMYYl4IQpp3VxjrnAfa/ci+pA4g54RAI3a7XUohEsJP5eDaqHj28SAVjJCjBBSVK8r5r726vjem9NZ22lLL5yUmWFVKprCgIZRhpNgePIgJ0rx4IR4KgXaId3dmH3AvYyS6egFAKXMaL9j3D6MNRFI40cyTmpL93Gt2HJ+E++NxjOPBICdw7oRwXUcqO1AjkfomxvZnZobU67ln/QLt2jD7uq/Xxbzy+MyLCkSNMLHu7X4EYNQkHswVr7S9/+csrK+OrPuog4bDwKb23VbPOe+8Fp4zSEHwiBaOkKIq6aeumRQDKeJKoPMvWqztw5oOnj4osWy7eci6qyWy9qV+9fJOmyXg8YpT0fS04zbJk6Ls0SSnQm5vb+ewkpMliuTLWMkrr2o6qLDjPGXv58q3RffCuqipCUEi5Wi7u7jZeg9bGWqeN7vuu7/pBD1oPlBLOeZaleZ5nWRrrX5Ik/aAZZQDIKKNAu64PIVRlMZlM6nqDAOPxmAvedR2SMJtNF5tuu92cnMyN0aenJ6NR9nd/96osgRBnrWGMAmCaJkmivHecMy7ZfH7y/e9/kuXFaluv1vXV9e1qtZZJkiRZ9NMjgayWi7dv3i6Wq2oy5ozPpuMnjx/OJyMSXHCaE9ysbsejilC2WK0DkAePniZZeXVzU+Qyy/Km2SqlrOk5p5yR4G2mFKGEUeas65s2S/P5fOqd+/f/8f/DGCtH5fX1ddu3k+no4uEFodQ6j0h+9if/4vPffCFV9tOf/vz/+1d//ZOf/KRu6/l8XreNc05rc3d3Z6z9sz/779brddO0N7d3IYTxeFKWpbF2tVqVeY6IXddF8dLHH3/8+RdfjMdj53bT7ujSchgAcUaPL+bj5g/2jOjjpcQgAI2sNg6EBaSBUCR00MYFcD4gEgT0QJ3zvXYmQGeQzc8vjo+37y25w1cOkmHG5aGwkYO7EqWE7HwO47f2aJ4BH6bj8WQyAudu727qzVoIVpVllmWLu2UzdIInFtEY6xCt9+B8miSjqkyVcHYY2tpZywCsM0AiJwKN9z5EMTr/9KQYlyUlTlHStxszDFmatl3XD0YmqQ9AKRuNK87o0HcYQkAXQpBSjcfjLM0ikMU53242bds6ZznngL5t223daQ1JLh48eDCqqr7vrTVZnisp+mEY+i5urIjorNlut7e3q7s7SBSUpVRK6b3iJ4IqMUI6sjeTBCaTUSx7Zcmdd3d3q9W61tpvtqZuQEiGhDe9cR6YYITCoKG34AO0FigBQgEJaGP7vtfWAQBXijEWHSCRIBAQgksl0cUw1fvTCdldPcQYMwzGOR/PvwG9MTjKFQHwiAhESJ5meVFVZVkKISgl11dvu1bPZ2PwfrlsJQeulJKJUDIE0M5aFxgXSZr5o1k02e2/0QCaDF2rpMgTNZ+NR3n66OJMMRLLHieQSNG1zd319TD01pim00KIk9PTLC9looqiDIQAIpfyHqbbJSoERCRsRwM+HBj3JQsOnVm8fzylIYsSPv6eV8sBVDk+csbD3XvwID1qDYMPB9VBxF45uxck7NZqZKAdNXAB7tGU6NJyPC84Ln5/oPIRPJT/9yYRx83ioaujQuDBceK4siLGwOsYhxYRy1/+8pc3Lok97n0HDEf6/b0mBBHjnI8zIjjzziZSUU6yJN3WzaBNQKSUJYkq82y7XqDVzx4/UELoYet9oEw2TXd7ezMeT4oi327WUtIsUUqJdrvNs0Jyud3WH334UQvh9u4uWqhs13ZUJW3XccYTxZSUTdcF77q2y7JUcFKW2WY5ABDOGaN8Z3fHGWO0rreEECGiMx+LBq1KKcIYIg5DL4UoR1Vw3ns3KkdA/NOnT613m81mMp1477fN9uHDh6PZhbX27u4uz3NKaVmWT59Of/vb14TYqiqllNfX133fx9BpxthkOv7Vr379zYvvfACVZJRyyvnDR09Go1GSps56JdVsNqVIts126Ide68l4VFVF19S6ras8zRJldU8Qm7apm5YxTnnSD0Y7lxWVZGE8qpbrleTUmD5LVTyhrlZ3J7MT6y2nfH5y5qzv9XBxcf76tu66LkAwxnBJN5uNTJKirAhlL16+/vrb53/xF//n3/z2d8+fv/rzf/Wvf/vb33LFlVKrzZoxZrTN85xxfne3ePDggVKJNna9XsfZHhPcGAMY4nlisVh47z/99NOvv/02z3O999g8kFyifoYduQ4di+IOl/Qx0EgpJRgIxuw95hC9DxYxABAg1tqhH4L3hFFGCRAaMBAifQA2Ob9AAvFjDwwBkHeGeOBD8N5ZG5xH9IwAp+TwEby1ZpiMKs4IBuedcUY7q9E7QM8on5+cUMbXdTto0/TGeJqPZuV4dreuKTAAEpwngE4PijGGbZGLvEipoJ33tXVdgIHw2qIBRpRMkpRzlOhHmXg0H3888YwYzsGBByYCl53xyETTdsG7RPFxkSpBMbgoaG0QqEqTopQqBc4BqIfgnKOMLpZLj5YJdrdarOtGKCYT+uEH31dCOGdYMEqAYoHDwIlNpTBD32uLKNohXN/1V8uuM3CSwels3nbDatVk5UgluUOSlaPemNW6a3vIUxhVScpFKvmkLGpIn79eLlvraLrY6rsGvQQvsiZQTUVN2MqSTWA14R0VNVIL3hKwAMgJkwI4s8FrG7yziEEQmkiZSZUIwQGI95UIxCH4kDCvGDB0IXgIPhAPBDyARRhCsICeUGT0fATjcZpnQlBbZvziZFwVIjjz+tXzzWYDnBWjCfLEEokqcSLpLNfIBkccUKCCcxmTYgRjBAMETzAQAAwhklrrZsMFGRfZk4fnCYFxln388GJWFJKSSqWm73OVIsDl5XXTRO/7elKVZ2fzs9OTREnGRZGnUkhnHSMkskFiYm1EOrkLxDnwnhPCBGOMekocBgdgEZESIjhSZhEQKOFCEB/lE5ITzqOtAiAGIDSE4BADAFIeCPVIXQBHLVLgglJOKIUYi8cJUAo0BIpBUCIYlQDgnbcGfY/eoDcQHCGeksApYQRpTNWj+zj54MEj+EAJA4RYT+Kri9xQEomhsWfeOWwjxKeHEOmm+1oGBzJq/IiPEb+iiKbBAXpKKDBGgKC3oAffNypY12y5xxAIBPXy7eJXv/l2q8aECQQakASMYxQanUujeCKEEOKBmIBgzAdnneNJHpiwoDRyz4R23mqTJ6KQgKabV+nN5eLP/+zngLhebs9OH6yWq+lkzGlwpqNoE8XR2izJ9GDN4B89fPr26kZINT2ZX79ZZDKnIBfL7uLho0VjNCQDUWo0rbUfnNPGEYZG9wiBECzLsVAMKCIJhFHCWADU1lXVxDr0nqZpmaYVpYoQmSRlQUXQXgk1rsbe+u2mBkAmxHg2G4ydz+feh2EYxuMJEGaNq6rxZDRarBa/+Md/brrh5OJRMppMzyYvLt8KmREh57N5UzecM05hOhmPyxNGeMqzL/75a4FhkhXB2GCcZLzv9Wa7AaHy2YzkJalGk8dP0tBq0w9Dv22aajJFkby+upmeP5qdPxw8DA6ESjmjpm+9MYrButsGQh48fhKYuFnUKp8uG3v+4GMdxPM3N4+ffrxp2vV2TTmWhWzb9U//6Ed//b//b5LTT77/g29fvqGiulppmc5Wrf/w+z9uO5skyWw+8kNtzeJinn33+iZLk08+/v6L7162TVsVo6bui7z8/IuvPv74e2mR3y0XJuhtu10sb9JMWQcuhLbvy9FoU9frzfbP/uzPvvjiC0CklMRmT3AqOaOAFJAfHcuOZw0AkCRJdLePxniEEOccZQxJrFeeBMeIlyQI8IKit9oabZ3zCA6YQaoD64NsHWGj+cl9eTuaGWK4/zx+PT4Vxvnh10fSV5ZlVVUdsJFj5zTG2Hx+opRqmma9XgfvGGNVVZ2cnFhr1us1248r6T2ThSRJIqRCRONcnJzHY3jkiUXCuOIiy7M0Sc5TH49pQggSgamAhJC+66KdiuQCop+Wdd77AS3fmWExRiggOG/1MGitm7oOIRACw6CBhDRNpJTOQQg7N9s0VUmi4vl9vdoYY10A53Czbe8Wq7Z1hMDDWUoI0dYIwbMYhhK8EGKzWTsbKIUsgbLI0zQVnGEIbzfN3WJZb3XX6W2LJgDjHJnUxg3eGxdcQAeIsKOeUxrP9bCXe0UjMGaNj7OaeEraW8kkfdsnKSkKxbiw1noPlBOhlDZx9MmUEkWeVUWVFWmeZJOM6MH2Q69UMh6PZZp23bBabwISxgWXijFune/7XhvjHUZBzHvjK7KPwjkMzKKy03svU5WlSZGmeaKIt2WaPDo/FQSUZIkQGFyWKOvM7c1N13XWWqO7oijnpyej0YQLwZlkgvuAEXbbyeZ2CQwEETmJBBaIlBPYZxLFuWac/xEgdG8vAiREq8ZDrxx2VHaglPJomBm8d8F5ByFUo5wzhojeOmutdx5CwIDovLfuEEsZiVeUUmeHe+H50Toz1kZPk6O2LAKJ94Dq8Tzv96fvu3V6xN85dHvkSCl4WLk7oQI4RAgICNQjBh8weMAA3hMM1jkA6hB8IIvN5osvfrfh2Xug0zGb9PD54Q7O2xCixgQQozcb6qEniJIzTlEJrgS1uv7Jj3/krO2aZp9bFKzRceaKiIAheG8HfX5+fnV1Ff8WVVXVXQcBtHVpXjZdu902621DGQgundUYPCEYExcpAcr4dHxygK92Gd8xD8Tv7JZwH7VGKfXej/JMCEEYIYQURTGZTDCEq+urNEsJIUpJzrlzdhgGJFAURasHIeTjx4+BkC++/Mo6/8EHz0bjCWOs3m4//83rD56dDX13+eb1+cU5ARh6O5vNnPNVlU+ns5u7xXgy6br+xctXi9XGOpdkWZrnhDIMwXo3TWVsXBhl1XiUqIQylqWJtW4+P8my7O7u7vr6mjE2Gk+SJGmG5vLyknP++PHjxd3dcrk8Pz8jhGZp5pypt5uouE0SZcxwcnKyWLWf/uCHf/vf/l4798d/8i+uru9++8VXk9k8y/L/6f/5P//kJz9x1gDBm5vroe/PTk8Cyy4vLzHAo0ePr69vKGVcyMlkEhAuLy+lUufn56v1smmaoshDCBhIFDdzzmOo0MnJSdM0EVo47tsOsPkxhnm41Ol9vtU75iqU7hDHCDEe6kU08zwElh1uIZAQAitn8+Pydr+ojvtKvLfiFHsYM848kiRJ01QIETmN8XbY8jjn49HEWrtarZqmEZxlWRZnm5vNuq5rsWfHRHQFEctccs4RiPfe7qROO3SKUuqc9dYyTqq8yPOMM36WuIOq31mrtfZRyx4wDqsF4zsnG+cBwFPMkzRRklMajfiMdl3b3d3d9kNPKAMC1tl4YnbO615TSpQUeZ7keaaUxOC01ovFMgSCwAZtb+9Wd4suOMgS9uh03HWd9a4sS5kq753zLgS/WneMglJQZEmRFUWWA0DfD1fbrmmHvoe2g96AUCDT3CFt+kE71N47BBeHmhCQICNHeFcASqgQUqmEs2j8TZz3wfn9wR9HlZhOT0ajMRUCKJWJFEpSylS0YKaEUEYAnLN93/Zdlwusm8E6zPJcJEnXDovlar1phJSUMsJYCDho3fXaWU9I7CHut8X3xlH3fIe9uV1WZEWepVIKSsDZ2ah6+vCCAWapVJwRCEoIH9xysazrrdZ66Pssy07OTsfjKeOcUk75zkLlHv3fx+YBgIxfj786Qrv7ehK31F3ZA8IYE4xjjI3Zm4wAQNyyvXeMMsF57CUJAOM0EcLY3lsTnANASkAwyjnjUaoXmx/vQ/A++KhxdFaHEDwGhP0EgVDcZdNGQ01CCY8FJT5x+D3+y3tl7/5sGvvXP1Qm37N0OXwF0AAQIDwguBC884CBApIQKKBzDgmzCA7pYrP54suvNix7b3Z42I9+v/5RSq2zzjm/c/sExhgB9NYQQEqQE8zTRDBQgv7w00/6ruuaxhiTpqqut0pKpVTw1lrDKGvqOs+y8Xj84vlzpdR0Oo3uHtY7Y11Vjp6/eLmpXWchT9jOkgNQMMYIRQBCGSHi9u11lmVnZ2d5ng+6b5qaEVqVZQihKAohWN93Q98BYPCu71rEkGRpnhcAxHsnhFCpyoucUhKJXXmeEwJ1XQvGKZAsz+u2kULOZtOuH169emmtu7g4r4rq/Pz8yy9+2/X1fD7r+8F7f/Hg4bffvoizcy5FkqaLxZILMWgznkwYFVxJyoWzDhhQynQ/JDRwqZAAF3Lb1qvVKs0yIRPOedv1lPH5ySmh7O3Vddd1o9EoyZPYYyillEjWy3Wapm9ev3r44IEQYrtZjUYVo0TrYdA947wo53XTJln+5u2VSLOLB4///hf/BJQN2vzs5z/7H//H/8cHz54F9E+fPPnnf/6lkCofzaKS/aOPPr69vbPWDdqMx+MY2zkYLaUsysJ7Pww9YwyQCSH2zq5ACCnLkjG23W7J3m8l2rXsJXDvKAJ+/wCN+8Su/VYDcUIRe5tochbHbTvns3crXwgEEVk1mx++dLyCAI+31yNKC9td8TEyOHI3DqYkB/j18LS8C03TDMNACEmUjKldzrmmqYdhiOKB+FzjzjiuMgCIyQYuBO+9tc5aC4DOub4fzNAzxkZlkaYJIJwqE20PvfdD33dd561DxCxN46CRkT2TO9LfWWyWeYxkc841Tbtar9ebreBcZalg3CN65zs99K0TnKRKFkVW5HmaCBJ81/b1tjHGMSoRoG304m7dNCgFVGU5KcSmrgOGNE0JpQhgnavr2ljIUlLkqZAySRKVJNbY7Xa76L21YbCh12ACqCwhQjWtbuKwE8EDeBpdkeNVEO7PQUd5h3zP0rTWOhuMdVEyyDn1iO1g2q4nhCZJGgJut9uu111nB+28tcFb7yz6AIg8eM5plqdCqLbtl+tN25sAAIQaF5xHF9AHCCGadUWwC35/2z3wniInMDb0UkqhRJYmkjGKgQZ3fjL/6OkjQYmSTDJKCXJKQ/D1drtcLvu+t3rI8/z09Gw6nUspgXLKGCIcSBZkH0IQf7XigtKonOZUcMainIwHRLYb9VEGBAJSQjjnQPAQGLmbJcDuSBeJnxg8Bg8QGAFGiXeWINAAFAgJ6J3Xw6D7vtnWXdcNXR9vbdv2bdd1neA0hHDIi2AsWpqxyC3kQjAWNfIHQiZ7r3U+Lnvv1UJCCB699uMfOfbwPG7RIBhCKaEckbkYUwCEUyAhAKCzFoG6QJyHu+X2t198sSLJ8SDzeOLy3i+N3wrojTEBAwBBgjH6AoMP3hEIgsKozILTozL/4OmTpq6b7SZ2BqvFMs/zVKlh6HU/CM6ttWenp8vlMh72z8/PV6sVEjCDJgSMNS9erQcPeQJZnm63tXeWEuBM7DYzKjgTkkA0zkbEsionkwlnvO/7NE0ZY97fN+ix8+iaBhEjxSUEPwyDkOLk5EQImeVp17VpmgrBrbVKqc1mwyWXQtbbbZKkH3z44e3N7eef/4ZSNp1MHjx4OB7lX3/9DWfs9Ozk7nYxmc58COvVejaba62ralSW1Wa7ztPixatXry/f3Nxtlovl7fLWOe+9u7tbMPRIaNcNSNhqtV6s1lU1Ch7brs2LIkkz72ycKVprX7x4MZqOTk9P27Z9+fLlg4sHwfvNZl1vGyVlUeaEYN/1Dx6eLxcLIfhqtZKqVCqhXDAhX75+g4R++PH3/vEff+khPHr8ZLFYnJ2eNW3963/+1Z/8yZ/e3t40gz89PVmtVoDApTDarjar+fykHzRjrG6atm3LqpxOpyH4uq6lSCKtIV6Ese3Lsmgig33fx9JwcLj8/RDmw/VMyPsVMd7l+Jx3uGfEI6210ZML7uU9nJC9pcXhsY5/zQEhgSPNfIwKjFPKWKsiTV/c5wrtnm78ZUNvovlymqaMkQh27R2KSQwZj95r8alH6xO3R39CgLhvCiG8s1prtCZN1Y5083swC+ecUMY5l1zsnjTZ8X8corWWMsBgvAVrvTFWa73dNJvNJpJruRTeGSDcetQGCUCRqTxPslwJQZxzdug3m029afK8DEjtYNq6NYNnAKlkqVJN3wT0SGDQnQAvhAD0XYtpCnmepyohhABQa31vbNsN6623LgwOBhehJzbYsGp6R8ABQQIIBAGBYvR+DB4Y3Zl8BIIheGJMxLt3SHQ4TIEAAG5XQ6J09NRI0zRlXCYszR3XgwbrHBAA9BACMAZCkPnpPEsLwlldt/3QB08EV0ioc6idC2ipkIzLGPtOKMUjd57j2/FlZ62NwIBSKlBkLIZAhVgdhRDMQyyQkpN42SRJEi8hwe6Tzcke6KeUHLSrsZ87sFFCcJRSQgllgNFUi0UKqwshyuoQSAASgBBAT7gAypAwJCzs2EIBAZVUzjlrTfxFlBAMznk7LvMo7Y8rVvdDvJiTvckZY4xStg+rAOMsuadic8Y45ZxSKpQEAEAa/9l1Zu9mKRxauvfW5vHnNJafvYvKjjuEyKNAk+yia2MdCATcYasghFKKlFJECgSpBUqBcgIABJFgAPRHwOoxoeD4K3gk5gMAyZlgnARPgXjnvTMgUiHE0LWMASFUSlk364ens+j2YMzQ9+3Q9dHeb+9mxxDD6ekJIWS1Wk3GVZZlwRo79IER9KbI8zdvb7MEXAeTcWqs7rrIUXWUcMkpI5QIzomaTtO2beuu7Y12zk2n06Iqx9OJ1cMwDBiCUopziuit1VJKQnnddD6E6XQ6mU1Hk/Fms1osFlVVpkk+DMMwGCm5UmmWZX3fv3z+/Kc//eM0Sbq+m1fVn/3pHw/D8OrFd0KIm5ubzz77zFr7i1/8wiPKJP362+/G47HHAHSHPMDexGc8HlsfchcIF8hpNRoxwTG4u3W92DRD15yfn2eJolz1Nqw3q5PTifFgu95oLVQyywtCyDAMb9++ffTo0ccffPzrX/+6rmvOedfhdDp9/fp1lqWADjhlVAiVMM6EhOvrtx997/unjG87ven0crHIx9NHjx/crjb/y3/8D3/6J//y7u5uPKlCCMvl8pNPf/i3//Rl7Ha2221RjdaiHo1GlNLRaPTixSuRCMbYt98+//jjjz/44KPlch1bi7g2GWNa6/V6ned5VVXr9TpuEbFvOeSUkSMS1vHGcihs9N2khDhfo/ugy3hPa+2h1B2ErffVbTQ/ud+23kFVjrK4jrg0AbGqqrIsETHGDoQQojT7+FyJ92lEFDGGB3FnDSLGl2etAYAsSWIfGmWMADAZ5QA7BTBGWoFzsXAGjz44RqiUPI2HZcYe5BhZWPG4zhiTQkgpvdvFu8NeruustdZm+Y54iiF0/VDXddf2LuBkOhVSDsY0ddMNQ9P0AWA8LueTtBoVZZ4TQozuu6ZvmrbvdJJkzrquM/W2btpACKSJzNLMDGuy0yuDjFZkQ6t1GI/TPM+VVPG73mHTtJu6u27RGLAebADCQOWlQbJqBw8QKA2UebrfyyIOhv7Ql0ccCX30l9rVg0MGcIxUtQhJmiZZDoRa54x1gIFzPh5VSskkEUkiKEUCQUoyyvOPPvqeUGoY9LZujLGEiQBk0BYYsy54AEo443L/GuneqIsebe67WyTB70a2SsUTNOVMScEAOAEOeDqbPnlwzjBwBhRRChacIxSMNm/evK7rWlDIs+Lk7Gw2O+FCAFAmOGMc9u0vpTTqyuLKCdYSQiijsFMr7P4NIRAEAoRTCgGD210Y5BCMHl/CHjv03ntrvHOMBMmZ5IxTQjBsVpv1crW8W2xW677tgg+MUsGFUiq+xjhmjvWPcx7nVWRHHOV7TxYeTcUiERPIUaQf/AF33PcQRXKEJCPca+GPiyU5osC9W6IcAMHI8QAgSClBggjeRdoRItXO9xZv75a//s1vO1X+/pmGvKt/f/d5gnOWUEYpcT7Eg7zgTOuOYVCSnU6n29Xig6ePqzK/enul+zaEnfWN5Nx7j95zzjGEs/nJzc0No1Qp+fDhw9ViiYjWdAiolNxuN0WuovXlets7C4iAIQZP8kAppZIwaZu7RKlyVDFKt3Xdt51SMooQvPOEkDRJYvcW0bA0zwXnvRmabc0Ync/nWZZaa/u+S5JESG6tjfBdnmdpmoZg621NCIkKpel0NhqPb29u7+7unA+MscePH+d5/stf/jogpln+7TffzOazEDBJEs74y5evxuOxcXYymwKAi+RGDN77vu/1oFebjXPO+mA9Guu6wWhjnA/A6HazCSFMxuMsy60xQMh8NgMGr1+/Pj87n81mXddxIThj3nkuRFWW2+325PQ0AArGN9vNZDINSDfb+uT03PtAOL++vXv+4uWzZx/86le/yYvCGDsajbqu/eFnn/3t3/zts2cfMJktl8skSdMk08a0TRNTgq13caQb2Qxaa8QwnU6buo0n8khoj8BeZIdordu2jfhnRAG9PwSEvHPIiyXN7ake5Oh0SAgc7CMOaOdBehRFybHF2pdAgoj3Ze+Y0hKp2PdmoHj/DFS66/NijE7sz2K8zuFxjucKWZZ772NIkh76iHQdl73oYxIxB0JInqpoCut92Ie1x0OiwBAIQRl1g0AoJVLKp2MWdxwhBAESQojMAmvMDgMkO71a8B4A8pwTIIxRQmnX63rbWh+SNAcg1vnVZtu2zWB13wETcHF+PsponmdJonxwXdv3XWetRyRxgqa16zttdCAE8jRNE2VdzTgNAZNElKPSWtN2XZ7xPM+kVFxwygQCGOPqpm0713huXYhp6VQqkeYWYNN0FiBQCpQEoAH2mXAEGMRBTGwNCCE0epOKWM0pi+KGaEznEYARoNT50GvTdLrvdd30uu/7ruv7jiBIyTmjieDz6fTi/FxItVpvb+8W26Y31g/a6cE4HwjhCIRQJoQilAQPEPkXgO+BafEWr7aI2sfha7yyo90tCUFxJinMxqMnD84FJZwCxZAo4a2ljDjrXr16td1uE8GyLJ+fnkxnJ4xzBMql4ELSff5C7HgoIYxQRiliYJzTSFvY2bfc24bR/Ykn+AAADEjg4t68c3/tQ8C+azE4QoABeOf6rmvqum2aVy9frJaLpt56Z6XgeZYWeZZnqbY6+sQ4bwczDLrXZtBGJzJhjFMuGBdRKU252MGySJHsU/7o7oO8W0sOy+q9sneofL9f9g5L9f7YetyloUMgPpAQfxIIoEfvgw+IQRvjkQzOaxOuFovffP5bk03oMZ78LqXlUH3JPUgL3nvKGCFEG4uIqRJKyOAtwSAYOZvPtuvlJ9//WHH+8vl3FDFRythhPB55a7UehOB5kQOis3azXY2qMsuyVKq2baWU3g0UQtvUQMhoPEqyTPfaWRtVvT6A38FkEpkAKirhtTZtPwghZvN5VY36YXj75q2UkgARgjPO956x8XxJpJKMC21sPwwBIUnT8XTcD31ATJS01lprOOchYJ4XRaFevXw1DF1RFPH9SZNEcLrebE9O5svVum3bH/zws7u7u+Vy9fH3vvf552/mJ6M0yyghlLGu7+cnJ0omhDFE0MY6HzhnzllCYDadvrq8S1NVlFXMCGu6vmm76ezkr//ub8ajSTkab+vGWnd+dpZn2d1ieXZxcnt7G1w4Pz8fBhP1oU1dj8ejRMkI/yOEsqqWq1VAwABSqddv3jz74OMkzf/r3/zN2dn5m8tLY93ibnlyevbs2bOrt9dfffX1z37+J998+93pxcOu6yJteNC6yIvlap2obFvXo9G4abth0LPZSdO0q9Xq8eMnXdtHhPOQHBsv5ijji4OteGbdl6v7S+vQ2x14m4eyd98Ivgs1Ha+XCBwel70Qdpmh/Pchi/i/4SA/2iu94qMXRRFCqOsaAGKxcc61bRvNw8JRUBHZz3jul+geLI3DnmNM9kB/cM55F6x33qPDe1s2zrlnDkEIQgDCMAxCsCxNOZfHiqv4ZIZhSKQSEUCDXXe8m3Zg8N44ACDcez8Y7QPNVNJ0HSFEaxuACq60GISgeVVS3Bx4GcZa4wJQrpRomo4zucuQYcABBKOKMcPisSJIKfM8a9vGOTg5GSEiIZGwxDEQ47W2wTigTAZ0PoAnhCAMzvfOaQAPQAGRsIAhxEAwCEhITC6Ofxe+7+ziBPS+8cLdJAkRRVEMXd8PhjEmlcAQwHsP0A6eAVCimSXBWms9JZgoMVi4vVtuNjUQZowbXIiQo3GeEs6FYEwExBAc5VwI4az/g91A1D7H6zuSnuLzpIw55yEEIvmO0UEI54wQdxgnR4z0sEIAIJLxISASJCESTHbuIQc8JN52pqOcwZ6WBbG7YTsdLEFAuHcUuwcznA2M7RzMMAB6BpwRYrSOMNdms7GDLqoJInLKlFSJVBRIzFBkUuwC8wCAkHi2ZYwxjK033f0vE2Q3j4zP/33Q8j0g8bh7e6/AvHc73ing3enIcUXEnQYXERhSunsDwj2mFBARSQAMSNw+IPQ9PJMc0cp//zmQXVgvDcHhzkoRpZTWW/SBUyAUsyxBxK7rqixN03RYd2WWL4el1SZLkzzNWIDvvvsuTSRjbD6Z3t7epmkaQkiUHIbh7qaezvNUsizLNsvNfFL6u9o7cAjBA7FIOFAHzBKqFAKnJBAmPJIACJQxKb765tuT2XQyGRFCqOIRqTPGrLY1tDAejy8ePBh0d319vd2uz87OLi4eXF9fxV0rRq9sNhsppTf1syePBuPWy2VeVgAAhH347BlQfnV9W2TJrz//fDQa/dmf/VkgEAhwCZdXb8fTybcvnlfVKFPJ5eVlVY7L8ShCBdum64b+xavXy003nXRKsoDEeewHMx6PlVJt2zKltrX9/Hff1G1zcXqqlFptakZwVI3btv7www/fvHzTti0hpO/7RIqT09Om2TLGHj9++Prli08+/Ugl6Xgye3t5ORlPfdMmQr55/fL80dPPfvDDX/7m821rfvKjz/7nf/8f/vEf/puU8t/8m3/z7/7dvxuGgVJ6fX09nU6+++75er2dTOcfPPvwi6++fvZ03OlhvV4DQJZl0ZAs48mrV6+Kouz7/rC3x9NYvENskaO++ShX6A8sgV1JOjrGHZetg6w24ouHMnmwoTiuo5QSAGDV/ORQco5FtXg0OxHsoOjlTdviPgCe7B1p76XQR6f+/eeMEGKM6ftecBaLvPc+ipcFZ2Gfr42IZVmCd8MwOO+lVITSvtfRM0lKlSgFgN7apqmromyaJk2yB4WfTCZVVTnn1qvV7e2ts3YymXDGpBARP4mW0M5apRSEnguJQNqubzvtPFgkbdevVmvOhTGm63WieDXKqjxPUzWfJNPZ9Pr6tm1bH3C93hDCGRN3i5V1wQy6bQf0oBRNlGCARDjGaFFkSaL00Hlnk4QTINqY2XSGhKzXjbG2bvVy1foA1421FpASoRKmpLahM5ZQEihBxncGJIwDYzuv/RBwD3IiQDiIjgkwzpWQjDEgJCAGDAjQaxsCxISBSGflFNJEeecTRTkj3nvB+WhUZVnqnBsMbLf1th0G6wjZWap7BM452ZUTGgB3XP8QOKNxsnhAKvq+jzQtznlMaDp0HgCQZGnwTlDqjeYQnj588PDsRDIqGMmUdFZzSpNU9V23Wq1Wq5Vu2vFkPBqPsrwcjcbRs1EmCURva9gFD0VFCiUUwBMSbYkIRNsvDAF9pI5Gwh4JgUaCO6LGkErJCOn6xljNGQFArTshxO3N9Vdff/ni+fO2rbNETSfj2XTiPGRZNh6PJ5NJnudpmqokicFsEcnnMUHgfhJJCKWcSyEkEyKas2CIU9uY7H7wEaUIO17se60bvDs1P4q2oBBHdwjBh93DIRKA4ANgBEzjR6SoEkaBUh5NvmMRi56jjILW1gdEIOumBSZ+/Zsv7lbrmii/x9CPt4+IJr33VEMI3g2MMkYZJYzsPQQ4Y0pyQEfRnZ/Mx6PyfDZ7+fzb9fIuT9Rycfe9jz++vb29vr6az2eMselkkqby8vLNqBqlado0mzRL6u3GWjOpsqbthr63xhZ5XpYjIcRms6nKKgQzaHQQZcbeONTGKmiFUnmWMy6s9877+MaNxyOlkgjSCyEEl0wwxhmTibNu0D0QEJIzxnzwbdcNfTcajfI0B4iuWpDIZOh1krCyLFWSWmcAMc/zJE2stfPZ7O72mnPZts1vv/iyGlU/+/mf/Kf/9J9OzycIIKSKcJrz/mR2cnN9q6QsRyNjDBNsPptuNpvXb9pUufn5w7u7O21dUVYBYbOtfcC8rC4eXDz/9rvHT55sttubm5vZbNq27SeffLpZLySX/z/S/qtHkm1ZE8TMbElXoVKX2vKoe+65gjPsIZqPQ2BAgvy7RL/0A0GQ3WgO5t7uI/fZsnSlDOVyyXlYEZ5RmbVPX4COQlVUZGSEh/taJj777LN9EhJns9nV5VVRlIMx33337d///W/bvl2t14ujIx8DZ3x5c6t0dnJyut5snPNPX3x2e3vz4cO10qqazXyIr9+8nUwmRyenV9c3z1989vrd2+Dj8+cvXr9+c3xyenV5fXx62jRd07Xe71j3yVIFn2wVjFjguCmGYSjL8scffzw/PzfGNE2zWCy22+1sNhuMOUzXRimyMa5NqVQa6oSIe7bizrwcikskPwUAbdsmAxVjROQ7t3efHh42TOw/JkEwsNdy7IZ+dGyPQ9HDaHS/RXcNQwDACO+x1xgZY5lSwzAktkUylKZrYozIOCL6XWayyzIZMWusM8ZaWxV5jGE+m319qiaTCRE1TdN3XWpg8N4nzCjGHYzknEudOoIHJOZ8bDuzqdtt3W+aZlP3eVHM5otMK85BCiEZ6kzNJtMyQwDcrLfOhcH6vjPExDDYpukRyHlvjY8ISkKmBOcEbC9GBX5HqOWYhmBppY21XTcYF+um39bOe+giMy4ioa5KYGzT903fB+IOYkjSxkmAFHa5wSgq8FGUPVrGvfW5j5VkBklHeZfEAHHkLFEIaae8I5iWijjFELtAfd8PzsYISCymfmwAIE5EjIkErMW4yywFZymqStB8kubx3o8ih4eiX0QEhN5ZjkgxSMKL05OLkyMOkRMwiJwhJ4oQvPOr1ery8hKcmc9mi8ViPjvKy5KQRUSpNGeC7t8Zca+WAhDSf2A/x3yX9aQsJgJGwBDBB++cs3ZZb7y13juEyBl5Z1bLu+sPl3/+4x/rus61Pj4+Pl7MsyxjSDFGG0BnOsszLkSE6ENIcSXx+wEO8SDv2c844owluB0h7oY23N+63SkSppbyR8fIix4j3HtUZl/GeBACH0r3Hh4QXYjRp4sRIUYMMSIEBphGWkZkrfEB+Xc//nR5ed2JcjyH8W6mpPYx6QAgSSBEREqtvwmVIkTOSSCgt7/6xdfB9F998dnN9VUMfui6hIFvNhuIcbFYCMbBh7vl7dnJ6WazyZXO8+zm5sZZe3l5Oa1yM/QQgtIyyzIpuJSyqirvfdu0fRcZAOfofOyttTbwOKgsK8oyIvV97wOURTmfzxGRBCEktXcXg0/rIisKIPTBQ4haq7KqGCdrTNe1Sqlca0QM0advi4g+DkQCEb3zIQLbkYHjdDotivLm9lYI8e7d27Ozc2vNxdmFw/jq5asQo86yu9u74P1ivgjBX11eZ0o9e/58vVo1dV1W5fNn83dvr7Oy2G62IXils6qqBuvbrq8mU6VU23V1vZ3P5+vVcjqdTSbV7e3dyfE01SmPj09vb2+llC+ev7i8vDTG+hB6M5yfnTVdt5jPAZFz2W63jLDrus+//Orq+oYY/+rrX9Rt982331XldLVa98aeXZz/+ONPn3/xxWDcfLFYrZeb9ebs7GK5XMUAZTVRKnv3/n1a7CEEH/yYfgnO74tlH3NVpJRJtCzRT5KHM8aOq3dsH0j0kXssYY+IpvQprb2PyNgxHqr/931vjNmvVQ4J5DwsfT/2XmPKmYxawkkT2XRsxYBHRK/xwWCGQ0jkcGemDDSlt0mZJk25VEoB4865VBNmJFJ1MLiPJpYdhgPGmMSI1VoH64ZhEIwHxkZi2wixDtYxjzaAcSEAkpDMBxbc2cWT48WcICxXN323jc4m1QAgtt5ue2uchW3T9sZFdNtN6wAYQRLw5wRKSaU5EUbGvfcuOGt8EgbTOsu15FI759qmH6wzNjbdMFhgbAe3EWdCCB981/eDB8bC3msRAGD0if4wahzfNynHvZ0FiAFdsvZh3xaGGPeTCna1GcYYAyJKxVsAYMSFkETM2WCMGZD5iJgwQUQPMUHcEQEZMY5xp4odidhu7g9jiDiSNlM4loCgwwFVeCD6tQeu6TCa8z5kSjKIxvZCiKqqiOgj1/Ux0xgA7ps6YD8igHb5ECLCge6lYDzNPg8QUmNM8D44/+T8zFvXNM1qfbder03XpeU9X0xnk+l8PhdCmK7fbDZtVzvnJkdP0niN9JVjCMjEqJSWtiLtG+oAIBKliX9Ae2CT0oJkABAwIiZRTowEESI72HSHO+uxS9t933j/o7HvFgGif+gOd39jCBEwUNytGkjhUIBInKHlGANjbLDBWOsPNM/GO5h+ZYxHx2M3lQIpFZ73oC6L3nWtzdWsKIooMM/z9Y0jhLquQwjO2ePjI2sG72ye54wxiGGzXbVt++zJU2N6iEEIQYB1W1vr1suNtVZJKYQIZtguXTmZPjs9JoC6rrfbuvcQIKbuSyKQmeYqQ65jCMCdBxh8bAeHETmRj96HEJzzIfTOM6IjqaRSSGSN6Y3LM5JSO+m3Xd80jWRMCJGpHBGN6du2ZQTWOy6FynRIOpPOciGHvp1Wxa+++uKHn376P/zTP15fX1prv/76ay/Zf/tvv99ut2cnp03TBO+NMWVZbjfNdrut1qvzs7PrmxsPMXfZP/zul/UQt7lervtmsyzLMin9ElHTDSdn5z989+1XX3213tY//vTy9PR0eXMb/PZXv/rVX/7y1+Vy+dlnn/Wduby6mUzny+UyAN7cLieTydHJyV+++fYf//mfhr7t+y4Bp7dXl8dH821nNMv/h3/6RxOikFleTt5eXvd9/81f/zo/OYVIR8dTzvl6tZVSCcG8xxDCZDLfrcwYQgjjFGi/1+o7zNXS39vt9uzs7Pvvv7+4uKiqarVaTSaTw/Y+POAKHC6zwy0QQgCEMRSjg6kjtJ9VJIRIuGvqndglhWE/Z/IwRTjEVcdPShXIw3jzQUR56MnvqTEHgAwelPdG55+qmumkk+yLEIpz7mxII3tSs8RBY6OQUjISMe6GQScVweTPE70lKQI8sBdpr7a9sSEk1WbrY4iRcZkk2H0MNngIMVjHGWVaSg7Wu/W2ti60xt6t1pu227T9tumJC2SCOCMOTILKudCMsbiTCKekPgCModwffd+3betcGJy3PiAB4xhCUAqk5NYNbd+lruZUFDm8npQcYHiId8eP8764J9AeBB8e0uybNI+YI0Xy3vf9YH0USudlIaQ23tVdV9eNtTZA5JwzKZDv0qlEItpnVRF28sohra2khpAAekSUUhZFcTjY73AhjZj7oez9iLGPQQwR5XlORCHsLDiN89ABorvfS3BQ9B7hejgggKX3v0cdfcAQCVEwLqVs1ss3r37865//8PL777fLpRTi7Pzk66+++OqLL9NgzOVyebdehRDKyezi/GkxrVSRcS2ZEiJTKtNCSSb4oZg1HhBAxiB0N2z94z18+OIxeztM4OBR4eDxBvzkvnucAj56T0bEx/NJ+yWE4GIAQuNs2/dm5EPvg+7xeHxu4zclSr02yNhuGm2qcSilzs/PvTOEMdGdnHNJ4ymEIKXM89z0gzPGOceQrq+vnz15whhrtnVZln3fHx3Nuq7zxkrGc6XR+2az7NbL6IbFpHx6cnwylxTBOdCSHc+nR4spU3kzuMvb5XLbBGRcqLof3ny4bIahty5EZFzqrOBKuRi7fri5uUmRt1SqN8PtajkMg86zoiqt8be3d+v1GgASyOa9j8gG6631nHOpVWr5h+DrzXro2+PF7OTo6OuvviCIQ9dsVndZlj159qyoSp1nusgB8fr2lohOTk5ms9mrV68E4Xw2UVzcXd/88z/84xfPn3z95YuTo5whODM40wPAdLa4vLxUWTFbHEXkTOnvf/zp5vbu6PQ0hPAf/sN/+Oyzz/I8v71ZHh8fr1arZBjzrAwBNtum3rbXt7d3d3eAbDqdXl1d5rl++/ZtUeQc4fXr11LxX3z5xWq1Oj4+/uzz54yJ6XS+vFs/e/bs1atXZZk/eXr+zTffLBaLsizX67WU/OjoKBnwVM5PZiEhQIduYtRYSUzGVBNJCwD3cl3jckpd50kRZZxm/MDjPOhMGDfdGIMyxoqiSKeUno8xsnK+eLBPdsnB4fSHvdCGtXYs8x2O5fy5AxFjuB/0nCSA03KPMUgpOaPtdktEs9kscXa1EFrrALFre+udkpnWOsmeWmusNZJzIih01vVtpvNnk1CWJQB0XZd0WJJCYPA+VZUw7maSeecAIKJXOo/E15v6+m61rtuuN/1ghZD1drvdrPq2oejms8n5yaIsi3W72dZ1iNR2w+3dYAw4Z60HnWdc8Bi8c1YwqMpMSuajAZ7G1YUYPWeQ5TrLtZSSOFuttk3bh0iDdX3nIgAiNTbmpWJCbJtm0xogIIFDiIiUpBkBANNSiYgRAx1E6fvQfvdPGiWXCn64HyLOEGMkBMZ2HMbgfAheKZllmc5yKYXzoW3bruudi1Gq1Fe0HxEOjBMXAgjT+PEIIbXcMb4TjUyNmMaY5PN2rNqPtYXuMwNGCDGTUjDKJX96fnZxciQJCQMD0EpE74ehF1z0ff/27dt+W08m1XQ+WyyOJ9MpceFDIGJJEhohwUpA+9yTxG7yHhES3w1wBYjBuRh8jAFSa8Iw9F1n+u67H/7y7u3rzWo9nVRfffnFl59/PptPORcJNl8ul9vtFoDysiyrSV6UQaqIFJMKCBdMiDSKiIgBIkQcBRs4MoYMk88lwYgjS8VGAsCQRmMAEe56LdLzGPyDKOHQwTxAVvDnd+Bhinb46wwhSacBsSQkHWPAGK2xDGmwxoc4uNB0wx/+9Je79XbgxYP3hI979R64bYqOEJPweIwRdyrbHsEfzSdffPasb2rBcDGbru+uvTOTsizy3DnHOddS9H0HMeZ5PvTtdrs5PjpijC3vbr13EKLgPOWSSuk0VN3ZgQC1FErIoiiZlF2/9T5Uk6KcVIzR7fJ2vd0uV9tNvXXeIbEA4H3gUiZROWRcSaW15kIC0GAH5z1xpqWKEIa+hxiUUoIziNE7m+aaJoyTiHlMoUHgjBFhiDE454NjxITgdV0fHx+/ffNmMim3m81mvbGcTycTiFEKQUjB+eXdKi+KPNdSCmOHzWbLEiMCond2OikmVVWVhc5ynedt0w7WP3v2/C/f/FVrdXR84qytJuXrV++8N59/9sWH198fHx3/y//2r59/9tlsvvjmm29+97t/+Omnl5zz2Wy6XN1dXJz/4c9/fPrs+bv371989vnZtPrLN3+RSi+OFk3baq0B6W65jkBv3r7rrZ3M5tttY4wlxo9PT68u32qdLRZHP3z/g9KZUtnrN6+fPnkupFqt1tu6zrJC6cwY65zXOoMY9vH3/YH7WQV5ntd1fXp6mjiSibV/GL2lY+xLPgzmdqphguPHbezJ7SV0Pbm9lHKk5vIQ9pMc4ONqwfiOY+Y4Ul0Os87HJ/fJnO9BKX6MHFPudcheTd8tMWWS2hTC/Wwz55wxLgmYHeazYxA9Qqbj4NwHsX+6fMgFEA8RBut741LhP316UoojgqLMj+bTSVFQ8H3fWxd8gLYbrAMfYNsBsl29LSVbJEBIxgQAur3ETupYElmWSSkBYByzHhGcTfAi2hC4AK0lY9j3YXCpY5gfoFb7+5KEFP97x+OykGB8zKgO+y+Pj0/SXAXjwrZptk1vXKQ9TyHuF9A+VdpRK8e6cZIAi+DTbU2Lkg5088aFPiYKD9KOT7Y9pCWRoNEUDHG+a75JIPYoYhT9w5zjQS51WNwGH3Z+MURjzGazufpw+eqnn7799tvV8mZWlb/59de//MVX89kkgnfGeDskjRjG2Gy2OD45KasKALZ1HQhdDENwFoInAMIA0Tg3rodx1xyWHA4TwZ87PnlDH2dv/0b3dng1PnlZdugLPMKFIhBjLoDxbkyUH3+Fx77wwZ2N0QNAIjoppfI8D95zYsn6tG273W6HYUjzmVNQnwxiCCEZgUlZ3t7elnmRZVnbtk+ePOm6TgoNkezgvPVaiWleSsFs30bXT6v882dnX3/+/OmTWVXq4E1Tr7dtcB5chE0dXr/fvHr3vml7rlTXm6brV5v6brlabrZtN4SIXKqiKKy1q9WqHXohhNTKhdA0TSpX51XJGNtut9fX133fZ1nGhSImAqD1AWBvMJ2H4Pu2LjItOF2cnZweH58cza/fv7u5uSknVTWdGmen81mWZW3fLZdLxtgf//hHIUTTbo0xy5vb09PTv/71r6ubSw7+xfNnv/z6y+dPn6Q5rpeXlwDw5u17pdR6u5lMprPF9LsfXv31u2+Pjo7W67UQ4j/9p//kvf/Nb/7uhx9++OKLL1JX85OLZ9ttw5hIMEZC2n7zq19/8+c/Y4Ttag0ASkrBWd+3X375ZSJndl1Xtz0y9oc//GGxWLx///76+vJXv/rVX/7yl8vLy81ms91uq6pSSh+iF6NhH9fe4UomotVqlZIwa21RFAk2P5x8OWqbpJ04LsIEPY7o45hHHpbARgmUZE+yLBurfYjIZqdn9+jT4UTmPToPAMH5UWkzfsyWOfxKD4LT9CCEOJY34z6SjTFNAybBWXJ1RVGkjGFS5DFG67xzDnbpKiZVTj+JAACAAElEQVR9DWNM37cMETHmOnPOzmezr05EUvRInenDMAx9nzC9TGsighCTPbXGhBC4lsRE39vVtjEu6KzQOkPGpeBm6HPJ59NqUuSzSc4Jlnc3A0Tngg9wt97GCExA20OWCSUF5yyCD97mmi1mpc5E8J6E9t5b03sflBRlWWgpAWG52qzXdYgYkTWdSeiKsV7knEthfWgHazxEBIfBOkCKgCwJZSFQorNggMBickvxozwP9lPfEHDsfh7vTACIsO8GyZQsyzLPc+e8MdYaOwzGugAIgosoyAfng0uylbgT3CfOWYINYgypKT4FSGZwo6peAi3TRzJ2PzfuMApLfXsp28sEe3p+9uT0WDKC4ARREoxRSnLG2rax1q5uLtNgo7KazuZzqTNkxIVEYowY29llurfEdDAwiO/WvXcuxjh0/Wa1vru+ubu5Xa/WfddZa3VGzy6ePn32NNPaORu850xwIX56+QoRq8lsNp8Xk0rpTCgltTapP5BzRhyRYoQkt0lAMaSOCgRAtuu5QMZFwv9oP052ZCDtOu4JEXcabzFGjvDYQcJBfW5Eina7Dz562d/MAAEACGNEjIgBMAJ6H4K33jvrLGPUtq1x3vq4rrs//PnPTTc4WeEB8IM/U/u/99DepIqs9z5EYIwlff2qKIa+TQ2aaXrOt9/8Oc+zXOuu62azmXOua1uttbM2xphpNZ1Mlsvbpq4RMVM6y7LNZjMtJsZYM/QIUQkuBeOcEUISMZBaq7wgztquX67W223vEfKy0HkWoutNtCYg+iRzREhJi8o7l8YBcsaLQscYnXXOOYzAGCei4B0hMCIuGKe0EQJFIMCoheBir4EIgjNGjCBYM4QAJycnq9Xq9Pys6VopZF1vG8YE5xCjGYbpZOKtA0BCmk2n337zVwSYz2aMU11vEeJmvaq3a+9dludcSKUzpbUPIY2tv729e/r06Wa1RoQqLzbrpQ/xd7/6/OXLl9PpbBiGm9u7PC9evHixXm+Rk7H24snFm7dvvvrqy7dv3yHiYnGkMZRl+fbdO0CaTmdcysFaofK3b9+HCHfLdZYVLsJytSnK0kcsC7nZbNu2ffH88zdv3202tQ+hqqZMiK7rB2NihBDDGP2MxRE4oNqlNGYYBkQsy3IYhizL+r4PITB+r7U7/iIeyDA9gBmIcCyiHRItQwhJONN7n2WZMSYJ1CVhMnpAtDuM5vCgIBf3DNTRqX4yDn1cHUyvP6wNjJhs+vWxtX5ETlLIn+L6FAskGZd9zmdSPpfKfinDSzBx0mr5+KLcX7j0ESmQHFzSs+FSahI8xnhzc7NeNwmbFgwxxGHo1uslIiPkxjlro1BMqiwEoKT7ncQwOUnJs1xprXUmxxDDewDYz5yLsF6v2zY454LfabAJJYkoy7IYow9WShACnINhOJQED2OH2d8+Dr3L4X0cx6clW5moIovFoq7rJLJjnUvfAhFt2GVvIyI/XrcH5eJxYdR1PQL6hwSWT+YluFdSYPdXj6dRW7CXwE/3KP10MpmkXHkYhq7rjDEYgTEmuTismT0uej1GSJbL5eXl5evXr9+8eXN7extjXCwWL168+Kd/+IeLJ2cUIVibZVlVlm3bfvvNX+fz+dHR0Ww245w3Tbter1NqAkRMCK11un0ppiAiF0NK+B67h5/L6j75/M/txMO6WnzUIPs45/sk9HK4PQ+vT7pE6e+0Wpxzdh/pHq6xeEAWf7D8HhiE8fTGZqe7u7vXr19rrcuyXCwWQognT54AQFKrSDm9Uso5t7pbVlUVQog+/Ou/frO6W15cXCTWw2Qyy3UBAN7atFRyraSUDKHrGmeHqipm0wlC6HsTAnQOjAtEXOtcKUCAbdNfXd1EgCzLp/PZdDoXWqUhgpEwxpj6Utq2vb29bdsWERkJZJRIW0RUVVVZlt7729tba3xERkTW+r7vTW99sDtAZbF4//6tc+7yw7vFdKa0+J/+p3+X5/l2u40ISeKxLMtnz57lWt/d3T1//vzDhw8xxu12e3Jy8ubNm9ls1rfNanm3urvt2ibNVfj1r3/99MmT+fERY8x5T0SvXr2ZTqfnT55677///vtf//rXUsrb26Ux5vXr123bl2UppUyckaOjk6IoAKBt26ZpTk5Ovvvuu3//7/99CCFEl/TbhqGbTqc3NzdnZ2fJbYQQfnz5Umu9XC5PT08R0Rjzz//8z4mP473fbDZJbDPdxwT8JP7jYf43Lo+UPa9Wq9lsVtd1wurCTq/OjJDmuMFHC4Z7FCrN07ifeXJglBKImMzRrsWZ8/Sh+2xvdhSdD9ZhiBQhhTApLhVsF5UczjdyfoddjG80wiOPt3GMEVnSPkJiRCSQOAEj4jHETKkYQStZlto7E32faT50bVHkgtPQ94M11hofgpRCShm8d84AouS86/uizE9OjmOo9WSGjIwxQNEYU3dtYLJz6HlGukSeG+/8MGCwHHym0ZrhZt1shoiTueHFh7q53bStdULA0yfHz5+dJVDexlBO5jdr5oNarVprAxDb1H30kGWQ50pyPgwtRjg+Pp1NTkMQzrJ6exe9gwCZUuenT05Pzq2Dd2+vm34AjD5CICCGzjtjjVQx03roW9s6jBAjWA8RIGMZBMmiJGIIFKILEIA54FFQUikPBJERMsI092wEngHifviad95xjgBRSuGctTZMp2VRTW7vluvttpxMsqxq2n7bdEiCCx0icUExRkIuhCQk7xxEJGRaZcFHQp7pnDMZA0RPDEWATkomFeMCkUIEhxSIYgQH6BED5ygV05lQmgtJEUgQMQIBUaD7u198mbGYMdIc3dAzQqFUAEIuIhe98yVXP/zwk5B6PptXVbmYT6wdYrSEnsgjA0yT4MB7CgHDlAmwDp1XxEQE13bNct2sNt/+6c931zdd3QkuZ8fHpxcXR2cX1XwRuTCBIpMks8HFm+Vm23Zc6uPjkzwvUicixMiIpBBKiDD0IngePPOOvGPRcwgMI0aH4AF3fyIFYBFYZEzeV92T+JH3wTtOyBBSI0ewBrxlEAXtKHCPw4UH6OV4cB4RYwQfo4/RR/BJdPTB30SQBiuhwDQlKTgHzvHoJYaMoBK8Wa0IAueqNn7Tmv/yL39gMmsCS+2hcUfV22nhjuFsTPP99oezACgisAjMxwBIOpNlWbabJTl3NMl+++UXJQbpDOsHs9lsejian0br4jBQCM1mK4SYHp2QLLvA/uN//q+nz55Mjo6//eHH588ummZ7JM18IppmeXfXcx0Yl4HT0cmFBQghWtMzPyxyfl7J3K3D0l9GsMb1HiyQZ9wSNt5vhvjyenOzXXsQ+WQymcwIRdf3TdOSb5Xguc4Zkh2Ms4GzTKnCDrEfHJEsygkx0fa9jU4VOQNSXGKkoTPWOMYVlxqYDEhNP3AhsywrywzBKx4F8yEACx7ckAllnQkRltvV26vLbdP9/T/9k3GmH4a8zNa3d7NZdXt9FeZf3G7d/OS8a4dXP/04VfJ8XpyUsgC7+vDq2elss7y9vb6rZou//4d/evX2arldzc+eRpGt2uGu7llWfvPdyy++/rXOy3rTtF1fat133cXp8Xa5FAzzbNLZYCI3Ub66uv38F78xwYcYzbARFL79yx++/Oz58vZuMpuHyD9c34GPi/lxDGyzqX/1q9+8fv3m9Zs3f/+73/7ww/dPnpzf3FxyQussI2RIhOCcvW9g2+s5pLJIEjlhjGVZlprrGWP4KJBNx89RSYQUxFhkLAAEZAHJE4/EgcgHYJJJLpzro7fedJvVrWLIwLHZ0clh2DiGaaPk2F5acxc/uuBH1hbifYJ5+A4fwTKpeypiAk4T/AgAwTspOOdcCJ4EjZw1ACC5UEo5F5q+M9Y55yLsUgRrzDB0ECMjBIi5VpPJRIZNnmUYvDOGszRDx4WIxqTKBMcIYI21Q3QmBh8pRKTBo43gSfTGrrf10PVlzjPJpoXOleCEUglO1Pfd1bINIRgzWGtDDMYECFDkXGuVKc0IBeOTqtRSJc7PMDQpp8myrMhzAFhv1svV0oUEhiEQi4CJwS6EjEhdPxgDASACWZcQSYaMpX65EFPpPSJFIjKD8z6kPPLx+viZbBtC8IiolNBaO+e7rivLkjFmjdthC/siLrK0yDjtOx8Y40qpFJXHuNf1ds4M1hhDLIwZ2xiCJWJLevCAABWRMQRGwCEKil88f1YorhgTDKP3nNFY2HPed11Xcr5er1ebdYyxKMuqqnSe50UxJJl/BJGIuEISUQwhmj76yDhDxLquX79+/c033/z1r3/tuq4oiqfPnjx58nQym3LOdxVMiomV1zTNhw8f1ut1WZbPnz9PN/EQPEg1RRfCYc14dEuHRfXD8FZw/aAi/hiKiAcUlXGixeG7wQHI+eD+En6UY42/FfZTBQ5fH2MMe6rv7iySE4Pgne26LkT0EevB3iw3//WPf0ZiTUiEnXvQNX3c2Lp3mNt57wVLInkIkEQDgFKJPngC/+Vnz3PJgumPF7N3r1+bofdMQIycADD2Xeu9z8vJdDaTKvvrdz9YZ8qivLu5GfpuUpVFltvt3XQ6d97f3rVcxIuLJ9V0Yowx1nnn3T4KT9pA8zm9byjEELxjjHwIXW9iAJ1La7x33pvBWxucRQhKyFznq9t3bdMjYllWgLRd18YYrXVZFDEGM/TD0AUIUgjGEi/DjIIbaWKD9y4J543kc4DIGBOCK6UG4NfXV3fLVZ7neVFuttvttq6qyfXVZabzGLzg/OhowTnPtO7aDvOp5Gy1vJtOJlora83Q94zx2Xx+9uTi5na5Wm9efPbZdDr901/+kufZ0K2//vrr7baeTqcfPlxKxpumDd7OZ9NM6/V6pbTItHr96vV0PvHOT6vq6Pj4+va2KCcvX7+aTmdFmccYm3qz3da9sUU53Ta9AwrAi2r69vVP2+32yy+/MsYUZXV9fQNIi8Xi6upaSnV8crLd1NZ5Ioo70aX74tehf0kgZ0LscC8zba39JLA1ok2fOnZFHwBIMy5TguWsJUSAACEkIzr0fd/3VVkJIdh0cfx4R41g4Ih33bu91H+63/mHbu/BRt2Z3R0jAxH3E9wT29A7KbjWSkoRgnMu9WejVhkXore2aZpU4Qtxp9PvnBuGLsaYiKFayaqqsF9qrQggOKsYk1z44Afj+sESY4IJxAjOeWswOMAY0TMuA5MeuYm0qdvlemOHKFiYFNnRbKqlYBi1VoCwWW/eXK+BEImFCM46YwJnUBZKSpHrTHAmOS+05oyboRuG3pgOANJUJqV41/XL1aptu7ALmSkiITEkzgSXUvXG9oMzFpCAuPAQnY9+1yqXNKJi2A0bAsZYDJ4ID31J3Ok/fcSVGm+E9xEAQgAhWFJgatsuQXzW2q7tR/Hy3e1jCVjg41oUQiYNodSSMXJP0lxhrSVnQnDJGGfEGfHxAWEaqUMxgnfBOW+t41JSGr0GUTL44vmzUgnFGCeI3ie9lXTqxtm2bSdaOO+uri45Z6dnZ3meZ0VOiNPphBhFn6QgIkB0znpnw9DGEPqhu7m++vGH71+/etO1Xab1V19+eX52fnR8lBcZI8SInJhgTBe6bdvE2ETEhAAXRUEHnUAflQ0OygyHa/7wmo+vZ4ylbO+wFv4ggXuQ1R26vcMX/G23d/jMGO4cYpv3r0m6FIABMYmVB+9jcLvxJoAu4Haw7y5v//TX75DxJtx3YoxEg9QO9eCdd+Bn8n8Jj9q79eCMYCAQf/Xl56Zt3NBdnJ28f/PWmmFyfDIMPVJkiM12CwCT2Vxmxbpu//LX709PL9qu++67m0nBikxzzo4nWZKfyTTjQngfXPAxgtIZFxz3aFgC84+PjwcxG7p6u+1jdETMBR8QhJQ+eueg7V1Tb43pGFKus7woONq6bZquFVxkWQ5I9XZ7fX07nVSIEGMwpvfeMc4YYUpi0tZQSjHGrTXOWQDIMp3S+xBChIRLR2td52JZFN6HrusZMYYMkcqiurq6Goa+Ksu2acqywAhKSmPt3aaryuLD23dFmXHBl8tVbwYfYbD2yy9/0fZ9UU0nk+lmu/nTH7797d//6vryzXQ6dc7PZrPNcr3dbvM8W69W5+dnBIAIIbhcZ5cf3k1nUyQmGXvy9Onb9x/ycnJ5fdX1w7NnT4UQXVvf3S6F1MaGbnAmYEQ+mS1ub663deOcOz09ffP+/W9/+7vr6+sY4/X1jffh6198fXN9a2zqmEK8l2W493kjTpAQYynlCE6mToHHqOGhv3xwjK6PiHY1O8YRMQbPiAEECHHv9rqu65Qu4qEmJ3xcBhjh1BQ2PujqS88/KKg82KLwEdH5EwXwsYCX9BtTFYf2bYYp500oyj2BkCj4gASjHen73hhXahmJe++JcUYCQxx9Nmc7if3IiEUQkhPnHJiUxH1wzrnB+wBmAKVUVVVEwTibheBCbPoh0czyPI8xDkOHAEwCpNifIQRKA0vhoOOKiJIYP0Lo+iQxjsaOZhEQSXIWkRHx0PSpUIIIQrACuYvGGRejT/MVIiQLEjmXjKGW6vCWj21VY0vK4WVPzpJztDYSUVI9T7wp3Jf9xjwvpfiAu/LeIYaOez29NAQ1NV3thwzAoTXEJLxJlMp1D9b6rjgUQ0CIlCjgO9PonIveC06HhSsi6vq2qiofw2qzvrm54koKLYasqJsNEk8jYFiM4Fyw1lt7d3Vd1/V6vW7b3jmT5fnz46dHi5PpdJoGCSVml84kAMSATdOsVqu6rtMw0tlslgxZqrmOBYZxxeIBVedBmnWY5x2mRw82xWNE5DF6E/cCCw+ev68d7H86ZnUPXn/4/ocH5zxJmIHHGGIIzlobjGG4n6/kIwA0TbNzqx+npPtFRQ8+aPz0nYwZG6UkYvAmuuCDVwz7ofN9r9BvNpuyLBGCVHy9MsZ4kpwEVdU0K8vVevvy7aWQudDZ1U+vageM68H47erq/Osz69ykmk1mRzc3N8vVZrBOaT2RSimltO66XXEo9Xv9j797TqHt6vWqBeBWSWECDCZIVQyx9S7UBuJtEz0QESA7rQrGeN0017dLF2hSlNb6d2/evnz9+mg+nU4rIcnZYbVaaSWkFJPJNM0TTf1hRVE5Z6y1qak8cQDS/ur7fhiGQPri4sKF8P33P1o7eB8Zp6LInj17dnd3p7V+//aduhKKCymO5vP5y8uXA8fZtFitVmVZDj5wEu+v70IIIitPnzz9upx8++1fvbOLudoub7Ms+/Of//y73/1D13UXFxf/8i//cnJ0fHd3t1wuvXefv3i2Wl731pxdPFmtl+enZ4nBnqbMHx8fX9/cbTab09PT6XSKiHmev3l/A6TTSuu67vnz59fX169evVosjpd3a/ErUVUVYyKEUNf17c2yLMumH4wxjPMQQhJXf1BgjvuJ9ulBMjJElBwhfAxyjMkVfOrYvf4RpUBKyYkB+hg+auC2zkFANpkfPfBYo71L9e17axVCjNHHMPqkMez9GyAn7ZQykvwuIe6n8kDkjIqikFIY0yNinmvGmPMxREgCV8R4iBhjFEJopWKMzg4QohBMCplnelqVtr2tysm0KgVjtu9Tbb7r+sE6BGSMC84QYrAGY2TEilJFoNaE3sXWhtWm3tY9Ijx/MltMq0JLCJYRaKV7Y64ur6+XTgjSuQaEpm28i1wAJ5zPpkoIiIEhFpnmnILz0QfioJQqikxrZa1dr9d93zHGuyEkOSMgpERB5Jwx1vSdtcEHIA5CSuTMh+gjOu9DEhOLERESa5FzoZXEvRTqYSvo4ZU/ZEMQRc55jLv5xV3XhRAnk0miBYcQR5HoXRGY7fT0khdMMWz6ICklY2nwSu+cS+BnjG5MfQ5jo7GxZKSupMUNxDAGhiAJM8l+8cXnk0xpziE4jEGIHbRORBHBWltoVpT51e110zXEiHM+n0+lENYMWuvZtCqLIji7vL398O7th3dvr96/eff2ze3NtZLy+bMnz5+/mE2njGizXnlnuWC5zjOtEaDru6ZtLq+vpZQnJydHR0dZlidjLaVKA7P2X2cnsxdCAOIAuItH9sS0cYbDjvZ68CeB+o9D1Me+ZPQjj575iCbzIF8cf77XNaMH5/PwDyFECqn8GyF4F5yL3iEE55wHMD52Lnz346tXHy4jMcvUg5h4dHuH9LTDjBZ2yATtZht4F+0QhkEyCMZIDlrwoWumkyqGcNNs6+2GcZYplRfZ0fGxceEv3/247c10dnx5c3v5YYUBJoXIsqxtWtveVZPJbLGwLrRdn2WZEHK9Xpuh58R1lgnBGe5YvkRUTmeaM4TQtZu6iz4Gj2QjIhMxwczRew/WWO+d806AyXReVFWM2A+9D6CzbD6brdYr5y1npDOFENu2sc4KIfW+SzXJ8gkhlJKcib7vYT8cgzMhhEQE7302mZrBLJdLJCalurq6uVsup5NpluVd1x0tFleXl0gQQ5iUk8V8ITh//erV+elJ2/W6mHAhSaq3H65VVjRdp1Q2nVSMqMx1rsSP33938uTs9Zu3X37xBRFlOru6ukoMdkbIGGVaaSWstXmR394tlVKzMgfEwbrJdN5bg8SWy7tnz54xguvrmwD4/vJa6gKFNi7eLlfVZCKk8iHe3a2fPn36zTffnJydlmW12qwZF6vV+uzsrGs70w9McOddoiU+2AIhhJQij6jV2P4fPgYdxweHTM7DY1eJ2ClpMBzHRBNxxgAjhEgECGDN0Pc9MAkReNhLTh+u6RSe7DWj7yl24wk9YPodxrOfjDoBHiJC6SNo3+mVHqRrsesDAyK2H5a5c/iRiCIBJyY5Sc4YYybQMFiIKKW0DQzD4FyAEDHCoYQuEGLYxQR7gA4SusMYKEmff/65bTbr9brIRDYpiItu2Ky3vTHQdV1pixijcyEiIELfm2TKA0SWtDNDRESllEryRZlCxNSr51xgLFkHQE4+peKcRaLgE+hvnYdIQAwkskwLBzjUPcMISB52QEG6/WmCXQoLxut/GHbQx03iyTylRZMUeoi4tbaua+fcbszpI8c5uq6UHFhrU+nO+5jcHhFxJg/X5bhykp8bwYAHqY+Dj6K/MRcUlEDc3cAgxpjirCiKHAVj7O/+7u++++47Y8xqdffDDz9wzheLRbZcXr0Tzrn1er1er9u29j7mWmmlzk/PFsdHVVUJhoyj4IoIynKSl0WMcbvdrlfbtu+891rr2Wy2WCwStJBS1URCO1zMj9HFw68wXrRP5m0/9+DQbYxvSI96/OHj9D0e0GvHHz346fj345Q0Vb49YGojJuKMeeI+uJDGaSXkYBgGxpj77838PLwC6ZVJLML7EH1wNkZng+3d0OaSBx+26+VMn5DGu7u7WVkCwDD0nDOtZVFVnPPeuB9evv7Tn9//+ndfmxi/f/meAZQV3S7rk+PFdHbs7fWm7pnYaq3n86PtdlvXNWOsrmutdV7lSmVE5F1I2Ymkm2fHlRK/AgjNn17e9uDIMZkNfQ/EBJecBJimdXC3ro31J8WJj3VZlnlWOOfarvMhTCeT6WJabzbvrt77cFKWOecyBNf35vb29vj4eDKZ3N7eLpfLYRiOjxdVVXGRmq98Uo9KW0MIgZw75wRjgiEydXQ0t9ZeXV1V1TSpU45jbTabTVVVf//rX/zx9/9ihq7IdZ7nm7ZfLbd3m4ZUjkK+/XCJiNMiOz89yRU3be0A8jy7u7t78uRJ9PHzzz///tvvFvPp7e3Nycnxy5cvf/nLr7kIfT9MZvO71eb56TxtwCTc9fTp0//y//vPv/vd75Sk6XR6u9qmG62U0p6vX73NtKzKad+9SXe8adrgAQWW5QQRP7y/gv1knsOF8cDzJVSA9nIqh4QX68yhmxh/a5Q+f3A8cEAxBaQAzjmCnVpQsj+JdtAONsbIqun8AYo68m3GeWkjgIaILuxKfYfjfsad9iDTjHuNxH1VL233CABJOmM6nRZ5NgwdInJGdV0zUt6Hbqd0dY/UM4YxRuds8E4KoaTIM12VuWlWBDAtizLL3dBZazCEwZoASMiEkFmmBeOJ+iiVcsGaEG1Aj6x3sNm2QPH46ChTol4vMbj5bFIWhbHm5u725qZpBkACrSUgdl3HGSgl7BCmkzzLtOQ8ldfsYKw1nDMuKMsyKYX3vmm2Xdc6HxJFUyjiQiDjjDEuBGDSQYuJtAIIaRoWEgLhYMwueUjSzxBDGvy755o/uOBxL3T5gOeCEEOIO/W13VLjKV1Li2Tk4u5sGYaEzRwEQDEBgwBgrUsDMbTWWmVEhJj4L4yIHeY6afkd/klJEnGGMRBGgaA4fvH8WSaIAwiOBCAEJyIfQsoNAaDe3PoQirJo2rap6yzL+ra+uvzw/t3bN69e/fTjDy9/+unm+toaU2g1LavFfPbk4vzF82ez+QxiDC5kWXa0mM8WMyXVYPrb25ubu5uu66QUVVWePHmS5Tkg+hgY50rriNAPQ1JOiQhAGMdGSbZTij7cdeN+OQxC718A7HCXxo+5Y+FA9DL9N8HIf2N7P3SfY+PeSE85eH2yMYdAZ8AQA4SkLxd3+vUIwTozGGOsG1x0wP/wzXfXdxsXwJD85Fd7kLmOXja1PXjvvLfeDNYMbujAmnlVsOinZS4IlCA79JxotV6JQkvJizwvq8oH/+rN2+9+fLPt4vR49uFqeb1s80wa47yD50/OlFSTjOqmvV0us7w4OT7p+q7ebqfT6TB0SirBxQ5MEZyAIALatsx1NamYEL0z62Y7eIjEYwQIECAlhojRBw/GueNKDIPxPjDGeWLAx2CMKcvcWTsMvQ9BMMYEAyTnw9D2nAul5D7MHRCBc16WBefcezcMQwipBgHO+mYYtNKCi6EfrLE604hsdbdSQrGkzhhjnhVDP9TbDQCcn8yGYTBmmC6OB+vWdXt1tzYhGBeyPB/6fjatXv7wvZb8aDY9WszWQ392etq1XQgh11oI3nddWRR932dSNU3NGOV5EWJAzuttfTTJhZTExYfLa13kQsi75e1isRCcpFTrbX1zu+qMJ6mRqZev3/gQlc4ur67yolhvNpPJLPEPlssVY0xJba1NRVDvvJLyk6orMca+75P2GOw7yhljxpgQ7h3V4eJ/EMYdLr9UW4px38mctIdCICTEiBFStjf0Xdu229Y6H/gYYz7O9sZQ9LBd73Go+7ePuBcZiTFipBgj7dSTY9y7zzG57LpOzSvrUw9+iODGnDKEgBFi9AiRICRaBCIC8HYwbW+OpsC5FMQcRY6kJXcBBCcppUBEL4GiFLxuakAmku6nMYxRnmWL2fyHH34gZz5/fj6dLRin26v1atN6hIjgPdjg7/vMJO972/bdpCyyTMZIxgym7SAGSqrCe/WdsGP9UQiBWGKQM0BEhoylcDswhoIzwVzwAOgJveBMA0mCQGARXADnU+UsTQNnY34wQouH/cv3DizGEAJjZG1IjKn9jca6bpUSu6w9BsF3nXPOOcKolOJc7DBt70eYLgnUJZ9XlqXgKtXrD1fkCNkn/YXHJpIRxQMrn97ThKCFTNJpsC8TMkTGWJrvuFgsrq6urLW/+OprLXnf95vVGhEFMSmlzlSmdOra1HkWYxyGQWu9WCyIyIdY1zUJvlqtrq5v27ZVmT4+Pj4+Oi3LcoAdRL+PAz6a44UHDXC7Cxsfur1xvzxI9Xa/9Skw80EKdZic/Xf20SNuyzif7PGnH77buLs55xYj+JhmEcckY+9398J4ZwMEJtJ9TE8eAphj7+Bhxjkuv9GSMGToY0DghCQYcV5qYbrhaDHr1xtnrBD8dnVbrzdH82fGGB9dCMEYd327NN5//vWTq7v1h8s7nUkSet2YIwVC5U3TBGg558vN2v/08mhx/Pz5Z4LzpH9k7dC02xhjzsssKxjxEIKtb8PQMM6enR1vOrvtXf/muja9yCbWOPDeM5JcIlK0w+DDX767+fzFoprqTd1Q2y1m07IswQfrrc5z5Bisrbt2yirGMUlkbDYbYlCW5Ww2W6/XTdPFGIkB59z7JDh8fwHt0C27LtPF2enJ23fv372/Ntafnp0w4kVRXF1dZVmmta43myRi8v7dm//hn//xf/v9nybT6dVPb6wFF8NkOicu6qZj0RfV5D/+4feZoOOjCUB48eLF8fHx7//rf3316tXF6dlmtT45Pm7bVgv56tWr5y+evnz9tprOq/n86uoqm0z6vmdCLE7P/j//+X/9v/xf/5c3b9+/ePGi67pM8+l0KsQHxth2ufUi16Wy1l5d3wDgr3796++//aHtu23d/uIXvwCgVHp8cvHsxx9/XCwWSiljnNa6M8PPrWTaq4ilPtEEcj7I6j6JWBweqYc7Jool3I/A2zOlQ/LBYd+TuqvQzY9ORjwnySqmXqU0DyjuZWDGT/VxN7I1cdMPLexh/jHaZZ1p7/3QG+89I8Y5T1owjNF8Np1MqqvLD0KwNDYWACII67wP0XlvvANiabQBIXjnkSKEUBa5VvJXv/rl5bt3q7trb/3p8eJoNrF9ixEYo/V6g0jOeqlUURRi3y7Ste1gOqFzUroZXN10LlKIcbVcvn3TvXg6eXpxBsGv1htj3bZutlvjIsQIUnNAGKwRjHNOfWenk/xoPi/zrK2b7XolOcszhSHoQnm/6/ztuzZ5+64NJAARiTEuJRccAAJERHIh9n3f9jEGyHJRVpXSgnHW9V3Txdbs5h1YDwAxy8q+7+KjbpMYIVXFkv0d+z0ZY2bwp6eL6XSayCyc8xjBObu/QUR4L/YTQhCSOecSaExE1tpU/0szC0OIaU56CMH7QERpY493f5R+HmXnRoRzl4lyFpyD6NvN+mQ+eXZ+VmWSA2jJleAAMenPEREQcs6bzUpnuVR6OplyzpZ3d1rpL774siyKyWRSFJnSSgmutcrzrMgzrbIiL6rJRKe8re/X681yufzhpx/7YSgnk7Pzs8XxcVGWTIgQY4QkBYeMOBGLAWKElLnGCGlmWAwQAwQfg4+MiwfFs1TkSwqNqYMjpb/jTx9kS4fIyqEh2OszPHx9es29sO3H74CMAVIS9UkdMnHPGd9puiIi7XtxEW3wowQTEosQIXgIwdkBAHyMLsC67f/03Q/bzjSdCUI/SE8fhFmHNggRPcToQwgegsPo3ND7vjFdfzKbCIRcCslZWejtes05U5k2ETabbZ5lz54///bb77/94cPZ06frdrhZ1pP5yc1yXfdDxuH05GRaFq9fvoy+41JplTEuEKPSqirLSVUpIfqud85NJ9NMZ0M3RIDZbFYKAIAAwKQWOvcRt123bTqpRAQI3sUk25gUMoMHH6WIUmtkZL2z3kKMSBijq6pKKdm1nRmMyrIsz1MYu15vyqIUXGaZ1lqv1ysp1Xq9ns3mWZ5dXV0fHx+PmHBneq2UDz4CZFmxXq9fv33TNr2QcjKpALAqK+99kefrzXYw5unJzPpAXGZl9ebd5enFs8GHu+VqW9dKK4AI3hZFtphNvvvu2/OzM5DaOX9xdvHq5avppCJEiCGTqus659zzzz7fbLav3rz58uuvqmqyXm+OS2WsnS0WP/z4EjljjLddM51O57NJ3w+buplMF9umv7pbdYPb1K3I8qEfFicnRHR9d7tYnCid3S2Xi/n86vImCSynMsFms+ac9caMmVVStLDWNk0zRm+JAIh7ObHUEPLY7eHBpJcRgxyZd5QaipBijKnC552FGBEjJ0KMZhi2m/V2u40kEZFN50fjAIRRuBP3iuyHIMbOu/qPQM5xuT+G19Kv102NiFpnQojgozEmJXl5ngnOEKFpaiEYIgrO+r7nQse9coTfD32OPlaTQnASRM7aGCzDuJhO1+t1W28xxqP5ZFYWwRqCwBCt2RnxPMtznUX00XsIPnqPGALg4IMNiEyGCKvV+uqqnpfw7OnppCqCDwFC15nNZjt411sQAvJcETFjjVSyqspMs0lVzial4KzvGu9sJlWeZ5ILoUWaHR9CGPrOWhsBiAWVScYYMuJSCCmJk/Pe2qEfAmAk8ozv8IDU4Me48GhDiMgAiDsfAMiHkIZ4j11fuK/tjYw+51Lz0o6CkWU8DXUahiFln96HpKsC8NEEhx2SAAnzvNdiTesqOXJE2ouO7qRk0rCIBynR4RseLg+WtKoABCP0bj4pnl+cpwYGydPgexdCIMaEEImMbvsGgBhLEjzcBz8Mpt5uheSc8bzMF/P58cnx8dFxmjZcFCVjbDDDZrNZrlbL1bppms4Mxycnk9l8MpnoPFdKCaUZ51zwJE37+Oj7/lAYYsz1AT/yPYdf9pDDeeDP/pbAzoPaWIoYH2CJY0jxyZgX6aNzeOCEDp/E3ezGkNz2jpXjY3DeB++s9T5YG4YQGuO++e7Hdd0NxnmuPvllR9wJPs7mrXfODs4O0bvgbDA9g1hIWkwrybDQUkkeY2ibxpjBe985WxTFi2cvXr97v17XWVl+/9O7TTPUrSOZ9cbnOpuWVVloydmrVx9i8GWpp9OplBIR3GBi8EqJk5MjYpikZhBJayW1JiQeB+tCb02IWFRVNZ2HENuuub3dMopKyhiCM0kYiRBIkzPGEYeqrKRWQggueAzBeau1lkL44K0x3nvnrLVWC5EwNkQUkqdI0VqrlCSitmtms9nd3e10Oh05aIKzGCMCCSmJ2Gq5fvX6NRCWZYVI1rmhH+azWTLxVak5F2dPnv6n//K/RuS9dU+fffb2w7vlcmkHo7Usi/Lq6v1iMYcYmrYFLrIsq4qyLIrg3ND1y+WyKsqqqhhj0/msN4PUSuelkJorzU3dtO3ZxZPletv03bNnzz9cvi+Koiiy5d1KKP3+w7WPJLISmdo2bdP1iLzr2vOLp6ll1hhzfX2thE6PcS+PkvQmfbhf4aOiZgrNE8g5pneMMSGEMfbTxYI9cTKOE2EPSwY7gdl9ykWEAJwxxkkwTgSpgaHve+sRAPihqRq3dzgYrf4A0HjshP/2fs6yLJ2ic877wDkvszzLshhcjKFt22EYskxIKQGic84NJoTgAZELyfZUKEyNZREgMkA7DKCVMSYGZ5xHgL43ZnCMiDgnYEVR9MamGiZjzKXJO8SRcwQBwKJHSn0MqTwZ4fi4ms1miNi2bZ7nxm9b44iRjwEIAlDca3NrrWWVZVIQ0ah8LSTLMiWIeQrGmBBdal91zhJnWaYjI5sa3yAAhhjABzMMXV17ISjLJQJL9T2upcxKf71SghgGQJCcIxchMkCGYB6g2w9wM0RMU+rSbU1yQcNgxjkg3ofHd218xpjAOXjyKary3jMGjLH0X9p3R4cQgk+6ix8F/t7v6kVJTHU/5ZfSuCHOOXGGCJIBs2asGd8zA8OuK5Tv1Eei0Jmzfhis1vnpybmU8urDZbPZGmMkF1mmTDZ0mZJSEgEBDsZba9uht9YiI6WUyrNc6qOTU9yfQIB0Q3GX+SBC4hfjQVv3ftBgkm4cG1XHSPQwKIwHkuhwgEPG+GAw1CcUWB48ePwOo/P7pD/DVMEa7z7gvhB5f5r3T0RAZJDmzadRGmkI9h5b9jHEgMGDT7AjfhTyjk79cO09gKGQkxsChMAZoA8AMZdiUWYUvBIseCtz2W63Mcau77TWwPnx8Ulv7bu3H4TSIopVDUzB9OiEqdyvtsYH630AkEpNZqquh007ZIWrMhWRuqEn8FqJ6aQ8ms0Rcb3adm3LGGNSDaYvc11IBOsMUJbJrNJ1+6RuNvVm3Q7ODk4xFpCsMZ6xTOVVRle3LVtuj45PJXHiHBA7M6A3wzBInud57o1JQaQQQhMJIdbrrfcx9TAsFkc3N9eIuFwuq0lRFMWHD+/u7u6qqiJiRa59AG+NcUYRnp+f3i2ffv/jy++/f6NVdnx86r1fr9epA4oxttk2xWThYpxMJler7TSbCw4n8+lmtULw88k0xvj6zfXp6enJfNoPAzadc6Feb148f35z+eFoNuv7hguaTqeMYzJWKstfvX77hdInJ6fstlmu1zHGi4uLf/3j79PIw/3gckjIHxFdXJz++PqqLMveo/fx9m41XzTHp2cfPnyQXDAm6q49Pj5umiZF2ABwcny83WxGP5ci8sMAfeQzjvHlz/ekPyyEP3ClyDkiRtiNFKX9VJb94O2DHiofd27vMGkY077xA8ZTwQOK5mO8dTynB4fSOg1jI6KyLLMs40gAYKwVbDfQ2Vqb5zlEh4hhD/h8VG6MYIyJzlIMgIGIpmWVWDrBU0Cw1nsftVQsAESfaamlAK601kQYBh/3tSIfCIkrzq0B2/XWWqXF6WmWprv1fV/X9XQxByS303IzXArijHGRcSYli/tM1zk3mGEYeoKwGyDHeGNaY4zzJvXGee8jgpTSQWSMEJIUagSIiJEL5r1nLCjOhRAhRgCSWqhcFrnKe7VtnLWA5KXMI/AIZIZhzIYPc/FD23SosxpjNMa4nW5CmnOWmrcS6vJotuK+FJsooyGE1D4+3vER9E664kj3SP14MvFT3Sw4Uj2RS0Go1FgzHrcBMTGiGQm4UGzStm3qds/L/GhxwplsppvVahWD2zb1ertJQgeJ6CtQMsa4kirLiyLLy0k5qZTOmRRACAA+YkSIEBAQI0nOHuyldKoJeMEDGur+JD/Rt4ePivAHjuETDu8wXsRHNblP+sUHDu/+lD5WdRnP55OGI/1WiBD22V6M453F8fEoL3K49w8/+tANx4+bMhmXIQQCSCrmIQZOPM+UM0Nksu+GKtNJ6JKItNYsnwie/flP31gXgcP3P7wqK8H1FHQZSJgQnTPkTa6Q+Ml0Pof4Ydt25u37s6PZZ08vCk0UXN92H96+m81mp6enRVEsVxszDCT6oig6F7TOC10w54PpzDBUEn/5/CyX/F9//5cPq0Do8yzfuM55B4hZWfFla33Y1C1AYNMyywQY8BE2dQ0QlJBZWYxE9LquZ7OZMaaua8YQAM4vThO3ZRjas/wk6f68e/fuxYsXeZ7nRVbXLcZg+sEM7uSs+OLzz5fL7f/zP/yv79+/r6ppmi369u3bTCkiul3XkV97rn7zm9+Eb7+LENd3V8ezqj6aBUClBUDQuQJkrY11PTAlu67bEDy5uFBaHk3nXbNNU0dCCMvl8m65Knx8f32jy6qaLJ5Mp+8+fFitVlmWWWvv7u6m02kShuacp9EKN+subc/z8/P18M5u67KcvH///ne/+12M0ccwznHVWnddl/qwk8Lq4MNoN5JbSZcG9+LU4xoLu2npP3s8WNs/l3QdhpshBA8xRjsMSaSlR55RRDaZLeKB0HDKOh/U88Y9HxNun8bm7Pv2HnvpeFB174c+xqhVNplMqrICgLZuVqtVDF4KHkKAGABCURQQg7WWq4KxVHWApGtghsEMA2Nohh5CgOA5Yy+ePeWCrVfroTOCsWlVHM0n01wRBO/TEBwmlFY6A4Ch72LwjCgCEoWI5CIOLnbGdYMZrIHoMyUFI2tM27XnFxfbbbPaNs4FoJjpLGltaa0YgbM2BJdpHaN3w2CHXjE+m1ZlmQnONnW9Wq1C8FmWBe+HYQAMnDNgIIRUOuNKpcIPEzzLtHWec8aIBOeME1AEiEg0nc6AMWts3dreBOuDszAYS3tR4mRyd/0qKXU9wMR2N8t775Po9r2uNyIlVifAQ5ATAJAC5yxxaK116c3GFggAHNWKU5oAeL9IRugVPs5j6GDcKzIiiEpy9L7M5OliXiqRCQHBKcG1kvesSEac84wr3OWLCIDIUCpV5MXRydF0Os+LQiotlORSKqmEVKenF7P54ujk5GhxUk2neVlJpZNmaJJ3AyLGOAnBOGdcchJjHe6w++2w9y49mfwEI4FACLuXpRkKAMgYH58HQIgIMfXNfNqrPXBg439DeAh7jmyIdG0faMd7BNiNZ//oARLF3VlARAgQDzPasG86jD5E7xKzZbDWWGsD1L3947ffb5ohAPNMfBJxGg1CODhijCZ6Zw14zxDQDeCNZphJxjESgBt6zlnb1UJwJsR0NlP5ZL3efri8ImI3y9W7yz6rChDZqw/Xzseu65Xg0dvgei35enU3m1XGuGZrCN18VuVZxiBgjN5ZKfikqsqq4kJ6HyKSEMJF8kCMcal0CN70HUQ3rcr5tIzObtcrM4DzlogQWIhQZTzpuVhnrTN5nk3KIq19Owxm6BFQSilYyofQG1NVE+8dAAxD75ybzaeJz8W5cNZtt/V8Pu+63lrDOc9zYZ0j4s77um0iUjWZLhbHr998t9lslVRnp2dCiOvrazMMSqnLu7vr29vF8elkNs3z/N27t5xxIcQwdATAODHiWZHPFsd3q83dqp5Os0xnVVkwJAw+eI8Qt+vNarkkxuqmXa5W7TBcXd8A40plT6dyuVr5GIXKNk3dtt3FxTkRtW3d98N6Ww/GE1fffP9TVkyzonp3vTLG5UXZD2Y2X1ycXbz86ZXzgREaYyZVldqriKjvusVi0RuDHwupJxbbA8WDEe/xe6DxwZI7dHtjvxYAhOAh5RAxhjiu7V3PM2DEGENwztqmrtu25YwTIKtmCziYh5BGk49tYYcwTtwNntmt78N29TFCfxyxcsGLoqjKCWOs74a6rp2xiOidzTMdQogxeG8nk0mM3jk3OBiHOe8KfN4H75USzhpCIIhE4cWzZ86a1WoVHArOykwuJuW0UOitMwNDEkpxJpFgMLZt6ggoJYMYlWKDdWm6XEDsB7Op6+26XswmELzgLHh3dnZxfbts+m4wxvsoJEcAJGCc+qFrmzo4u5jNCGJ0jiCWWTafTHKlAOD69na73QLGLMswhTOMOOcuBsaI73MOJBRSaJ1FkACABFJJrbVWKsu01rkuC8bV4ELdtG0XbQghoPOe7/ntoy9JR0InDq1SMkTORSKQUiFiwpmJmJQykd9GtzeaXSHZjmztXAhABIgU9qKdye3tI/3Uf3nfnPeAczE+OZ4qESEjjEEKRsFPCn12tCgkz6WE4LSUSt5P9cM0mgp5+m1E9D41dAZALMpSaZmVWVVV1bSaTCbFpCyqclJMirzK8oIrSZwDMkjsWc52IlGExBnjknFBRAw+klMZl+5ho+rH+dbDac6Pt2h8VBF/AFrCp3CR/V37tLbn4/+m1/iPJeDHc3hUYhzPGAAgjF0lMQ3GDSmasc67AJvB/umb7+reRty5vQdfGfbozgOfF2P0CBgjRyTwHIIiKiSXjDIpoxu8MxBjhCiEyPK8qioS+ds376az2fXt8v37enpUtNZ3LqyagRizzkopvDXOQaGha5uyLDnjEE0IQTJiFAVBnslMK9iLgkolGBPWu74fpkdnERnnIs+zTCuMjtBLQozh+Gghlbi6umksIEShsn4YeHRS6Qixa9sQrJJcaTkpC86Y93YwfXQ+DXBOncGKM6WUlIIxNgz9MAyMk1JKa53n+atXLznnSsvj4+Pr6yvnHOORgLTWhKzrhm4wjImynDgfPnz44Kw/PT09Pj6+vr7uu05KebttI5KQ4vz8fLVcKSU5YVHkt9fXUqv5bGa929RdQCIhJ/MjFntGrOva1d3yaD6TxBmjN6/fKKW0ziNCBGp74wFtCNV0cqpjCrS5UDrP6qZ9+vRJ13WEcbVaC6k/XN48efbZf/x//b8ns6P1tgauI8S2aQGg67p//Od/+sPvf5+klbuum02m49rou+7o6Khuu8OdNWJ4h9WZca2GvVbs33B7h0DgaFtoN6WPIWLS5Aze7Yw2EhEE71NtD5EBRj5CTIdUzAe9qI83LTxCY8b8dAxIkyMsJxUApOaEoTMhBC2kUsoMnRBiGPoQgrVm9Kxt18PHyBhjbCfyQsQZgUNnbKqrMYwkJIA3xnRdZ60m71POyogNJvTWtG3bDX2mNIBIKBqRTSdsrXPWUgStuFIKo59MJozAe9+2PQK5CIxACAGEidNhjAneK5UTEcRABEKpLMsSbcQOJlEhiLEYI+c8LzRQBABvIF0H4ByJCy7SraqqKsbYDx3nXElOTAglRVZ0JmaZms0m5e1m3W6j381HD2EYbeJIExmbTEZg874m5ONOQ3lfhjyE1x8TLlJSGIIPIRKlgR27Kn1qxrgvYkUKIUCkXdIDDIERMgREQEJGSIz2M4aIIzCI95UhRpQIoodJTPpe/LANAxCB7eaQY2SMBQwh+m3bcCTkyIkzRpzHEFwIwZnIOEZijDggIktj1ERM8B4CIkPiwCgAAFCKQtLlSpzNXZckRNr37d33Hu1l2+Bj3O+Tydyhe3i8ez8J0cRHza8PssPHWw8fIcnj8/HjJvdH/hsRAYk8EcVDItK9xvShifmkNXh8ACDnnLxxg5MYyzzPBXF0CGHoB0DfN3U1mybKg1Jq3RgAatv+9rYeLFRc+qFvu74sSq7zbhhS32QA8BCB4bZuy6KQOnddvV6vtYCcT2JUic3nrN1sNllZFEXhIN7eLtf1kGsFxEIIkmOVSwLXdUMucDKZuwA3d+vf//V9HyGEgMC29aB1HsLgHIQQ3l9eOtcXv/paS5HnOSPwvanrOs+yTGZKiYJj13VVVWitY/RN07x//55zfnp6AgApGN1sNkVRVNV0u93eXd8sjk+Z9zGGLMtcZ1arlbHxs88+u7y8qrdt27az2UwIgYwxIY5PTvu+X29r50Lb1lVV1XWdSw5uAKLjo8X1cr1ev8nKyfnZedO1q5tNw+o3r19KYifzqT46YZyVZVkUxWCc1vo8LzcvXy6yxe16jcRvbm6OTk5MiKvVSlfF6elpmq+UZ8Jay2SWmJanp6dXV1frpp9dfCGEjHlMtIx3794lgZUQvBCiaRrOuVaqbVvOabNZjZnc4bjp0WntRkbvhSoZY8F9Guc8zMEOV3uq4aX4OgLF/bP7gt1HOVsIgcABwEcmEj6GLx4/+BuO0BiTsC/Yj1TOsqwoijRgYrVapQm5CV7bbDb7ixXGsRTpDZOW/6h7O9axDmqeIdFBY9yN4U6f3vd9OoHkb9iel5+I+4ceXQghlYIQ08A5KeX5+XlyXZPJpMgr413TtenTy7JM47Wk3o0UKMvy5OTk0MdzQQAwqnaNTiWp++d5nuQBRz0aqXiWqaRlkDZ/qqcOw9B2ddd1wzAwjkWeT6fTsiw5Z+OleNCrPuZY/f4YuxfSrUyElHR63u9A0bEF828Y0CSgPooS7CktdDhv4dD0Pz5or9hy6NLGVfjgp2nGXvqtVCi9VylL7YCRksD3ZDIpikpKSYIjot+NNUcSnCs5mcyS6spisZjMplmWmO4opBQqzQ4STArOeYqAHq/wtD3GNHrEGB/b+sMv/kkXhT+DcD5yPx8p4xxixQ8qZw/O4UGRHz/GIQ/f+bGHhn2ae/hxD0710ZL46LsfWoPxcVonSZQy8foYwxBCdL7vewxxJ0TCeJ7nUsrttlFK/fEPb1YNMAbv3i0hUjfYveIrz7IsyyQBWGvbNjZNu58ICMa7ZMHSNk/LI22lpK+rtf7Dn/784eq67bvtdrteL733ueJKssWkoGAzJb764vMvv3zKkfqhl1I6gAiwwzkY3d25169v67qOMWZZVlVVAsOMMYyx9BXST6WUVVUppbbbbSp0NU3zxRdfIOIwDJeXlycnJ4i42Wy8sV3XtG3LOc+kGoZhvVyVZfmLX/ziiy++iDG2bZuGb5dlef702WpbF0Xx9u3bi4uLVz+9ZAR92/jgri7fD8OQ5/lscfTss8/zsvj//uf/8uHDh6bprq7M1VX3ww8/vH37dhiGzz77jHOeRmOm4XlS6aS1tFwu0xZ7//79crmcTqdptOR2u6sIpsrlv/t3/65pGufchw8f2rb94osv8jx/+vTp73//+1TPM8YopVIFMZXJtNZ3d3eHqyvVjM2eEJR2etrm4WAq3s8dn9w+6d3Mx8cwDKlaNxJq9sZ5X4GeHZ2MsmlZlimlvPfJh8V43zCPo+hiCMF7jMCJMSSIiRAWEIAQBeOcGCdGgNGH4Py27kMAzgVjHKIPwQcIxEhK7mOIEYz15WSWlxMucxtoMIP3HjBwwThF7030A0TnXD+pqr7vhM4m09nx+ZPO+bu6vbJta/uubXPOn8xmGiKZrtQMhR1svenXTd9GQAKBLmOYSxEF1xRjW9dtvcEYRC5ISk8QhAadR1WocrGqu007WBcnhY4xtkMfAXWRo+DtMKybWmZZP/TWOS3FpJgoKW3XtXWzNrX1NisK4gylRCGMC+V0bnxwPnLGBbHgvGSs1EoSDrLqrAHCrCgiUt0MNgShCq1zF4K1ztqh7Zuh9yE4yZ1BBEZAGCCG4EIa6U1gneGcEbEQAgBxLmjPRZFSxwjDYGKEUfoorULGSQjug/M+MIaTadX3IYZEvOSIzLkwNqXFuMt1duA2AWMUMTLOuOCMExLuOscwSiWI0V4hMo2dIGKk8nxSVVIwJfBoUpweVdMqC7YVnDgnwQVjIiCGyLhQeTZ1EiiTosxYrqPknhFxyXWGQiCXxAQywYQWKte60tkEtAiCO0ILMRBGlgZZABEhICfiiCxGFiMLQUD0FhkyQhYDBBeCC4SMExdcEjIEwn2VjoAYMhAMGKVbEBECwq6MwAjT8+OPIPoY0hhzZBjTMBJM/X0MkJAIgCEyQII05hIIKMS9aLyDGDENq6LIKBJG4sh5YCzQ+IcC4vgnEqXZeknL/PBPQPQABEDEk869C865AcAxQg/Gx+ABTaRlO/y3P/9QWyJVDBFjyohHwc8YAMA7BzFiDPcFxOAxBCFFGIahWeUUTuc6wwHNWtEQQzcpBCnkKhNFOT9/YVC9v6t/2Phv3l5dtr3jBJOS5fnGWOstY5HAlop1zQYJHYT56cWAZNteKMWz3IZQt2Zw0ZMcPJmAyLQnMjYAoFIqUzrXqprM3r998/LlT1yo+eIkAN/2PqAIXKPI1uvl8+dPfvzm92CGeQbcmtZD1/bFNCsm1dVd0zgIDFedP336VTU/bQezbdoizwijHepJpTxSMa2artM6I8E48ePj0w9vPnDGnlw8JeRDbwHJGr9cbSeT+ab2PkpElWeTTGbRedc1fmjQm2kug+/++Id//d3f/6aaFL//8x//7u9/oyhevX2lBXOme/7s2d3tze1qnVdVABZJfvHVL3U+QWTff//jTz+9NMYqUcyPn7x69y4STE/OF6endb2ZlNm80tv1zdXlm8XxUe/Agvjxw3LrSKqwGfwv/v63Hy6vjo4XR/P5u1dvCl0Ila823burFbBssjjtBmfMsFkvlRDN6ta27ZfPX1CI3abt2laQZKgYioj8brlpO5sX08i49VBVRdu2aZKw1tpam/47chEOID3cxdqPEM59BM/GOs4YLrfGcqmEkERMcNJKCoTobK6kIgJnMYTow2q5Xt6uYoyAPAKxyfxodGmpcyK5vU+mfbgvTqba3shzHSP3Q6+eqgUR2f132L8JIhZ5RkTOWueclCIJ3qeeSiJCSkJcDlINPkSdqUxnzlrOeabVYrHo+rZt28YaDqQgzDJ1PptmghF5qYXxbvCut24w1tqAkSSXgkvrtiFE53032N5aHwGIYpqBByil9i64GN69fb+6W/WDYeAWi4V1FhHPL85D8Lc3N3mm57OpINSCz6rJ0XRaZDpY17VtF0yMsaqqoiiKskjTpLIsgwNYEglTFsg5j7rie4F2ay0RxhC7ts/LKtN5VlVcaB/CYDtnffQQhIB9cCT4rv0/BbwISeBuL/OIEELgu0lgwVoHADvJmn32fHBndw3R1vpPFqIep3QjDnlY1RtXwmP+xZ5qzCRnHCPDUGm5mBS5FAiBA3AiTgwAA0SISIwYceR48OvIGOPpE9I/dN9RxxlPYzFG+swI/44sU/gEOMlgP8hidOqPs5/7gz2sX/5cKWJ36eBQ1vIwD9vJBTzIydLAjf20rvQOCAC0q6On/qT74PfnZF0+WY9ARAgBABNfzHkfg0eIGONgOmucsb73cVm3f/jzt83gkfGkoZoIvvv3OfjMuKvBO+eStpIJHqLD4BTDUjJBUXGoiiw460OIQHlezo9Os7ys2+765vb19Wq73RrrCTESWmP7wYQAjKUxT9H0QwzB2liVWdd1trVK8rLMyzwnAO8MY6xQKi+0GXo7GCk4MXLWAkBZliRzRGzb5u7ubhj6oigm1URIua03TdO8+Pzz3/+33/+f/v3/+aeXPyhF1UQ4YN6nab3ovHEWGIF3dlKW00lZ5hmDgOApBiWYVlLrnDEWfJBcIAInppRq66YfurIsE1WCC56Wl5SyadsYY5ZpzvnQ94jIGNV1baxLwCYgeR+yvKjbzlp7enK63W6TrUhl31ev3lxcPDk+PRusPzo5/XB1va2bD1fX1WR6dX0tGB4fHV3fXBOjMtdVmeeSedMThuCtVHrTdNu66z101jKmzqds6Ifz8/Miy969fS+lIEDkPEZYreu67crJ4vLm9ujk7KefXgml37y/PD4+9i60bXd0dMwZX63XjPEQYuoOGtN0wJjs/KiNMlo5OJAvxwMGABE5d4/8xQPC1Cjy+WBJ0356QzJiO/OKaK1J6izpfbqu69oOYSdUTYdvFA5E/ccGndHPHe7tx3DKA1jG78VgHlqB/ZGa4kfjm3xt4lmM7QHOuVEOJt349ClZlnnvrfEQCSIhshix682mbn1ExqXz0RgDgAmLAx+sGZwzEUJ0HiOkN6yqqixzrbXcQ3nGeQ9orLc+9s6bAHXnSMiIaL3jSqYimRAKAIQQZTlJEKjWGolciCPNNSF1CcAMISSoU2rFpRhxQimlVnIxm5Z5FpyN3uVaCyGaZru8u+m7ThCbTsr5dFppzRC8h+TkIDXxMcGYiAGt8bsuunthcYweoodEKtuDzzuEc3wl7NV90kVOC/QBqvZghYwtB4de7dAX7iYtfFyAvMcJfRhRhLgTP7snEj8ADyMC27kzPm6ncZiDEEIIlWTmkv4L0EOi44ijPnZO4wclDCQhw59E/D65gP9NPu/RNjn0vp+kq3zysn+y5ve3j58759GaJLmlB/f0fmotfhTyPjA38aC/ZVwJqfsYo0+DO5Lo0p4YRdPpVGutVV5UU6n1tm3fvHv/w0+vttttjDHLZNrg+7ciQBYDeh+diyHEuCvGWxthuW3WTU9STY+Os6JyPjb9sK3bTd22fdcNxhjX9abturbr8kx9+cVnX3z2Ijj75vXL5d0NZ1gVpRBiPp9fXV397ne/67rm//7/+L+1bY8xVGWeaS4Zl4xLLjhB8ND37t27d3d3d1LK6XQOQCEClxqJ0z7ASgjZblEqtV6vb29vE4lfa82FSJnKdDrtui6BgaMlzPM8hJAmjP/2t79NZvDi4uL6+vr6+jqlIpzzENxvfvObhKyWZU4Qri7fX75/h0kumPMsU72xLkSpM6myuu1ubpfA+N1q0zb9ZDL57LPPgjWcYQxuUhYYvXfx8vLy9nb54sVnt7e3b9++l1mOiH1vtk2n87Ltu3fv3nkfq2p6fvbk7Ox0tVqFEJRSl5eXMcYXL17A/u6kAhPnZO2QVDtwX9sbBfTh4xExDzbOGLAebuRxg8DH4XiytOOnW2tHMmYqBo3c0UPh/r/l9n5uw3zScOBBuQgPCOujiz480dEeJdcIe5LFfev3HvCNexW05L3GL5yS5ZSScqZiwMH4bd0tt1sTQmSiH2w/GABQShU6k5IDBO+tcz1jjHHknAvBtBJaa8l3FG3jwmbbGOutR5IqkAjEkaDrewSGSE3TeRezokTEhIznea4yDYDOhsHtlOUOdEx2F3qUG0/XZPTfAEDBIwTOUDAuOCkhikyVRba+W66Xd11bg7OKUaZlLoETeGMxRHbfPR3GjgLvYwiAyA5ZCSnVc86lfA7u2XcjAxhwL6zwwO3Bo9rV4QrBfR3usW7WmF0deqD7nCbed4iOJ5l6GyLt80Vx7+0+eRy6CnxUfAp7wbzxuzzYQuN+G78R7OXB8GNd9X/j8di3/f/jO//GOz9we/BvPg63rb/v19wRxyHS+PUD3MvN/Jzno08diIgQCEAy0lIlAXQiChGFzoTOVKalVF03vH93+e7du7s7n4x+ih2LoiiKIj0QQhBnABBwd+oRWQwYETYG3l3eXS83JPXi9KKoqmYw7z5cOx+RicGa3lgpJQDd3Nwtl0shxOeff/71l18qpa4/XL5//75tts8uztNYAylYptXJYv5//B//abUcvB0Yw6rMJ5Oq0DqBagxgvbp7+/btZrNhgsf9+EnOJQC4EALEdui7rku6UsmC1XW92WwGa+MelPPen56eCiGurq4SzyWVwRaLRVVVXdfVdb1YLE5PT7uuk1L2fb9cLufzed/3m80GAE5OTr786vO+79ttLYR4/fp1Xdc7ctBmOZ1OXQjbuo2IOi+6wX24ugkRgdimqRFZnueLxWIxmwqMpm05eGvtZtvc3i6JiaIob65vY4wQcbnadF2vdb5ebZwLb9++O3/yrCwnX331lfeeGBRFcXt7fXl5KaWczWZZluFe2wv28Anso+pxr415zmE8Cge53WMqwOMFf+ho6KC1FPYEiGRSxD7UGBke6a4hIqtmC/p4sivsWTeHMWncU0vwQGR5/NSRHPh4S0c8wJfgvmdZK9n3vRkGxpjWKqUaxhgkklIikjUOAZTSgnOIyBgLwZth4JyfnZ6kcisAkMicsbbrKfhJpo5mU52pfugGa5ngQihAci7EEAmRESlJyMj7MBg7GGu8Ny4Y6wYbNnWzXtdcaiDWdP3Nau0CTjSPQFmR50VhnSEAnSk79IxgMa0W05mW3Ds79H3ft3awssw4F0VZMcaTmzPecyGJWAiRC7GDoYXkSvkYPXBrBmsMADACIOSMF0XedX2mc51lQiggiiE4O8Rg1n1kDCSXnPEY0Flv7a4tYRQMoF3fGBAyqVgiuaQEdB+AJ37QLpThnKWk0BiTGj0fpybwcZqyS7CI2D46e5y13AObB4cUQnAmOWlOVSZnVV7lWgnGERkhI0aMIxHnkgtBQgjJWRLJJ2LEGOPEiTjDiAgEBLgTZGBpgxw24e1a6AB3fXWfet4YN7rewxr4WEV4eLCfFZI4dFH3Qd5e6CTt2YOrlNoTP9q0iBhC2pm7cQqI6TyB7ar9qUX9AOT5mTlk8ef4NSG5vRgDxpC08ByE4JwFIOtj25ubTfPnb37sfAC6BzkPvOzuchERHjCtnLXJIHKIBKHSfDEtCi0lQ8nJORsDCqmB+HLTvr+6Wdc94yCKYico6mNAQEAfoneRcRZj9CFaYwUjgFhVVdt1PoCP0QTwwyCVnFaVFMLawfSdUFIKTkRCMCkVMQ6Iw9DH4JXkk7JEiJv1qt5urTFaa4xxsVjcXt+cnZ31bferX/7i5vrt1bJ1xkgp8jxHpOhtDN5aQPSp52oxnXJCawbOSErFGBljCNEaG4PPdEaILkHHiMR5lmVccES03rVdN5vNhRCXlx/W6/Xp6YnWum0brXUEDCH0xhDjs/litV4HgPfv32ulfvnLXyyXy7u7WyUVIhVl+fr12xDj2cXTdx8u75brajrjUr17/14KZZKgcfB5Xjhvu6Z9/vRiWuab5S0RYIRqOiUultvNy1evs1wvqqxtu0k1XcznRZ7f3N5Oq4lS2Xff/yRVNhjvA92um64z50+ebeumN0MS4hh6G0LYbpq26xaLIyEkAPidF4jJDDFGMd67ut06cS6EkDRoHrg9AEgEgjFsHTM2OlAmw31mla5wMkdSSik4IiaQkzFSQgJE79wwDHVdD0NPu+0P90ooh7v3cM8/iPcPPXP8WDNpdMsjyJlcPXxcXdhjuLufJss4NsinX0zJ8qjiQTtNyJD6f6WUbdsmPVOtcwQGyGzEu7q9q9vOBY88ECULLhjPM5VrSQyc78dEIeWRudJKSCGEGYah7TabTW9sa5xH7iKLTCAJa52QOssKZ30klsqQQogsy3SRA+JgjHGWMaaLPM/zotiV9NI9S/5mTPXk/kh2VkvGIQqGVZEVWQbBQ/RaqvmkkpKDd4LFxaQ8Pz46nk8mOdccFHFJCDF6Z/a4OUNkB4aJxvs4ah/s8N49pT75PADgnO0vr3twuw//fhDXj7d1XIXj82MIBR9nYPdRVAQIcfSdhwHdfTyIGOneAT/I1fCgeWNMoB88fpx4PU4ED/O8QyT2v5uN/ZzPe/Aa/BmQ83EWNf73MHp4TH/95K78txyHHx3gwddkAe4F5Qfrh2FItulvgJxJjGPk9wohEl15MZtURSb3t4Mx5rxvexMiMsGJS2PcZrPdbrfegxDMOefs0LdN0267pk2svCTzEXZzN8GnYmSM3nvjkUstpaytf3918/7mprGWqayYzpq2u11vfAQfcb2tB+tmi6OiKPq226zWUsrnz56dn58jhLvb6z/+/r++eP5ss7w7Wszq7XpSFXbo/pf/+X9+8fRcSVZvtm29kYLNppXW0kUwBuq6vrq6Wm+3eVGWk6kPcTA2wWsJ2IQkiuSs1rooijGESrCQ1noYhvV6fXR0tFgsbm5uXr58mX636zrv/fHxcVEU79+/jzGmuklZlt9//6Zt20RgjjG+evnjpCi9dZeXl0qp6XR6e7v9/vvvj4/mkvG2bQlZ3TRaZREwIuucXW1rnU8icDO4uq45wcnx4nQxVwxZdFpneV40TXt5eX12/mQxPx4Gy4Wqt81suri7W5bFZLlc1W13fX3ddV3XdVmmj4+POae2bVPX/pu3r2LcdyDsDU5atGmE6mEJDA7GmzxYww8QpsONP/q8cV+MjmZMw8b07LCdNHkW7z3CvfTSLtuDPRA3lgcfUB5GewE7dU3/uKjz6X1IBwbosMEQwRgDO24hT/R9732SLk0BfKqYx+Bxb0AwRsHFxcXF5eXVer1RSnNZNHUdvcfonR2k4NWkYJxFCIkVlySZU8NDiDFxOnay3VwwzpEIgDbbxvgw2DhbHDuPTW/vlmuhdSWwyCtirOta6y1nxAiU4LNJdbKYF1kWzGD7ngAZ44QsoEdErbX33gZPjCUcHBn5EEYSJhciVTelzqIPzjsIcceRCzGEmJe5sa7reucc50RI1gzODG2QyUtZ42IIAISEO+Vov6NMpovFAAXnPu7Q4NRBkTL9cSWNvictDkRE4p809A9W4b1feWSjxzL1Aw+afksKgQAMY8ZpUuhprjMlBIEkIkJGxLngnHMuuRRMCBIciXZfCpMv50QsQABEgjQxHIgREkMiwe/X2wN7HT+WFNkXOO/BlnFrHTrjh1HdzziawxDh8AEdvP4w24NdFvW4Gy+VQxgRpRaDxCDZza0ERMR48HH/FkrLRz4vxB35GhJTJYQQIHjvbEAcrN80/c1q++fvf7KBEVd2Pzjs4K0DACTdRdpX9YQQqb9H8ADB2b5F7yXHYJ3t2+hdkRdCSEBmXGz7YTDRRRciABPIWKKtRSTGOCABRMZ4CsVCCEnVr6rKpm1t4EAMKQkpRIiABJxTkWcxAmdM6gyRjLXImFJa7SBxCM5JzidVIQQNxlpjpZBSq+B9kefNtnbOHh0d1UPomrqpa+8tQtypIIG1FopceOdjDJOqVEoF74iIMejaripLRiwEz4m893mW++Dd3tz74FWmE+yZxCK0Vl3X3dxcA0BVlTFG50Oe58Y563wERCLrvRDi3at3Za4xQqa0j8H7cHJ6tlyttm1jrZvPj96+fdX1w/PPPm+7vm5rD9yHeHp2XretjxFDzLQ6PTlyps+0YAzv7u4uLp4A4aZpnLUXT560deusm5TVbD6HiG3bTqfzy6ubxclZ3Q5M6XcfLgFFPxgiLjXfbmshBCKt15ssy4uivL6+QSDOmZAiybAk+dckNkJ77tuY6iFiGkn/wD4k0/LYhoz/HdGjEX0MgGO8HmNwzoVUPkRkRN67JErWNI13XinlfAAAVk7nKbcbz2Z0rfFRz9B49g/4e4fHSJ/bOX84+G6HPAgEa23ywJyzhHCG1CYdY+Ls7cbQBM92PD4kBCHE+fnZhw8fNptNnueRVFs3iVDWNbUQNJtNskwjYYwhOucTmQeS7/OwU/VCwEhEgCz4aFy4XS29B+P8ZH7UGbttu7vVRupsIvlivqjr+ur6khgxBMRwvJiXRTapSi2E7QdrLWMkGI8APloiSs0xESDhilrrCJBC13TDUtkfEYVQ3js7DNbYCMCI+RiG3szm82bb3t3dDcOQhvxBiBB8Y3n0oe964wLEyPnOUiCwNP1rt0QipCDG+sFaT4Ra61R4T+4tVRqIKE16TEOFGCNEdrgc76OXj3ki4633+6r1g+jnwZuMvyWlpBgYBC1ZlatpkWnJOcZMCMaIM85FotwqoSTnYufqdozHvfdF3NntpMKM98Vq9jE75sERPj5ijJzJMU89pCj/XNks0qf93oMQ4f55ONQ5un9lcoGHKtKwy/biodsD2BMnD8KPePCJ4d9W3rs/pZh0qPfN7CHN0PAhRkS0Pmya/nbbffvDaxeJS2XCzj8eXJAIAHmeK6WkuEeG0+i+drvs2qbZ1tEZyRjDyIiKPHMuhAg+RO8jkOBSEDLvIymZ60wrxTgTQgoplZRcyuCdd8EHG0PknBFiVZVd10XSxg7OOy1lUWSMEGLgCEqpsizyPAveO++ZEIS0qWvJaDabKcHX67UxZjqdKC37rs3y4rsfvjs9PT06OvLOCSkIAQmRqbZuts3aGNt1g/cuy3RZTeq6mU6KtmnqzXo2qSaTCUBExjG6tmvTfINUqnDWFXkRgk8GbbBmsENRFFmWGWOkVHVdLxbzLMu+/+67pmmePLkoy9J5770frJ3NF+vNNgL0xpydnS2mVQih67oQwu3dHee8KPJ+sEVRvn777u9++7uuH7rBTmZzY822blxAxtiTp8+vr29CjMRIMDw7OgI/aCk4wTfffPPF118hY13XX199ePb887vVqmvb45MTwViR58vlqionrTGcy7ysrm6WPmDddi6ELMt1rjhnl5eX/dA/e/rM+3h9fXNycrLerKpyUlYVInrvkpkAgPQYEVNaPLZoP2YDJBNhrH1cpE/g3yGkNHYwS63TSt4FNyHQ/rFgPLm9ruuapokxCCF8cnuzxfFY8w978WylVNd14zmNRezkIA9t3LjoiT5ix4yPmVCJ4cMYS6qbKeVP2V7wQUrJODuYAbh7NyFkDHEYjDMWAMuydMZt1usXLz67u7tt2zZtZmNZ8GlsimEcmKAs02VZBmdjiF1TD11f5HmmdYAACNakGI0440JIxtlgbNt2MULbG2SCCYXEN03rgSLSL549WS5XPoYYvNJqMO2zJ+dPzs8Ep0lZKM6NGawZCIgzjkgggBjP8kJI1TRt4iK3bRcQs7zQOgshGuucD1zIspqE4CFCPwxd1yOxsqwiYtv2LFleQudc13Zt24bglBJD0F1TN/VACELxGMGHSEjW+RADI0ZEzvYIoLVihJ3pYtx1zVtrt9smRsgyPQxDQlz3eXZq92Kp/+Ew/jpkVY0x1whhxY9t62M08hC+izEyJCk4QVAMjxfTo2mZcSYJROpuEJyIESMkTozFkR0lOReCGCPAhAHsrjYB7vxDmgifPppGUc1UYU0JSSrvJXXNVF6VMumjfhRvjiHgg6+z24j4kSM59O6Hu/SeEwRxdKiwl42OMe6qevGj8nkIQSoBADucZg8Pxxj3nQ73I9TjvoHhQZDxAFiGA/Q4xohAIQTnQxKjjzH4YINzUojtpiauOuNevrv85rufeF7WnSHB06VM7885V1JorVMx2BqT8ANrbd91TdM405t+AB84AgbPMWZKKcG10gDoA+zkQIEYMaUkcB689d4TYEw1Rh9Shaep6wiBIzAEa/3J6eL66s5GSp0s3hg79AyDlIIREEKe6b5tVaZ98PW2ns4XXErf9UppIaWxpu9a57yScjqdd32LRIMxRVkprbd1E0KYTSZZOX3/5u18Mbu5vu76yAVyqYJ31pkiz7q2W61C29z93d/9HTFunVtev8+yrKmbPMuctYzIWVdWVQyhKAofQ9/3RVlkWRYQykm1vF2enJxYa2az2c3NNWPs6upyOp3O5vPr62sgmkxnw2C+/+GH3tjZbDY0dZFnCbmvt822aRaLBRKvm3ZTNxHo6bPn3//48tmLz25ub5ttw7kOMVbVxHo3GOO9s0P/4tmTqtQYg3e2Kov37z88efrEh8C5MBZW6+VmvZpOp0+fXCDSpJpwzuuue//+kgn9+z/++eL58763q02dlxPnhzzPN5vt7e1tkZd5Xvb90LbtZDJZrZZt185m06Iotttt2zWpHjSSPGOMI9lynO1wuD5D6q7Zl7oOOwhGDDIdaSkKIez+rfq+d84qpaQQzrmkQZ2m+nRdZ60lwF3CA8Cmi+PxrUe3edjRHD5WgnhAYaeP2aWHSevuAeP3wBHs/LZSyjvrnCMkKSURps2TzMnoX0MCbZNWpOBJa2uxWHRdW9e1tVZKGVEEHyAVxYOLELVWk7LQWnFAO5iQQjlGIQYiMoNJl5tzIQVnRM4F5xxjwjo/GGdd7M3QG4dEUqqMOAIxzlbrpTUDETx/8uTkeE4YBaPovel7by1jTHDBpWB8NzgxFSyTWg0AjPlvjHEwJmlVlGXpnEus/v+dtv9qdiTL0kSxtbZyCTiAoyIiIyMys6q6WvdMT/Pa3KHR+KP5wCc+0Gj3krwztJme7ukulVWZkaGPgHK15eLDBvw4xImqpnALC8NxOFztvfbS37fjKecckDMujPFJkrgQVquls+5ivqiqkgP2Xhpr+662DgAC4wyR+0CMY/AkOI8AdIKhlAJD6KyOFSuMMWutMRYAhOA4ahHd2z3IGAv77NI4znBk6xx5dWMXcNg/DHpMYcZyXKUUhSA4E0Cp4ovZZFZmqWCKo+RccMYj5o0UXComBWc7Z4KLve7Zg0BzIdje/+M4vjeAE01w1jPbP9qT3tt42j+qEHbs0uHTCTwA4GwcJWajF8gHT27s8DH+6BeOS1podIUxqxAgnH3GYeDwEJyJMxEII11UVJsxBAKBtNbARN3Znz5+/v6HtyAz47xMk3jf+7YEiUDe+77vtdZG62h0e+9jD65zOrhAEASDRPBEijRJ0kTFp2TR/pAqTdMsz6dlofJUSRm87bu+aVvnnUrkpJh0fWeN5ggIwAUTDGfVbP2w8iyOe0AggaSUyJRQQsQqYOds13bOuTQvkiw3zoPuhRBSKs55oOCsRcaTRFlnKZBxlgKoJJkUBSBbr9fe0+Ji8etf/SrPss1mu9mSEDSZVl2vV+tmPp8C6c3a/+IXr7MsbZtOgGGMUQhSCGTQ99oYLTjngksh15t13/dxjJI0ZYwFFwBASiGldNbWdW2tAYAsz/u+77SWKimKUhtTt50QggPc3t4aY2azGTJutU7StKrmndY/vXv/7/7+H/6f//m//P0//E+/+9331Wy23daegDxNqkpr2/ddCAGBXtzceKtnk6LermOYgSGbzqqubbetadp6WpaXFxflpJyWEykEIazWm+226bTV1i3Xm9nF1cePn56/+Gq2mNR1HZeRru1jBidC+BIRjjilAcFay0dF3TGzEFeGGFZk7BjhlrGD4rij3N6RuBERchHjpSEExlAIwQC99wyRIyMK1pgIIgPR8h737R1lCwFgYE4fNNzYmIXDAoEvR1fGq+SQR7HWxocfOhl2URdgnEtEboyzxjHknMv41WOLNxERRROegg/kmeAyy4OU2948bNuHukUmmUiVSAQqb4MzjgEmSeIC9cbG7h7vPRIoTgnHSSpneZor4U3bdw2SV5JLjsY6S2A99drb4ONr6fu+KDPv/bau265znnwAEDLJ8jzPsyxjnEMExFKKGALfDTByJhOFiFrryF8PEABAKRWrYLquM8YJIZumaXodS2cJvLfamp68nU6yy9mkmmaKAwVABMT9KGCIZ0MgzlFyZEiRZihm9Xb5GAbe+11l+d50ijqLRiiOY+Uxnn9Hzg2OYu5HiesjZ3H3lfORxQMA+I4U4NF+GofHYyterO9EJgAj5qeAR8xZxphgTMAeHoXYQQXjWGmNIxZHt3R2G0/yI114rAhPOj3GGvHIVhi/xrErNl4aTrtHxm7coxP4J8jd0Q0P3irGWCry8VDGrHPMHANiRCiN5nmM18UjI+xWhNCLzWpDDx8RAeMEjACByYDME9gQXGDW2kDIkXHOE84zKaZ5sphOMsmrIrtaVBfzyWySlWmaSsFZQLIs9tV7CM5Ljs70AIAUyPvgfAjeedK67/q+1f1626y3G+J8VTf369oSbLveAXrA1Xq7XG0AWFFMlEqMMV2nF4vLm5sbIcSbN29++OEH59x0Oi3yXCAtqvIX374SDF5/faMdUHA7fY/R6Ickg7bXD6sNE3I6q+J7jp1zsVB+u90iYppncfb21jysVwEIOZtMJtFi8N7f3NwkSUJEy+Xy4eEh8rItl0tr7eXlZZqmnz59EkL0fR9Xv/lsen19HULI81QINi3yzerBO5OnCee8bdtZNWEIDMlZLVWEwOWeoO665XqNMrGBrHfI2adPn6bT6ayacMmI6OLiQmv97t07xlFlKiqn3pi63XIhmqaRUmRZwgUmqVytVog4n8+7rovspEmSROAVItpsNr3uijIvJ7nzZizd8FhPx/7o1D0qFhkkYhzqpD3P5VHIZBDeoeDlSF74dLY4ql4ZxHKsjcY+5ljxHt3HqbVLOKIFoEfntO/aWODAOffeDShtgDsOnQj6HFFIvAtpmgrOvbOTyaTv2kFrBkJrDTESqSQK2hrOMBVyVpSFTBLOgQIFb6wlIJlI43dow5xzJSRnjCggYN/3RMwFr10IRIyJgOCcLbOyblqtu+Ds5eV8WhTVJCdvX371TLdtvV2ZricAKUTsS00TFl26GLMFAOtd3/fRf5VKSSljQS0i5nnOGIZAgcADaGPW27Zu2l67Vpu2bUMIZVkmQm63m3qzDcFjMuUMQgid6ayn6PvECCcRMGQcCYJPlUikDMF1zkX4U6211gYRhOBElCTJkKYdnPUQAuBBSnmYDHwPmHkUw9yTOh6zio8RUob12nsvGRecCQZlnlzOJmWmJAPJMZFCcCalFFJxKYRUTMT45Kh3NRYfM0S+61d47Oka7gdoLAYHhuE5moinyjafkkYPBybnUx8eBWGMbzLqBmSMPwYwx32EFFFyYs0VZ4zhztt7TPEd5POe8PZOlffouuD2tkFEuPPBReZLT9hp/9s37396f+tR2EjxGkIsaYkTxkQK3xi+ppHVGz9wDkCcs0QIyRkQMSCJzHsXWZqAyDsXnI256oAkOecyJh1ElmeRzkDrHogi2p1gkGcpAjRNz6QEJA4kgBCBI0iOUrDteuucmVUzY01vDHLRG1tOqwzRGGudj8GVNMsChU53aZqVkykXcrPZbptayqSYlHmeI1LX1JfXNx8+fLTOE/WddpOydM5Ws9nnT0vr4K/+6hfIcLVaPXv2fFZwYw0AaK2r6aRtWs4YAXDGqqrqdY+Ixpm2batZJaVMVVrXdde1eZ4DkNa661pE3GzrxWKRl+Xt3f3t3X05mZbT6WazSaVsmibWAfR9PyknvdHT6cx7arp+tamLybTtDQVcbdfTadVsNsFTkiYqybZ1wzgi0HwyQaBFNUHyXVMrlWhnFxeXxhiU2XJ1P18slsv77Wb9i5//PEmS1Wr5sNo8LNfGOuMcV1LbUM4qxngAGytx8jz33lFARGZtJOaUgchayzhLkgSQnHPe7Uy6EMKAxjnOmxyJ2MBAAqMoCO1LFk4N1hj9j8sLi7DUsMvwAdGQ27PWAgFjbOftxZOOVeIQih1L+1gvHt3QkRV8tA26d9yrOMhh1BBjrKyx+TnWz8YYrW3TdHXdWuMZCiLqui5VQghGMdqVKlCqNfbzw/JhuXaRYTIrGHCnndY6eJrNL8rpLM0zJaTkKDnmSlZlJiCkkk1SVUqhGAkkAcQpWE/bTdO1WqV5OamQM0Qsy1JKSUR930cPOoQAwAgZIBcyAWBEGN1Wa30IoK0zzhOhlIlKMsalNm69qaPDzRhTMlFpLmRivd+2DQAQMCGTIp/EVlBreqO7rl1y9LOqXMwneYociSAwHl0rYAyIPCAgQiBrTR/7E0II1loi2KmPUfnv2DiikwL6sUo7Cm8e+BBPeyHjkz+qQ8bkPuS7uwe2b9w7BBWL/ly8YuRD2CtpjM4AAUSkyfgvEkFEtgeggz+HzwzF4zFf3E7zZOOHGn84+7xwjppn7N6d/n/Wzzv1Mr/s6sG+X3gY08HzHj0yIwRPFGAPTs9EpA6OUQEiiutMtFMHOxgRYwx/3PXxGAQCBsiRS6FSxhMfwLlgvXtEdQmOvPVW277W7aYqs0xy9JZsn3CcFul8kpdFNptMLhaz6aQocpGlYlZNgHzCQAmecFASs0QVmchSkWdJnueIsF6b1WbLhcry0li/Wm/rpttsG+vIE22bru+NkMmkmhXl5P5haYxdLC6++eab2Wy23W4/ffq0Wm8Fx/lsSt79p//4P7X15h/+/t8Vmdput7FDwzi4uMizLPvNb36zqZtYdj6Er8bdscvlsm3b2NQBALHxPKI5xz50zrnWerFYxKL/9Xr99u3bEEKe5+/fv3///v1sNvuzP/szKZiSXPet7tvoZEdcm8ViEU8SRXu1Wl1dXeV5HrFg+66VgoUQEDhjoul6lWabuq3mC+28B5xMqnfv3jlP06pM01QI5r0zxjDJkkRaa+tmkxdpXW+yLEvStGm2ZVlo2zdNUxSFMWa9XldVJaW8u7uLkJsxCiMkd85tNhtjTFEUNAJ2Hgddvjx78RAB6nQZGf6MeeVxp9w4Zzd0LxzJy8FJYQ9aMRx6FJAZhHCsqJ+yjocTnr2h4eHDIaluHMgoXWzURz88hjGG9jgIsUBGqEht4gmBCa69X222n28fNtsmeBRcSZkwJkIAa31EjohtcwAQnIdAimFVFpM8yROVCBQISFZAyCRr2954J/ZBSO99npdff/0qOnPOuVb3xtq4asdnGZaDGBuJHwawHACIyBQAEGcw45KrRCYqK8pqPismlVKp8xRZCOq6bvou5gittZv1Q981nIWqLCbTQkrOGAjBlBJKKbFjpACE4L01lmKh5r5c6LFBbUSbdzzh8FyEczddTrzAsZkyDtaNT8j2/aRxuRz2DMnFo6tHTOez7tdZt2z87VhOxoGR8fFjzYFPbGd13vhuxx/O6v5hz2n56Gm8ZLji2aemE3CyL3ilg9wd3fkgRONR39/ho+gFZHtI3sewM45KxmHEmjZeNOL0ti5Yj8i4TFKhEmAiEO6MkkAQHATiCApRcFCIznZAVjHMlCjztEgkQjDtNk1kkaaplAJRIOZJ4rQGAPDaO0POATqOxCgABMnw+vqSS1itVkzI+cVlmhdcqrrRt7f3vbGAXFt7v1xvtw1n8uLiKtZV9n2fZsV0OvOEq/V2tVoFZyd5Jhi8eP7sl7/8BQP6y7/8c+dclmUPDw/Pn8+ef/X1jz+9/f7NxlP48PnTer0eRqHpO6UUIca30TRNBBKL8h7LEdq2HXJRzrn5fB6pzKfT6d3d3Xq9jt17P/zww9u3b6fT6WKxiL+NZUR1venqhjGmlIrWcJIkfd/7YPM0q6pKcc4RbK9jVxIRBaJt00gp275P8yIQOk/VbPHu/cd4QFFkSapms1k5LUIIUkqRCK376bQ0xuR5qk0PDNfrdTz+8vKSiO7v7xFRStn1TcRYiFMo1jo1TRNxrNiYYnp/zBHA5oFC2hVePOIRnq18GWu+MbjKeM6PWyaO1d6pYgt7AtwYdRyvTWdXB/ii9h6vmziKMg36fyCbj2djKLyjAc0r3gzucbCGPvrh521bE3nGwTmnvQPBmVSecLnZrtfbpum8p1QmWZIyJqz1xoWIl8+lgEBW91b33pppmU/StMxVNcnzVLLgg7MModeWc5llhUxSKZM8LyO+QMQR9zs4x8BQCCEGaKLYNC0SFUE9hgB0NCmklGVZKqW01taFPYobQ+BpmpXlNM0K4MwG6nuzBx7bGe99U9ebtelbJcW0yLM04RxjiUr09hhjQoCUXCrOOcRgZrQVoueHiEKIwazhoyglPySqPR3oUy1yZOzjPoh9FIt4XEABcG8jSy4evZBzWi1inOzJUPcaixgQGzKmLHJ+joKxR7c0fkY4yZA9NW+PrMuxQXDW1fuy8jt7/rEhfHTmo3dO+yDP0dB8Qe6edMrh5MOg9xiLJNLGmIC7i8KoWJz2uFMxL37qvwJAQAbImVRSpVwqQhYCWe9j4sdaa6yG4DnDRHApeN80SD5L1KTI8jRJleAIwXsIbk9i4YK3UjBrDBAwIvTgPTgbjPZdG+ptG72x6bTU1vbGdNo2nRYq1c4FYG3bbreN7m3bdw/r1Wq7MS7ML66YUNo4730UWCKy1ndNvVmvnt1c6a7/T//xf/bOvXj2/M9+8bPJZGKMefnypdb69vYBAD59vH3z07v7+/u4TKdpaoxJ0zS+wLwsOr3zxpIkWSwWMYQ2YHR8+vQphFAURSQkiiKzWq0A4Gc/+5mU8re//e3Hjx+FEFVVRb7ymI8HgNXy3tj+9TevrNV5nvVtLYS4u7vLsqzI0yyRBD5TEtnOb+mNtZ4IwAfgUsUlqGkaRN7pdjqdRp4mxtinTx+007FZ3nt/eXUR4UOdM+8/vHXOOmdiIUL0QGJPPQD0fT/EcgACQIg6/sjk+qPeHh0GnAa5GFTg8BXfM1cf5cuPilGO4DZ30sEPET+Ha4+ReWFkGsOhpfllk3OsWofbPVJ74wR+PHjouh9QWnBg3N7HiGP/2eBNE5H1zjmHnCVJotKk7/u6buq6tjY2yWUjBAEWPQ8hOSICeSKSnEnFizSrptOyyDgD5421GpFJkUTA2bIs5/M55/zu7i4CKwy2RvSiYM+dOAxStDiiCzj8iYjRdtsrdY6MGRfDsICMcc6LYpf6nkyqmAav65ox5qxt27ptW0RK0zRJdgrG+V3RQcQEii2xSfKIyTkeCLYHk2OHTXjjtfJorE/9hqMBHY91NOjGHJJuvx05hWIfKzsylc5OqvHcHfudeOiAjhf6sdI9UgDw/8VG52KbR+GXo6scSc1T93b67OMwyenQ/FGhg5Gmf+yvgOOQUcDHu7LeDScZmnoBIKoHRBzq0Y+eLhYwcimkSKRUwFjwoJ01xsWAtjHG9tpaHYKL9EpZmiZSISNjjO67EEKZF9fX10hB8se4ulIKETiHPEuUQslBIHgPJkDbQl23q9UqUj1vNpu3b9++ffvWWGedq6qq7c39/X2ne8Z4r+1yuby/v4+9s0VRTKfTYlIlSUKEXde1dbNcLtM0VUpMJpNXr75erVb/8A//kGXZdDqdTCZv3rxRit9cpW8/tsvlsq7rONtjsX4sUYlF5k3TxPKfGLqMtT/R9iWi9+/fx3u4ubnJ8/z+/l5Kudlslsvlq1evXr9+3TTNx48fV6tVnufe+7qu+76P2vHNmzfe+/l8XpblZrOZTCbb7Xa5XDrnJnmRZ2ksu90ZmnyHHSNl0hktuHQhWjA7YOQ8z2NLTN1s3rx50zRNlmXxzK9evXr37qc0TSNnqvc+UglmeRIVfJ7n8/mciGJn4bCwRCunrutB64xDAl+QvrGiGsdIBl9iKA4f9oxl/9gIGxWIjTdeTKrxJQdBGkC8hjUl3hBjj0GtQ3UloggjMoaSoWAoEDl5rUSss3AMIU2U4MxZE8XAee+DjycighCICcEEQ8Z8CN47AgLGPFBRlh4gshyoLDc+tNqKJGuD5yoRshCYeMu8xgBKyJIXZUdgucink6TI0FnqNDVdE+ok4VmWAucgJUtyDWLdaVCZCwAcBQbmeuw3yjRF6G9RqhwdtNfPSmtX1UzpvkaErjbrZROCKIqpTJRIpMyQeMhzxiQAeiDnrNFtA9ZlUqKnlEsOglwQKFKZek9d0607TcjKvFRCgjeJAAnBtrUzDafgbIeCq7zYdmbd66ScpyJ4b6zVAIiSB0IXAhDWrWaMWed77YBIioQAKGDbt0BBKpFlERcYjTO97jHyvTEkBE8QAIhxYDx4gIhdyWLBJDvCtARAwAiF+QiIKfb0H+OldqzeHj0wxoHRNBNXs2xe8GmCk0woyQgRhEKZgkpiLQfnIJEQMRLYMQaIgEC4q/ADAAgAFN1BxPgZY+1HvDfGCCAQOB8YFwToAwUCAkDG478ICLnzKQHjtzugIERAFo8B3J0KgRggg11jAe7oBSHuicghu/27A/YZSQCGPOoriAR4QAQYiDzQjq+Ss0AeGBJi5EAAAATknAcfEBBhNypIsb8psFg3GQgDARED5Bj5mwABgo8AEON6bM8QGY8t/0SBAniGKCTv+h6EbI3/r//62/tNY1CgTMD3kW+QKDhnnbOBCPd6OgAEikOAAdADCFF73yQJprnUPpalyc6hU7nlecjmQc0aSDaaNZRYVqLtEVip0nlZzossY4S2Z6b76npeFWkmsdluLq8u3374qF2wBK28RFViVnomLVHAQAwcIcgERDqpLv/l+89fvX7VWfrx/WcT2OSmYkUmitRjsKGV6BJJ3PXTTGQMBXkwTgJLeOp7t7xdqoQLLtq6npX5JOe+39juPujNn//867sPP/3w5ndae8fxYWunF7O395uiLO+2fTm/anqTZAogXF1M1w+fjWmeXV8Yp601IpHaBSaSgEIhz7K8aZo0TZx3Prj5xRQ53N1/Igqmb3XbLabTKs/Nti6lfPvw4eL6Kq2mv/nDm+fPX7adrTcr4anK5DxLtw+3s+mkM/5X3//IJ1UfWKbEpJr+8MPvZ9OiXt6ia8uEo7cXi5nu+zyfTGfz5XJjjUMGs2n19uPGWijysq77333/4+uvf7ZY3ABLf3r/adMakc5+fPf5btNnkwtHUju8uXqudWAs2W7b2fzixzdv5hdXWZE78MAgAtyjEIQsFtjlUlijyzwn7501D7e3i8urpmkYF3HNJwIXMBASMIIdytJgWQ5ry5Gp96hHEXEHd7gz3gCRE2qjGWCRZ3d3d0brRCXeWcG439cCPG5j43HMQDYsW0fRmz/RLg4j6ocj03isik9DPeNb6vs+Rj7H/Abx9sYxqOgLRgvLet8ZvW3qVvcoZFbkeTlFROd2URrvfXAuuk0IAREVF0VRXMwXl5eX00khBaaJ6tp6UU3Bh6qqtpsmy0sf6P5h9bDetF1PyKVKYxiTc76rdeYM+CPTW/TwYB8vit5hURSLxcI5Z7Wx1iLuYkpKicmk4JxnWQbEPnz4cHd3N79YXF8922w20dwLgbTuvLGcoxKCiBL56DMJzoXgHA+GbMjahhPYubEHw85tp17+0Xa0c+yUHO08CgYO2xGQ5kHw7ek6yfE0+0JccRxgGV9i7JzBE9HL/19tp+/tT4mdnN7k+Ksv3O3QI3V0hlOHmEXItNFpMfzxx39qBUDEWIK0/8w4Fyi4MS5yYGtnvSMPMcuIgNxY3xobAGSSqTSPLZ6bzabvu/l8/t1330zK3PVWd0AWdLPpu63rW2c1BOc9eQ+9IWvd/f19QPjZ65t37951XScFN7r//R9+bNsekHXaaONUmnEmV5v1w/2q7XshVF4UeZ5H9yXP8xg6AoBYqjabVdPpNLqSz54961s3neac8ywVnz6vkiT5fHu33tSbzSYAGW1DABdCXhbW+KZrkSDLMskFBwSArmkBIFa1DEQ8FHCxWMSUnlKpcy6m/JVSsQRxvV5P8qKqqqZpIgl7mqaXl5exGrwoiu++++7q6hIAInkFEf3VX/1VWZavX7+ezWZVVX311VdZll1eXk4mk9ls1nUdYyxmTyL3RXTfQwha6xgVGxw4AIg5kSiekaI9ckfE0Ku1dghlxXMOuQbaQ5JG6FHnHDIW9viZZ5eaI3UwjhUdOYvDgr9bKPaTlkXFxlgMqlVVJbg4Cqs+BjDH14O9e3hU2ELnti+Lx/h6bITcP640G57h9LGH3/Z9H8Ek4xlGD/AIPxqdeiIy3m3bptN9p/tN23bGoJAqL2RRJEKR87bX3linjek1hJCpBAOx4BlSnqqri/nzm+vFrMoSlUtIGVRlOikLIiJkPEm1h493y8/3q21nkEkuEwD0LjozMXDKAFgMNcRIAuxTrDHWAQCRBEQJHpxxpkcKHAFCAIAsyyaTSZqmaZoS4bZuOefzy4u0KKVgirPgoK5122zJWwjeWZ2kkjNACJJBkqghbe79YyXtmPJqP3vYUM0Lo8aDcVz6NPg53s6u46dr+ngdPFWo8bqnGTgaUWThExscWlFna0ZwFCTHUc7grBl39MOzGusL21llNj4zneQLnvrhWevw6FbPCuB4/1738KPXfvSW4qvGQzf9jz7saZyTAecoGBOxdFYIJbgSQmpje+t6bY312gcTyBJYAouw7brltm61czHwJlWSpkTktMnT5GevXz27vJxPk1IBAwDXgO3AtixYhiR2kDXQdT0F2G63lzfXGPwkz0zX9k39+fbh9n612rSddsaG1vjeOU98U2873YfIk8BBSjmZFFcXiyGPXtd127aLxeL5zY3R3Y8//vjs2bPXr59TCE7buKwlUtyv9GZbP6w3gksXGBeKAi7mV5zzpu5iFi2+ZylE2LHC2dlsFtc6a21d13lWXl89i9zU3vsYg82yrG1bIdTd3YNS6VfPnkcr//LyMparWGurqnr79i0A/PIXv7iYzb1zwNjt/X2WZev1ejabcc6tdd77zWYzlH5st9uxuEkpYxIOACLnX2wpXq/Xg6cRH4QxFoOcs9lsQPJqmiaquqGEhEbZsbgsa60jW1ySJDG0O15extupbUcnAdLx3N5N18Nkevw2hKCUmk6nUSUnMhnMaEZPmmyPZZa0xyyO6Mnwb0mQDFZ8jIBHUy5qgrNdgEenHWRv3EE4LF476jVi0ZIMITDBY+nUtqnrXjfabLVutOkBvFSgZJ7nQigiDM5bbbqmtp1mFDgQ+eCtRhdSJRfT6mJezauq4PD8osoFv1hM7+7uqvkiENs05nZVP9R9ZykwCYxbF4zblWj7iBxPxKRI05QrCSP3JZYLRaNSKTWbThhA33feRgIK5611zlXlJIK8XF9fM8Y+3d5r466ur5VSRVGkKQcC0/fOGITAgQQCA+IISvJMJVIw773WZry27ucNDJHq07d9Vq8cKYyxOoQnfLunziZ2QGPytFJrt+2wyHfYoKdz7KzS/YI1djSvTpXKUyLwlGr/wls6e8xT2uLo3Z5+hhNv76lnPDoGRo35RzyF7KTS9WgEwwgR6gsCfnQDI/tdsKj2YvUmE1wqZCIA84FMCNp77b0NoQ+hd84i7whqbVdtv266utfERTGd3NzcFHnKEIo0uaomf/PLX/zim8tKwYRDBp55B9aiD7FIFBFaDdV8tl6vV6vV119//dWLZ86A7nQA8fH24e3Hz70NvcXP96t1Y4qqApH4AE3bbpu67XtEKsv84qJSSkVgh77vI0nszc3N1dXVP/3TPwZy/+E//AfvXd9DW5uXz+eS866HrrN39yttKQAiV9oSkyovJ9bavu8hULAeyKdSpFJpra3Vs9k0y7I0Tfu+j0quKIokycqyFEJsNhtj+iSRUWfEEtBqOivL0hhT13Wr+77vt9ttmqYf3r1bPTyUZZlIFaH6nHPv3r27u7uLarVt2w8fPrZtG53Iuq5DCDb4NMn6vo+UtnGpiQXn0amItxGTlHG5jjIbl680TZMk0Vozxpqm2W63QzCPRhW/McPHGIvFL1F3Ru/lqal1lIMf652xaBwZbbs9fpcXdMEPlDKxpk8bDZy5sEtai7EcwijZHjV8/PFQNoOIIex8BfjTlN9YnofihSF+dfrbg+UJHgVvHNhk+3JEKSVjwviYcwgUidc441xAz13wW92pbYhF0BdlodIko8C58dYBgDPWah24sxyllBLBW+d0LwUkks+qCQS6C21ZTadVjsY4bZYwnN4AAIAASURBVJRKA0s+3q8faiOBWRAmQG88KopLTUA2+NAMBecBEb33UopoX3nvgYLRPTLgnE8nxephqftWCx4okA/OOa1Nlpfeew+kspR3fdO2QiZpUeaJ5JwbH6zbdDZQcBxYmihtLAQUDLNEScWJvLXaOmCSMeSA7DE4AAyBAfjjMUI+0L+NhyNOiThw55b74xYCOHRZjpQBg6HucrfxUcPp2AyKX40X9CN9cLrijzXK0Yc/fftTvJx/6zYWVxi3JcBjVH+Q3viLo+canvvoDeBhTwWe878BzwhaHIuwH6ZAFBOmuyIyAiT4U0QbD4M0QIwxZMRCiAzGHJgAFphSFLwDBIoZHfLBYwgpCMcTCm6tTe+cEnxWTQpVRuZSImIhCPLffvVsXqQphDcPte5t25Nx4Agw4hJBvHNQMjFd+/xqkQr+8tnk06dt27m20eQhlQoJvON55izINFUeeWssdz7NIMlSwRggn06ndV0b3Qslo0c1mUz+/M9++S+//v7j+3c3L7/92XffdL/+/vOSEIJgGADa3n2+XV7PV4sqtw5s2zsJUqjG1dZ6RFRCegoCmZfcWu39znlQStV17RzftZwiRoynpm5jsDFJ5Of7hyQvjDHB28lkwrFru05rrZT6+PHjpJoXReGMXrb1w93nV99VSqnXr1//669+NZ1OrQuBMEmS1rqqqmLh6N3tx2I6ifrsYblKZjdsD6qMe5rM2G4YS+rKsrzfdjsHjgvb77ao0qSUbW+6rovoP3HnvspPCCGCNdERHFRgXMB9oNEMf5xUjB/gkB2F904twpjKGQghYeD8IxKcx9gsMAQPiBgA4onEWXkYfwi7TmfaQX9aMeT8xkL41DbU5Azr4NB7ByfqNr6goxsYLwrRjhgngXbtd55c8J6CR0/EiUFWFsGZuu+Ds94Z6w09u766vEgoJAn3wlvd7zxo512ndyR2ggfyptdCyUmayzmrMeFSVLPpb37/h/lk0jQNl/nd/bbVrkxzYmmrPTgrmYhFzJxzJgWXYpfPQ4wvUKmUMcaZFELY4J1zEMGgJWeMeWcjxxhQwL2ZM5lM9Gpdty3nfDZbEBebzSZNAhElQhZ5apu+d8Y5RMDgPQOmlEikYEDWOreD3xSIGOfY4KTtX+8hvc7unT7pwcBInw1+3gFgyBPhzfEJd5W5RLEWYzfXAWBflBFCoLC7YxhFOY7MwKdX4YOd4wl2qFcOROhoyp0K2OmRR9uTx5wcfzjziYjG7ETD/qOz7eMzdOqrPf7qBPBs9ycehC6JkO0hsPkO3Y5wB8Yaee3+eJBzEFg24kIDACSBGIC4d+BdYIwFAk/IhPIx4I4QFw4k1oew7JxgDIC3det0nypufax8vprNZsE6Y0yr22meyNkk+cW309v1pt4+LLfrptUeLKHxEPHTP326f/3qpiqL9eqh5+wvfvlz3fzX369slojO+I93a++cuL7sHH68e3h2ueAyIBFgzGdjJM1dLBbe+77vYsmT7rqsKF69evV//D/87/+v/4//7fa//7e//vf/u067pv3hw4f1i5t5ooCA697f3q+r6cQReYJOe07gA1njBOP5pGiaxhrNCS1RLDdt23ZaTdq2nc+rAbJRCFUUhXcxSUZ5nt/98P2rV6W1dr2qLy6uptOp6UWWZV2n+75XSpV5sVqtiKfO2Pdv38HN/MWLF29++mk2m0XYASHEdFYh8k29tcE756M9wZXSq9UsyyKLXnA+NiG0bcuEKsuSiIwxVVV9fNhECFkhROecc+7+/n7QRhHO3lMYKredj0xAj+m6KH1D1vCcPbdfCg7hN/GwVvwoTjPeE+m4xtKd5BkBxHrdCKjG9kufeGpax/wTG+GHAsDQOReDuQc27B/bcA9wFatOzq4XADu9fbQkxYf03mPY6bl4DwDAgGLTApfCeGe9iwj9TCpnjSVwAMtO2893KBUvJ4r5RCWKBW8dYyKRCgNRcLpvlVJZooyxxmoCzznPUvXNTdEZ4/oOTZ8WE6Pd/f2y6TRgkhaVSHNtvPNNnhTIGBeCIOo27/iu3TBWtEYIcJ5wiHhQISCSYMgky5R0nsA7ay0xjsgFg0BuMinXTdstN8ClEMIjS1QagrbWAYQsyzyKUGvrLCdCAiFRqR15ntWGiBQHtm/bIIr+KNurwL3pQPtA5c73OuZJP6vMhs9EB9ruaLE+0nl71+Tx/OIEi4Eeo8HAAGnfNH3q3p1ebqxljg6LSd8jeRtOe/QIRzrv3+QyHp3qqZ8dqDd6Yv/uPI+d7EfycureHYVPh52DJ80ACRkAcMDAGDFGjIV9YJkdVmgTPUUv+Bg6Hg0rEBE6BgQALASwgTgCAjofJOMRgQ8J4n8BEAC7VldlIZXsu77rrbYmBFiv133fv/7quUCWKJGlSgAFb59fLubzarnefPj8cL/ebnvXaNdo12pnrdUO2ra9mk9109RN9/rF5cuvbrbKO2O17sl3HEEpJRiaPEnSljCP4tAZLaxIlRKSqWxirX148IiolLDWUtuWs9nPfvbtr373h3/97fd9W79+9XWn/b/++ifTt1wqYIIwLNcbAnQeuczItgQMUfZ9r7Upy0mHaI0pikI3PUCImbYY5CSatW0/mVTr9ZaI0jQFwhBC33dZlhTFZL3eZpOaMdbqHnwo8vT62U20jIkoT9XyYXPz6vryQr/7eEteZ3mZZYUxDhFjXDRN5HK5bOvNarUaIpb7thB8eHjoug4CSSmn02nbtsBMWZYRD+Ti+kLr3yX5JBbixcVktVpFHl2ttZBJNPGJKJZRD8Us1tqEsegyRYevbVuueAghosvvlwY8mlHjNSGCsESNcySk0WkZ7+GAKBAArNFZljEg5Dyi8ERM1AHV6fy2x/VA3DewD57WaYPUF1aBIZYVrYOh+f2suJ7mG4YtjAjmB88vJskgUATci1iuUkrGufHOkEcheVYQk8te364295u6084RAyEBJWNCyCTq0b5rKXjJBecsOO/0rvTjclosinxzf1uVhTV9IuSnj7cAqNIsKyZpklsf6rbrrfOAROicC0Cwo0zlQojIv2n2FC0xbjxQTzHG0kxlWRZbl7zRBB4xYoTudIAx5uHhrqubxWwmlIox57Is5vN5NSmyRGVKCgaCIecMGQW3i2sr9cixEEYgZIMngPCo5Mba7qj84cgKOdJkp4v+2RHEfV8m3+FNPm5D8w0ixuy09z64x6Kn03D62X7zUyd1eIdjb29sS35Br///daMntqee60jWzgrdeCdjB4xRwzjG/D9+8Un/RCt2/LqG3B4FBGIUMIQQPMR/jsADOkBPYddzghgQCdEBirRIiolMc5bmINPO+LvV9u2Hjx8/3d4tHxhji2qWJsrpXnH27KK6WUyvZsViWlZlViRCIDBy5CER8Onztt6uq+mEI9zd3X317Nkvf/nLLMs6oz2hNuHT57vbhzUxue1M09tGu6brN/W2bVsinyQyNm5HfSClhOC7vrFa91337etXr79++S///M/VtPzum5c/+/ama7XRzjnnCdab2lrfa0uAITAuEyaU1rau6x0ih/dFmsYiEWNM9ELiUvDw8DCZTGLfoRAi4pkZYzjnX3/99f39/Xa9mU5n1viffnrXdd3FxcXLly+7rru7+3x9fT2bVolS06J8cf2sbdvb21ulVNu2RVHc3NwgYiDU1njvdW/zrLTGC5l4TzFXFytCY3Xos2fPEDFm76IBnWVZTPgN2aUIqzagM8K+biWKLe3L0yjic+4jqHH/Dluf6GjaDH/SHrRyqAWNKdJx1+9YR4y9Mg44rCdRtQMA5zzL8wGgI/6QV/OLs/7juHhhuIOYkxxV1EAcpyRJIjHmHjL4kas6kEPEaA4URRFCiJU/R9GnQfyiFRCvhUCDf5nE5m5nY5VjdMa99wgiBDLOOR8J5xgXXEphrI7djLFqW6nEEz2sts+qUqlUSAEAUsgkSSi4vm0icFe0PHwgAuJCqTTlIQjBGGf3y9W6rjfbxnoKgRkT/urP/2L1sPzw05tpmU7zJE9lWRRlmXVtZ40lT955yUQIYb1cdV2XZzkCcYazWYVAbVMjUCAAQu+C7s1OPBhTKqZhrRAiTfOu16vlutfa9ObmqkLGHJExrm3attfAWJrlKk2kUMbYvusJMM1SIZNA0PaWMc6lZGxXRsS4jE21jAnaV5wzxgkRAGOsfLycDZN4rALpEenxcfjGTtuOMGi08sYguRLS9F2RihcX82kqJYREcMaQC8lYRDGSnDPOGGfIAXmSDIIx7qwfu25jncHCwbwaJvah1n90aIZ2Q7YHJIoFaeNA+uESf9CTM44lHh2/uwSMFBXtTFtEhF3J2O4xhlfNBq/78VVDNLTH+49qMveys+suJyI3KpOO3Eq70iHvY9rOBwo+PoWlQFIwBOyt+/D5/v/13/9HZwLxxLkg5IFXNyxJR/Nh+J8DF0LGKDXngjEeA6ZKJRQD/hGkj8CGYI1r28aFkOcTIWXTdYyxJE111wNAoCAZ79sukFNS5kVutFaSAUBeTqbTCXLeG+eDz9LkcjHjGKaFmOZpKnkiBATvnbtt/auvX4IPH26XkhMXvG0bzlnbtXlepFmCCFJwxpELXmS5Mz7PsiTiXWw3SZpOqmkgkomqqsV/+2//XaU5ACLj33777X//x98ih7azaSKc0X/7t3/jdNf33aKq6no9LSc//fTjrJoVRS6EQKI0TT0GAppOJ5vtZjG/UCp58+MbKVVZlM75alp9eP+hmk3LsnDetW2TFtWbN2+zvGCMcyF/+5vfpKnK0/TZs5uf3rxZrlbVbKGd9wG2dXd9c9Nbs1qtqvnFcrWezRcPyxVDeHh4uLxYPDzcz+ezr19+9fs/fD8pCmdNXpSbpl+v19+8fvXp48df/vmfrZerFy9eJGnmvHv34WPbmfni4g9v3r78+vXn2zshpNEdAGy329evX9/d3QkhtLF5nhtrAQB3+YrosXHOOQbvnIugVLG7fzqbOedgT/jFGEM2quKmx/aqsQE3WK6DHR8VIQzdfxTbd3c9tVxway0XXHBORIAYWS+AQAwBQzgXOzprCca03B5EOAw68jHK8YSlfGRrP7U552I6KgrpsOQF7+LPB8qeaDvEtYcConOeBQCI1axJknAGjAUCYAjAOQoOkn16WBGTV3ymRIIBwVuVZNMK1quHXmsfAiE6CsBERJ2uN2uVZophmSaN1l2g3hjb61k5aZut7po0SZIkAcaBce8pQjNIzhMmvHON7fq+H+MXxA+IGN3fpu8EMhQ8Iuy1uh8OEwJlwESFSZl3Xb/e1Pe3HwRuBVdJkqWp7F2gpje9IRaSrLTWEBGXQnBFhNpq3RkCDCE8FjXQY2IPDj2ecVrvKdt/mHajI+nsr05/Pgw9BxSMHyyaseGa73tuaOhHfzQk4VxM/8jzAADw4fS2zx45eD9PnfCpaXx2O/uKotLCwxjp+B5YZEoanWF/M08+whdu7PQ+x2/+/NkC7Xp996DZFAKOTnL6QzrsKhnvPHxMPjADInLkjAMPgQPAjsUCgBC5kNrY5XqVSEXANm3bIUihkrJstHP3D9Mit8EHwkmZ50W5Wq1kkmVZLtPcEXeepJS9NnXX56lgFBiClDLNMm+1MWbzcJtJVk7SFzdT2+u7uyZL4bLXSZJ8vHvw3n717CowvlxvvHXz6dx7t+88w0CuabdCSZWl5OH585tf/NnP/+//238tJtPnL18sV9u//suX//irtwAgGHrOP378eLWYZlJ+urud5klAYEIFQALgjBHDeObou0RfKr7AGPOMxmjsoouFAtbazrnFYqG1bpruq8XldDpdr7bPr66apimK7P7tkgFSCEoIq43vTeyYin0CsVvJBa+Uapo2z0spk7hezmaL1epus9n0QcR0ZlEUWuvJZNK2rVCpUBHed2fYRQvPGJMkSdu2EVhxMpnc3d1JqYYpgSP2Str3TQFADG5F4JgIVTNG/wEaJfBGhvXYVD27khxNvDiJdysS253HA0kp8zzfbrdt00QOAz6p5n+KFA1XtdbEJZsxFjsIY4HpwYIwip0i293WgN8T1cBT0hsIYqXsWPMDQAheCEHeA8BQUxtCINqxbAMAPdLnAgJ574Kj4IN3HgAJGRH3zcYDCimV3LEOMYREiRA8ARhrtbUOiO84Y6Xe1CJRXAgCQC6QCx+CM+768tL0XVevq2leZBLJVWWhlMwyabRhwLiQ3nlnHAXPAI3pI7qyEEJKHoInClKK2ljnA5cyL0rkXGujjQFkUilE9IGCc4iMI2rdbzY6kA6EAFxbu9m2TdsHApmkBKzrtHVeqUwmqbW+6XtnfYiLFzAKEFM4jHNEvlddHPccrZHcYJxsOzLnh06DscOB+Bi9HKuBwVs62hQKBmFRFTcXVZmIVGAqBOMYwVAYj55eZGJAzlhgO4aKo7j60WkfC+ifbsgZn2HY82VVOp75+/0HeuX0tYzfGI7Kvh7lYve22eNpDuD6/OPhu8uP7+fJzMJwnqP7iyd49Ih9iEYFBQhEwfsQHIXgvXU+aOPuVpt/+tff9o5ApD4Aoj+rMh9XuuM0/I4lY/e4+y8Zj89LiAhESBBBQRMhgvdaGwJCZNvtttd6Oq0Sla3W6/Vm60MgIOdCAGBcykQZ53ptGOeLi4ub6+ssyxDBm14yCN5GctpqVk0mVZJm67blSGWezqqJD3Zbd9ZACH2WZt5aIdikLJJE6l4DhTwv2Z7FEShoq02k0UkzYCxJEiHVP/+Pf1mvN8+ePXcuTCZl3Xa6a7NMTYs8TUWRpQzJOzspSwCq602eZ9OykFJaawIRcthuN3leAIAUMoSwXm+7rr+8vJJStW0jhGjahnOeZYm1pm5ckqTL1cb58PzFCx9C02wvLxZZmgCFNz++++rlV8aSVNnn+wfG2Kf1ZjZfXF5ePjw8cCmaug6e8ix1zgLRrJq+eHb9+fOn119/fX9/66y9W25evHjhrBGcE4Sry0vnHBeScf673//B2FDNFz+9/zit5nXTEmCRJTHCGR241WrFhaTHUAbDEQ0yxBwvwKDUt9stRYtfiEGB0SjZH8npxuptnNgaNOLj+YHts997ty9OThZhTmP9qWCct33X1HW8QzFWm/B0zmAcIBrqWWKiEgCcc4yJ0ZFnCqnZvhHwCzoP9i5trHZ1LjxmMoGUUigl7GNTUkpjjI/vAuMlkBgSEhL0umUEHgCC54jeY/DgJWycc3dLAgC4XpS5QuYBwdP84qpptpt6C9Yj48iFC+CtQ55QQEJMVbKY8swFIRQPbFYmnz/fJYyqIrN9o9u+vZkZV0opa+9rY4AIfKCIaF6WXdfQvixo0BDeeyHTruvQuLLgKk2AoTHGGj9XSjCUDBEolYLPK++9N7o2DplFpj2hddExjJDcbacNcikTRSiMb30glBJ2sCzj2rxHjAKA4+WeHfImHqyq++3Q2/sj3g8cxiGJSAqRSqW42FOmY8Qc4IdTL3p+p37SkTYado5LPc+ohNFdjdXSEG8/0lhPeVpHOmAsh0dnOL3DnRIYe5yxgRIP3KZ4yEgen3S+j2T2Uc8d1r+Ms4YIwGhX9D8elOgEnKBgwFPSOqRhxlePy83jnYx81tgyBAhDmwTsxEFxZG3bdgRlWSZZ4YzhKr/bbNtWB6MBOUplgHU+LOv++eUsdnyn2nGZVFX19bPLIpGl4uv15v2nj/d3267pvPdfv/7m4urqZ4zf398zdOVkwti1UmL1sPz02aTy9sXNZW/DatPmWVoWU8HgYbm6ubgMwUvFp9OpB7/ZbIL33uh8Wn369OH65vJv/vIv/s//l//b97/7zetvfs4x//u//et/Zv+83izTNM+U3G7XK91/9+0rFNxbI5NEW2OcTxIAhn3fp2XqnEOk2O4dvTqtddM0sVVuMpkMsL1CiBD0dDqVt3dt2y6X68vLSwiuaRq4ury4uCiK1Bozr2a1Jsl417QfPnyIDEQRcaY3mgFOp1POsV6vt9ttkmSz2cx6v15vF/Pq/v6nv/iLvwjeOGOJWHRjcE8DHsc0rsZJkhjrI8fCwEynlLJ7qgDY16EfzdJY+RJbJpRSjigGLY9m9a7S+IlC6xPrajfDGY5bAB8vPdAbxHBRmqaTyWS9XrvOEtEBVv0XVorxM0T3ecBMAYBTaNrxQgB7tRdXmS9DkY7zNONtYKMf5HmssEMA78l7CjY2KwbBFWcyds7umdUEETfIlnX308f797f3dW+JKeLcEngEJmSSFeWsmswXMss76+5Wa5mkxkHTNM6YhMOsSK6r4vmiVEHnjKZpknDWNnXTNAGQkBttm6at66Zr+yElAwBJkgAAZ5AowTGGb8lazYQyLmzqpuk6H4AzSQGNMW3bElGayCJNikxN8+SiKq/mU+PABQzApErLsszzMnhYLbdaW0SuZCqEcs53vTEOkMsjHJbHtxdwNEcBAIEQjp2oP76xJ9AWzk4e2Kf9BtSC2GALh84i2y/0SMdMAsNFj8y1U33whcje+IRHKhnOqfzxdlRy8qX44aEcnb6Zs+/qj55wfNj4wxfueXxapNHPw6Og7RzBIaGy+3emlOkLz8UYg9iYsmtPCQABkQCC9xbIRxRXhIAQoo0KzuZKSc5iDddicTGdzR2wu9W292iZXPXuoekbDxsbPm2b//KP/+Pd54fARCC8+/T53Y8/mHZ7PS9/9ur5ty+fffv1Vy+eLQDg7fu7n9592Lb6YpokwpNrnW2LPPn2m1evv/2mKOBuWd89rO6Xq+Vqo3vHeAJMGOeds1rr2L49m0yLIgMIEaaLIxPI/t3f/e2rr29+/MPv16sHydmL51e//LPv0oRv1g/Iwma7evv2DQAAQ+1skuXa2dZoQ54QeqNDcLuBQGyahog45xFAA/eMRUIIztF7CxCEkJzzi/ncOff73/8eEW9ubj7d3RrbA0BVVdvtdjadIFBZZOSdEKLruuV6o63rOh1zGbFbPC8L5Lztey5E3/dN0/iw0zeRAhARP3/+jHtinIGYIkYmYyd3pNaLB8eqmXHx49F8plGLSzwsIpbEWB091gfsPKKjohUc1RYcldeNF5/x0jFMzh2ZHUNPwXpHCMWkvLq53jUOng2YfkFy2B57bUB3pH0Bz1mJPVJ74ZBd73Qb6jPjYUOx33BR2hc3Rl07gHDDiCwwWCeQRXJugQKRQ0DvyFlyKPpAq7p5//n+8/1DozUwgUpt6qazRig5qWbVfKHS3Piw2W5lWjAhooYQHMtMXs3Kr64XGQsX03ySC93XpteRjIMQmqbRe1oWznlkWgCAWJ2FiEPmMloM2gdjfdvZ9bZpew0M4/71eu2MlpJPyrxIEym4ElikyeJylmaF89S2fW8sE1woGWt2ZaJkoqyntus6bV2gADjQXME5b+BUgZ1dr8dz7suq7uir0yOjnCdJclC9CcBjXht3e/g+yjdUbTwu3CdKYmwenSqGL+gMGJXhwIktdXY+j/ccKcjx8UdK9FRbnL2lI7C00/sfn/z0mC+r87MniZoPRiCcQz/lWWtjPBOemhJHg74T/7iE7eocmEIuuciEghCQvGAcA4UQptPZZDrremM8kFSep7V1901fW3Iihbz4uNq+v13er7bGESJa09frh+3D54SF+TT7+Tdf//t/93d/+Ze/vLy+IC57R97WAi0n63XnXa+UvLxcfPuz1yrh6832/fuPn++W6029XtddZzlL2rbtui4iAEvJizQTjDtrbz9/XFzMm209m83+03/8nyHQH373fdu23unvXr/85vVLqz0F573tjFlvNx689z7NMutCZ7QPQMhciOgfEBdPYwwROhfiQieEirwrMc1f1zXs+pLZ1eUNZ/L29na5XMo02Ww2q9VqvV6mqYqHZSqZ5IUS8quvvuq67u3bt9vttmma6GBprUOA+XyxWFw+PDxY67U2jkLbtpeXl7GSLhZMRLUXF9vo2MUJEMnznHMRuSaqsahN4Vx85Wi1AQBrbdM0jLGiKMbHHBXKnUrQUzJyNJ+PLoqjOqyoF5RSi8UikjcJ+mN2JR4GfIYVc1/ItqvOoC+2N+EJMdtT2679PIQQAhu1+sXqXtqX0kYVKIQIJIGIkxveDINRWIgQdlD1gcgFB4wRV0mw/e1yxYMFb76+XhSJQCYAEBgPEBkHGHCGQiBTgpNSlnkQiAIgSUSmEm8dF8nHT7cfb+8AwsXFPM/Kpu0ztnvRbd8xoKxK8jxngfoeN4/Lt2cMUpVAoHttA7KA0PYdQKC9AaF129YJ5xy5APLgnUAosnTGi743m82mrmsCVDJXKi1ANq1hXAAwrU1nLDDkwAKh3LeIwr4/nXaQxBwRYQeYwlkk6zspWDi1sE5W8OPV/HTtO1pDB8QsRL9PDZIQgj3iBJ5pX4E9dOd4LuEhRgkACDgOBoxl7+gD7SnlTkN5dBS7e6JgBPFMVGf8565oZdSBdyCTJyUAdK4oDJ/wm+EwrHKkg8+ehChEicDRS4ChoHykrejwh3jQNX+wFBx8ZrvkOiAgxQsF2JcmYfBAxAgIgDOUnAMwZywHTJNEIBNSKsBGG5FmxJgH67lsjP+82rA0vSwW1199s7z9/Kvf/r578ezbl9eFYq5vNs2aJhMuVZqUUuUkVDG7NAEZl9NctrmqO4McCKnvGsbV1dUVR2F0V68e3r//OE2VZHwxKztjmXdSSiGZ1loIkFImSULGPqzX1zc2kEtV8d3rb16/fv3x0+c3P/z+5Tev57PZz7/9pl4tsyy5ve2EiG7cwodQTifLh7sdVKHgAZDtKTAZ24U9Yo1eJDIry/Knn+4vry68t3Vdl5MclhoRqqrKskw7u1ptLq8WRVE0TQPeSS4YmXqznUwWD+s2lSoR+OnTJ2BcKfXw8BA7uziC1jpRIkZW0zSNwSQp5YsXC+dcU68BwDmntY6ldtvlg3Muhj2jksuKlO2BpyPzbZSdGKTFXa3cON2LAEBh1z5kjIlYoHmeL5fLwRyP82807fypkMKojf3I9uXhEeWF2KPMDngxYp9EBIDYncI5/xI97FkpOvJbIxLmEV/aUyeEP0HtDbzn4ysOVUDDzrCnKX+KyC16xXLXKsgQEYkRkQtBqFTIpGn7d+/ff/jwabPdeu8nsypN0wDQG9t1nXVOqSQvCq2ND7veBmeN6bvgrAB/dTF/dnOVKNF1jeQ4r2ZcikjJSETamOVyeXd317ZtJFuIpMPj1sPI+ed94EJKKYOHtu+01hGlIoTQ67au665ptO6Ct0KIyaQwxlGME+47Dq331loXQgxNdDtDNVEjKvMj65v2WLFH27in+2gmnNV5Tx0GX/Ry2CHva9yOHMqj9T2MWLjO+kCn3t4XtlOP7Skn7Ow0Hr8xHJXGHJ38VPXCiQI7dS5Pvzp7/3/0sCNXb3zdoeTg1Ex+6g3gyQYjdXg00HBOTw/9Ht77EGGeCAAgVYm3DhntOm6ZYIx5TyrNCBkyJpPcAS43zWqz1T7MF1eB+Mfb5scff/zxxx8/f/jY922aKGeN1X29WS1X98656XSa5WVn7MsXz+azaSpFniVZkmjdxTa1ly9f/vznP5/PF7e327fvPxCREGqz3g5Zq+gDMcaSJEnTVDJ+f3+f53ns4/7Zt99dX1+/efNTU28+fHg3nRSvX3+d5/lyufGerLWR0TD2/3kKAYExhpxJKdM0jXQrEWs+Rg63223XdbHfLlr22nSxZw4AIlrY5eVVzC5dXl7uHAPGlFLb7bbMi9hLtl6vu66LNHjW2pg+rKq5DX65XEYUzaqqYjfhYrGoqspaG/VQrC+NjUabzSZWnA6RvAjaGYNweZ5HVzI23T9lY+GeQgERnXMRDjTyjA4huoGTAPEgNEWjNqFheR93bx/9eRTeiOHZsOcngL3KiHjfu749PBcbOTvpI54eEQym4b6nijGGQnDGEDAABmTE2C7tF0k9nHMxSh5rYU4lExGRI1EgCrFaggCAIRNcWwuMMamIc+ODAyDOLRHfd75G/jIEQkAgsNbhnkfNxQQ6Aya5Z2kAzmWm0rQ3frVZO4Isy6ezqppUaZLotum3NQ8+RUyA2go2fmPI3i+XZTkHx1OWNsvmcrJo1xtkrCizJE+rywWmatv3n8PGJsmmow/3DWJ58+xbJaYPdyuv3aQoQtDOm3xauuDvt1vgisgowZuuX250S5OGqqUralzUNHVy4SA3Hom8RK+o476zrGeubzY1I+U0t1ZMJheeEIRHFSx1ve18AMYkYh688KjDLm23BwdmQgjhvAWkceCKITJE4CwSvwHDmIKMv+VCAEbGN2ScI2fx2yB4QAyIHiCWAnGpuFTOhwDR3ofoP8cQ11z6BM3LZ7P5JMkk5KlywTCpgAviEhOFXALDgITIkCN42HHI7Wm14v+Kix25XSAGyPfHCMYREOK9YuwJEvHSuya6fTcdUWybc3GyHf0LwcPu6N2/uJ8OX0sACkSeAu5J3uOri28PGUOmCHkgFoARMkKGTCDjznkaanaIIKLRAAIIhiKCHUQMovi99wHx0TJ4NHgFR853nIQAgSi69geKh2DHHMhYwBAAPHm3V0NIDoEwBOMcqvR3b9797v2nxqMWClQavIEYCWCMAHe8hpG5EABw9xXjggvJhPAeAQXuZxtB5GXkiJwxQYQ+ACAXKmUonKNV1wUuVFakecEZbtfLvqmrIlPgFQQ0vQg+F5J5H4xjHhKO01mFQLereqND7dWWkpDMnJzWFjvvAEMifCJ0SsuU1ip/5jxXSWG1q9frKs8uZpN+u+y29+D6q6tZmrO71ZJn2fT6emPdFK3gXAqJwBBQcJWlaZ5m9w8r3fY311e26yaZ3D7c3X98O8uTH+4bmRabun724iUT+NNPPzQtLObJd69eru7uJ7kK2ti2//b512Q99RYQJuWEAnlnOce6XltvGAcPYX4xa7qmnE6Qy167JJ1oQwF6AE2gvWnIt8+vLiTHLMlEmn+4vb95+XVj9GRSeN85s+bQrOuGeY8cpcrVdB54ttx2aVa8efPj9cXF9aJqm1ql6vPdMjB589U3G2OWm60FBiiWq+bi4tl8cbNedd//+Haz1dOLq093q977zw+rb//sF394+xMHAcizvOy1nUxnD8u1MVaphDPOcG/IRny7EMh7QBYIGBd121az+bZuQghXV1f1dssZ45whAGfAGVBw1vSCcwohhGgqAGOIALulaS+DbF+zzBgKBERiCAABiSIuORAxRCUk58K74J3njCOy4EkpYgJE1N5HQVXYgzkdBYhOjbuzVurwq8EvjO5z2NN8n5qEY0fkyD4d7mrc/TZskU5i4GeAUZCN9tVBg/sYQnCkIXBOXFAAht7Btm4+3t0XeQKzSZEkSVZEDNZol/UhcGTb7er6+rrdttMkX61Wk8nEetdp7YOrqqrkCATLu/uu78pSOmO9c8G69Xr9sFzOsjzNc73VxllCxjiL71wJCURKKeuBCHvjgm8C+t4jY+Ljx0+TTM2nyTTjKDxiUMhUmnFt0xSVNNtOG4NpliWJwjZIKYGRO+AB9s6RSA5idI/BgT209FPje7aH4awhj+fCgLsfwoFDgDvMVb7nXohOp4gB92j6IXBkO46IEILgZxwLPCwgfMrL+aMO3FNeC47iqEcHxCucvrfxFQ++xXAkI2HPYXk0vfcfzkDbxPEa/MtxPbelYwCapxxi2vdREeHZw+KeMCLqC96fHW445+4fzaLHVze6SMzcx+mxSzs5vQulOGescc5FE7nvWhgxwIQ9aVfdNEpKlaXT6TQE13QtQAAfWHDXV/Pnz64YuNX6PjjLBTnjV6sV7fGEkyQxxrT9/Xq9jnUZaZ5dXFzEGFss0xi/BEQSQgBwxtjrr1/99vvfOWN8IOvCL37xs3/8p38qCpEh1M22ayFN01lVFEWRJiZNU2scEUUyspjGiy5LkuQ4ygrFjFqsdYxxzhAC4G6sY/Vj27YD3EnTNMbZIp8U02lcS4uiQMToeM3nc37bTKrpZH6x7mzf27Kccs7ff3j74sULREzT9PbTxzRTUvHF4sVqtWqdwT0CdQy6FmXZdjoSDA3hHyll3/ecc2/9QLBgjBmifWcDRcPEGHqRh4rIsizbto2Y2kOFS3TRhgVkLCOnsaX9NDvt7TtOE45nbFFNpZQCvhhOORWb02DI+GlplG4Z3mb0UmOl1im02vnF4uTqR4va45Kk0rPHRzSaMefhnooi+BAcEnIUKvHerZv+pw+3nPMQ4KubJElzJrZts4mhdgXgvEu4KMvSa9PqJthwkaW97bU1xpppWSST4nb10G62aZaWKt2ua6eN7vrl+m6a5ovptMqlllxrxzgXSjrnETFTSd8bliQIIRD0xrbW2dAb4onKem2JiAsQPAnBG7JVxiflJOt1nqi2pq7ZIlkpIE2k4iKAD+SRGCJHhH1v+sGrHg/cPi79OAo7BXdYsHCaojuadnDYzzC2eKLaw31gcx8M9JwnsZIT0TLGELmUkoAFIu89kGOSAxye52Q7nSq4TznDOUV+FAMZfcsP38zwpLsXc/LqaPTeHsViL5mDd7sfAqAjARmkevjzMHjLxo8zXDqMOPAOzLtDcTkbqhmW8hDCmFbhcYgYi9qTiMbBZB+8OGc34L7d+PS5cN9pP54JwwGD2h5C/VmWxUXTWtvVdYQ/HFqYkMvgCfdqT2u99DqVKk3T6Xy2Wq20MVFnkOubdpum6vmzK9G1TW+Qc+TJcrmMtHOReq3ve9e2eZo0ba21TpJksVi4qXXWNPW2qiptjFRKOk0YCLkkQSQYY4uL2fR2ent7e3XzLJB/9uzi1euXv/7Vb2fVzXa71c40TTOdZIvFQkme53lnNLEIxoYA0Pd9ROKNTxoL8aI/EMN9pm4GayPqs1g5GVn3IuMBxMZ2Ci9f5SEE3dvtpkmSrGuaJEmM6efz6tWrV7/5/Y8RXawx1DQNYOCcLxYL26yUElZ34F1VTmaLxb/8y78k1azMizGjbJ7nXfe2rutOm1hREUJIkiRm5sj4AYHM7Gv3Tp0WGBmCg6MyrMNd181ms7ZtY70oAAwkQfHDsFzAqOftyLw+1UH7Sx9XqB3YtUzIJBNHi8hZBXaqfvAwk/9U0mX8wyGAeyQM47UY9wUXR789VbTDU0VtCnsLfYjzshHyNw1NS4iCIfjggxWMM5FgEjpvQq3D+08BUCXpYlomadHVjbYamybNpibA7Opa931VVe9+fFNVlfN+2zYOvfO+73uZJmC9Qn5dLRhzG+Ndr/uu+/DhgdPvn91cp1+/4IkKLXIhuJTOGCkSJUTrOsOCccF5Ms43nat150imGai8JHLaU6Nt3/XMtQBFMeGTLEcm57NgDVhXB9eD04liunM2OOccBuJcBMYpcMkYYn+kqKIq2ltnBw43i40Vh0ruaP0dfxvHa7T3Se9qPD2GImbyhog4AiMAwUMI5CPIDsMd5VA4vSjsq7POqvNThXe6/3QRf+p4OgljxPT7U1P0qcc/3T+ew4dz+/y03+mhPaTh2Rs+vduxnO6yQTzWCyFjjFhs7+UEIfrdsAdriTQdtO/DgxPFNl7mTheNA+neWzCMsVhMEBMfsf1uOp/GhT5qtejWRLzKWLYGABAcInrv+74PErqumzFUXAAikypJc2v6aXXx7u2PRCSTVMmMKWOcNcZ75yKnaySqNMbkeZ6m6Y8/vYn6Jq4Mfd/XdV0UxbZrmBQiEZxzosfYktb65VfP//VXv3n+/LnMU2P6v/u7v/lf/9f/Jc0WZZn3PctStV4uZ7N5kSeKYaRQh1iLwVhvNMcdQlPTNFVVDcV6SZIgYm9sDIF477VxQ/9ckqSMMdN3SiUIvOm2Tae/+hrWq62UMpILrh4ettttkiRN01xdvfrV7/4QvVWtddP280WV5Lnp20RKb12R5WVRcM7r7Xq1Wr1YXJRludms+l47awKCB7pbPjRtFxEu47othFyvt7A3l4ko1lUM8y2MaJyPPgzaYVB7MWsYdSrsuVSP/MWxtTqe3mOf6qy8DZNw+MnQsYqIsXr2mIHhSAXi0x7YU3b3UwfD3sIdGt7PLhBPeXtnNR8idl0XbcM4jQbI7NhWOIRYaV/EwVlwIThHDJBzTiINxIK37V0NeK/SHDgrVJZPq77eem+ZC5M0z5S8326Y8jJJmBCrZr3abIuiEIkwpq+3jAWapOk0y1bNPWlLvTWtrjfwge7efnh/cVmVOcdEIfPAeIBdIDI4ql3vPBgHwLjHYKxpHTnUMlHeOULGIpyOCVgbqdo8F8FDrvj1Yta19m5Vd+1WCoYUgvfBOQCOyJE4QADGgA5GCjFm+YZZ9UgfM6ixk4X4j3TzDWdjI7q+3RQa/Rm3XSxFSI4IgUIIAYJzTgiJIxojDIQI48L6s/PkVNmM9x/cyRNlJiH2i+8nMg5eY2x9hb3BGKO18Ojdjp+RRo0Qp0qRMXZ680ex+uGG92Rh59MK44XgaM9YzvHpmmrGGAGGQIxhwAj8hrgPN4OHIR7FObfhjI7/guQOZsH4iMiaudMx3h3VLzAuY6zMew9ckA9a6+Vy+d1338k9VXcIwQVwpnfOJanq+nZTb5WQ2rkizbhUjLHJfCbv7j7drf/lN98/u74qssSFsFy3qeRDJC1N0zgWeZ5/9823kQfGGBOcheC9Nc7oumlVmmQ+dSEwCi5ePKAHmkwml1eL9XpZVlMf4MVXN5NJ/ub92xcvXgiOSqk/fP/bV69eTSYTDuSdSfOMCc6EIArWWq52WLh931dVRftIWJIlcT2MRfZSyq7fdF0Xa3wEwyRJmq6XUkYV0jQmEHZdO6lmdV2naS6EMMZcXVy+ffsWM11V1Wyx+P6nD5vNViVZnmb3t594kK9f3Gw2m6LM0jRhjP/6N79LldgPHO86Td4KIfre7Jhvq8oT2kAhAHLsmjYWfsfVNcuySMU+FrrTGcv2ENWDXozr/2azybIsQp3FaGp0H9k5fjE6ASobJj8/oQgdG2rji8LO/wGGXHy5i+7IY4URpO/Rcw5SdyoV48OGlNtRbuPReDzBDRnCI2dFbg97yvhhS2M0G4fTDp44BQOBCNBYjwSIAhk64F3TyU2ffbxjjL1YTIokKzkGo7mnaVEu1yuJ7Pb29vLqarOpm75brbdMySLLnXPOWAGYCBW01ds6BdkRY4GkhKaDj7e3r7dfq3SKSnjvtbMMEYE7GxhA2/Y+oPUBmeACA/M+kPZhu2kYEJFIkoTLlDNmiO62+gUHa0PwPkuTIlebTUDSDBVHwEDkH/1vT0jBy0OMsN1CdLikjt8bwWOKFA49vNM948MYY2e9vfGUGNY7tq/85RCDaVaQQuQRk4whEQUCYodW29h3OZW0L0zgI1PxqRl+dJIjSzNenZ0E904V7ZdVxdGdjF8jPAZXj29pAHsbC/Ppgxy9kPFVHiVod6uEiOHYuN7l0vZT4jG4dFbkzwn7boY9fkuAiNG0N8bEYsLIM8A577pOa+2sBoCI+u+NHtp2GWNDSV4fHCIKpYS1ne77PiILMyEaJeR601w/f7la3v/w4wej/Z/92c+zSV44tO1D0zQBKFVJzA72fd91XVEUaZrGPNMOOYExADDkTQiewHoHjoQVVnDGWJJnxvTffvv6X/7lVzbYy6sb68zf/4d//8//p/8llXdcyb5p7+/vF4vFs+trjhS8z9OU7zh3vHOOpUmWpsgpFoiGPdhsbDC31q5WKwCYTqewJ8BRSgG5Mi/qtuNSEAITythGW29cMMb4WE7Fubc2yzJjTLJP8aRpOp+L5WqzWq1M32EuU6Ws7pTkzlgpoe+aqqqapomgBJGyoJrPmrZdb7YBoaqqHXzzHpOTc8EYRvb5sixjRHpg5hlPs93iPKy6o+B5PCa2usdsa9R2UfEPRdGDXTj+8zSuEEbH4Kibahzk2Omd3WEMxdN8e2fn9FjrnhY7PLUNjz1eHZ5yEAnOX/epZYIJDgwDkB8aGRGQM0acEHwsN+OMc4aInoIkEhwDoQP0PuIHSsY5qMLx5G6r3Q/v2k3++ubyalYmacaD5YCm7V3wiCiEMMFv67rtu9VmHYubTWc4Y0WScgIwfl7ObUdKJNNpu976z7e3n+7v8qkUjFvtjTHTNAcAby1HYV3rA7PBE3IuOXIXuPXITLAQQuiIsQ7KpExSB27Tubm0nsCYICQrMnV1OVNlWfeGgvPWORcAGcZiWkLGHlmJR9MRR3bAscUw5PL+xNze2SAnEbHDPY+1/kD7AvrAOAghCHY1CyhEBCzDCM2FgKNenaPJMMzPI2/vaM9ZlTl+kJ3cDPbAvrkOgcEezHKPshVP8lTE8oBxl/Yc4iM45oOo4LjnAUa6MLbSjsV7N89HZuKwBDDGiB2jRQ8LzdHj74If8IgsscsIeA+BhoTbzjBnB97keE04ld/xRfcJ0DPpjCifMc0W+5G89zHBA5HLkwEiGoDI6MY577VxPigplFLOpYhICGme97122hCwpuu1NfNqZpx/9uyZ7Pru7mHd9B/vltPptFrcNGh6Y4lrzqXvNQBwqeq6Xm8/5Hmep1mET6rrOhayKZWA4A6gd957zwATwblg3MRGe8zzVJveOt1p/bd/+9ff/dMP79++T/Ps/uFWSvn58+dvvnmtnRFMqCwPpmeCo98R7iRJYqiPohejfFE/RRLUzWbDOZ/NZlHT7zc3mUxWdU3AQwAhpPHQ9l2SZN9//4fnz5/HdnXOedu2aZYoySPGkxDi6mp+d7+s6zpTKmqpMsu8M13XEdF8Pg8hrO5Xzba5vLyIuc+Lq5v7+/vlZk0Ik2q6blrrHO47p5kQgiDqwphrjNbMUKh45OSNy1KG6iTOeXzA2AIY62hoVPYBe30J58ysI2/PO380CQ8WinFXGxEAeAoEj2xcx/1bT+mwszIGT29sBDwzTu+Nl4Dx0nZqPg8ievbgwUyITYSxXIqIYqY0fo51XHEkeASDThTnMjAeUBCTgac8m7AkNx7e3z18/+b9+/uH3gWRZtVkGlmMN5vNYrFoei0Tpa0LAKvNpunaWBPBOc+yLFVJwtRsWk3yIkmSopggwHLTP6w3Xa+JSEeCQEQA5mzgnCPyEAL4RzsoFgxIlQbkm7r9cPdwu6y3vWsMrVtnnA8+MtT3KmFX14uLxUwJboxxOngDIQAi45xLwYU8GKPxh7HldTRfjzrSjhrsTv08OGf3jJ3I8f8Au+LG+NKEZIIhRxZC4ACCoxg8QQhHuup09MeXe+rDUz95ahIOOvWs4v+yXIzn6vD/0dQdOq6G90OjvsPxtU5/NY7hDw1P4wc88sVPh+PohGNVR6P+yFM9d3S2U5335VGI7kusmCiKCM1st9vt4FwesQOuVquoiuKSOiQynHMqTZMs5SrhMjHObpvOWJJZ7gl77ZlIHLHf/v6n//Hr37XGl9O5JeyNNdYZ66RKqtl8Mpl0nTbGIGdVVVVV5b1/eHh4eHiQacal8oTGud7o3mjjXQhhuVzmef758+fnNzd5mizv7zjSYjH793/z10gUnGu29XQ6XS6Xxpim7hBRcAWwa+SIIxt9oyFyi4hKqSzLYnegMSaCocTC5kFPpGmaKhVXM8a5D6C1LfLy46dWJqrTFrlI07Tr26qq2raeFLkU7P72szP9Yr57uizLtNYxpxjnzeXlZdfqtm3X67WUMsnSyWQSaWa3TQ3IVZq2kTaAMQoopYoaC/dAnQPc0qAvxhbz0VQcJCKOdZIk0WuMzxvV3pA4PBIcfHob5vCRfXykO0ZzHQCAl9PZkWoZKmeOJGo4FzvEW6F9M3x8/rh/oOhjhy29w8nHU3zsOMY2z7EojhfQM2qV8+jq+RACUOwq41IYa2nX8cEi6i0hIGPk+gDggTMuQYiAzIZgnUsT5a3LinR1f+eMvrqYhxDSNL8s8/V6s6nrajFPi4KI9caoNC2KIs9L05umaaSUVTlFgq7rnTfIBADjMgEUxnaBHGOQJmKS54v5LOHCW59KJZhoto1V0rnAuHQB75fbprcolfXUas04Z0yEEAO/zDvf9TYLLTLOhbDWGmudD03daGN9QBQCuBQiCcA8xfAvCn6MKRWrFfZ7DvYjIvKDYPKpzjhaXsPYkxv5AbvMDRzkohBRuPbZzdX1fFqmMuNMMFBScM6s94HQx5YzBgKRQYDgkSvc1+KyETntELs4WuXZoYwdzd4D7QtARDyRbOD3Y9FRRkAMFGKnG2DU0sQ4F1LQPrTggvcUABE5i42Mu8QgY4zzXRcdkGB8LIHDbRzVv41KsQ5WkLOKBEe+uwuPWZPx8ae6cPcnUiwhCoGCp+C9dyb4ACFY74HL//xf//t93TkUnSMuFAR79ALhqD/h8HID/vQgv4KzWL4IAJyxiEcRQtB9r7UOuOtwDIECESATXEqpjLPaWGcNEcUGd++DMXbbbhbzeZZl3hMy7j1oY5x12piynMwvLlab9ZsP91ki5/P5+w8fJrlkXGhj1putC4ELGQgoBCJYLpcAOJlOkTGVpEqK5XLZ6YZxmRf59fW10brMckRiwIo877teKW6d9xQ8eUQUSj57+fPf/OrXnz6thcRtXb969erj59ubmxtrHWeYJakAXN7fff3Vi9jzaZy+v79/9uxZJGtljLWxb4F2BRdKKcZ413UAIISYFlnX6yRNH5brzvjlatPp7uLyqigLbRqrDYXw7OZaCaGkuPt8i0VV5Nl62715+45x2baN0f3FYlGk6uWLZxz8/d1tludEhEz89ne/t4TOuRcvXoRA3373rXX2+9//vu06Yuzm+Vf3D8sQcFM3aZZzIRjw4G2U7liPGquQoptxqt6GRGBd11G7R7DTiPDZdV3sVxFCDIidTy34cc6MqzSimuCnnCQIEFO2IcRk5DBpOefAZAgk8DBGT1/EGHvKyIUnTG84tNPPmoqnhuSRRA1C+9QNwInPEZ5G6yCM9hd4OFgXuBSJ4qv10oTgtP7tj++q6i8Nk5u2CciKcpqUuVCydw0FJEIpkxAIPJler/3Gm6CUgIC93QbG03xycXHhiG2btq7X5PxmtX7IpIRKQpDIGBMykVVVtdb2Te84Xswmmrj5vHpotbYghAIA4kygYpw5DxAIiH+6W1cVTKe5kAmBbbVutNOajDGpKovp1KFa93pdNywELoXt/NFbGi+FZ7y6Q9fwC5/Hy+zpmNJhScvRaO4vHXBfUC8YQ8Y4MmQkkCESi/3pR3N6lGk7G4DFk3uIH8Z9isM0+4IPh4cO7vgrtsfOHj/RU2f4wrwdy92XheIpGYGnBXYsNWNdFUKI1OdnLdqzN3x0/rOaeDggpmeic7Z3JV2sgAAApACwi5JF6z6cPAUBIEFE7XI20GGsTCnV9kYynyRZlglrQtf1wJlzfrlep2n6zbc/E0p2TXt7vxRCvPt0e3V1xUS6vF/fr2rGk6urjIlUppmQ6WZbv333XgiBQGmaffPNvO6Wt6tVXdeSi0Sw3ti+M8V1Zq1TSnLOiQXphJOBiJqmLufXf/e3f22d/vT5YVpVTdNoa3969+7VixeIHID1RvfaOhvyTKZpury7j/xr0cvp+x7w0YsYfF8i2uMvFikTxrmyLNn9ylGwHojxdd301tzd3QnOrLWpQCVVVVV3fZNkkyIRkqPRHQQ/KcuLxSxPOACsNvXt3QMgzmYLJoT2wRMlSVIURfQv27arm6YzdlrNe2OMs54iCBeGEHCUeh5PnjjuYzEcvoqBjegXDblA2AfqYE+wGv2lL6zbQ653OC0dNhGMnMKDSOFwzngcUwoAxFmRw1GzxVPb+LGP/ICjw8Zv4QsLzdEN/FFpHJ4/fhgWwXFp+5njGY88YEQECCzCfmDgCHmebe4/ySTz5FabVhPf9u5e18B5lmVSKe2sMw4JU5lwztu273tjjA0BkJm676z1xVSu244lRZqWReFurq4X8ykDJ0E47UyvlUqIoG1bJGTAFEPwhgPLi9xx1RhngJH226Y3PgBAKkUAZlyIyEZNjUz7xDOZKcaATE/kuRQcBUVyKyQOyBEseO+PazIhOlNjF+3Q54Yxcczowzju/eVBHM97OjwVY4wTcBx8fcI9WTnyR+KqEAIyINjnx84N/an3jyc1lkffHokl7vpKz6e3ifxeczwmQQEwZq0448czk4bmA4yFHQjI8IAz8qwQwYkl8Uel40A06HzF9enZBukYmDceX0scBqJA4SlBO33bR6/9yLQ6UqhxrQ/ORghK5xyLq9guH48xqIEAkSQrQl6NKwCjNiXg29UaEWezRZYVMUfobLB91/fvlJLfvHoNGH744QfjjEzFx7t7kWZVORFJVm/Xn+7vmZKzaZVNprm2Hz68e1hviqLIE1WWXsmkmi36rlvd3n78dP/85iKXUhsdAuttnwgpmITglQJUou26tm5IbL57/eoPP/7w45uHiyu1XK9lknz+fPfi2bMABIwZ45xz1jshMiHEcrmMfRRJkkQGH5Vmw9uLUa6o9owxQggiL5Uyzk0mE2TcB/AEUqWbbaOS7P5+M6/quq7lJAcp8jzNO5/nalPXAsn1rZBJWWRK8Nls1jTN3ceP2nrjw6puuAxZMdHLbTEty7KUUvpA3XZbd621NkY7+74PxGPCyHvPGRtKnY9i8gMb3XgCxG1XsJok2+02ssVFJReXlMhwFJ3FWId4dvr5Eb3RuKwyeA8jLYCIkUdmwHZ+nIeRa4I7gMOSllML98jKPiu34wDp0RLzR0XxywJ25MOd3Y4iv3CuOny8BSYBYsQTgDzGHg4M1nSGU5IknLjM0jKV297rjw+stNPpNE9T52i7rZu6Y8iLrHDBb23bNj0RJlnKk1Qb01pNNrTt2hK/vFCc80W1AHRdvSpTyTyQDUwBAHRtTx6klM62GCwEZvsmGKqKDJOs1D58vGO9dRRi+45zjiNXTJXVFYGrtUUpGPMeCDmTKGez7H7VPSw/WxAkZQAKwVnrUp7Bodkxbmk4UgZfWOmO5sPjCc8Fnx/N/3NO3hCuZEiIHmEHjxzQASI6DJxYAGKBAAZle+TWjM3JsWcz2H5wourgCQfl7MZOgLnH1x38j7Fsw6EPN5ie8Cdsf6Law3P1k0eKfK/OGR0qnqPnjWNBjCHnBEhuBxg9NpzpiRf11E4AUEo656w1YQ+HOBBMjhP8R7PrcVhhqDKCYbXFsKsJkFI677S2RNT3JktDURR9WW232xg0u729LSe5YHyxmBnbE1HdwsfbJePJxc0Llea3t7fWvQ8v8PJqcXF1s6nbD+9+ImDW2tuH5Y9v3/37//B3lzdfEVHT902tq7wMgdq6EyyEPESiZiGEEjx24NWb9aQsyjy7usqM7dvWXE6rzWZTt+2kKOdVQogiUfGNdV23Xq9ns9nQpe6cS/dzaSjpjO8tljhqrVMhrbWcR2xlEgKVSj/d3l9d3fz04ydr7cPDQ5lKKbj3PpMs4ej6VmIgTsV0sphVcazfffhw/+nTixcvkrT88OlWKDWdX+jeVVUVa1mtc63uvSdkrKymd/fLXmvkiVDKu1j6/xjbG9JVbN/7CIdGz5Ayi36/lDIqv52XH8JAKsAYi0wOxpinpHJ4YzBa4dkI8+Hxh6OVAfwxc9kOCuDs3B0LzKm7Nl56xttYgP8UUf+jq8B4z1NWgAs+pvBiSIxoV0P3+PPDov1AfPdnfHEAHBCRvOk2XZNnielMVk2fPb952NZd9zB9IWVRlFwE57RxzoVUKsUV55QlearaRuum7XQIXIpkUi6b29VyrS0AkxITiYwFAEvMBuet5hCSJBHSCR6AuFSsX0/zdNu7T3ef7lvLy/l0MkPpX371vG663lgI5ExM5AXLhZrM+nZb9xp4l6UsIHPeetcX5fW2JSJjrCMiEiAYMiYZsKOhQTxIbj01BEdr02ko+3SkxibIU2fe6TwGHIgN6oEFRIJT9Qnh6Op4QkIEh+YUhYOHHSb9WBfCyB3xX3wDBy8BkQA8BdyVe8Y2NSCEQIQD3ifsaWMJYmL57Hakrf8/c/XGb2C8H+DJYAyL0JoUxwKAcwJOgD7iaNFj3/HpteCcJXFkjoyNm6jzpGCIGJnCrO5pxzAnKEIMI98pWNgzyxMSUdvpWOMXQojeYTyt5IqnyntvtYkIEhyF9z6RXCnhvf/w4cPFxfzFy+fe2Q8fPnCZ9Matm7aoZjxJHeDHu/um06jEt69e/1xK59xms2nafrvdau2L6g9/+ctEJUWmVCBo276ruzLJUsWNcUxaAuBKSqnSJGRZ7kAS8vl8/td/9Vf/5b/9o/cghHA2bDb1fDKN7yGW8IQQVquVECL6eWN5iaZA9If6vg97AjxrrXcOfHDO2RAYY8ZZxrm2drlePX/+/PlXF67vVqvVVzeXEAhCcLoXZZlwuL6oasuqIp3P59779+/fr9dr40JWTFyATmvw4SKbPH/+/ObmRnAVMDRt65xjnKssVUlSNy0RUgCJwoNDopiUiEM/uF9jQTudouO5PSSwxyvJAHU2IJx9eUU6WgrOGk+Ph43qSHbpm+AAQNAT/tzZlesph+9IzMY/H8vM2QzBWZE+WgK+8C7GTh6MNPFT5w+MAxEGikzAjAECSYQk4dt1I4s0CJGkmUjy9ae7ruuXjSnaLi8NemBMpDJFgrZu0zSdFhNk/NP9w4f7W71azheLy5tnk8z1zq2b2v74Yy6LaZrPJ+ViWjHqvdPB2GAdCMmY8OCNtUUikiQBbN9/7jerVQIi4bJvXQjIOEjOfAgciYTgSEQUQHmmgncOOEopAdreNL12VCOKajoX2m511xktFEvzzGs2fieIGBFjwxNgV6fZuKeG4GiYiAhGCJxwMoK7e2AH+xlghN8UUgIK4pGEDzjHCHftD+fV0W0cVXOc1RuDdjkVy6NJe/Tt+HLjGsjxbB+rxqOJ9wU3GkZB2hOh+FIDIpwooafMwSNjdNgzqL0jlw52RjSN40XhTxDYsYuJiNGajm3XMXLlnXHORbUXvGeM7QIYsfCBHxKFRasVoOu6JEnSRA6jujPwIUJ8M63Ner2ONOJFUZi+VUoS+L5vQ5gsLuapkiG43gUO2Lvw7v0nHvEIm+7dh4/FZPrs5sVXL19Za3/zm9+sl8skK5IMfveHH7yn68V88bOfScnaRlvjJJPee9NbxhhKhmLnrBRF8bDxAFTmWZLlnPMk87Etoe/7pu2jxxa5CzhnbdvO53Mp5WQyif0SA0RZhGsBgO12e2lt7K+31g5F/5tOqywPnqRMVqv1er3Zbuvr62fvfvx9XdfGGJ8kiKgEzMr88mIh8urHj/dIEJx11r59+7aqqjzPkfO6bctJxVQWmHh+c3FxcRH9p23bGGOAszwvgWHXdUwK7+I8Px7/oSAZ90mQoxV4EL2wh2RjjMWQ5tH8iaHd2NnS9/0X5HEoXR5EgB2Kw1hYws45P2j+QQQk4Hk5fUpVnJp1p489REVik8DAoTqsNU9VYD4lRbQX5nOr6vn38YXV4fQpPBNAECggBQ6BoZdIEkEJRs4xxuaLRZJny9Vmuamtp8uKIePO2K7pMEAiVTBuu6khAOMiyTIUsjW66XvHkLiYLFLkXHeu3bZ903V1w72vyiLlHMkjkpJKqoSIemu73pQpIKAl6rRtut4F7Ixd1c3nu3vnPBD64JBAcSE5h0AAwgVP6BknIZiUzLuge9e21noGKIFJR94FxwQKxSGIsVLY+VpsKObk7LFvL9pEMLbghhTgeOAO0oH8cXwjVfowbLjDTd/NvLh7ysPVxfxqURWJkCxIhjtNxwUghx1hFPIdIwQEkIdlqE9Usuw3wfnRzBnfwJnfioP3M/52COMMWoFiN+HJyYfZTichd/bEvD3iWBn98DyF72mIcrf/fJ3RgU88XCWEgAhAu0KiqOQiFAlQ0NZqH/7pX39zt20DV5oYAeN44PwN7+1JeaTYtQlDLbc1OpbVxTcy9GxESzzELCnuHeRh3QweEWPzJmOPEKDoHIUghYRAfdtxFGmScMY2mzURMY6cM2SUpqrIi6LMmt5zLl0IznlgKLgMFKx1wTsEqGbzqqqMdV3fMc6TNF1uNrrTfdulKgnW5EmqBM9VQhCAAiBwLpCjpxCIuOC3txupVN129/f3vXVcqCTNrHFZmikpyiz9/OFjpuT1xaLIsnq7SXIVQri8vIx8tpxzxnnTNNrYqqqWy2XbtlU1izmwvu/LXCRJQshX20am+Zv3H0SS9sa2XRdCeHZ9ffvpo+nspEiAiCGURVqUk+W6ns4XxhFwDpx9/nS7Xi2vr6+F4EKI1XqbZEVWTrhQGceiKLq+N8a8//RJa2O9l1Jlef7u7fsky5wNjAvvKcolgB9P9ZhCi/nIsVKAkUMSYQpiqDN+GCuF4UNUe1rrs9OK7dFexgqViBgc+Dy4zypEeyLAztQb5j8QQQgHQc6npvIXnK3xt0cq9+ip4JzPd7qN1xQcuYxPpetwn8P4E11JoDGAlscQIr6I0Z1grOua1998a4P//R9+QwicSSFnTd/121YSLqZVUk4BmO56a21qbTGfV1X1kgMpsayb2/s7w3iR5kVZ8sBDbVb3y/bhgUz789dfOWMpBGcsEUmRcOmDtt7YzjokUU3KWW/vG1P3fd/pruuZSJQCDjxAEIyTd8babegEC4TGWU3Ep3kWCdmRo9NOu564SpIEFXOgQ3CMJeNXN2gOONdah7jzvU71ylEsblB7pxg/4x9GYKovDHo8dofACRTIQRAYW/YQkB04P2PLjB2O+3BjQwvN0bo/oJyMz/aFqMDZ2z71Vk9V4FgKvqAeTq+7vx/4N21P3X8YweSePsvwZr582J9y9aPjY2vdwAKNiMHbiBTIOUfOhu5DFgflCe82crHuGO+UQsSY6Irn33tUu87rsizLsuj7nnNiDJbL5a9/rb0zL168WCwu7+7umPN5mUvO2rpmnF9dXW+3q+//8MNsNvuLv/iL58+fr1arh/vbpmnSNHM+fPx4B8Y8n88u/+Hvp0WxWq2mVW4NCOWSWHhiPHIulJRSzmazi83mV7/57YsXL9jtg/UBEbkQzrmmbtfrdZUnsa83SRIHZrvdEpExpuu6yWQC+87jOHUjWFfEUQshxFAnlxkRTafTNE1R5KvNZjKpYr8dEVkLq9XKG7OYTctCbtbL208fXk5mFxcXJbFe2/V6HZOFum85k03feRQk0ulsvtk8FEXR9h0ArNdrxhhKkaZJzMIKIQDs0JCAgCE81k8did64XXW8pAzeXvw/OrWDOI/1WVSiZze2x1uGvSwPwn1s4dHj8XHnrnY0CgUgAPDZdMYRY65wdzKgAfNl93ixrC4GYMKZnlYiiknRIes4PPxAL3n0mo6E8LFdlx9kngZRHC9bBwvxrmru9N8+YET7KrvdP4PgGBIyBOQBhEW0nnU22IAvXr5eb+vbz5+lFK7vry8XbFZ9Wna/+vGnzsH1zVcesOnaq6vLbbsWkgOQ9Q4Ycinbtvv08ePmgcDn1ewqm8xrax66bkNuac2KnEnSoBKNXOWpylLrtDW9IRBZ6RlveuNsQGLBWNvb55fXk2ImVOZBakpqxx8MPPSIDEGlzoumA3JpkswYFtZC23a9brW3gZEXzJMkVjA+1W0TPFHMMzGOyIhxQgQmgEWqNk5cEuPAOTEegoc9Nx7j+3+M+xAQGWIsgoXYXweA5D0DEIxHRl/cj+Zu0rMd+dzAP5ex7uKyurqaF5nkPAhGXCDj3FNgSnElkDEPjoLjDITgjLhgu6ZAFuuQ9vZdCMHH20DGOGdSMiEJLTEKGAIGQiQGwBEYIueRdzAgABexlAM5N4EIMADQjrwPQkz4IUPGkfOBbQ4550IiEQJSCBQIImY2Ms548GHPacf3nH2AAMgVME7ICVkAFgADsACMCRWAecIASMiBCeQiHkaIwHbkfAEwAASIh535hwEYMsGEYIIhw0hmHoAB25MisigCsXKYUCFxBBABkQwPPrpdmrgo5x/X7X/+51+3Di1hIJCcO29pH4MZjJRBNh/jPVELMgZAbM/XGMn/AgByToiewBEEZMAFMe4ATSDBAkPa3TcSQQCM9xrpFMk6Z6xzPr4zxRCAMU+BS5FkCROcSz6pJgHAONd2RtvgA+s1dT11mrTvNtu14Pzh4f6b19/efrrN0nJSzvrOtHXbdT0iKSm4AIQQyDb328W0erhvn11Xn+9vVZaKXLEsCQkngXfbdV7mSon5bGa7PhUy6E6Qh+B/9+vflUXx4tlX6/uHNz++W1ST68W83dwH0y5mybRQVq+VJA08zfM8z9abdZIwztFb3WzXgtNsOvnw4W01nXW6E0KxRDZtr7ddkubzi8X98gElPqyXKMXd/Xrb2cnk0gaZZpVF17sQBFu1zWwymUyqutmw4K4vyk8//X55++FqPvmbv/4r6732/vr5V58fHr759rte97/59b8+f3FlvO2tdRR+fPPW2qCS7KsXr3/3/R+kyLVxAIwAOd9x3lHw0d4duumj6orde2wEOzCs4TGEG7FEooKInh+MOBmiUkySxO2VRVQoIQQMJBgnHxigAMYBGQEEgkCMIDI34ShxyDgbGgFjln0cmJScI6IYq+5xhPDfYPKNrEs4igidGKSD6joCghnfAJ2r2Tt1Wf6NN3hiqO6fkO1KDlFwHtFpI2ZSZCh21t/d3eneujy8//TxejKdpEnv7dXNs67rVpu1A8qqalpObq5dCOHth4fNZiMEqyaTqqqQoKnXwejNZtPVGzOfpi+edZ321gRjQgiJQAQCH4KzFJxkMCkyFCqfzQ2JjXad77xx5FEELxl5H9q2lRgkF4jYtq0UEJMocYAxOPCSIJL47KIQB3G/w86Eo1E4G17Gk9oQGoqpnhiLI+snzm/ODxFCD3+6t7gOME2G7x8tnkNvEs5FCPYHsNMnPRuNOH3ew/OMbvdc5P/f6iHBobH85SPxiznIQTrwXLHP+ePjT+AxHssYjmpg47XO5whO397YBj37mKc/HMcMdmUsJ6Nz6ozGnWWaxhV2CKBF6obYBT+eA23b3t3dlbPMWuu0i03TwFnXdbrry7Ks683d3d0PPyTXN4tEcKGk4Orm5kYIQQQPDw9K8gglo/vmZXYTAjnn2rbNUhXZ+wBgMpkY6xljeZHe3d7++bOvu66TctciKaWcTKeTsmKMdV3Td21+8SxKa9u2WRq9Ya+UavsOEWNzW5ImiBiI8jxv75roqKVpuqprKeX9/bZtW5VNm65nq9U0z4p84nTtnEsUA4BtUy8WCx9Qa51lmXvYKKLPnz8LqX75y18SsMv54uPHj8AwUh5yzut6U5ZlkiRKpbEl3DnnHDjvhgRF3JkmakjjRTcuDgQ7hJAe1oeous4EJw/5LNmeMyB687Rv+EPE4ZTx7HCoLMYzczwbd+shPu6Pplv0/MQgLY96Bf94effpNi59Hgvk6WpyKjnjI8PJdY8kZ3yS4TH+DVvkH4DTp2NZWsSeEmeD7u3z65vZbPb205vttsukCsD+8Oantpr98rtvOOfTchIYtKbvu870Wql0Vkz4NWy2vdb9/e0thLBYzK6uroo8aevN3ccP9doGo6vpxFutGM1nkzTNeWhZ8Ogt915SYIkok/KSJ+vWBOt825ht47VDJhMh0wQZV94ZyXgqBAS9XW8SxaZlIaXkxjHnAkV0GoIQgjPipLCCRqngLyyOR+2Pp7moOGGEPMNdNV6n4jaoPSB2GGemoXpwvyIfOvQ7xgg4vtsYjCXEwe85nFGMsdHCPmBp7lo2B612FEKAQ8V5ZtbxSGi1D7HirrZzpJ9H/++n2vAP9gZl2IVPEAHHYRrY06nsdHq8evRyz20YzputT07/aFvsz0YMGTEiQKTIdzisUIPMwjm1R4dVu4OKYufa+OAkTzH8NviAh5bH8NujdWn3mQskiHsdeesDasPbrixLrhIm1QD03Ftn1hsQPoRgOsMYW6/XSqltu9Zdz2ZTIup78/HjR+f7Z1eXaZpPqgCpZ4xVldjU7vlNVrfNaqOKVBJh9FTqui7yFABj+k0p1etGKfXs2bMf3v1jqqTzFoAyJYH+3839aZMsSZIghqmqmfkVEXm+elXV1/TO7EIA8AM/QASf8OsBCoVCCgGuEMBi9pqe7uqqd+TLzLj8MjNVfjB3Cwt3j3hZ1TULmrzKivT0cLdD75OVUtV6XVUVKt1atz8cywcJwfpd11Xl0GrOGCNNzcxFUXSdD8XJOtuXZbm1NkT553l+fHqqqsp+eHaOycnxuHe9rYosLwvvasdunVWoqG3bb7//fn9o6ra73WzW5TYv8v/3v/1f7t59k5fVx8/PzPDhxx/fvX9/s14HrAz6VlVVxuRt34dQGu+DHIxjvZUzlhPthynZT3mejM6FMH8Zu3CkWAaJJS9YdLM8D/GrKWGP/bZwpiClMEZJu4IxfAEn0KXC9XS6E43tZ41oeJ1cTKE5xYFg+UzVz7mbIaWkcy1kgnuTIRfGGcmImR4IiLjZbEQkdIHP8/zm/s57//KyK4vVzeaBAZ5e9v/5hx/+/OGn1vNLXWfV6v1vfnv/+OCtfX3+4up6nRXffvttVVVN0zw9PT0/fWnbVmuzXt9sNpuyBJ2ZY93881/++uHzUx9CkgWUsCEsja4ys8qy2zJ/d7POwefEK5KV4krxWvkV2QqciA8sxDnX1k3fD5VtQxM7rTWhIIgK/juW+Q6npGdyLhOwHqts+Llsged9G+YnEvU8HDXFxeMIUBNIFSIGjjgvTTSX6eKTJ9OYo8SisPVV+IFzpWR+cSIEXGEzc9hLucJkCZNIsTguzXMeEZrWNFmeD57pmlELD+J2tFzhucVFZnrt4v7DTFBIj3K+G5NzjMXndDKi3QwRg68r3R9r7fF4jKGA0YwWPnRdp3WGiFmW7Y+HoihUZoDwy8uzUqpcrzzDy+tuXzfKZHe398xcluXv/vB7nUFVVftjczgcnPd93yudrapN37vjsTkej2E5oQH6ZrP5+7//+7u72+Px+PDwkGtdFEVwcAZjHYxK0+FwiDscOk4Ex2fI01itVmEViFjXddiZrutwDHccelVmar/fW2udl75zzrGznllCVpz3nhnCE6y17969K4vs3/ybf/j+/bf14fjxx792be1c37e1JjocDtbasiy/fPkS0ipCRyEei4hGzA0Tm+jfEzif5H3i2AM1HFmKpJOolqDthWpt6UtT2J4ThAnU4Ri1MKf5EzzSsoSx8jNVPTgvEvNzH3JOki7me6WPTcOsf9Y88ez/QXEOuaJUFEVT113XklaPt7dlsfry8spO7m8enO12h4bybH+s//FPfxalv3m8y1flZr1yzMFswiBGw7uHe/EuVKN4eXlpmqYssiIzt/ePt5ubh/s1OHuoG2NM58HtjlUuiJgbUxZ573xrhftOBN/drh3T7aq6W6/2Tdv1tutsa/tt5wuTCfjDYct9uy7M0M1SQBEZhc4DikdghUOB06lAcF4odrI/aREvTCT3uQQzZ07pFwMZorGsOcZsZfDee/YgguxBSJgZgZQyTBRzLBSIIqTz5uFCyAjBTjp4mJBgjH0M9/izxrkiQaQZDxyjqCMS+6oDLKh6KaYtMuB0yGWj4nVoT58/4V6cNOe6NhSd/NYjTCMiC8OoYnKSUwiI0y6GhCIIhM6zG1vcxXWF50wkm/kq8IJSGH9Nq2bH58ho7IqAhHgWA0Xn5X8BoK5rIgohLdHaFlqqhmZGgX8gYrih77uyRDI6y4rD4RAyo8X5pvaZ0dpQ3zRd53vrnRdhdsIAcHd397kqq/XGdu3+WHu2N5vV7e3tZnPTNcemaV5eXu5vbzOt911bVmsg9f7x3R9++7sff/jzH//w+y+fnxRhczxQmed5HvLukTSZLJhkFQ6Og5i6QETH43G1WrXt0J+2bds866pV2bR16NAEANZarTNj8qapb29XVVW1trfO1fVxVREq01mvdfbx8yetzH6/t3x4/933L6+7v//j33WW/+nPP/Zd2xz2q6pA4M2qenp68t7f3T38u3/37+4f3lnrv/n22+fnZ++9Ik2EzBCSJo2hLMts7yKXirkoIhKml0oz8eyCLJVWeEiBREYfSrQ0pDtDSdUIOTcl4swqM9A3HNAHYKjMAaNzBAAsWwh5exP+9At43glJxoo1ETcmXHDC7SewDgChRPacDF2SPa/kLS3PMj75/O9FUVj2gOi9L4ri/vHBe7/f79fFDZF6ef3cNvXj4x0g/vSyd//pP/1fi/862xVeBIRXq80qL9h5b12+vnl8fAztE+u67rqurp21BoUfbjZZXrVuZ4rV5uYedH44HmqEHFlrIqUJdd8f60PtBda3jyrL1kWWabNZVW3bH+pjfVRdWWpSrj12RDKCS9M0MXuUHHtmJK/IgBakk29v2E9SESCGjU32ORozJ5RukucXOd+EScQXxTIKdF5Gj2lSzUchAggQkaCCcAWERr6Wkk5JXxF0o1nOHuI0Wio8hOjMuAonQ8KZVTOVDRf3IWEfX0utWfprfOYcbuNmns4l2duLLFCWuealgcGqSsicyIDj60ZdQVCfPe2SKDDfLpiZEya7sbghk82RxHo2EdtTaSDwv/CVkB5XFEXbtrF+IyI677rWIigG8N5b77TWoOj+8cF2fdvWbd8bo72Xfd3U+8M6y1rbI4LOclQmL2H7/PzTT68a8f379w933wHpvu9fvjyviyJfr733rrcOrHNus67+53/7b//73/3h8fExmOyqQldVlWUZ26H+SJZldV0rFCIKwZyhjkmWZdvt9u/++MfjcdB4Av++vbk5NnXk7iHpn4h0DtX6RunMe+fZNW1vlNbaAGoB3G331eZGgPq2JRBm3j2/glJdc1yVRde1RVkpkGpVfPh4VErd3t577w+HQ0DZ3W43bnhAq5NtJtYNiFfmKa0pDKfMbELDJ4QCRh4ZqFDUo2j8oohAErcJ5+ImjnYFhqQn7YztCRIA6LNqZmm32LdImktIHtneZJ0TnJwIhinnX0SY1M42kRd+1kCm8L/wG4XNAaiq9X5/FGaldVWty3J1OOwEoczK7X5ne1F54YCoXGnbfXg93n781Htm5psiL8tS+cw2bd31tm8JZbNel0WR5/nr62td1113YOs0KWtt1xwyk4HOX/f1cX9UtS2KqixLJNUBNtbv9k1r3b62RbUuVmsymUGETDtn2Pm7fNPWtRUps0yImX3TNF4rIKW0zgGdOBBQWqExwujHuMeJsk/qUi3KZS1nbkgcgP7c8RbBPUZzxVePvwYHG9AQKipKQXTfBb4yJHGBiIhHQYRQBJeiNEcYGCPBqY5ogAnCVKJM1NkTz1vodT6Xz+YwNmIbJk8O9wik/PncuRf7s89DPiDxZOPokKAlsSPePx+ctLFMj/gS/BORCA9mjvHTuAPs2FvvGRAgBIQuFMueM7x0xy6h5ISXnyYz+PYw0Ngoi4AMWmzgpSNLFW1yRPQMbEc5jwgQe+uznLTR2uRKd4iIpDwLgmqatigKETDGOOcEpG3bPN+ExpzKaFSq7R0e2sP+uH532/d9SDY4NnVV5q21rzv+8ePH1+1e8LeI5K28vLw83N3mJssU1c0BtSGk9aokkb6tbzdrETm8hlhfaa3zXcvCSmfOue12a9S7WI4r9AcviuLHHz+EHH8RCcVcAKDI8sPh0LbN5mZl/2pDf6I8z29uSESOx2OZmaLI6UBCarW+ca4+HI4AwB5Wq1WWFU3T5FlmshxIf//NO+vleXdkAM9s22a1WjnnmqZ5//7956fn9fomqC5aawQSRK1NXgx10UL+ZWAqqVcvMj8+b90DAEFpgzFoM9oSwivCzbGeZxAFgiofOzQF2BnoP8sEyCkpwjewPfawJKhFlgSxSstEdJUkgu6NY0IQ4ZyPzm/mC91SABcYocy61Maf/LWS2VdGakEriuL5+TnXSmttRtt6lmXsoK6bYrXOCnM4vCpDq7vHxv70p7/8lGnzzcM96UwJiGPvPXg5bHeDLquNoqEsU9u2tzc31vndbtc1x4e72y/bve/7uq5tpcvSbzaQVyvrdS+mZWot92JZOVZWM8Eg6Lm+72t7eH760naHTVFmxnSNdSLBB660EZUZ3wqD0jrLjJCqWwdR0wq+OmARyZSeyEFhpCVlUz4XI0I5qX0OAKROCl+E3fRMU8MFEcWsGiIiEiIJheUGKixIJAAoggIMkcvMIC2lwil4nfPa8c4hqxWu8Lw5NYdzm2d68fTqr3UsmTBRmMl5E0V50iniq9rbyS063h8tLpfwVAiTCByIGyWnQotnZccvbnuyJ6d7liaMiWEghQ0YGf/iJi/qr4wDMY3NaWMHuIB3xpiQ9AYAfd8jnnLFQl83Zq7bhsFn2pTrVVEUXde1nUPdA+lgIM3LIi+r190OERkpL3DftG3bhmAIz9wemuP+WGR5tVo3/W5VlJQVDw8P375/13fNzbpCxOcnFQL3hR3bvjCU5+Xzbvv6+vru4W61Whk9LDPYOUOzveAMC9H8PIr6Xdfdvf+Gmde3axapVivL/X7fH3YH883jerVpjlttaHWz+fyXl6enL998801nXVEUVaU/P33ZbG6fX75YB1ZEm/Lh7sYxkNZZlt3d3e12u7Ztf/vb3376/KWqquBBJKO9Q0bIsqwoKxE5Hpu2bfNsqEkCSTNYGPPk4kFE8p6ifwT4CUOKeYE09mpQqUV03KIBXM7FrGjonrMGOBfCwuehJucldpWyk0XMmwjIIfY0LgDOxf/5PE7BOYnTm4hiDfgJuE+MPPGeeXpyuOFip1wf7KtaRDxbRNSktNZPT09FURhjcm0A5OnLF+el7zvuMSuK3tnDsQGdg5K277N8xWw/fXldZR/Kv8uLzZqld5a9YJGb/f6AiLdVdXNzo5RiBmPqslp1TWvK1eO7b0D485e9s11RFB933R2VvfL+uGNmBm0pP7ruuD+aoy/LrqiqoZdjVqxN3nBmlPZ8pwW6tmbmTKub29uqqhiwsz5fecvoBQXQAzrXnjJaBnfxqW8OjpzqJAcpnO9z/DAXViTpCxNlQBzLN0jS8Sccd5Zlfd8XRVXkFdij1gjslTLMgAq10kSKCAmEUFBIgtDDHBPpGUFAcLDdA/DJogUAPMAJ4gkIKeDLyZknp1R3AYZEl41oOVnm4l/TbUwDDuf7Nof/ibgWu7FEQhC/NWFpEUnDnJXR4b4RdYkICYZaUAjI3nsZ6kgppXrv8bxWOyOwSO8dEP71w08mz46ts32HpvTeQdLXMJ3DBLvjiEQgzjkErwXjFYzieQxqz0Y4meyJjH6QhBlDSjeDeTMQsjjDPM8DLQquJmOMIhNCEEOAyf1jmefZ09OTCLd959gXWV5UaxGpW2eU3u52t7e3Tdvf3t/lZVEfDtZx76Tbuo9PT9axADVNJ8z7/T5TipQOqsnzp4+/+/67f/XHPwBl+7rebrebdfXNwwMiNk2zLgvHrm/a0K3aWlsUxZ///M9lWQ5RKkY/PDx8+vSpqm4674YmFah3u12e533b1YfjarXa7/c3Nzd//fDifb7f7+8fHrTWu8NxtbldFVitNi/74/03335++tK27b/+13+/WulQDyzLsrZrjk1jMrn/5n252rR9v90dNncbrbX3Yoz5/e9/XxTVy3ZrrTWKEDUiWGudP4gIM+R5TnjS5+BcOYGkLUNw+wWXZNM0Mpqmo/02bFp4Quw6FJQE59y7x8fD4RDS229vN9vtNlRdEThFe01Yg4iEno5BohsSqBIlShK+oHFmh3yLZ29irJhTyUvCZooey7T1Z6qZE7Y3f+Z85qnEGoPHTDagX+cssg8Rvd57gZBBDF6AQvktJaRz33Hb2+2u/vzymitamay6uTVF/7L7YkLfkK7TOru7fUDKttvt8Xis2857L4y5Ua3jpun2TbcqypWusLqzTfNluw0NGM364bj/QGwPTkzrjamNyUOEWvF4Y4wRX4L4qqrk7i7TlGUZEHkvlnvf+bZtOydyocHpXH7/WSeeHlb6/MXP8+cECjZMSUSEQulhUopBCQQyF4xvgsJoTlUZr2gwl6EuBkyfuG9Ubiaa7lvGZCay5Fqfq4BX0CH9+vXbIDHpxKOUuRn2XByGGf8e8igQOIR0jjeesTQ5ffE6Ek1G+up0YhHf459GOX0hwHURwOBchk5fEX72fR90jjzPcSx+rTKdiiyBtuZl2bf1cAXQC6AQCnmhEB1jTJ5lWbXahETspmnavfv46el1uw9VGLu+sdYeDoesqgbJD9BoullVaAqt9ecPHwpjghkGEfd1w7YH8TG/kJNO2mEtVVWFbhW2tXlWWWsRuDJqu99997vfotZ5nv/pp7+K0Ovr7v33fzQvh6EZBViF/He//4cff/zgRHUOekaP5AGb3oqgiHgnRKRJMXNzrB1j29m6rp9enpxzxgzasLU29BsCNQabkUYK3TmAmTND6TFF6E39qXHDT56pc+44h6L4IYbsRpdhiIiZvHSCF3NSMwLGMnHQUfiaYsvXyMGc8815z5WR2mTPvqh+nk/xOvrNh0INAiIeRELF48ELJcjMzts4GWZhQBDxGKpeIAACaYWYZdI7L4z7uv70+aXQxjw+VtWKsvxWOhGsm+7YtixYrW+yLCNl6qZjwbyosnKNKD3Xu2NnLcP7zbYHd7T7ffPlZe+cK1frsiw70OyY+x7AKYXG9Fmmtc5s/RcAAPYgQ95lqI3yzfv3lrnuXd12r7tD11vUxhgjaFIgi9aGCZ29JLzPT3y+zynkzY0HMKHLHMkrRA/T6dXDBEauyKxo9OmF5NOxyqeks0KIXdFhcDaSpBxdojpLgLHWSyhJtGxRvETuQ0AQzwSsM7KOpz/JGFGJSYhOWMvpsUSTpwEk4sXwlTEyNf4eGdg5EqVlU2BMFgxFT2CkRyejCwgzewYBcOyjJyYVhiZbgUm68QLenat68fMgrZ+3jAEAcT5ZpUQ3HjMjkkhoFkGnl9OQfhEnFYG27azzorU2WWGyAo7H3noBImUAFQt4YesdIt7c3HzuOgYQBvIMJDTUKWXwjutj73m1Wq3X62K92XgvIq8vH//0558+fvz02/cPWZZ1R2Drjv5QbdYmy5mZCJSizWYjpLz3vW0fbx7LKkdApdSXL19E/LqsJoaEGHe62qxXq1Vgk6FxedDSda66ps7zfN+1xuTPz8/l5l1gDCbPuq5XSiFIWWX37775j//4f/QOqOfWsvPiRIFjVKbpbGd7IirL0gk5L75tnRcRDJGlANQ0zTfvv/v8+UvQDr33wsAIzIAUxLhrklyM9E5D2GDG6iCp6o7JiNAbS7kGVU/GIi9zTEw/YOLYhiXtK8JzuLKo7f3s1L1UmFpUfhfQ44K34+d66oLbc85ur7w3zHSCuiGkimWQWWJ0fudtCO8GJkRUQEikCCjjjFAA98f68/MuyzK3WRtSN+tN2IS6bvb7fdN5ARKBrCh1VhqjgNSxaY6tRZ1Xhaaieq37p92hrlvnrNYZW1/bBvMSnLDvvRPL7Jw4BC3ct01w52hC733fNSEmW+13wmg99449S+9Zozc6n2jPATBhJMSR2aSoeOWI020cDn1mr5fRy30FTiCproLnGTwICkkUKAIXmFk6YMqwz2aezD9ZhaS1WuYLxCsCY/quCNiTxcI5fZ9A+FVNdAr8OLbrhMuYPHk+zfF3pnDPNzDKBDKmTgb6EmIWggdLfiYRSGc1WXJ89VwXjMRorgos7lukkpM75RSGysHhFzLnEDgk8AWADJR0vV6HGH0v4r3A0FAHPKJBaJrucGzqpnNeNptNtbkpi+LjTx9ft/DlZfvdN4+khmLKYceKokAUIhLvjaa26+vjvsjM3c1tbobayrvdLsuy9Vq7ro59doIoEHpVA2HI8AuhHFGKCnmEx+NxezwE05QI/vYPv396OhRF1bV7ICyK8o//8HdtZ3fHQ+8zUkVtwXXWMypF5Wq93+8Ph7pcrfOiUiy9R8/gWIRxvV4jolLae7/ZbF5fd0VR5HlVdy0DsaTG9gU30xxZJtdjrACcp3HHKJhU6GRmNVZ30loH9T2qejIKfYtwApEogUyQNFIqAAhhBBdLf14aKX+eaAwpyqXrXHzOxEsUb2P5eYxvni84mdVkjOwZTrF2gswQnAFAg/IUH+qBcUhSAQACFkXEQkqMVqQVWZHXw4GeVOe5LPM75d2QP673TXuoX/OiKlerR5O/bvf7wzZgJmmzKsqiKLxSbeeatul7Rwq10gLkmUHpoYpisHsj9cLOuywrfNeR1ut1FYrzkjKbm4dj3QsqpVSWF4Wgx5aIdJa7eYjKyBjSK9d53qWdTMU0OOcBV6SQ+K7RKShKGZahKDERkQKNqBBRwAcHD6HQkJ93ZtbDs6fhaMBcgts0VOQEwAlGXAT4+OHKnZcE0km+6dvHhE9MwDu+jmbdE8OvwYMyedSgJcnZcbCIFwHA0NTRCQMpQJSgZ8sybFxaTkpr4g7IbLxl1SmdiUhNtNyMIjpHAysKsS3r9bptGhryz3oM9axtX5alzoxj75133gspYGZBArcuFIuz7KHv1PGAiKvVqszLx28e91++yBiyr3WmlALhuq7X67X3XgCs7YKudjgcHh8eVqtSBIPydKzbLC+t4zzP03pdcW/7vs/z0hhT10cA6Pvee4+g2rZ9fHx8eXnxirKs/Pbbb3/4sH333R/+/MM/3t1/uzvUfeduN6s//OEP//v/+r+QMvudNZlrHdueGRBQ56X+608f+t5Wa8yygp13ru+6tu59XTd5AW3bZtkp9CzLMiLTOQuCeNa9MrjQzlr/pFqaLPme42Nj/juciDDH6/G7gckFX4+MtvG0D9+E9USAOeFpAp8poKSf9QIMvRktzwS0mbZ3Hc/niuCwBv+zIzNT6fg6z4uvhoT4nkW+RUI4Co9egUIhTV6EGJGR2bNjduJBOAeldGP5w8tz7e39ze1qo4ko06YsS3Nsj77p+14XlXVc1/X+2OR5eXN7S6Tr+vC6q22oJ8zACNZ66+ssy4zJrHXDISIAkOdQdIy9dyHGt80yZiCVo9GgTXOsAX2WZSrLtQEi57wn56KhHM4FFEwShONeXNm6iVSVHOIZUV4kbRNQOfsTqGBCCU1ZB/QiJESFhDDKhxdYMi6N+TwhYdITdkJ0YgkTcJqsfcDemRVFzj15Z5t8YdqLE0tnNRGB5w8/PSQ1lo5FMeKJx6iBhE4BxAyG8ylFfzaa7PS6C0h8ke2dG8zPOO45VHAqe84Y3sTBkx7KnDWGJ9CoKIQSg1mWFUXhrEVEEU+kSZxzrm3boiiKqmQBlhZAgRCHNkwwkN3g2wOhY9M5YZuZar1SvtdZ3nW2VBSskUK03+/v7+9ZAJR2vV2VRdM0GkHnede2Xe9CYUxmDhl79+/vQjRHiN3I89wonWVZZ3sRMcYcj0cai6dohR64qqqPLy/3377v0Hz33Xf/6Z8/hXCP4Cas6xoVWcefnp6rQreO903nWUxeoS52h93D3W14UV5UJs+sOBDHgL11ddM4P9i98jzfbrdd17Vty9wRUbDDB7KOJ+ulRKaVCr50bqiPeBHde+ltEQzmaX+xMlRI5CAiYaGxnUIqsMOMywxi1iwobI7Xms97I5zoAlwcc0yeAHd81BXOl9ZlSPcFfuZI6c6l7UiHAgylOgiJVLDxemZWygAAQyhIeIofYw2KFAEKhZZbzJ7BOWTomh5YKVU64K5pmr7rnPtWrVebm8xkqu2NMWUJzst2u62b7th2gEppA6i63r687l5eXlUx1GECAGbv/eh0CT6hYFgY8otZRMCjNhkg1k2b5/n9u3fKKM9inWe2TqAgw8ye2TkHQFVVwCxGDmbEfSI9fXWk/GNChmDGRVLKzhz6+55xShi5LyQTYGYaGgEjpdNbstdj6DyXdPgTEYSzZs0T/jSZMMxEscXdYD81s4RMwZOV4jx/HPHMw5G+9PTMJU0xZRUcO1pc5uupbhTF5BT5x6JnF0OYgnnt3AyzsBVyNX0eExtPtGKFhK30OXFpk8ZDyepi/apgaBm2NCQwhLI0Zw8kgGAYRGYkD+gBEVCrDEmcG1QN51zdNpVbVVXlHVv2wgqQAEGERTA0NyeTAYAgOMdcd9zZ589P61z1zr3uttlNZYxBYRQYUtmsRQHDvFqtrOe7u8M//fkvTdMIUFVVNzc3ZVne3Nz89NNP3m8AILSMDkbLIOtZawFaRDwejzcP94fDAQC01kVGx+NRQlnqQ7ter6uq+uGHH4iyruuKqmTmzebmhx9+yPP8db8VrPZ1n2t693ifleU//ef/aBSZrGDvneO+c711QKrIjRPqPYsbgnSyLPv06VOIenXOmSIXBhIOV1JYTYEzZkalJx7PN5qd0yD/qB1O0DA8wY8qYIwA994t0vMI8HBOviQNY6EFWgHL2h6eZrMA1ksdLxfx8PqI2m784lx6fcuIjtMrM5zMNrpMMVHDKenPhIPOQQBgxSEKkiIvAo4Z0bM40Vr1XQvAZZU7lGPT1D2wwhdts6wwRKHLxu1tVTfd/vl1bJ+I2+3248ePXddFq3csUFSWJQAwg7U2BqZPpu26PlTeC1Lh4+MjADw9PQWVEYJMRFprLYLqQs/VqYj0BhV5PkaAGz6n5AzPrerpkyc1QkPCu4gQKR69TSLAIiAswHApfftnjkva3lcBLpUM0tVN9I9FoRJmXP/SNqZ8d64cpzAZ558yy8nmpGrfRJsMkUQCJ8F5cjRRpn7jtszXk659UfdN53bl+akUm+zqVDiW2RvHtOUkGwxtKNNjrQ1qVmg+oG3PngAIBBlFBLqus9aD6hEUklbKoIAX/stf699/o+q63qN/XOWZVsEuNTB1z8qzgC/zlV35m83m48efXl62Rbn6zW9+Y4ypqmqz2fzwww/7/b4oijI3oTILEbnehibsznGo7fKgdd8fgz63Xhefv3yhoiCipmlAZff39//7f/ixWN0eDvVqs860uXt8+PHPf3p49/iXH37Mbu/qwz7frG7vHrIs+/z588PdTVEUP/34I6DKi77zIirLV5uKyAm8PL3AmN3x5cuXu7uH1WpVlth7BwIudI5KWh+7heI5A4eLIREAkJZcTsE4jonaF8+OR3CNHHRepQ+TEQHpJBfSybkgM7YXhrq5uYNzliuBl6TMLIFwth5EYtOtIHURonceAbXSeZZppYUZAYzWSp/65J29JZEUzjSDi9i04MycgPuZDH5eefJ0bOCYhEmY2IuzbD0wAwMBEIS2cAwiIbgGMROFjMhAgEgECoWANfbsHIGuymy1yfK1YO68sVaXSokuVXGr83XHtN3Vn1+3h7p3gj2r1nLjoQPlVcamYlMKklAGKvegGQ2jQZ2jzh0TgxJQAoqFPKNnZKE8zzprvcjN7d37777L8mK72395fsmLkrTSSt3f3R7228wo9j2Cz4sNomIgARQgJK2M0cZIjJsNMSUESECEMJZBAURSKmR4oCIGGeIkceiiF/rnIRlBEiAZJCsEQB6dzwokRIAhiCbSinLsb2/Wv/vNd1VhlPiyyFGDUtqCCBISkdKkQCGBYkBklQsqJBWa2YV+W4DIQwfIU/xRCHwOCYsQ1oMooRwlAhKGqQCCoCCFZvDgBb2AYGhoR0BDuztUemy2p8LFoRNe8B4OsaPDz/Ah/AvzGtrxgeAwYQo3wTg3CW0Lx5/hAIhUyFzEocEujnRVxYuJX58Qyds+bHp4f8RKkVMzhyCWD2gSGj4M/TJJQDtUTnQH+uPL4f/1//nfDj040GjKpvOkslwTEcnYEor5pGAn/A9jU3iddKIZ5HfvY8vZFKNHXQdCAGfiHwFENEYjArNn9iEALazNaIobBMwooaMiEgA7ByH0V9hbSwhlkQM21tVEUFUFoXbOoxChyU1JaEgy8CCOgVmTyrQCpKfXo2OFujjUjkUVqzWiPh5enl797373zfv331FmsnK9bRpTrTX6ruvub9fsHAqD4P033/37//zX//H/8T+3bPK7d7eP7192LyZDhVZhV+oSEdq6frhbP395+v79O/EtivPsNptVSJmyTD/8+Knu2WQrUr0yebFer9d3Rblpavf68lof9tvtocixqrJv378Xgpft/k9//ri5fXc49nlRbW5vH7/9/j/+6c/l7bue9P33fzg6/vi6p3JjqtXnp+fn5+fbzd2mWjlQm9tHk1Uv20Nnoe2c9+A9bzZ3xhid5UPyOCKCIIgHxYAB32koIYhEKkBZpghFurZRiEYp23fb3T4WbAqpYpj0goaZ4JgrYmeB+f037748fWbn1uuNc47GCGok0jpX2gAQC4ghlWUqy4bKF1oxgPV+QOQTcIYQRQqRwRrPjYRRXotR0QNURs50WTpbtFKmwuxbBPav3nMu+p0VLfu1Bp6bwia/ynl1AGa2bENyCTO/7hsg1TNnWdFb23tnra/rOqsqRFFKKQEVYo2YQ2R2JBPhFTx2VJjw8vC63GDo9XV/f18URegufXt7G01hY076yTsCAOpc7VgMUp9rDG/cIjhXbs4fuCzOh2xWLUxELI6ZAU9FHMIepxY2mEk88zmP81mwwF/RKlKRa37xVwGhn7uHv2ykiDZfQnIuE0UqGniGvPLJNPi8F8RXpxpvm8znyiYE5S/CfxSLr2h16QbG5aS6soyuyiwzzOw9O+dEILSXU0q1bescO9/DqVs3MLP1TinNIF1vAeBY15vN+t27e+c8AqCAtf3upb4ti6IoQsChAnTOAQ4vRZb1ZhXkjmC/EUGjc89cFisFqlqVLQ5mqq7r+t6R0kp5GKN1giLocWhEEAohdV1nRYUiJoiYZdTZviiKvCo/PX3e7/da67ZtESkEsoaaL7vdTms8HA4iQ7NAAKjrWkSCgzBYoYKdU4CC05GZnZcQIBdKhQUt2VorqNMTiR+iDgPnit3PBeM0BCatwJLCxhmcwMmamkJUCiQp2Azp6hPokcRLeYLjBJo1TpXTOdCf6bB/KwGZIsnE+vErsr0Jog4cAgFAFExLMaFSwaIYSgP0zgaj5bFuBJRlyauV0ppJgQrZ7iKIoEgDCqFzjlGEJRbrSnXiNAlm4tRR2miT39zcbG5uu67b7g59b6tq3TQNiA9FBkkNFd8Rh77hseAFzESQ03tx1Aa+dgSLBsz43VTYRznzKaYTUEpp0ETEPoTYLLucUp53tg9J8ZdUMhCY+ucmk4x3xt1OH5Kesiz5At8Coun+TL47/+ul2b6RC4bsRx4b3cWBo4d+yO0bXzaG8AzRHzho7wgw9L8mIuDT2idVkxYFjonEMKUeV53uE29QysMWZej0nskc0rpuPLafzfPcmAzAhkoRWZYZkxtj6rq21gePI5GmoeS/751XOmttW3c9EXVd197e3NzdF0UufRco7Mv+5bfvHzZl0dTHTJMI931bZCWyWNt5b283N8YYh5Tn+b4++q6hm7I5HqrCINiiuAf2YauttV3f397eHpq675x00rS9ylgp5bx0Xedz0CYXkbpthPKQz6CUKsvy2NnQ3uHl5WW321Wbm6Zpy/IupL3v93tS6nA8EvJut1uv19vtNhbSDK8OsT/WWus4RBgE7y8LHg6HLMsywNAFCXGwgXCKCOcBSpIY3mUsxJPiWopil8A7RsqEIKDQ9/tEYRYOnVJoibaf6EuekJEwQw0JPVoE3Aidwyd3ERbDh4l/flHqvLLsK9uxeGd84OSNP1fQWLx/TgonZxDCxjCpVNkJSm/9sTVesjxHJFDKVIUbPAFEGpVHZkARBo8zXh72MMQ9D4aghCmGBhGh6s92u93tdnDuKI1l/UQk+PAnexWsXMMDE3oUI0bm2zvZ55RkL5K/ZD1To7QiFYE7zCS+HWayPCavmOzSJDb1dILnkY1X4CrlbVPeeQEzrwDVpf2Zf+USClzhHJEwIAAAOY1JREFUc5N9XvxWyjyuTC/dxsBySAhZRKRt22BNEs8yqn2LeHdlPnNfTvxKKlvD18aiZplenADGBCADeDjnum6o2xn8lsHMMBpghxDu6LoCABYE0tYxe29y7Ht7rBsRefft+0/dX7quMwr3Xdv3vVqXw6IE+rarihKAvZWuafM8/81vfrOte5MVnz9/ZNtmOe2fX+5vNiXIt99+m+XaC2dZNmjY2jBD3/c9Q9d1awBjTGgDZa0UJYpI3ztT5CKyXt9kWZaDiDLMfDwe2QsABT4amqSLyHa7fffuznvv2LZt++03j6vVihkC9TA6D2xpfX97PB7brg/JgiIS0gfbtgWAIAnhKH9nWWa7/gTnM3ku5XAyq2MHbxPmIt/KsizEQHjvtTanc0+ydQXPynLGHM2Y/wpLnELPafocXlM3GXsnMx4zWeeZ8vszpdevokF6JV3YL+N2V5AtFW+Dty9ksQgO4jMAeBHxPngvSStEbHthFtu1Yjs6ktE5EJIyfV8TUYghBWRARvEoPi37Ozn7CG2pwhesHM6519fX3W4XrEN1Xed5HjMxUvEncOXBvD42pkMcOhiMYtypEsokqWsyFgl6cgoL96fLwdGsP7QzVSdg9UgMZ1GagRzxBSlvjkt4HoE8AforK0o/pIc++fwW0LrO8BbfO71yTju++sZ5RCV+xXZ6Jj0MghAiADRNY60lY0Laauwgv/DNN4f7RlHs0uZMbAAyC/+59K40JgLH0Ib4p5hGVh/b1Urlec4a2A80NFR4EJFQrhSQcfQOCBLD4MHMlEbybdt+eXr59tvvtp8/7Pf7EJTY1Iem0OIdg1VKOesUkTCi+Lo5oqg//t3ff3rdEumX3R7F3tvb1knPaFwvnrMsY3bGGMfgAdu2BUJBDNKgMXlZlgI980BuWQCYyWgvXG02eVE45GKdH4/N/tiVZdla13VdVa5Nnnvh5tDs9/vb25VSyovr+15Qkc7E2izLWAQIO9t7LysgQCXSW2udYyLSBpVSpigD83DOCZzk7ytwKEngFYwpMSkwpyLalWMNI5SqiWFWkJrE5PRAyy6lkDGJZWLnjJ9DzLxehC1EhFQKPof1lCiky0i1vfhrWvPlLcu+RCwmQp8k/shLxpC3YOaV9y4RptN1dv7EYMbTEhFRGZPxgLazzDbLfV4UWmvPLAOu+qGpy/DvrKZoPLyJ2TP+zHKNqDrbH49N72yRlaSxa3oG8V5QMQAJggh6YS2YVkYIbuG0SGsKkZMzmoPEXJWZnawsHnG6KEoalAzLhFCuAkLlsJERE9Hp98WRPvw0Bzmb85VTHlcx7U4++dYbAenS/qSfrzx/ctZXljB973k6PJ4bdeePmq8GRxdsaFyu8zFVjlDkrIjgIpzAZdxJqdVE7UuAZKGPKCaqIZzr3yK8OIf0pZHtiYi1LuQ+B5eecy4U6R41raGStYgAMhF5wd56AELSwqh1Zll+/Pjhv/qH369Wq67r2INz7nA4lEYVmQGCzJi2rZE9AyqkrmktZutN1Qn99OX55XVnNBzaDlXGpLNMmF2WVdaiUpbZE+rDsbGOgZx10jvHIkoZpVz05DnvlfdEuuu6u9vHslyhEY/q8/Out7y62VRVJYx5nocFNsfae3841EQkWgfrUQggqsoVYugBxCKy2+1CyGjf9y6EaYbq3trAGITiebA/MTMqcwKwREQmUhPA4yGH6nQ0KXG4jkoA0HVd6JiRQsXIUE8khd1ZuZLAJtMyN5MxaHsx2Df+QQEynxowi0jyG6gZmznTipKwzMkK37jmSyNN1IeEsUVEwl9J4ZueQehe7VlEQjgshcakSogQHAzHEqpHiIDJJBSV9yyAQgpoKGYtCAweEUEYURQBCDghQHICACiAYymSsR9g0DBl8NGIiM6zuumcOBZEZQTIC5AyDCRADASogLSAhM+kx4LLNFq38RSHF0Ico54UZJwr4HhV1Tvz7YXoO8AznheNsYhojMkyNMZ45oBaI9sjIglsTyE5OGOuKSmc8JI527uyhDj/eRWVFKImX/8qul5S9eYKHF7w7XHyeXjj9UB/FUOBxheNAi+c+faGR4E/m1WqbIXKIGZEZFQoPMRTwJIcky5top+lJAURQ82tK8uf/JzwVzkbADMyiqM9I5WowsQCJ2jbPs9zY3LmU2QpEWWZJgpVocNzube+6y2gAlK951wbUur5+cX+/jc3dw9aOsu+t9Y67vu+zDNtqCiyptl77wFIae1761Exc1UV7V+7uukzA7vD8XZToclKPVgjiUgQlDGV1q+7rbWONDnHIY0hnn61XhGR63onNnSCvXsPWZbpKv/8FHx1UNe1MfnN3W1oPeG9d+yr1erY1EopUgBEbdsiosrycr0a98orpeq6vru7y4vKup11tdZalPbOKwEiQlLxqJmBGZRKth2mMm4ElcB7Qim4CZxf0XkgSfoMXQ9xdNlCJA1jtBuOPqDIehaowfz53kMsRZ3CoiRtM6MCF2evyFynAhNNa47b18clDE+r2ly689dleHFP4kYOh0pDsLmIoDpVWQuT8yOJ8YCgSVA5ZscsNEXUwZk1FsDnpDfHRKFJF+4cd90RAJBQgemcBQfGGABiBAUEihAVo0VUIYLm2tKuer8Wv7K4yXMdMdKoCS2LGoAxJstIayXOEhGF/PJx3acdkIXpTTjf+XYta0tXrAgpI0xJLczw+Rfvz1fvv7T5eNmkMd8WuWod/apcOInkDFrgRD97I5Ckn6/QILgc0jIpXpi4DKcvkjH6EZMAt/insixCo7jg4QtlaKIiEqiK957ZingAcM6DMGiNilzfFybTWreHY9O1Nzc32B9YwIdKUkikjTZg8kwkFFMFnSkCBvYEuF6vQ/oPEPXOm6I0WaWpDmpTmKfJsyzLdnUDjpE0aQm1OrVmYwyzlGXpWJxzwto519Qte1DKlKvNP//wAQCyLHvdH25v8tv7zefPn7t+8F9uNrdPXz6UZQlMRFS3jXc+y5RSIbRVhvAlgDzPQ9RMiHCJ7jEOpTnG/Qyan+ACq5tgWYCiIF68HRfCSFl+oISBSVvrTriZ3B96VqTZ2xOL+mScNR6StNgMIRH5xKiadsqehLSk7D0CaCgtEz4HW2q6U2H2IQASZoJbmmCYXp9HbC4yeViiXPOLb9l9Gbx3I/opCi5vAPBjatSQ4sbBKESKyLHzA0ZkHjhoY5oUCIEO5bbYs/PsUVFmdF8jswCKyYwJbTBFCBFCv+lphA5aNyI2ACBpkw376TwgkdKv2502GXQ9Ke15EPdprGzJo4QmCIMqFtLvIv+AMwiO2xUE9lj4KvqN2dsImimJDFfUtJT04DskIu992/boCNBhFl4cSn9JEHdJDe/FpHVt+jkeemoJCYbcdAnpbfHXiBu8RNBl1mEg5a9z5oFLbZNTOJ8YJ+bMKT5z0Vd3FVTPSqBFnWieGhWOD/D0ukHMEgbP3vvn5+eqqpqmUUqhMq21pEyif56eHxcyX0X63rSe8kS9jmgbQvWmQDLzpODJbQPB4AZJamDKrQPABNOlMYZ5yI4P5VdC1/JQHiyU4a2qKs9DB/N2COsoCsdApNc3hSJs2h4EP3z49D/89//d04//VLfdw7v3yuRfXrfv3r0T4hAy7UFA2CCGEOX1er3vXQgP6Z3zDIdjjUC3v7kZtGpjeIjtzkiZamV++OnDb3/zu6br67rO85IP9c1mXRTlp6fnvnOgbd02Qvj//d/+VyBDRI+P3/z44dPxeCyKQhB2u51SRtB7YST48voSgjBVZtjDy8vrt99+CyxPT0/f/fY3u+1h/+XLfVYI0Mvr7uHh4e7u7tC0TiATadsWcOjIppQSHByloVR0ogid6v12XZtlGbIPNuSA4N57pfRE7gmfA30IRxObBhMRioTjiI3mx9rcQ/QlJvkJzjldmjRwJur6ESQmP1UQkmJIawLWZwkZE04eq8tMMHMewLmIrrLk4TzDHPg/Z0yE7tOUJlwZB84nhMhLyQAUckLEC4uIA0ZGH9RzBg+MCJnJxUSpYsFaOJ8eXvDzpx9iiYT4K86MdVdE768K8pe+Oyff8bMkN8Sf0XOZ3jnoeMn9ARIXXzphD2cs8Or5TgA7qKM/A0ouj7QkSuSOk5m/RVX6lx6LCh+OocjBLp1enxQVS5PQYbLzM/icS5w4125n57to+4VzyeMSDZmsDhFFEFERnRGuSBbHKEFdFEXb1nXdBoZq1Fi/iYFEUFHv+Mvrq8kKZfLd7vl2swLKdnWXb8gLAipmQBQ3huyv1+vd55csM1prRqmqClHF1rghrEZELItvun3dhNRVZTIQco6JbCD3gX9Y70CIPSAqZufYWeu6rgNCleW99QJdWa6MUdQLAIWCi6HQmAgykXe99z7PSkTsWmuMub29j7sXTJ2IWJZlsAB7TjjCyXZzcQzS51gZ56ugHutQT4AkglwI5txsNs7xxMOVFiKZiK0RDBaBSkSMUgBAc29cWlFm/rX54ifvi7E3kzsnxsl5FZV5jtp/STIxR5jh8NJ/4c4x7P4UFUIIhMF7p5DU2KEDWYIcHQ6SmT0IKK3zIitKozOEhQo9MMPnM1UDlYSqK+cfgHT41Qt6QQYC0kA6aHiMQ8u5MH8Yu9MNoIMQ/10fcTITkX8OG4swEO6PqmFc72KeIpwLSZcmM9ES3nLE1219PwtO0l8lGZM0pvkGXhrpWbzln/zcf+fLj1uXtjyNf7oSufdV4Sw9lAmEpyO1E2DixkvJwvygJ8F7c9IRj4M9EGqlDJEOvVZwDHNXSgGwcz2iFEUWglyEvUIyKlSbGVUEnfXW/fThU1Zt8mJ1OLZeUGf5y3bvGFmIVO4ZBMY64AAKZbvdEpFWKjdmvV4DQFVVgKw0IihhRFTe+7Zt27bd7Q8mK0yWxTMKm2+9D9ph7ywAhEm2fdd0bdM0hCrUuW7bnkUAMdSnDrvhQWIpMeela63WWhvTdC1pdXt7S0RCyglsD0cyWajM2XY9jBSMQ7hA2E8kwWvwcCpu9zZkTO9PTzA00Aip9Pf39yFvPQJSBI9BEx1/TYXpifSZjvD2M+Vg7t5PASjicywwmtZXnLC9SfrO5LGLFGeCh/8led5knnOUm9588vfhBN8UikJRgDSaKAcjufeIqFVmdK6UAqDgIp6ADsz43KUxuRNGohO57IRxfvWZX92f+Z3zKxNtb/6cCK/pdycWwslkcCYKXNmrK/OHJXb1y3YDroqAb5z8/58MHI2xwfCTXh+K6WhNSQmhaJKKty2u+tJ1OD/olGmlezs5jgmTm3+eAHl6RpgEvAR2ElwwWZZFM1pcaa6VUUQKNQ014gOsMvPT84vJCl2Ude87D2SK7aF2Qh61yksWJGVEQKuMiLbb7Yef/squD84nozQKV1UVzJtZlimjUSsW7JwHJOu4KKrQ91UpZYwpy1JrfTwe275TSgkDEZVVxQJ13fZ933QdAGSmCI6kkJ4fLZM0NNDWREMU6+6w994XReWc6zobLoYKLMzcdd1ms3HjwITNLIqw85OVeRj/5a9w0lwezkluIJVE1DRNVVUxLys900vSG56zvTkwhNUtFMoLKi2fEYiU015DHjjX9mSssC4z/+eVB/2aOP3m8SaSF/Zx3BcYPZEkSZmAMZxQxmgMAkVAXpxSJs8LpZT3tu9c1zvvmYF4lKoYaHC9zFOmRpenJP3VTgroGPLHgL3ziKHO5NBh/LSuJD8vjNDHLuWaX90lSCg+hVylc5vkpZsHeKBT/BUCEhEIiaSVkRExRLQKwjR55m85X5kZ2X7ZY+eSHM7qIc1f/Ytf9+uO+RxkzO882TPftu3pDSlSz8nN5AjOnjCDOFmKep28dC5MXJK9BmKKCgEBBsPsENegNUDe9711nYAH5LwwK1s45zx7JCTSREqc9c5zpvu+b61lQCB1aLrNze2hl86JAzJ5yeyMyRyDzgz2XB/22+32JitWZSm1C+XmNaE9dqHlnhdGUNbZ3jpSBhHJ6H19bNshriTT3nu/3e69k2q9bndHhmAsxbZtTWmDrQ4AyrJEss6xtbZYlSwcLDoj1pMgFUVxOByapgsOv9DVAUmXWWatraqqruuHx3d//vOfQ60WVGYIahIZlTyJvGARGCJNgOhIvjwkieNLcSeG9fHYkjCknWRZLknxs1MMC57BSQpsi0AVbtCLTAgRYSa2j7LSNKISRyUj3Bq1vRgFs8jwRJYDxNMevr/MDPXLxgSdTks+38R4A3NSBSCdPwsO9e0RAHmk8gAm1AMUEWt93zvnOF3fBIfnBCJ+giWKEGl6LAi0uK4pyTiXiRCv9VeLzsKzhyeRwJc4Smr0E5STkA6olAIeLb2jDk0htZ4Ez/k2nHOvlJ0koPImz+Vpkhco+yXYu2SouM5H0/P6P5f5TWS7KM53XRd9KgTE4w2XfHuXVHmYwUB6RvO149U4gCtfvHIu6W4TnIkjUbkMSlXQq5zvm6bpug4RqzLvOuh6J8AkLJIoMcp8eX59vN2QKZ5e9w8PD85DY53zmOW570Bnhe86Yww5m+e5UsTebjYbx7Zra4VjI3XP1lrrnfe+sy6E0nhAEdnvjiF1PSgl1tqub0K/3B8/Pu12h7y6RUTnXNfaPM+PXdf2XV4WpExdtyIyVPNCAWCkgSCISF5kQR0UESJtbdM2fZ7noRNFWZZ934fQ05DykW6sgMAbqHGgBkEzDi0drp9a1MXjKQfKQmNjHGZumma1Wm232wh+ABB7I4gIqZOfKNX8UkhI6YNGhHkkJ8AgJrwFvKYQPy5l4i+UpVDXSyEJMKPj8F9k0FKxKzmvMpMis0cPOGhnEpDdCwAIuyF9DUQRKESlNBFVVSVjedzeehZE0uFr1/Zz/uHC9UjOOOnMetr5eX7eyPbiE97SbwYT88XIBadq4rmF6nRxCPqgM7JIRAKEgswMFLP1EUOBdwH+OaB4BW4nG3Ja9S8FmEUFbi7hpglCk21cfuyFFVyaJ5/FoAyJepJ8hlEQCMURzBA+l7YiEhmT9mBkezIK+ld2YMLb8HJUy6XvAoCc71g8pgnRSJ4zVbXTzxNpDwAI1anMB2pCItIwtNrph3BB8HVdH48H59z9+lZrrXXXW88CyB7H3PasWv/44eO7uxud5Z8+/PX7b997gabre8d5pgUtaSX9oLvc3d0VRdG2bW6yPNPW2rzKu7YO8NA0jWMfLDd934coUxi67kGw77V1w8yosaqqsiy3261XH7/7bRmW1nRtWW7qvj8ej49VSbmuu77MV23LUaCFAe+CZ0oZYxik7TtEDL3Lg8k6yzL2AoLOuaIo6rrVOuMBZvDtRBgRldbBkDj0K74cYoZLEdHMzAjRqjymFd7v9/szqgIqkmuPnELyhErHEdmNClAxbg1PjJNTrBtH9FRN3HvzO+dAOYdvTBoFvCX+519uTMTwiWluPu1FtoQ4JjPIkHqtxjTtqqpC1ddQZS76EuZ69qJCMDnL+YfJBmLirJ088NKj3r5Ri78uLiTdzBQqUuhMKd0lJfXSAU3O6Ap+Lq76l8Hb5C3RXMPnzQrSxf6Ct/wLjUvnMmkwG2/4qm/vq29JL17f/MmJX8LBr17B2YgHMeIIOdfH0tshn6Ft26Zp8jxfl0VVVUWWxxSaPC+d91rr5+fn3fFIRLvdrul6L2J779gDUNCiYCTo1arMNB2Px/AEYBeyBoON0VobyqYEa2fXdTzKkcGb2Pd90L+11mVZKmP2+/3z83NomEBEXddlRY6IoQdL+NZqtYp1IZQywbcXNie+um1bEcnzfEwiakPwCBHVdW2MCdRpAt5vBK2z1KarRs756YcTDG/HMfkteBxjJGdYWlhs+JmaHyIMTHSqlMWEi9oSAilRBCLo+fRdBhTUQCIip1w9pMHaCwJgQQTF08BmRSMIAgqLF+/CvFr2c+tt2PHl/Rib+WLyE2ZS4WnQRWEkJZ2TyLo0mhlPhQBOx3ySFh0jALJQSKYbbwvhmqkoxMFAqE14uPeeWYxSq7Ksqmq32znnnGfA0KZqeJIxebiz6yyRzrIMALxnoiFfLZ0nEYnvUrhJs+UGLVMhhP5xAEjoqIhL42AJ53C/AgBkFI7+DwQAnFZpPW0gC4fWmkAIgAzAwhgVCkQOCoYICEhQ18ZwOEbwEJRnzgujSMT3aBCAvbdkcgQFSgFpT4qZGVlxWIqDoaEXw9CC7lT9DyQJLZMQeXuWSAoAqAgRbejRjJh4SUGEc5VJ4rSfEMr5SPXFFBWdO6tVG58zhvueh2wAeJ/4v1MR8AKcC58Z+uJ3zXlw3dgPcPhFRhuViCAoILX33milVEZdA84pZCFkMr2F3mcNlMiZFQYCYI9sY9f4iWCRsswzDkOj32QAWSQ8FU9BHKyao6AtSqtRnk4cNgBKq7hX6QdFBpmHXRr7ErNwKkHK8KYwH+/7HhBu7+63260IffPt7758+ULaOOd655V1RNpklcDhWNd/+vB0f39/c/eYrx0e9nVd933XW84MtV2js+wf/8O//7vf/359/74FJJUdX18e/y//Td/sCq267vjwcPd8OHhCybOH3/z2r4f/+OVY/8N//d/+3/+n/5u5uXv/7nt4/ev6rmqdb8WWZXlo2119xKLo+vbhu9/8h3/+f96uHzo2nc2+bLfrcn3z7nbf8oeXD9nqHnV5aPtdbUFnXdf3TlCb27uHp9ctgvruu++Ox2NZ6eSwAqZkINI5AQBTrMv1XS9/rpt2YLqddd6XZVlUBbPP82y9Lo/H48hRTq3Kxvx1LSJBuBdgAKCBFgmLeBroRYhfZ++RlouEpbRroKhKKaXYutD4VOvMWp9l2Y8fP3zz3befn55Cv0wvnsGDjPZ2N4QsRf2Vkzr+URmIOM6mgFiTM+KqvMG0iIhpP6H5/XOtMzJkWJLU/paRkucrr4ZzfjbRfubSZXygSrp745JiPt+caCwKbUGIKFiQ0silxefMFaZ0XZHPpdNLCc6VzYkPTKPgAKZaFwBcMrr+iiPI10opGmtuKqWYT4ki4T8iIoSUCUzWKDJ0Uh3/tHDQ8DVxdREar3xlosDFc5xoFcmspimwk6NPdaAr713MJcDLvtjry4kIHKQdEfFO+OdgJSYW4+tAeP0hV5555VsTqoIXGhWlaw+FskLoZrQlxviLoGQYY/q+PxwOiJhl2Wq1yrKsPuyb41GR0Vq39TFTKsSAeO/73voMmFkASWtEFWqsE2Xe92VZeOso083hWBR533UaSRCMMZ7UoW+Px9pam+clZaXq8+OxVmT2+71S5un5SSmVFwYRbW+brtVat33vLMNYqbG31pi87RtnWWs6HA6h7vx87SchfsT6yBKC5y+YJeNWjILIKT4jPS/EU5sWWPKlz2H7q/B56c5oWUzfHuqyzin8mfR1KQg/hrRMpNTr0INjsTsACJF4LEzjRTkZlAdhltRZ8Y5LOC9v8Fpf2hc82S5OU01dXCkiXWF7sGTSTJt4vYVt45iDmed5URTGGOdGr3USIB4FE0SVTnsyQ5xFHiYFNc/YWDql+QxTaIgq//xdABBKNC2uazLP6yPlB3DOwgN90VojDvKB1to7AKLwD4O77xxoJ+xtUOaSHNCwGidnIRgAo2D6hvEWCJzj0tySeSZJXJVsLj3z+tzOkOVrHHp6HCwx8VRCr2kvABBs7+evZAgK/DgBmXlG4TJaXVrgomwnSZhSuH4pZRATISONsVq0uw4kiEhEAksoisJ7H6JXgnoBg5/PIWJZlk1nY+vm2/s7rbUm5fpeKUVaO+ckI2vtu2/e9X1/2O7satM7z4DKZETQ2t45p6vK9fvVaqWU8uwPu11uMts2WmFWGM/cOcsCChSg6azY5rjarD98ekJUve3zPH99ff3m3buszJXOul19ONRZWR13NQc1Rw3C9GpzU7d9WGPoDjGJUZjsZyQUMYmIxg5NgUbx2CQvll+Yc76pkHd+rJIU/MIl3978rGFJVI1kPLDkCYBFeI5FjOMzKSnxeMlxptPFnMHiGKKyCMRD+YyB2w9kKN6Q5mSk70635hIf/rnMb3ETF+cMMxSNlWwmuBe/cgnV4TL1x7FbdNyK6OCNNrG4XRMAujTbdD6EZ2ZrnOUUp5s8WfIksXfxjVcSVNIHvoWRyHm4c7rJwzSAkz1ZzOyGlM0vQMv0fgzGwBR/4KpQNcHnry4tffipHOts5+escfH5kyOGr8H/2y0l83clxxceEto6gQA64WPbhBgERJQxjkhEJjsMS79eWoVcCEtZFBTeKPguYgrMQoeSYxqE8hC6WZZlrH6CYxusvu8DU8zzXFCFuLMgB4TcvtAnaPA7oBaRar16+vTx5eWlfczr3hoEUlpQvHMiGJxSRaYf7m9//PjJdpl4q/MsN/rm/mbX7PdNW6xv1rd3r617/rL/9OX5X/3Dv2maPQBsbm6BUEgYfJaRELZ9vz8cN3f3RcGCClEx90FRC4K1MSbP8/1+H0Jv0uWncqeMWddpUnXs0BRoFIxJBTF5LvK/9KBPZnQYU7kSlSD6uYnIXW0SsMj8JjeEhu+XiEDaKzHeM2f/E4CZdFdfIP0TYB1+vQSfiHC+8kvk9avw/fYx4XxRpZgwucjqFiXWS+wz1Roh4YVXuroHmi4ioep5mm6Z/hxAZEbWL4nPw6vTOQfleyBVMZElKbcPQEubgF+zY18427cyPFgi7jBj4bFxyUDNz4msjKgGS3LAFbFjznLeKBXNZ754uDj6XFNykOaNnDHC89fOzQnwNnSIwu8vsCCdvzEGeMadZM/cNF3vWVD9LLBYnP8Erc7ftQAVv2A5KUONKh0siXoKh4Y4fd9nWZbneawCGo7SORfiRwBAa11VVd/31tqmaV5fX/u+d73FMZ7e5BlpFdpi1W2zPzb7pq37vtLKA+rQfjzx9f72+2//9Od/Fl6J6+43j7lSWVF8en1ue39bVEpnx7rbH9q+k67ltnFEdP/bx9fX16qqLHcm113veuebpinWt6vNxgs79nXbW88yGFSH4pkxMWPxgMIuBY4e/dCBr0+qjuDgdDj5WSf7Pz9fib5bmpkl5YxxpGR2gkHx7ZP3DkbXqzFQ8gbb+PAW7+CKkRPxjJNPmV+yC2fbce7/lMT7Eh8+N0imn38uR4xEXBJT73WyLudeligHTfYRZn2U3kL3Uwzs+36uvKeTJCKRZXlkwvYWz/UKsZ5zmnhbCoWXRJzFfZ5s4Ff538jLztSvVBxRFLp5YQSeS+cLMwobGedkByag9VWgmhDKr2qH85sn70o/zA9orq5NQO7SeycHB2/AlDlyDdgHw24jkDA49tb7ummt8wAKUQlLiANa1NdTGJhfTKXs+VSXuP5SMt/XNn++0tSIN5FTQyg8EWVZRkRt26YGj6BMBDNaiFoN/j/n3G6367qOAEO3VGt9Va29swB4bLqms4K0PdZtZzViCPXUWZ4xEGlEZOdub28JkJ1dV+X7h3uD4IC9MGjFCK/bw/b1mJnVu/d3AkbpXICLav3p81+LVe69FQXb7YEFGGl3qN9/f/uyb/reDlZZnW/3x2CeDT0FJ+L1/PSD8SlWBE1VwwlAzkXMs89wduiRfkLyiqA1XrF8LIITzIgki3jvkSj6GlNKskjHYAn9h+NmhqjtTZAEz4NWFnYwfL58z6mezQzQMTED4tssS1dGjHWU88ps4fkTW+JExJgfQzzvxfpyk4CUxcE8bYMZHhU7UaTHg4iLRrz0CfOTu3IuEyCeAPTi5Cf3XBpvYfmL85k8JA0RCrWfPI/eU0RMfHtESHgxwGY07aYijogI6VPsa7RMvHGBb1ldNIxDIuTOjyZB2+XNSYWbt+xnpFNTWPUXIj8vGDk1KWEfrwqCF/BO2r7zAoCnuDuSs4iFCQTOFz7n6HAZbBYltktwe/bF5EUpRi+uGhEFlfWONAkqVMYL1m2PyoQ8IxCwXnrHnfWISJoRO6015jkROfbOOYUDwbXOrW7Xu+cvnuF1t/NesqI6Nm1nfZ75zvm8MFlRBG0ryzI+dASwrgrbtd+9//budiPs6r73SB5pX3eHhhnUan1rPbaNE8D1emNM3llXlivr6t51+2NdlFVZrV8P9e/K6vWvT54hK8pDdwj+vBA6G3TZ0P0gPawJP5Mhwpyj9Bkt25NjijkDE2GOBydCUrlitJ0iIqD4cSxKXZcQ4fRrMtsRxwe+mjLRyEeAl0Eohc8UXyjU1VoExEtDLo+zhyQXJ3+9Rgx+0YgHlr4otXik2mequS8uAc9DgKJuGs84VeEXR3zRSNZ15MHxCTwr/50Cx2RpEya6eCdcYGaXROPFeJ+387+/5fhSs3ts7gVJ3uFkpFtxZSYTUji5cmU+bzmIyZ/iSOEqhZxLcD4Xtiaru65tL77iqxt+CTAEQYBYUBidQO8CdqAgeJDgav3qPsAMm+Cce6XvfSOETAByMqI1b+KlvkKaAgnGsd5jcBRhIpLGXOSYkhgKl5RlGSorEREQORZjMuvZMRyODQPkedG1vRfwws57EQmxWjJUPoOu66qi7Nvum8f7oii8t53nXqB1vD3Wbe/K6kZn1ev2+LLbN50vVxulTZaXWVlkRVn31jouq3W1vrG91yZv2w4UrVYrRCSdNW0PY7MCY8wcrxeRJf01UsX5DZeCISenPyenExI3gZPFn1dgfoLOUzC+wIzmvCkMHRNR41whybcd8tIS08HwuDSW9Ny2zrELO6KIBKO51qeG2qO8wMEePbEpT4ISJz8vRXZNfC3xCVGKofP62vGv6a8iElP3ol8XzjWACVWdHH/8kzImLhNGnSZAZNqKTMZ4KvYn+Ft8BSRcXERIh/I8o86Nwz+dmTMoHJ1/Mla9w5k9E8+F6zgxHNXxSNDn+xBhQPg051OX2PNnYpIr2jRNEEuZmUjH7lwatZASJCAiQIUy9N2j4LcYHO8AoLUK9pwZhT3JuamsKqkkdm4QQ0SFA3zGUFucRQnNifIkfniCbKmYMv814gucp11eT++Nb4z9lmEMSeClWvDx+SmIiohnhwIMYow5Ho+o8tXN5j9/ePr89Fytbw6WnR96K+bGFEV+PNYpxEYUC/LcHB1SdDsRlmT34pTGCZ/xV0ziMxf3IQ3qi88XkQAScm6zERHHXFSldW61WlXr1X6/d+wNZba3RVGwtfvj4VAfldEhmztQrYy0UopAvPcsSKQRBcWz4O//7o9fPv6E4u/v7zUpcc+r1arv903Xrlx2bOpQ3SVwaIWYZdn79++///a7f/+P/+5f/6s/Ng7qngXNbndgzkiXL6+7z1+eHx8ftcnuHh4/Pn12Ap8+f/nd779zrl/d3L5s91lRfPv9b45NV1ZryLK7h28+Pu9CZ8Su61SWVZtN7z2MMiUk0kP41Tm3Wq0A4Pn5+ebmpm3b4NiTc+uXJHGYi3SP04ZEMrixArxprYVdbFsYXopKR9djfFoE4/SwMHGrRZREHIoGnwmXY4wSACCfsYBItUK18Ql8MrNCBwA6zeNOXx9Ue0jUlAFYLxCCMzBNID4F8TmRhSVGMkGD65QoSbCf2nMny8arVqw0YGmytF93XJlGXAjOTNWLX8QLOtD5u04XUz4UxySBbxrE/rUdmGDXadpjQMeEoilQsar92apTY1pUGsbKZyG0eoT7U8PJ8+1a9kPDOXBe2rQJw37jenHmpVg81vnZzcnNHFN+wcAlBWvyIgQBYAZCUIDEgr1jDwiogIgEFSDzcozAnJTMF3tpr94+88lzrty2+MUzWQRP96QEPUoPcu7ICMAJEJo9B0rFLGI0CSjHgtY7FmBxDFrj7e0NkISkwACoAqi0CmofEa1Wq75t+t4ZY+q6xVyZ4ubYNJ8+vzivyur20NQAoIxWikQESffWv2wP+WqXGeMcepHd7lBUNx8/PymjW+sFqKiqtnV+1FzDdqU9vecjarox/GpCbOcXJ38CAJEpOME5DKfqI8DlonrnKmb8qS7X1MXErDp5TprNHGnaHOMAILQ81HzeVOKUzzjrnDBMNAVKPHuonM9miDI6b78J52wyBbiI+RNQviJ6Q+K9S69MeN5E7v7qMczn+bcPnLkVceZQnOuOS/O/5plbJAopz5sQ96modf7Ft6xrokyfljAs9oyyEJHCod3JIvtPJjPyswH2Bs5Hk8rXZ08QPG93HBXf04aP11N2lYpWEyJ46Sgnh7LIVlPtecKkIaERqXP6OrDNOeUlGJtLe6e3C59ChxQhovPctL113okCAEYSCGSRvZsGjl9ne5M1wrl6d31L54h/ZfMXv75Ix0FIGAk1gnKW2YMiA0KKCIS8E2FEUAih4g+aLBSDHZQSYSRyzrnQDcRa661YzxTojDIP93fe26ZphFco7K0L+YAhtQBI3dzcHQSapslMsdsdONdFUbS2/fD5WeuCKeu6Rshnha7WZVYYtGydP9b9bt8+3JVt1wPQ/tjcPX7/f/zTnx6/+b6znfey2dx63je2j6EMGCptnjcdS4E8VhWPkS+LjGERtJIrKDIEdQRtD5MjiEU5rhzWHInOYPsyaMU7JYmfkHP/+hw4J3TJ9RZiBwY45woTII00a0JoJmh8/qrBzKhGo6jMvJewJKnNSVL4ecmdxrPu5BMcw3Or6aWTSBMb4NfmeZOZzDdkKlssWeEXvxgvXgriShnc4gPnzHhOMb8KvktS4eAAnxSGEJaY4BGE8dO0l5JbcYjaOmWhwgJzDScLESEvzXMyJppovBKL510SKS4ylSX/a7xhcvrjFi1X/5pPdb6EOTe5wpZwNErToIgTETGqzvX747Hp+l6ME2JkBgYR57wwIJnF518SZGXJxnsFfy+NS/swEbPgMkanxCRYF4JpPXwOhtwoS8W1EIXW5KCAUWk91k4LFLmzFkUESFCAVJZlq5Xpu6Y5HojeE5FnqwgRBiMbEZVV1XVd3bRKmd3L6/Go3r0r69Yf6v7uYV1UuTJkHLK49aYKZWK6zgqozoJn1fRWkTF5oTJT123RtI7ZelcUhT42IZVQxlKWxhheAhIYSUSs2jUXziZ7vkiZIdogh7SEQfeKX49bilc9BTBq279gpDA/0RlgRvzn3x0yVc58e8l3OFEnTzwPgJ07bRCe3g0xGA9RktTI2M1kIllPoPYXM5hFbgoJvk3+emm7F5kiXrWL/rIxX2+kI3EssiiMRoPUuBfnH+85h4NgJ5yzPU4ami9O8u3S98kkwCcEmOBVuszo52NmUKMvJ92fcHYAocImEQWm6RMR6m/Z/GFWo+ErjROZ7PmEiEPCwybItvg6DqbEc59fOn7WQubK4mRd858wE2viwuOVvnPb7dZ7L6gRQ0F7IUQQ8N5rMikwzHnqXAJYBKSvnkgKJNfFrMkDJwg1xRdCFtFaCWFrey9MRrOIUtT3XdN3lj0jEGHonSmhViohoeBYQggRQ5iC9wLsiEK7G9BZnum+axvrOqNIKwxZc8FJ3HWdZxBBbfLeCbHvnINs1TSy33fOQ14Uq/V6dVOSAWubotBO3P543O4bx9Q0fGx80/Z5rtY3d9Z7ZfLddp+v1n3fC1CYW+qDv+IejrEUqVNjkectQtE0GHDAdBaRJNryzEH+dmA+A4OvIQQmNhsR0epU5SrV+Od0fhRrAC7125tfWZY0z4EVQgiMiB/9nDD260oLYsF5JsAVEg8J0l6UHfxC2Xi4QFCuYOAi1fsVB37NyCnnZvFLbHuyP5MbJmxsrjRfUUrm+7D4isUtXVxUKn+l4BgDXEVC5M04t5mRM759bqwfpzTztMHsnsS7I7OZTyr1zLdusjkTyrKo4qSrHoxCM/Z/iXl8lRFeumECBouAdzY9kFDUu+nal+1eUJFWAJpYBERhKEa9bASbQ90VYHvLoq4vZ/6nS4iQjuHVo7BLREHSDxFVABCiV0LcRIyjGQLxoyaAg1QUehQIO3aiNAFg7zwzK0RxloS1UYiiQAgldLY7HGovan9sjDJO3LFpSZlyfb/b7V72dYj1QIKiyJRCADCZbtvaWuu8AOq+98y66yyqvFib/aHOinx3aFRZdtb3tidSIfs+drbz3usL1hdIwoW+CnVzSjhB5xG+Q5UfgCQmdiKcXTJaTspsJu+6aI1L7EYnJQG1ltmA88SM+ZLPfHvn6wTmoTh/+rgEZ8annEnKMObkDXsUjiQ2s8Dz0Mr4zMlf02OY2/0WMWFCyhcQ4KoMMgkNh1+Eq9cHXjByTmY+l6zPyMoFl5jMTKbp3sA5Q4IZmfjqhK+MlPTPFjUVQUKp0jzPleJQk1NEAATPldfJ82MyECIiqgS10ndNlx/Z3pWFxA1flPwmdDzeP2Fvk5fOheh0iy7t+XWou6wsTlH60oFO+FCw5DBD19mhnxkEixkLSGgTMpn/BJUWXzQvlTdRTL+66uti1mQVE9I8ic8aoSJ08EDnfIgKRvQi4Jzve+t9MHvQiCk+yFkQlDZEUEPYoVLKsQsiuLeubdu67TKTK4UmU5ki5KEbQO+5bbr9fk/56vV1+/j4qJTaH5v729uO9YdPz/tDk5cr1KpzHSOjFmPMsT321gMYlZm2967notq0H11W8Ha77XoOfQm6znrvj8djXlVFVXqG0J8okNkQ1TIBQjiPeJDzQPfU1RdZYzRKp/SfmdNedQN2j+LgJBdiuEddCEVMnDJnSMHX8DQYihhPEctMZ0GXEaIW1S0AsDaJ5JzAXPr6uH0Lcu6CgWL8H2IIbY8iQKQXE3EgJZdxv+a044qQO6c4S0LEV4j4V3nArzjwDYaaRXk2ru76blx6Ps6sVXNOHyM/38j1F7W90xvhjD2ISCgQZYxR5DBpajN3SKYRYjzWDAzJDnDOiiBhgQvp1VfZHichV5NFneZ22RV6iWUmf12WutITfyOIpg954+l8ld8zBNXEeuGwF957wNAPQ8K5zJW2+ULiu1LWON/Mt48rotj8mXPBbn5kgTRHA7skjqi03HxwChARSGi25Qh1LODp3fBY733Hruu6LNsYYzKtiYjFhSbdItL3fVO3hS6Ox+Pj46Mi3XTtN9k3n1/3nz5/AXSPj3ehhKb3VoC9p5eXL4dj9/jwHRH1vfPOFUXlvReBl5cXBu2cy/K8bjtmqLu2WK2MyVvV41gF4kofxFREiMwplfUnMvEEBeRUOn9eDuLMQTN56ZXzjXw3TcFkXl7CeaDi6fmTkMb45xCzKmMh8ng9sHOtTuGZAADCwsIRtwNTAhpa6gWREBBQBECUsLCAYwAoSDGzc5a9D2kPpAkRxXYi3otXnJHWqBSgAkQenVGECELAyEN33bMooGGJM5J0UhMZcGxZMygNkYyKiDAmgB5Z94TTp2g2Z7TpDangM/kizKhY+rSAWqkndZAGlEZSSqE4FBEHHP5po2H05J3mAICgFqcdns8MYelju74pY5uDXUpHRIQ5mkEUogrzjZEp8SgSyPOIqEghIohnZvCnXMxwXSMpQINkSK2L5n4Fvt+LyfM8F89aZ16ECQQ9K9bokTQyEGoMgYUiAEykQ9fegOEDKNMgWsnYf0tgdEKf5+QMCDam+2itEZCZVW7G5QggMAuzFxHbD/08iQgkoGXIZJUo5MKIOsERhsOOBwgEQAghO4H5yfAGAM8B7BOmMsboJq7ZybnMT3CA0KSPdloaQ4/GnwlkGl15a20P66pAYO+EAJ+fn8q8aEWYBSjo4N6TqEx7L2cu/PGfBI8GIieZXpHERJthlCpcEmF4TlXP0pNSKjxXNDFWFI0+pzH+YFAFxrSEePoZkc4y7vveuYxIIdqmIRFN1B2P3PcZEYgokWxIq115wKABCgoSOmbX9zrPnHMh3xEEiaTI8jzbqPVv/+mnH0qdfX7p/+Ffff/hhx9u7siUK2utQ2FSerXpTfV8rP+y7fHW/vsfPpqH+y9fviivb6vfPLdtC8p773tnnLndfONMVd5R4QRY/vTTP1V333Yi24MtS31z9/j50xetFDj7cHP7+vQkgHmeE+q+cwFQx0gvmVjvU42HKPrDoHWMiEhaUEC8jAa8oDgiMI/oLMwgUho9RLN6H6hLKI5fFtlutws9bGN5awDIlEZEhSEojQIMQUiPEUEEEGbPsd8KaoUpQR/nbLQWEdv3boQuJUBI3tlUlAGRUEEmMyfhmPnUEr0oKwDQE90L/gVGjG0ZDmBQe1Mp+OJ35Q3e0X+hgYnG+Yv3Zy43XfrT3z7b9MN1zXVRm/lbJjNAUTKZ8e3TGqQhsDvN21tcxemZCeeekNRfZd+iteeSWBAnc/2Nkz08oZWc3fAvNOZzY54af+awF2hT27bW2rPouMQT+kbYmCtYX71/IiP+Mu1wonfG2UYsCF69uY4eXGJR1YuSqKKTWCkxi+bylPxAUcHyqSiXiGy326ZpyhsJWfB937dtu9/vAzsJGdyhN8vYaZ2MMVVVhSgVpZTSRESu68P94bFKKSB0zqHJEDH05o02s/QUUjrwswApZZOpghjekrYOxbHwzfzgftZ70zv5vPR/fPJibMfc6BKBah6JHQPIITYemnOXuRnh7bRmrjnFBEljjIJTiv7EKjJZagrEl5jHrzXmNqj5e+EN5G/yBFjifIvGor9xpGCRcpr5bBdI86+0gQBTuB9kvMSOsVqtyrIMytbpzoVgkxNUhO/GwivXA6OvTm+651EdmVQIgnO0n6iPi1rI/EWLv8aLv+LRLzKMNC9CksIRgkMeSLhirQ0Fl0WySTBBeiyLs025S8Lmv8K9JngxScScc+g3jrQUUWrXCjnjE5+TiLRt2/d9yvbCLgWZfHKsw5yTPYj4td3v9vURCx0jGBFJkanruuu63lkn3DTNfr+31oY3AkDsPh1qmvR9z+zzPA8TOxwO3nttVNu2XWfzPNdaDy0gjGGQvu/LvICEW8PI4+P2TgjCla1LxIXTyY5qokStPbyltX1q60odWJM6nPMwMThnhxN6G376GdtbpKKwhGipBJAmzOCsVrOePPQ6d7myfanEkSpJE5teUKhPAnu6tnDzuYwZP/wLaaKX4CB+XqzecmUyqXR8RaSFX1v8nwBTYitfiGn+VQTtpa/MHauJgZEoFK0oikJrjV5Ofw5WK1xuSilj3tW/BBikwVYT2XA+DQAIrQlSprIYOnv9oH/FVczPLjLpufQZ8EvGUoIAGPsMiDFncWrJ50XwuKwcT3E/vX+OF4uThLcJN5Pzmmh74UqMGZ6IJl3XxagHOEefS4i5eGqfP38+bPc5VgCglNH6VPSRmZumadu26/nl5SVMJmifIauvaZqu6wiFmeu6LooiMOO6roNFZLfbWSchUaHrOmbWWntha2117nNJjuC0fFgqsnh9P+PTRinzFBETMCXUo4kYDUn0Bs9q908Oca7GXfo8h+cUkOaaUirbxXsm9WJS3kTpxn31jN84JtOSpIJzGuQa7/mqRvJfcswR4BdMZlGimaz3Z9kB3vLG+MC5rJqeyCUW/ou3Ws4HzKA5jmjk/KooOhEh08fK18pXXpnkRA+eYEu6D3ORMz5hvpMTkrooL85x/lcc80OcINoEzWNDNWY+HA7zcO4r9OjK0lJ8WSQsc7y4DpzX35s+IdUtUnibCExxhqHnXKoHLK46hZY4yfRPu+2+aZqgNWqtUY2NMrQSBNs79tA0zfF4jM2Pwp0AEFS9wJgDRwkJ9Yi4Xq9XqxUzR59oWMIkzHIOh5hoe9Hj9VWom59CfGPKQiRxmk7kjLQ4y/yZcJnDzcVNnA24QLhSjpjisowmogn3iQzo/wcwr8OUs0LeIwAAAABJRU5ErkJggg==", + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAACAAElEQVR42kz9WbMlW5Iehn2fu68VEXvvM+Rw761b062qnrvR6AEQ0E0QFKwhoQEZZaIJRsr0Jj3pp0lmepBkMhpkFCROMooQSKoJoNlo9Dygpjtk5hn23hHLBz3EqTblQ+bLSTvn7Fix3P3zb+DP/U9/qoCqqqokACooWUKvKgAAyBIRsCiCirElAEq6+/UyMiUyPSkcFS5Q640ssFTVtInSzJp1VbVGterWtFHEzEzVWmu9NxZEVESaqFes6+rbtl7Wh/PlPDxHXS7rdh1mUBMRoWYyqiqVkIJE+NbUjktvXU0BoKois0pGcBtjG+eQzCoiREwhDBYsM0UUZSwzQtRTQsSEzVAiFE6aEu4+MlE+mLGum29rVWl4RaUqp25tMumhjdKMEpTUkggSHSVjBJJSQKKqSI1yEds/56oiCSCGswBRjwIYESwgSZKNKMnMQkQEsjpbpQ5eqkA0k9Zm2Ak2yWGal/vj6aDWaK1VAUlElqa7RxYA602WzlYAVBWzMPH84cPT0/Pzh3j66vr41Xl93lpYh0DUEResAjW3QFW2TAeTYelRGd2gNMmV1CKSSTKraKomkFJJEgWSWpFVJaZZqhRtUgabOU2qk8mxlt570zZTuxYUWSJ0VG7p11zP5WtcnmI85vXpcn5cc5QEkUUVEckYIEOAEhdXVRYAj0pInW4WA1nlBBRAIVkKFuv/7/STRGaKElWAqO2PSsysNdk2z0ypQAGwIodfqwpFFXELUsnRe7Qu5qLKTVZrS+9wC7PIjHRvrYmUmSWILIWMRHjBBV6+jYrM9AiqvRwXkACYSBYCqo1KL5R7gEKrQlaG59jgGVVUqAmTopBKzUJVAXRPsFiRKCUr2JUiBZFwAshaK9KTYwt3j+DY6IOZWZWUMDURUBIoVQMplQJmIgMsJ01gxUIlBCgWIKUsJIqkCCK8igUBQQBAugupZhAACEkgCFRCTEQks3JdWzbARoZzVaiMRhXv8PUS00JVRlqjNQune7XWpmWSqQl4mOab+xvtssErY13Xeep9Xpo9qzJrOEdewpGwikqLjtLIzGCKE0RaFSlgwN2LyQQ0IGw2mRC10kKkxIRUFoogWRRPFFUoFAkUSBGBEAAiX+5fQMFKRCSSiKqI9GLKGJHuompmbapg1UhEJgFCzbwSqCREpJiQMhUmgNTeLJlEGcnKzNR6uYtIxU8qw/4vQAAiAiAiqgoQkTQzkh6XcCEE2IAkpQr7J0Qqihs8oqypmbWmqKiqXobKykhjlbQGCJCRsX/nikowI0ZERQRJsCICAPny46kZUFCYNey1CclilYCZhQRHRAYLBEWVRFWgEkWiKjNIhoeaRQ4CfTJVMc3wyCHliYoq+tAIjjVHSIxKR0FEoSyiRAvw/eQyq1gsIimwTMmqqkBp5MtXAKGieLlqsgokARawf+iiKYSqFVAGaSJSESEigGSxhLGtsAgXZjLLM1dHDcg57BznjvMlb+5O05TzzJzCrDdrfdJ+dzicjq2pmc3zPM0tWNfr+ny92LamkErpNbjGFNh0nMf1uqXT0EBWk6BnBKAKAYgsgISSUpZ746CSCohBJi0CGAJVahUJUASAA5UVFQAaDACE2pspW9c2qViSBYEkEembxxY5uF0zNmRWpReTpKq4l1eSEuUFkgQpQGaIkAKyuqn1SZuYwiksFLIg2F8Evpz73E+YiJDc/87c+x9BsSrVZK8SHjNLwJFIpJBa5RTJFAIizABQkMwaiUBpMfHSEAgAJQnJKKiwGJUR9Nzf9hij1nWQFCGSJYVKKaEppbSZZ5ClqgCEBsmIZCJLMopQIFQNQEVFQSn58loXySqIYGwXVbbW21zNUgV0JqMqfdUx0mO459hyeCKRiWIqm5iI1X67qIpw/2OVUimRQtGMyCySBdsvNsCzUCgl9/uFVQQoBWtAZpKt1ARkaGhDlJimKgOxl2dopVcw4AEvcfhWvm5w2DPK8PTueX0zjreHw02b5jje1t3dab5ZptNhXpbj7VEmg1RvWklMkEUvF43a3Le+TYfTIjq5e4zqH7brh3VcAo6XI+GWlSRBQIUCJZRIJRPNxCSSKd0oSqaICclAlQikqhqqIsKLomo2qSUyCTGdu4hIIfeWT9jNLEhLlNPLx/AIVGpGIlnF8qrMzNT9LhFSiUqyur0UFhGA5NRuXt2aFIoAKlAle5kFQSKrivKT1pPcO1SyRAxZ6UFSlShUce7ToA93VAKNVNUCRKWBSWYlPQpSXpE05palgOE6WhNlQ1SWFwVKASNybJGgZ7nnGFtViCigJEypzcTILkCWlKhVlYkWiyQrhVWlGYwIAqIgFcjMYqqY1X4pAyxUzlFn0VwOh76gLdmNFQqiilrmgx61Dd+uW4XtzSsgun9GomoUKQEFRGSJQTS8sgSQqiBE1SpZSBEhMmGJQNV4eWBaSCBbbykFKEhVUWMxAZSUmDJSNKVJAeEukmXVFBWg00dpI1vLNcYYKOSG9z/+6nq9Pq+H+djYT6dX2Q79eHfq82SL6aFRoZSqstWqygfmSdelX7epr1PmWVQ29ZNOvdt6GZeH7fp+SJjEfn2OAtWUlcygSCNpQomQlCZqZKUWQRoJI6GZKC8hBFEAqZOKVGZkJPenqdIELiJAAkhUVonpdLCqovp+OUpZePEntbTJT9p4YaBUFeYpIMVAFrRrO8zt0A1ZQu41JzMDxZeysTcwBJiZJKsIe2k8WFDV2jsMpkgRkfAAWFMmUNm0V1UxySIhJqCJoGTvqDKcaRgjkJuVIaKZqLR0B5CZ7r73Wt26nOR8PqPQu82TWSvtEGMQJeIxRPeXF6IaCCUqMwEzrapAvNSMdMrekxQorEoUS7OGiByPergJO7hMjGBtpFCSvkWW+ojt6j60nBQpUETMhFollawmWqzMl5mWezcPUVX3yEyRBFRIFXF3AAQKUNUqeoQ26b2DfzWAiZimeFUAUJUSqqrDIUlj62jVt/CI0JRsqObcUnQKoXegmm6owMOH81Nsd3KzbP2WiK5hyFYwtKmXFCJV1KIGIjMTJa3sUOZlEnmlDofJbNZP2g9T6Xn7/JIBYYhE0UTSlCihoylLqqqEalDNFJZSKsF9UmKwBAgIzQQmIgqke6ZASmJ4JjUqARGKNFTWy1OVQtJqmtUH070iQVdIsAAECvmCKJAswgQwI9USTdSm1qd56cveEiQAIfbHtbei+5i7P6EMgCgBoyjg3rSyAFQKWa0TDC2RMUcgIpg0aYAnEtgbDKoqkHvDk1HUGptrL0TmFfO8ACaSIVXC/buw0Kz3U0/44TDv3X9rOs2iTZIwulfSKsr5UgNXqBSlPF+6SRCQTHpAoVlIAsXKiChqphTkMs3z8d6mm63aWtCpHaJiOJ08Dx+brmuMrYgGoFCqIgpIiUIFSgIoIlGAkKCQARRRJAyIvxoO8idvCblfelmVfW6ttZLKDCGLqMyqYgmKkGIWeM1GEtoEWlGeBK0MqFGIUMnIitxKxdIk1AFGWsEEvZU2QMrTE0EyIsYYbVoyfd18bO5+TURUrj5GPtOGzqKyhUO2Sgs6zfJG7Dl1/SoqUiiNjeKiIQQQaKpCLQooKFYSBJEZhKhI7mOqVEYVSoVgBVAFgTBr28b5kpOUAkVRKRKQBECkipWISPWmsuioDSEjWFuSEOE+UGXmjjWpGooiYl1Npfc+WWtqpqoREREQeQFVCpm+X2/c625lFVgZSVGoyQ6uECZiSrUesqC1CWp+9XUdvuUYgyosyxBEVRVZKFRSlIBHoRLhUJZwYJV9aiwVUVDLTKT6fq2LiCr3ykCpqOGVhBQuWZVZqAggWWwoug+BkKqVXhpMZUpVVohKpkqmAxBjnyyIYZMeSm6cc8zzHFv1ZKq/T4WgVIYji2osdzFNAnAK1ERMBRDso1MklEVJJgIl2HtMQHWKIojAqEqKogRklItUM6OwKqSEZPDl164qBiuLxkQqnTqVkvRKd2yiKvvRkOQQwjAhJ0oSm8TVMUltPqmc7o+3t7fTYUlIjG1d16YdqZmX7ToycN02v1yv69O6Xq7ndVwu6WezqNDUDBVVYc9RFZereB7ue5O6PkauKQJRsqgAzRzR2Ix7d5Sg7qAAhAVUMUYos5FrFUoqvFStNzQx04IksKZP0rSrqEoxc6AUACI807dQdtLJ/VqJ/czuvboUCCZZEapSG4iiEV3YTCYrk41pYkpSlBFRlSQpEgOlWQFIZTnVyCpElGQgk9baPiiwsk2iXWXuy8H6rIRl9ss1zk8R7sNRXpnIAWzumWXaxaKECRYQiMqoQVICJmqAcNpPr3aS1DIxqjQxCkHyfD6f/XqJcyYFUVmshghlJiqIxRA73h5SESVV4qYVklQaZS1l+e001dw8x2dv3kh/9ywf2mwfvV7Wy/bjd1/d33/y9fXj7z99HzmAiXCwEhWZJsomaiKSRFCtDGBFUbIoyEwWSbAxan8RVEj3FYDKVAhUVhaR3VTMClCNqih2IVgVEYmiGoRV3kiVOdwRPsqlobFDGFNplqqOdC+nqaSVJ2VoiUCLoovgUHKSaloYHsft4mde5zRcZIuxuY7rdn58l3Fxv665eW1mWkSam7YJiMgRwm1jAQ7bQpCjnFoWohUpWdWAMBqJqsHK2qt5iWQ15RbuIkWpqCpDAdiCZNXm6HNvS0MrTDV1MbOpmVDTq2mG1+aMKqx1vcZ6uawbtuuW2/ryfilMELEfZcgOqUUNiVZa64AIjFkQwdRpmdGamM3uuV63MUZWqIjsu6WsYhaz9uNUMiqVYEIo0kobZMrpMNnSlpP2JtOs2mS91vMZ6/O2nmtbz170bbgkXCszUgAhM2vsM8IOgUhZFkAT0oTa2FszMwgFL7M4QGSaSSdTdYvyIIuJDO4IbAMkib0jqkhT3TGoMho1Spzo5DR3Oc1lnHK7e318Oy9XvGr3h+f1x6/v3jytsWJd5CI05IVxBSycBqZRpdSEmkDt+zsgM0NVqRqZIraDTpkECkKhpgelWBIIVr1AIUro3iXs+EdSsyiRVTvGwKqsGAFPV/W9sVBLZDGoBaBaCpru1ZWIzBiQrWSSLZMT5KT9VkMGiSyOsc6m56fL5SJ1jcv1+fky1qeLjwczg7nzSit3jxpoRWVUUZyoaVqKyNwEbWujq/n7zOdEEGKSVJYKCyPhQmFJJsBwoYhVum/BMkgJIqMchWCgZBGIsstyO+lsU1NrJczM9EwhUVQ1X71QHrVtEYMYJaUFlqdk0TR8kACQSArTQ4XpI5OBEmOfe29qFCuMTK3SaWp9mnyM8/m8rQN0SpFKyIjIrFRjCUhP94hJxSbT2aR7m/KwpDU9LG1aWuucpuxLbXM997w8T+vg0CZYsfnYGFkKahPj5JujKE0CGRFiUhmZSIWxAS9gsYhm7pdvRrkzXhqLGplRBc9wbF6DMUynIFBQCpWEYkf9k1QYKGCbzU7WhJNhkGs8nU5f/3e+9+++v/74P/+9Pz28/uh7X/vuH/7p75+vH1xMcBObm0vKgLBR1EAWhaoiYpWoQTYrorDDbJqQl82JCIDKpEKoGVFJIUHsVXefi2rvE6WJKQKidI/MEWuEu5R4VkqlhhilgV60ohVHVgFt6KLMlunMoSIpPTC6NGmtTapNBF4YEbKurAyVed18fVq3p6fz83N4lYS1i5ont1Kn5sZRXWwipMy6qCWvffZClpSTBmArXffVtppWIau8MliKEkCrEpSCRoQSVRKejuoUJiJkZOgsfTIoRFtTnXubu5mp18ZWmwz3fZuSfZ58G3LeJENSIAgkRCg1xvB9TIgKVkFoYOeIRKSSRmwZFpGUFNVPfvpeRFVVBNZknm2aZlEdlV6RmawiKpFEI6hAwfdFTRLt1KZFpy7LwVrXaenLIststujcVRWQqiIoKM3MyH03kTbptMwwtdZUxZlJINPMBFX0UQHA1EQoUo5AFphUT8kgRo2oEbENHyPC4VtdxFK0QC+qqTSxqn3Kp8DAHacstZpvlpySEjdT//TNx29Oh+v43KbpF77xq995/a3t/VfixTDP+el9Pn+5oug1TFSUQFqbhCWEqApVCEpBFNQkKUbRBNRUVKiyw6ncUZ+fbBeFsu89SSGlXgBoLRJMHzlGjLXKS1KYLArjpbTXDj/L3vaqgsUoAUX0ZWRklRRH79Zn08msydQaqaKsqpHx+HR+fDg/f1ifn87b8/XyeF0337anwqiUjJFYSxwMSIau3aY2taHPxW2HOYXmGfTUeNnmKVpVZWaMQgmhO6q4nxgTKBAjt0hUr2xeESG6tPnQptlsnqmwxj6pCkW0qYqhTa2qSBQcTC0WWKkVzMhwz0BlRWaM3IGgApS2l899gyRqbMKmbWrTcZoW06/99BtRUoJCU2lTm+bWZ21mVA0gMhMgtYqVVbI/XEOIY1AxL733bksdlkObrc+tH+c29ePxZjocI2345uFRXok+zfOhtcX6ZNo7lTaJNJQAkEqwpBhVUVlJipRIhAQlqYAUFQlcx7au13V9Oo/rFr7GmjWSwzoFsCprs1IL+55OTBQUF1FFaNHEDjpbE2Vp/uK3Pvupm49ubj96/Orp177+G//wN/5XP/u1z/RJ3n/+8NUXX3799Te//LcPOxjDQpXb3EmqwlQBAUM15IV9pKJGiojY1Ptk3M/A/pPszXCAVQAIIRNIKVCouq8s6RjlGNuoUVraKIRgX2OWJHci0Y7gkmpUq4gkRUukJJNgkalu0vrU0FALrbE1y0JEDa/reX16OD98uLz/6vHp3dN29XTkdqGAqtpaaUKSksIhfVLth75MU9cJIkNFMxAoL5dIA5zuDg3FgHvVjn/vYyiFrNKsLIZFclQQLSpDRgD92I43S+9zaxM1rZFKIUdEInfwJyLBqsjwACcPJAylBkPBtxprlY/M8kigAMhehcNDoCbaRLpYF+2ija1Tv/Yzp4SrSrPWmomKiLRpmqfJmk29m5mIBpBFiDiRrMoiBRUjwlo7HHpb6nBYjgfTg+nUjnZYltNyuhVtLIwR27Y1ldvb4/G0LKe5d6VpmyxZAmpr6ZXhmVkoIl8IIQK2YlcBqZUYI3PEuKzn6/VyXcd1rCO3TIekyr5LVWPbe25Sda8gYEJUm0qqqQlE9ND6ocntcV6or+rm177zG3/nF3/rtR5uXn1889Ev/cLP/dbX7z79nX/2T3Bdg8u6YZIOIDJ0EiHMSlRJVqU2UpSYiqQKyWnq86GrESwKBUIClZXISO7YfxazdF/A/4QXAFTkXmWTqCY7KF0UiWJUwZGVTIUTkKAXSwVUiJUYqIQIBKKpqmbCpmJqaqwiLTy3NS/n7fp4ffzq8vjVkz9XRVFUskyERmslbV/isfdpmu7ubt/eHu7mabJuRGZlZl19VIV648Zy1CaSlZUFsJixM6Oo1AJFLDw8wLJwZGDfeHlqVLZ5nnoTkzb1rEikmoiJSDVT94iIzIoERUnLSEmBi1eMKvesgVw9ghlpkMlaiUApZuxqTfsySVdtqpO1bv3Q9JOfma213idVVVWKQFTEBGI29W5qYq3PU+8z2VAFYhAlTJLhqKo+83BaWpNlEZ05tfkwn9RmlTa3ZpyQUiWtt8PBlsPSp2Zdp6mpCYVIZFZGjHEN30QSQsjO+NoGPGJ1+Fbna11Xv25jva7n4dvmmeXbGCTNlEqqqU0UE1XdX2coRArKKisYtZSmRnDq0+nY527L4Xhox1/57Fd/7mu/sET//I//8PIXf8CxfuMX/tHf+63/TTxczt//N4wVdiNzt9aHX1sryn7LQATsVrAEqYShTzofJptV9sW1kkURyciIRAkqXwpCCgUqiEImMxOFgkRV7oTQQkBLzZFVWZkK4f7xp4AojWK2JrCSRqhQVIzGkiIIVqmayaSlW65ZzBFxqffn9fzhLI9Za25DCGksiqVszbI10IZriPbb0+uP3356e3Naej8cTjfLq9mOkcjKbQMG6G1cKy7ktpcFLyAdmqoGkX0pK9qaSHqGZ9AldzylOIYwxGNrJn1u7dBSUk2lo6tNsxLmkSVIVEKjKob7GJJVXiPcURmFKF93xNlURERKUE10mXqT1rR16820yzS3aZmtm7757L71Zi8rATNrapYFYRKVCCpMm5Im2UTmxt7AQgEqQGF4BKIZtYlMnKw3O7Xp2KyZqnDKgujUDkubmnVbltl6t9Z6pzZ41tgiPce2jm1grCMzMjO3a54v43xdHzZ/utbTsz9u2M7jfNku1/XZaxu1eaT11lqnYOoqpqoiQlOliKgIhWUVIKgsqinMfbATGZ/ev25JL/yNb//GT03f+eov//J3/8W/+PM//u//7H/4gz/7g9/5/l/+N9/57l/79d/8xz/zzW/94Z/+tz98fP/p20+m3j88fdFa3wl5qqCxRBwhmhC1yeZlmqZmvSm5L8hU2v6SR1QGQCECRUIKQVZUFTTTUelBeYG99lUyq3KvDdz3qYXITK/QYId1pVU1UkWYJtLEQCmUlKo2CkV6KQpVoTV43bb12eO81Vp0EfSIjRJijRpqUoKy1DbN0/H1zavXd/c3p9f3t2/nw83hcNt0AjUSsQZ8Y2KLsXllaG2oACAMMTNhAK7K6dR1CZ3mdqCa+tD0LGKNTFDNSgmiH0wnzJOZ0IxspiogMuGxFekjI0eOuoxt3VwCNQpZ6Ts3kdIUxmxkE7VsXczYDjbNRiuZqbPAaLP2Q9PbrzVTmomaalM1pagQKpVSkZVZWQ5m1l4PoUKKqDbwBeEdW2aKmYq1w3w69KO1Wa0rVLSbaVF6M+uAlJiKlXXIJEHZthGxpbtfY13X1YdfuW4edvXYPCN0czurbtJG5QgfHheKgEomTPYL3Vqpqs4KK0tQrUmyoNp3dlTtbWE1MFjq5ajxjdcff/2jj1+Pu9/81r/zoz/6w9/7N//dw/qejuVwfC57/v4Xv//P/uO7Cd/5W//LX/qV3/yd/+z/8OMvf3z38Scfnh9KCW3ampioSOlebITCqdvp5kanpkqRLKKKBc+gj+GelRQEUNiJfxk7w3CfgmonBr60Q9x7YVQRidipQ7KT1UOYAjahlbKhFVrthDQUiVBRbaJNhSSiCpFIZwXGOcfFt4vnOqpk5xgIILYkVhEWA4q+nF7fv31zejXdvDoebk/Hu9vjq6nNUWXaJWpLjNxip+lnRiQieirVVQiCLQGcjoflruuxHQ/9eHvgyfrNJJ1eEYQ1scVsatMJot5677OWBKFlzoK7J0tk3zxlJisKUVIWKVlSUYxkUVSgsI7eWjNr3djEZmuLahfpgqZUQmFdpnnS/pGWkFpiKiKonT8nUSAL5ZWeXhnIqMyI8AytF1o0hEJBFkduAjnO8zTPp5s71gtJgCJmXbUlQJVAUKkqfTr0PvuI9br6GGON63Ud2zo28TG0p3UCbN0OJ5xuTBeqlXZVM20iZqCJqomqgWK01nqqUmy2vl8bO6NVMnbWpzBFcowRsOkwTRlXq/Hzbz/7tW//bTxefvAnv18jTtNNpr66ub+ZGm2Ktf3gX//Xdze33/yFv/vzP/XX/tP/6v/yOCqaVJPWRbrYJGxVpJpRyxqWuS+LNSMkgJ2CK1FIxzZGOlRMwKwEUVUQRYGQRCWquK/kWdh/ft9v/wwKFeBOkIJYgckSlqmRXpqU/XoKSJGKgiiBqkQmPdI907ldwtfL9pR5VUahHFICiVEWZq2KLi3N6rAs96/f3n70tddvbl+/uZ+XuU+tJCGCApC+xnCItVltZktPXy+qEOmKrgghptPST9bmNi3z8dXCLm3ufZkOh2NbejtObZa+aOuTNUqHNTObqAiskcPDRwzPUSg1Q6qkVFR5sXZ9U0XUtjmoUjvcKtZVTMRUm0qT3vWv+oKCtN5aN1HR6UaQgqyqRAnBysr0BCoBB0t2UUlFREXFyLQdb/KxywxSrTISolNvvfVlXvq0IKGgiE66TDaJ7kKIzHRVsWUy1W29nM+P6xbrJS+Xy/V6HRsPi928apE+zXo89uWGy0G0SZtIlLLpzglFy9zValQznWjNWmvNmrZuKipCFYpWie6SqjSqChmsFvj47g3dfvb4jV/77i//8e/+988Pj6fjrZai2eWymWmbl8Px9n2s1z//V/fdvv43/tEvf+/nf/+f/59+bAjNpReasCElCIaQFdOsx+Pcu4mKqVQlIJAeEZer+xiVOzUILwPEjg/KvuOTAD1dwMJOJdv3GcliRILKyqogZf+CfGmKFFIhxWYkKM62LzuZOVTlJ1wuzagcFQO1jXFFbOBOLWOhFNCAW2NvLTJs0tvbu/tXr9+8/ejVq/uPPnp7Op1UhcqsAiojxrZtEVW1tHbTT01nkusY41rpkBo29cPtPJ3YDq0tvTT71I83J2uqDe3A5bZNBxNNbRSjdCsplaACEpt7VCTTY3h4VmWkghEVm2PEtnlElCIDFSVNQIiBjdDSLjaZTa2ZkKRKiYiamKlJ66KHux5bRuzzVUUiYgBZERVZyUqke4RXZlVmVkZF5nB/Wa8iSkNV19yaNRWByLIcunUmVSeTpgpTBgpC97HPKRG4XJ4+PLw7n9dt5LZu4es0692rRTRb5+EopzvtB1gTGlSIAsUICS+UVgpFKM1aNaXqYjZbExNtNvfWd1puEyFSgG5mJnPXtK2b19g+++Szv/3Z3zj/8PMP7398c3yFKpswH7pvo2xqmqp2Oh6fPqzx8Pmbt8dv/uLfP8j1//m7/6SaHeZZVaCVggKc0ZrOy9Sa9t72q4QgRbNkXdftPCKCxUpkReyvL7DjP1UV6aKiprHXLJQIdhkGwAJRYBUrWWQBoFBAsJigWC+mULSxZIhQAO5XVKIclYgRlYVkrDWq9uVDelbsSwy4ZG/CQiJ00pv7+/v716/vXr356KPTze18OBShyoKI2LatT+uzh7NoEGHvtvQ+S+q2pV+uCrSlT4c2H2U+zjK1ZjYtc5ubTW06NZuEUhRYkzZTm4q11qG9RAAmdYfRXmhRmZUeUvBIonx4RBYlc9fGclclShNrKko1tW7WdIfmEkHVHX5WI1l6840pqjKZkZmentsY4Q7ntm0ea6bvgFdEhkc6KmN/B7JGwaFCbfuwHxEBlHBp07EfkBrYaQIjJYss1hZjHRvK13U9Pz8+Pj9sq2/XQfdlstu7uTURwe19P5x0PkInoYTDKbBm+2/rxc1dyBLQVKR3aWaiJqLWtFlr3SbT1pp23eF1agPN+8SmaFbzXG/6/a9++9eeP/+wXc9N2rx068bMyYSgUddtPTSbb99Ua88/+BeHN7c/9bf+ox/83n/xJz/68/bmtTB1l6gyiGiTzlPrRlP5iSgGlRxrbKtvq1dUvUgGWLs4DXxRzxW02+3NzduP3izLvDMUwz0TpOzdEvZdDKSAQlF2sExUFE13asauhaWESok0gWZ6JQGtQBUqmV7D3auaqLVOokaOSoHQKyOpsMnu39wcTof7t28++vqnN3cHnSjK3ptnhucYvl6vl3X19bpD2qqWkeVpIruWyCBSyY5+WrSbmS7HuSaVSZbTbF21N4jSmnTTbtbZFVNvJRJA2dq6mlK43xE/2SzsAjIC2tq0TK3vC0U2FRVRocK69MlaMxGSLztqElSKSEVVBJH6tZ++EbHMcg8fSM9y+jWuV79exzZ8G+4R23AflSGVEuHukRGZQRFtbV/QCuE+2PQ6VkTcnk4JeqbHdR1nr6DI5sPHdlnPvp193S6X8+W8hlesrqz7u5vDcVap080yHbks0idhQ+QVQlVT1czyRMaL3KeYqQIxU+t9r+ZCa9ZMRERFqU3LxFo3bdm6svF26lOzudtf+/rPnsZxOz8dZCqE9oNEVUZvTZAA29TG2DLW17dvS/CDP/5X3/7F3/zZX/qb/9l/8X9+aFs7tEowMOBmOpvNU2tmKFSBFCTdc9tivbhv8XIQE+B+DUntZ7nApje3N6/ub9+8fv36zf3t7W0zZda6bfkizAaq9rEYFN813FUsTtaiZZWrFrRKqAqgZGduF4XKkozKispyTwpPy3SaZjXT1syYmeVAsEjnWG7a/avTzf3N64/evnrzqk+m/QUm2bbr8+Xy9PhwvjzGtkqFaqcZhU2lq0QGtswUH464CgGzNvXDMsvc+qHPh2Wemgj7PLXeYPKT1kVEQGNSSkssiTRTch/TX4QSWbmzT3ufeltYbM0Ox2U5zLd3N8eb+XCYe29m2kx3agF20SbAKoFI7WLu0k+/d9eamJFgRoSXCEXkul4vV79e/fLs2+q+ZXi547qtPnzfxu0diAohWXCS1J3Uko+Xx+V0uLu5iXEZ4zp8eIQIfIx1u66Xp8t1jcz1EtfLlutG+GFph5s2tb4cpM/VZ/TG1ltgZK0glW2/8BKSIQrbr9MkIU6FaiOaKK2Z7tujKpEygZm0KVVi6jpP1lrNE24Ptz/95qdfr6fnz79soM1zmUqVj3WeF0dRtaT7iEoMXt8cPsnzdr385bd+7h+JXP/r//Y/wWES6Tk2UMz0ps9Te1naibRKZiFczpdxPY/wF7k1qvBCUqmdfqvNDofl9eu7m5tDbzZP0+1pvr05zIclkdfrNbN2AGQX6OU+Pe9qVbDAkgBZrBfaxa6ZTtlZAGBVxc7RQcHU2tTuTof7+5vbm9vD6TQvfWqWGU/bRq3jfbNj3bw+fPzJx29ff7QcDtZEm0A1w7dtPD89PT5+OF8e3C+QbNYhCE8pmfvcbWop28gYq7IkSyjTcemHZtO0HA/NtApipl2bSZvVTCBClTLdN3pqIoCYklKoXVWyE0DMzGxuMCmVLBFp3VqzZV5at9774XBYlvlwWI6n4zzNZkpJH7mv0irSrJnaSNdv/PwrsaJ6MLS1w818uJmmQ2uTZXpEbtdtW2NsGQ4fft3W2FiObVwQMJFCkiVKQFlkZbpfI2Se3ry+txojR72gloHy8/n56fxwHWPb3EfmqPChwvl0sK69SW/Z5rSuvZMiWQ54wgGByIixjULt4jSvrF1PXvtpEjWqUJA7zJhCmEhrNc3ZLKVBmfevT0vrb/s3fv7tr8UPr+a+rddpmaVhJ+pN3frp5vH5WXJAatb2fHl+ePb7jz85f/nHxz7/2r/z7//L3/m//fEPf6jz7RbPaq2pHDpNhSgVK7zsttY11mtUFrijPLsEb2cyVElJt+PN6dX97ek498mmrofeWnvBMYp1vlzGdXtxUsELdY4FvpjYSAoBL0rBKKCiCJHSnfikun8vERTKrE3dDofD65vTcry5ubl9/fr+7tXN7eGofaqe02SnV215fXz15vU3Pv309es3fenWSIGq+Iinh8f37z88PDw8Pbx/Hl+hRNiqskqQmsPhUJpy6qyxrRmRlQXhLH0yNopIFkM1idZMBOho1l609ajhnjHKsqJKS4QgVFVopn2xucvcRJXatE299WYqJqIk95ZHKK331ro1m6dpPlhrHbUjB4XdlkKp3/zrbxIxMppNd3enm5vDsszLcjje3t3dLofjrGqbj1gdG2obLzw/L8Su2d5pLxqVIklGZgqlzxM1b2+XeVlSCIEaKJEVz4/P56eHaziifA33KkltOM7LZKYTdKrWlOLSQWvXWD1DTEBW6rgWUhMIKVUVmHtpcO8HACq0oAI1ShOIuhlU2E21iWp2yZL89P7Nr73+u+1hGg+fm85mWlAG4E6T5+u6LLfjmtt2OUqy8nDzpp+mh4cfv3n9cX744enbvzgJ/+v/73/uhyUvQ0QPNs+dBEQ7iCobwAjfhoy4bhwmu/JWSgMIpFAmYC315djfvLq/nZaJ6L2rWWv9cFim3sYY18t62a7uCahJq8idJha7D4OwKcL2iRfWmhjYU3upQdv24megWcVWbdJpnm7e3r89vr69v3l1f7g/HOab4/E43x0P8+3dbFNC8s3b+zcfffzq7cfH2xl9mBhlQvl6Pj88Xj+8f3j37sdfPX5xva4o7rp9yWTJ8IqAX5Hb5p6Pl22M4FrhVdpL4NhUW5WkDm2AElaFylpRycK6XkDHfrZq32TvpEMqtIsKKoqpQk5N59a6mZl1qa6tKcW6WbOqSo9MT1SjtiatSeua4PAXKEc/++XXQE3TdLw5LIfDvMzzMh8O7XQ63dzen46nZZmsMTI2v3puCqsQVL3A1rLTYUJRgDgpJssyLcdZDW3G21dHNUCiNAJji3G+Xp/Ol9gGKit2BaSrZV+MAp1VBEyKkGZb+GW7jMoSRNIHq4zQKkiYVQeBJJIZuY0VCcJEd2nqLuFX1Uk5QRy8sPHmcKOoy+XyvU9/tn9JPD6INqpCikgV6G69sY7T6dW2ObRFstLvbw+Uev/+cZnm+9tX3/zFX/vTP/jd3//T37+fjstk0jBLiU6U9mLwQdkix3YFUkqKSBZUMguAmkKyQr1KlHc3N6fTjU6td+2Ttd5EOE2tTW3d1ut6Hdv64h4ie62T2g1YSgqNL3pvh6T2HSj2QihEbEKxChRtqkvr8+lw8+r17enV/f396e3t/f2rpU1ouajpQee5H47LfHN8++r+sCzsOingmhHX89Pjhw/P7x4+vPvyfHnYtufLeo7VCaoaCz62zPQYVeXn8XS+XC5XdWW284j0DFTvs6qQbNNkZqoUCqlII3Yp2ItcNF+cN5BFT0clBVUCNmVlQNmEikKp5E46UlOh2ovXmJpily/uMINpaw1SntvwqyP1Z379ralO83w4LctxmabpcFgOx8MyL/M8t957a603m/YFMXJkZkGlNaESQlGoiBBR5pXzbDe30/H2YItoG/OiOqm0grr78HB3v1xW9x3bBrIqHQIx2txEo2pX5mdWPl/P63ap0lHIkOEFWhVYomUZOYYTWgEIPDLcASGkdnsJqqqaKukFz8xRNeks5RPn++17848l6zwiCDYzs0ZTLxfSr353+2Y6HVqfzfrw6/n5wzQdIvLHn/+44+H19/72dz799D/+z/6P7Lg5vEamTgcKyZ3CjaT4GIiN2CUsufeygCb2Mwkt8VKPrRtu7++Px5ummBdtbWpNs3zzVZTWNHIA0WezDmmUBhWaEUhIUJTI3WMuUaUlVtxVOwQQIkpQgN77YTkux9Pb+7dv7u4Pr47TfLg5zKqkGU3Z23Q7f/TRq2U2MT1MvauOjDG2D199/vju4cPD48Pzh+en9+fL03V7Xq/rdb2OkQAyR2SFR3r4iu2S1/OzX0Ow+MZ1O+/I03JzEIOKqgIKUY0RmVGsQnrmtrqHR8a6XTKyihGxT8FCI00EL8BOJZU0K+yyu13Bmy963r3jyRebNxGKJhXWDKqJ0p/9H30iqq233qd5XuZ5OR4Oy7Is82GarfdF1URKzUynXY1f+6KxqfzErowgRCIIsM+8Oc7LzXK8XabFwLJmVAdjXdf1uoXH2Mb1WvtucoxttwnZ+THkTkPfvLYRcblexhYozRSkEhpZmVCYigHwUXTxQO+TiPjwLKkEIvfxkghwKw5kEbMzmGVX+ah//Zdv/2Y9fDjHemhdjGPLXd2FciYiiiIj4vHDVzenRU2vm/tWonI8HR4fPv/49u7tz/zmD//wX//RX/7rw+tXrU2UTWli1RopmcDqHrn5vteKZClLC0wioypMKSkZOcbww83pozf3SzNrrfcGQ5Rvvl3WazLVjCZs1iadFrNZtO19XkqHWnM4IFUsprRsk5i1nRJGyZ0bTIrZPPfjab55/cnb00e3N69OzUqboRlEVPyK0Q/Tq8M8ebzR6ZTydLnYFtvj+Xo9X87X9PH09O7dw5cj13U9b9uIdK+CSBYyZFt9u/rYhm8R1xpXzy0yM0tX94jRD1OfJ9Yg4xpO3YWwYydkRNbm2xYjM1C1W++YiLAJlLtzw7542FERM1DDMz0SyErPoIC6m/u8vA+qUClRJFK0dZuqysQUuU+K2q0dj4fD4dhaE5qIZErvXTtVOv2pIkuGXsXdq7irXXajh0oUMrNiE5X5cDgebw/U1OaJ8Ijy2LZtXSs2KQ9hRWKMsW4XADO6avk2YqCwavOq9MDYMkvgzF3MApDarUkyR8ZIRvMRplrJSgo7ioBE1TqCXLOiRLoYSDIbX1+v+b23n3xt/mw8PAevZrZdvC82tz4i3QMFCm/uby7Xy+F0q6394Ec/7N3u7t6w5Hp+UObc7/7s//Nf/vTH3/kP//3/9e/8wf9dLWkL4SJMYbFQpDh1g4q2jFEtLIKBKBQjK1SojiSrwdZ1/Oirzz/+6P70+lVwXP0JAg+HpM3E5jLXQaYJyiwgo3zbrplIN5RFpG59bJWZtKoa7tBm4aDCoITuPiU+Bsg2ifbox7Y002Ybsjo91vO6vlmWG2+v/fC1ab6Pfj2v26y51uV6/ZHjDy/PD9fHL7768YoV7jEq4Cksf9ahpTcSa1a51C55HRJo6iO9gEGPuvD61Q/e397eDo7xnJgm1a1MRYssaBVDm/Tsa60NbSdBKRvRdi+wQkVGVSJ2f7ytqmUiYjg4qRBoU5vnWVjUbFooZkUBO8lWoM3qZpmsBLsIWK2sYZrazc2h9UVEBOruYswqX3M7rCOWLY/Ei6cNALx4hGTV8HH1AnhvNjf0ztaXJlq9FSXW7bz52Lb0VVaPGiODY91iJFWGr2JaYNaguOdWhTFkHZlCr6HVqwopKJaWJHNgbOruJlrFbd32Kx8imW42i2lRipaQyCatgTrLTT/qF+/O3/ruLa8tfWvVxYwZtJpmg3CMHNulqswO8zxPh9O2XWVc/XppNh0Oh8enr8a66aw//K/+r9/9h//47/yt3/p//+t/MX26AHNl7nAzioiLwPWFFAYa3GO3hJMSowSQFTtlCpJfPnzxx9+frXGeoioKkcyi9FkP6FyxjSHSTRQZXiNyBqQC2zrWdXjMvmVErON5kC/+CLpbldm+PvAYw70QTTsSObwtjYqoTdzvq1WcfgGvvqEnEemBoy05FYRrjYue7zY+5oc/ef9HQp6wfFifUIDIGGPEFVLoypGZGJFWbVQObGu6u0paVaZjexzv4qvT6XD36WlgmBcz+jKLABqAC8fUFRG+iSgydv+yoKhKAzQzlZJkeIytqjwqPSM9vPLZQ6HSbJrOh9maoZtGyRjr7kbT2qQmHpuoG/f5A4Bkm6Qv2uc2z936DKaPls9cR7Sp9blP2zTPs3uoKn5ioEuRQqxrdPbWrbUpXYmumLoetFNkZLpvWwUiar3GuIyxbT7ol9XDtWuojAjAsgLpVZFlmUVY5e6kIJmIUeXjmlvTXslxHbudtGdQCUh6SIc13UmwomI6dZtb782OrSZc5dXXXs2nKda5IlEyVp9n86rIrWezss4pxK+r13YOhx7vrs/Ps0X4dX18b8qPX70J1W199+7dD7724fm3fuN/8Tv/5r+xOVnqDpSjZMt1zS0otGRCiREjIaB6VCZIjeEisF5NOffD4Pjw/NUPP7SP7m5VNWvsWgbrdtSDmNgQlE7dVOeqUpl2n9YxxroOH7WuY4z1fK01W5lHXcjazXeBlEpRwHZiDZj9/HR5nJ8oSpePng+vPh+3l6k5Ro0Pj+8vz89cZhYk89Xd3duPP1mO84/j8XdTJ8zXzT9AMpG78STkvF5R7+HctgxHPsu4VgQ8eN1WHSXQgitsffIf/cWX/TTLzKC7SuCiuheBjIGInfIXuycod0btLprWUrHy0VuXfsCE3fobW5Y2um8eVXE+Xx+fx7zY6bjcHA40e5EkvVjXbECamf70r3+klACs2en2Zl6O1vvUWp9Vm9B6Ro2xDs91g/u2bSMzzJQQUZ3miaYQS7Xe5uN8WKbjNB2X5STaRQyqplqhl/NlXZ98eFzL1+15rNdzhiN3xxZkVe4WCFERUWNoJVUMbkyJTZDMLfw8fEspI6oyhIaq/Yil5+7q27vObRZRUW3W5nmel6W3aeo2lW1pn96+fbPZPC7nS9U6dgwuSdsJWfkiCuzNhLoss5nSpB9n65K+Ssntq1chxGZ36V//7nf+5Me/9/n5Q5+P59qCW+XY4tmLHlo5alRsGKO2DdcNI8srKsMoU5fp0GWuZlzmhYrEamoQjUJmQWiqZkqRaZ447fQi09YPh6X3pk2Xm8P98Waej6qihj7bfDgUi4SZggTUKGQkC7Cm/e4wTTcHPcwQlQ/bJ9+vj//t4Pvr5d2HL5+ent69f356QrO6buP58nB5+vDuKxY/++wzbfL5ux89rI9PHA/xkBVQBYU0CEs8gteRdN82Oa/wrXAObppOVGpWgVGSTDvyeNNVubGSIyvSMTbdbcK2bYtwoShVQGEjVESsmZn03vvcp95b671rn1Rk75VKyNZEOrUXZada6XJcluVwmBYjwz29ICRFf+bXP9pda+Z5OR6Os7FJibU20cyYNbYxho8ttnG9btvYtoisgjXtTc20TY1ddm7Wcjgel2We5953GoaaECN8retlu1yetmvUZtvq63Vsl2vGgGgpdyMd7GyPEqKhGMF1q0zWoAfGNWJjOjMyPU1apoyxDY9KRURalNbUmmgTk9abcudCtWU6zXZsdtOmvvm2SPvW4d7er8USEsNH7DY9jowSLxmq2tuhJJd5Pp1OBSSaQJsoIsa22tQvl+fhT29/7hfeHN/+zr/451vPzX3lqC1Xr3S619iK14ix+cC4MDfIbnQlsC5tEjHR1tAJRrcWw8HIqoqsGoXISoq0qbW5mU3adqGJUqFN+zRbb9b7PE8UtnmSppPVpBpAwRdrQBUSyooi9GBtWe70/uZU/e49vva5fP0HUe8e3/kHBDQL1nWeu/SojKrZpvL44bsv4lrf/dovvI/HP3r/p1+Oy7kC6ASqPBBQ7Bp2MhIpQ5U9Pcbz8AsqtEYVLMpB2lztKPNhkh4piEpUZUX4KqIRta5rhJkuTCB37jelT33u8zIfl+lwPE3zZM2s67RM09yW49Q7wQgJtNRJWlfrDYouebpd+qEndhK9Ib2JWaSUwkx7M91HDEXKGtVqjEzJzEwvDMHublW9yeCujRRVlkjXiY6LX4haloOJwdU3TJPWagO1rpfLZbs+Y7uUb9f1um2r/2SAJksySMB3t8XdUCoZGzIKyAzsfPoazID77qrjGZGZO89jOenuNiwCAcIzNNGkChkC9KnfdJ29yXE+eeLp4bGezzm8qrQ4NpFwlS5oLHMfpaWMKF639XBzJ2vVOI9thXvF1ppw226Op+fzh6ff/aOf/pW/+7Pf/af/7A//+XR/g6t6RgrGyPXsNWKsOTZZrxHO8paJXZYpzUJ2znhSV6FkkpSHh+friKnrMps6hgnrrvdmNO0ibDnKUyKzKNpEKWPEiLSpT4f5iPl6fn4+Pw3BWJF1qQxP7wmrMGGzg5jx3Tr/6Pwdb5+09pjn1eqeN0PQD8fhyOvWhEXpE7lu6HVAfPnl5996eN58fPnhR96N1vyyiVaABCUqcfUW1cgobxU+0pydQamAoDJRAsmcelPGdr1qn8BKS2eZKAWZvvtkV8AzWMXcmaGqynme52kyXXq3TJeWu/fMSQ6ZOXy5Xk7vHz48Xy+bb6xcepumGVmbb0X0Q6upx2VNwRjDnGMnECgLFaCWWqav6yoSGVy36zYu7usW15Fr5RYRfDGD2vXmAKr3aQzftgFymmdVZcp2HQJS7Xz1x4fz5Rqxruu6Xq+embudfDpZTKYRpbXtcswMKS0yS30LCWQhRlXsPSWqPP7KbhHQriKiWSPHEFftTK7DFaQ5YrXtuU3HNs2zTtKsj4x3V+1t27b1Ovq8bPWU6zZ11T5BxdpMcmT6GB/ev/vqi68Oh8Plcp6mab1cjocufVJp07LczK9+8Kd/9DPf/u5v/83f/qM//ldn9Ofi5sNHrteoDXHVbWQM8ZE+IkdBVK0X6ekqQLmMfVZgVolGIS7XM1Ib5wgBzLdzblNWp2RKFWy9DqBUEGa0EpGit65m4tlynsBqk52f5HwRH+uGUTDFvMy32jW38bUP/qvt+Dan82V78tVEqWbCy/WqOi2HQ0YsMEFu6ad+aBd73J7Pfp6l9d4m4uGyebqRJppehSyVbbhCUVHCSCKV2ltnbOCg5yBpXbRZkVWVnioB3T2ZTamArGMrAkgWACnJpma9995UlYrpQDIrSmAqEFMz80RDm07WbnCzHq7XLYZPvROa7lHJim4dkowKiqfo9/7GRzvLYmq2TL3PE00LvnPZfYzn89Pz5fl8PT89Pzw9P43LcC+AKoKftKM07uTcEau0dry7FbAIrw0lVXK5rg9PHy7ns4/h7h4Yq1eC+xQVQBCFGFERUkQgI4GWgdgSXrFVDI+xE5R2R0FqMctb47S0QmBkJbPCzChIEC9WI+XhQl36sYs2wRzy6kO0hPt4vmygtkXcfaxXoUMyqyIK1m5Pp+Px6OvapikitYmK+4jVx3pxj6oxTq8OPS+ffOe7lw+X3/3zf/O+1hF5fvb1GnHe3HP3dYvIXedFAoqSrMydqEkUaZSJFI+ozBwDDpPetffWBFAxRUNKelwv23YdgSShJUxCBBAhutoWHo7wYcC0TNIOyLisZy15Nb+N28Mc+Zv5+u+8/bm3vHnWfL8+WdZhPqLoLFXr0ltrxZQIZrTZBOGJ8/Z0/+btyPzdH/wP18rzCC8HKyhMk2QJUhUQZsRgrJZX1lq1BkZg7Efaj7ft+Gqxg7QZras1YVOzJrp7IWAbI4YbOwFVsybWupmpCqS1uauBWm2yeTI1qpJGUWlzp5LK1uywHA7HpXURLaO2ZiroqpN1UfWq6xhmJEQSHFGeY/U1t5pNygo5xubX9fm6ni/X83l9uozn7ZomKjZn7hqFrJLYPTwkrNV5fbxc7qY251jZK4b33guBKt82hO/mmlXcLSKLRdWsQkqSFhiZEE3fXd+QUbFrMr0yMzxRJTvDX9EgS2siO9N9D76p9XKFIMaQLdR7IyrtistTPfbb3TF9ntDSB1jB8fwYco23H71aun7xwx/per053bW2xLZe0O5ev755+/HY4jjG5fnd8c3rD+/eny8P2/X8+vbGx1Yh58vjcrn8w9/6j37n3/y3l8cPn4/YRvnQWadrnN03ZFBEpAEshIAVVDF6ZVK7jnVDWQb3HyrDr7nJ0Ll1CQU0N3hdIVMiLufr6ljH9XRziB7NtJ0mSosUYwmq0hEvROh76jjdLRg3yWlpy8a/PX3v3/v6L63FR+IaMfU+VVuzTGyxHrSuLT1MGmwXVtaIbJbT3GrDx3ef3s+3f7l94caezd0TJSa7kQvJ3TCrdjqK7UA0X1rpsp2p0TqWg2mvYjq004Da29+IyPSIYdLI/pKKArg7lTWGjzYsijJps247jJmoAkIHoTNvs21ZoxDuNZIYE3PkNgQ0a631RJ23s37v197ihTZfu/PULjSKcvdxvl6u1+u2bZfL5Xq5jHUd7jvuny/JMTtHrYAKjwp6ZDGtWdTIyM0vo7aRl/Pl6Xy+RGZERo7IjIjC7h/74huNQkDGyBgIRw7kYA2M1cNrtwzZadw7ERqJZqb7S7z7LEiq7UoB+jbSB1G7qXYiI/18vSimnzp9ejrHNi7n6ybCiM3X7fn5w/2ru88+++kvPv/88vS+CefllCWXy2Xz0N4//dY32nyCTL3PLG+my6zLzd3z2U8HG09Pp0+/e7T5X/7e7//5F19K4O50B/byIMT2IJrdcZ5lKkZTSnJXuhGpY9R6vvrlGmtsZ1yfnyO34+mwLAelya7eGrFdt8saj4/Xzz//4vJ88Rjrul6v5zEiM4RBxVjX2hvMrA2RETcyL/1okH/35jv/4Nu/6tneP39AuFzdZCpaE+t9FuouQgMpQt11VSImlkFkqk2ffP2zf/nF7/3eu78ogSp/ouiskmJjIBGBlEyWW21ZVxln8XPIMCHZMN325Y3ZgWoqJm3exw3drfLc04cDxVJAd2PwXRgnRjB39dVOlntJqlERoyiygNotkjJliEC6qjYBxMT6LK0BCYaKZJR++1c+hrB1qrH2wCJG5HCPMWJdV/cxfGzb5sMjImKU76zsLAap/Ctha8I9UBmxQjwR21gvvm3jcl2fz+t127Z13bZtDc/MwT1VIrl7luyy1+tIH4i1asAdPhBRdGzuAKr2XBZUFRUUqvClA5YsiZ0CSRqF7u7uw3P1UYWxjXU9P6zr4tMvvvpsqRiXyzoqU7rAoyjyxY++WJblZ37upz48fPnlu6/W8IiRcTWz4/F+3Twzrk9PmaHC0HZ+fJCSnNp2ftC6HO3u07/2a//0v/wnf/LDP/2Zr30v3R/OH2bI1CbV/cnskTI7ZT/J3Tci13UNr+265rjG8PQUojgCW5+X29P9ZA3ch4HNx0Dq0+P1+z/40edffu7DY8Tz09P1cs64ekQFxhoRY01fwy2q02LqN1f924fPfuun/ka7+sPjB81CVFsOoIhJmw/KllJzm0REu2WuXWVqlukkALtenp+u548+/tp/98N/9Qdf/PnSZgLarJjgQAWhYuLY4wcRTjiwEZtyQIdFrnrC6aObfiu64Hg4TK0lsqC66+yF2xjh1bWTfY/2MYqw2tSsNdMCAzkJe+QAvU9mrTWbREB2M4U6LKUZxZAQMRVSG0srk8isER5MWqYrdU+Ky+zunumwNkvfi05GZaaZTYclCZRuHAikRwRVLgWTsqAURslKMqnP57M0YcVu4pg1Nl/HtqVvGVEBoe7WMXv4ko8kNaqhrpXwUS8FFYjdQ4q6p57uoiDuNqWsERtKUdWoaJoSSIfIiNi2EVFqynWtKyZz9qk0jwe+bXNwc9Ara4SwQ7aKujse/uJP/3Csz5999gsZf1adp9tb7T0Df/pnf7Be/WuffBzjGVmHPmXl4ebN49OHj28n97xcMR6/P+Mb//N/7x+9e3yg1+Xp3LQFRM3nmkNAz/BMYSQiK2vNsecc5jwZUVFok1FE5TrxsMa2ruu6XrrM4urckOkZKh1Abmtero/5rs6DxpIvp8N8/+r1/f39bDaQK8EcXRqUh6G/8eqz//FHP7++H++fLwADukwzVKUZklkQ42yLJqiSLLgqhVSBJsuwAYjVFTj2ubXlON9dYhu+TkKarOFBoqBWYsyV0sAlePHVtBr34ITldmkHFc2pWTMBEmx78KJYj6gMUWm7lxhQTUTVao/RKGtNMzOHr5FrPNuUJO/kqNYp2hSQCrFSTWSObWBNr4LFuDIZEbGNiowx1hzWF7HGRFLUa6inqiLqrKWVexwNibaL3dE0FUlfnY2eOUY25aZOajF3t3wpZIxMzxwuexgL/8oelCVUqQCqSb3A3SxBhKCssAUiZEQZkpF7eUkja6hqoviThDlBD3NksFpZK0mUU1UqRqzukq7lQ8F1DTbk0DL/9Pbt3Xz4y/MPM9jZV1w8nbGbafJ0c/PFVw/r+P7Xvv7Z51+9GyFYyS5vXr0G5HpZD9Ph6el9TBMDHy6PrPH+/fs3r14DuT48zZ9/+Q/+wX/w+Z/92f/+//WfLPfLEvZue5rUUthFa01QVVhrwDnGGKutY520z0u3VpVjOvH8/L5ZqfU+zOtp+DUihoyKRFZUDtmK6NpLrzm2D+/fG60sn85bbNmlnUWa6pWp4Wvyl772zV9+9cnPnD46+/O784Mnux6mLpBWVTG8t0XRFKSD0oDKHFQpwN1VGxjX7TrPLSk+6vZ4e3e6a9NUq4io45nwxkqVMgorfUiJEpsXZBLJwRj0+WTTqXEa1ffhs0pVuyYwUCoY5yEhqg1AjiFmVRARbdPUtDXzKiZ9PJ59Bafc+rNkl6tJa9PchSXoXajIiiHGZOQILW99XFcAqi1jLVRT2v2rWxGMMTLTaJUyooAU2UoEgJmoakZVayJSuUbEqIx9m5cIRBQQ8ZOAVUBRVcnIjM0TKIFERZFsxsiEZiazarfOTyJLVEdWhQCSvmUiaZWoClVFBKClKS+RksUahYumRBU1UYNBCoKbi0qE6ECq+FQDA2DEtl7aLF1qrE+X54dMFKPgYiqV7l4lqtObNzeX8/rw9Hh7ezpvYzoeT4cjgMvl8vr1zc3drT3c/PgHf/rR6zcKXM/+8OF5sWm+u7tk8Ud/cvvtn/vN3/if/Kf/4v9hx9s/eTgvN8euMdKleDvPRflwXrM8ISyR9LfzAfNMrLc39zYF+EEXq2rWKM9RYUissfq6IVlpEa4NsZ4zr4FCkWUDWXtUxPrYcUjI/Uf37/zLefW/9a1f/3vf/JU71oenx6frtQdFyhpU2h7e1uYutP22UwUZiGjIopAc40rR1tty9wqP71tEbuv7y6VSFFyWggfScg/DxGCWlngaKjKZKTn2uERpB5tfWbtrs8lMBWzLkBaQMFi6+9gyKcUuarBoxkalmMjxuEjvI7wLPa6UG8ppdwAI5/PDtc+LzekwCE1U5AVelybuFKFRmD01cgcggcrUv/nb3zsc53ma90Cln0RBkihVba2ZmYiovrQkWSWiCt29/EQ1gdidUvfxNFF79m9tufOwXsK0SsA9o49VuhcLFHbzmxIKK2IX7lUWMiOKwB48KCBUxUqNQIo4SW2EaFjBSClrQdscgxIsUigUlVZhLCTyvK7bZf17P/Xr37l7/cMvf7ytgoiqQRqrosrEWAQ5LUeUfO3TT9+8eXtdN79u7z68i9zW7fLh+fHrn3z6/v0Xjw8Pb968bq1XxuV6PRxnsz7W58P93e3bjw9/8W8fzk9fWX00HbqZAnd9/vh0s7mf1zi2KcoJReTtcfn06x+vfLKjf/T2xtqYl364ma64lIzGFFqJuK/bBuxiQR/X6/Xp+cFjCKVErPeTHbaxxbZpVveS2N59+YO//51f+ce//Btvw56fzmsOoiTR20RQpSWEZtPhSGpktKZGbmOTyorBPS9R9hQoZfBpe9Su/f71P/3B73zZL8fpuLuGUyVKIgOMykAplOnuI8pbbahV0qM3uXt7Wu6tzdIm4aTs1EkEqk0i3CNUtVszaKPNfTod+rLc3My3piK7x3SEsmRnDmXueprdTI9MUaPuS7QUpRjJiswKiEik7z4dTLAYMfRX//53em/LMs/TDKAQFIClYi+nX/fE1Z8oLSorA1moyErfnQqqaico7CSm/ctYYO0yfwV3bTOrtFIpBmgJC3uy10v1qGJJRrBQnpWJSmRVhkymzShFSVG3LiKWlDTahNYbCWtXsyBMCTVBsaAjEZUGVNaomKX/xrd/6bM3n74/Xx/eP1dQBGNzVfMYrJynlpWFUrUvvvjSIzz8/eMjhV++++LDVz9+fP/V5eHhZ3/+F/74j/7g8enDPE+UOp8fI2KZD7fTaT0/Hr/3c/N6/W/+4F++uX3zVuxoy5vD628f7046Pa5bbPnZR5/EGB/efXj7+v7q5zd3x4/fHs/PP/zOpx+9evVqUmlTbPUEwZ5pT6WvNa61ByWQuI7xdL4wyyhVWdC3x7vz8zU93t7dzdvl6cc//u1f/rv/27/12zeaW67Xy5qbT9OOuquIOYSi1htKq8oUhErknmkKloiCrMiuqoKiPJ7fz/Ny+Pqnv/Pwh/1mmjAPKAUmgsptbJ6VqJCqciYqVUtlIDaycjnY6eZgx7IDZKFNbLtrlDaPTMDUjDq3PvV2XJa743GZp7mfzDpqt+UUZGUxd6Z/JHeP4mooM7HerXUrRMSg7DmeqqoKicRLdkcmArnlGKv+2t//KRU1UTMz1WYaUT6CL9kL2qyp6R6XDWFEIiQLXhm159NFRXhmZsUewL77VzBVbJd/7wED1D25MiEqBdllj7tyi5WeVVovHoysksoKD6Ban9gT6qTQpHeIajXCoi8UHaKiJuSGgpTuFkUsIOYMSmWtmm4i8fVXH/2dv/Y3bzj76hRet2ts21/ZVBGcp+PNq9eHw02fljZPfZ7vXr+iye39q/tXr8Mr1vH+3YeB+ujNqz/8N7+X8MpQ4HhaQsRUZsTyrW/D/MNf/OFc8/0y3fXDT3/8ta9Px4eHc9n0ts8fn47ivD5dv/X1T6+xXnH59scf9V7a66Ob+9N9u8RDbdtuGOpp1w104cWfQGBjjW2M67ZZawTCq8KX+fZ8Xln16rjo5fLXvvO9/90/+A9Pjq8uD18+PabHPM3bFkSHCm1iazd3t0L45ktvSs2Ipo1NkkmUirBETaapi9rz9ujbdmjHulv+sj6Mgmoray/Z8CKZtUUM+ChvKUgLbzlQV4VLMz3MCxpkrn5oYlQRUaQhyO26aXHSNtMOvR/btKiqSNdpTyAsVmZt6S41aoQP960q1/VCBqUm03lS5aImYEbtFr80ZRPNRL6IED08cmRFDV/t5WS/pJLTrDeNMLBSoQqt2jNqAcCoXWbnNej14qVeEIZkxQAEyt3Scv9fUblzHKilBkFBBTSERCMcimqq8EgwHUitPdxgB6bCM7NNxknLoDrUjJpiLCVb7GHuAiOzpIKNkgQjogIqXYqWVS6eAlGTZia9HVtrlBSo0oIVoIK9L6fD8XQ6SZuTHMMPx5siYotpmh4/PNzf37/9+NPcBqS+/2d/8au/8ktf/8a3/uzP/uTV/WlqdjgcDkeuNZ6fzncfvn93+9FvfPrt3/vqq4TNN8dvL6dGRI7p4fHuk0+/+PDO3nx0b8ui7fbtcm2Ph4Hbt9/cND599c0P24/ubm98I+uxcSx62C6H94/venAwAb+Ma2SdJhFtj5crvcpxPV9m45brjeB/9u/99m/86l+f1usXDx8uY3UfYv0aQygAQgDTeZ63MVR4mHvbT1lWcrfnTRFhAshp6ptfK0q9eltsOfXeyya/uTl4+HPFaMAwzqZDdesVAqFnOmOEbywPEG1qaj0koXtMTA7ZGxFf60pRVhnQmyrYgcr0csJVSrrJrqiuct/Kr8PhnmRt2xYRIha5rdcuGGXoRxbKPZtpVZm102lSbdvzJSPCScuMQdL2Rcbe+qvqS8QkSq13bc2mrAgEiu7OLEbsXoCevhtTQjMi3Xfs2iAo391opZLJrH3RUyKSKJqRYi5SiMwR+RK5QVYhd+b67g5LSGtNGlJGM9Om2rw0XNlmSc0CSylawkwhTbJQ4Vr07GNVjDS2Lco55i7Hw6tPP/7MNh1tZIPpxHwQQQ70rtJMTG2e5tNh27bb29vwej4/Pj089G7v3n31xY9/+OknX/vkk4/+7ffPl6f1z/7iz7/7Mz/34fHheFimuW2Z16enm/mQotvzOn3yyUff+Obj+Tl0WW6XWy8x+6k3r2/mw7qO+ea1d70uR53mt+sIvnp7e/uhztX0m+3Te2/WkfO7V+3VV59/MbejtNs/G4Nz8uk8RrTWJzOTMp3rGk8tTI+3fZ4n+8bbr/3dn/3rv/3rf2e7Pj6vH9ZxrcLU+suMR5bV0k/ae2amh/ROQUSIVOsS20Dk1GSs2xZ1XA7rukYOZKmallfVdHNjT6eF1c9rX61rhyJqZT0BQO4eCXuitQK1C75pyk4zy+JwbxNKZctUQY4RuU0yU4pKVnkOgVKQtRKtIiClGqxNsUGgFttYGaGGdb1G+XVb5eHh9nQ5xLL4ga1aU2iBGrTjYVZtrs1kQj17rOvmvR2sqqpQCVBVLZC7FSNJ1fZCm1at8q7dywWQFM2dXY5wj0IBpTsTYVfiQhRVyMqEF7RS9rWvGcvAiFYFxUpElYhVZAiE4VUkMxIl5L6ETNGkDSqlhzbZVKoHE6CpRWm57iFQQWqVxRaTHBy5bePsWyZu7+bT4djl/tNXn/WmUUVp07KHIAhjD9uAr56JU8b9/X3EyKj7+5vV/bQsh+P0F3/+bx/evxezpnOb+5fv330jvv2tz757eX68vT2FX5/G5Xv3t11dCuRxPr46dcU0HW0a41wje+DTu9vrJdnl7FeZDjd3tz9697h6fPub3/r+0188X9dv4TZO09MPfxzLN8X73enG0m/vPn18fLys6ze//tH3f/T+9e2rU7dYt5vT6xb4gw8/umvz129u7k3+g7/z9/7mN392uz6u53dXbNE6/39E/emvbt2al4fd3Rhjzvl0q9v925+eAqooqiibzkAAxzEGCaVBchwl8sf0iSP/HcmXKCJKIsVKHAgmIGOBCZ2hCqgOquqcU6dOf97+3Xvt1T3NnHOMu8mHuQ/5vqS1pDWf+Yxx37/fdUmx6cjuJEWkW212kIqEqVsqBSJMNaG46SKfgbDmYeEOcBpnBGWhYFSdwzxJV6OVvqxEMaBvq6mauXALRGZO6s29LQV8MDdzwmDuAgFAJbETNCA0xKaGQODWmluoSKMmgV1ZgUeEgYM2c0DhjOANmnqtWmdQ18mhmjYzm5s+jIAgJHR/vLmMJ1s8W3Vl6Tu5r6hPZlayQNWSupo0Fn24uUyjNsOu944A1AWg48yJMCigBbBVAAwEWY7x1c0RUKiYQDLTcKvh3ha3eRhRkpLBY0lsYpCaMytGWrqcyBQLsCyciCopELgwzkjqBBaBjGA0cw4jCIoQI5Igcw7O3iFjiFNVbpGYGeNNzKC51YCeqNdwksQi83Rcb8rbz5/uhp2eIuphkknpnGoKVnaojYwcpjlQjnV89emHBLHdbnePHm3PNlJ7rf7y5csPPnj/8Qv/+KNPQDW85pSmafrJx5/8gT/4c//yn/3j81331otnh4fT4bQ/3/Y47uH2+zO03fpsNmytMaVpmodu1a2GdEaqremqXA2qh20uRiWlfHX1VddaSrHo4PznPx/vPjt+/JWrF/uHca/xB65ewNxO1i6u8qPNdtWtjk1Tvz4dxtN0vDrv3x3kj371l96/eFbrwzjvpwZ1DCGweqJlDJJYun6aa/Ioq54qmFYRIVhSJACBzGQVWrMQJ4honooYGDsYykO9e8xJy4YItoRTn+RUclp7PZg0YkYVIknss9UwQg22AGBOIkYQRpQCGVwRKZwNNIysEQGOoeEVciGaRTRRZivIHH6a2iHC5lbnWo82tmjhMzh4DZ1tMWdPGgB5SnNQMEfCXYfZiSE5GExtjgjnoGSZ2aggTGYmt/uxlGaRI7tJEkIWQcRwNlsQKBBuRNKWLJqZ+8L2QARJiRTYY2IAMCPilNLiWoKIpgtT2gAXbC57YDQnAggGAk5QKDU1jEiJ69JYAtc33S5mCczGCVRcCuHCkGFYBFlESBxAysjohJFb03DMlLxinS3cH11dPHv86Ops26VBtsNlN/RMEjOhjq1hSTqNRdLxMNc4WXMCZsbj8TjNp+uXpVlsdtvVavX69etutWrNbJ7M2/XxZpVXt599evf8xR//E3/yB9/5zXVXLtfr7/3kBxmfln6N8Yqbg3Srdbm5uRnB53nadWW7GhAxcAVBGctpzpdDf3NzY3XablbI2cxzN2wuzy7u/KpdPn/05BXf/d7HHz6/uOiG/tvf/d47z9/yUc/PH83ot/vx/fPLZPX5s0dfffSVf+tLf+h4+nz/8IBQZhvDYJzHrusgQrqSV5t4o44GD6BUyJA9St+11kBnsKZoFuARTIwBi0FdXdvi4EMKd8lMq0w2QygIBmoko6CudbOr+aQRPcoEFgScJEsSI3c3FHNXgyCjcFBubuEK5izkwRDGeERImcHFECeYskK1aAa1eWutNWuxSDmrhgMimQY4smPVNjZAOiU5iHTMzGzRQseGpthFzpkyWU9ttpQZg/jya+eBCr6AeNQh3kBEA9zB3RdfERFHuJt7eGvqas3UzCPIws2NEJJIylmYSy591xVJb2zEjEiwqDNj6dFgRhRYAmHEyLi4Z3QZsjqqhaMtmjcUA0FMKMlJjJgdPdiAnAiRnNAZAUFc0YwIuwDWiefZhpxfPH787PLRZrVed8Ou7C4250/KWdKoZsdpGoaVZNY6GS2IyX7o1+uz8361Xu/O12eP3KxONTw+/MlH5+fnb7314vNPPy2pUMCk87Mnlw8PNy+eP7tar159/vFQOiJx1cz88OozbDOmxCl3fX9zc3d3f9t33W63a61N8zz0q8NxbO73D3f399eMMXRlbjWXUnKCOiXCQvz8+Vurspbml5tVO5426+Ht7VmPtF1vV5JWq7UkOWd5a/3s/WdfSeUwz7fjqVJO6mzz3K/XbuYWqaym2lprhUVSYiQkZslLwVyIVNVtnq2Z6TIX+WmWqi3pQrBglO36vD5dfR9ehauD1cmqNwBzUwBjAmYQIW/VakRjcMTgePOsglE0cCRXb3MdqzVtS+MNI5asILg5A5vb3CaD1rTOXitYC9WwgCAia+HmbhC2RLbDHTAQQ8ygqaaUh9UgIgQ/9dgukp3m0ziaNq3aZuPteyuRYBQGMKsRQYEU6GFuXutsphACCBHWWlv+Pl943rE4HqJ5I8Yky8WZc84d58LSpYyZ3qQdwgGA2IkWv+iyXECLQCYmcDSQRe4tS+7N3Q2VMiIT5mBySYSCCwUfDJFiwf754iR0YO4gRCeFyTPBbr15dnV1eb4b1kPXDXkom7KCMaaHaViv15vNeDiUUhyW9DwRJwvQ1jhJWQ0o6fzi7PLq6vziEbp/55vf7Nbd2dVlPYwXjx5vdv1Z3++6/PH1Z7lkPO1fH/ab9RnApLU+3D3knIZhUPWSystXXxz2h812uz07n+b6+uUX4zTeHx6Oh7v94dZN3ZyY1W29WR3H48vrmxa6PtuV3c4tusR97tj5ydU5IgX4OufNet13Q0FJDRPnFy9eYAmtGI0VvY61ELbWwKPvh3Gqc9PSdbkrROhzizpLSrlfmVZXBVwuYQSBADFO0ziOqhWX2TNhAJU0PH721su1fnf/Uc7irTWdEEyEELC22bQRspAQlzAHVdAAo4iEjIbL9pSQQEHfLJWW6zUgEYjwEh9AD3VvEbNOLZrhIoc2BweHcDcPRAlHr2S6bIzCGxDy4jEC8Fy6nLtwXOD+rTVtOh4nndp0PE3jOM/Gq7d7phAQgGhePWCpzpupalOt7oZBbq7WWpsw6E0exdzN3iA+GJgwiRRJqUvLHjuJ5JQkCRCbofvCs8c32hbi8EXevswLHAgzJ1yWYhgIGAGORgLExAVZIIQIcwSQEzpEEBiipwAMVPMAZFzI+NYccbVeP3tyfn429F1XhjWkyD2CwenV/mx7cXZ+Fk33+5PkMj3s728OD3ev5+lwvH99uH01nk4Pd6/ub2769S73w9l2e7bd3N8/vPvB+0aw3aw2m441NuseEg190TZ6tdN4Avbx+DBVr00vz3bjWG/vbh4e9mW1ktJbYG0zUty8vp6ORwBLpRyPU8lDq1USB8Krly+Pp8Plo6vVqtNWH27vLs7POGFOJQsdxiN6nO92q76fJ3398nbaj8+/9P5uu57up3k6eSg0dNM6uWTklDQgSelKl/uCCOxBsVhlDD2SsGRp2mozJonwcRy1qbkyLfkAYABzTGV19c5bP8DrHx0/FqGms4IRLs3WQAb1qja7ejM1DQgUFgJkFAABWtxuFGHLvhacWISIBGzJYC/cH7No6oowoxlZsC8M5sQkjAxmwW5hCosvzMxcw41Cl/OdTdNcqwpl+Kl1sDWbx1rH6m461+k01qny8GxgYWECcAdzgDfdNvPW6ptrQKC5am2wnImWNPTCm0P0BXoPmCUJMzPllBPnnLqUJAyXCPByWofAJdztTurmAUiAEe4GQAxEBEgGYBGLFDpQkAVAINjdEDSzMaACkrtEQzD28ADDCFdk5uQcCE4oiZ9eXT66uBhWq1yyMAXPNMNQ+0fnj4Nhs+r7fsVE06woYjadDveEkZmmw1F1AvUFY3q7v099v05DyfLO73///uXLq2F1bIeRlA4Hr9PmydNoBiGTWZ1H0GamJdHpdPz008+s6fnVo3BaItggsF1vbNLgfH27N+L1sPLQhYL/cP/QDd1bz18cD8f9w/7u9o7CAaD03TxPNy+/uDi7MIim8/F4evX6/tnzZ++9/z6oPuxvPRScx2kOB+47FJrMUhn6PCTghRFGzZGQZS0sHM3U2lwB3C3m+dTq1NoSpBUk0lYzp5TEVLlfbV48++bhJx/Nn2G4qs6zhqqqOZhGMxgJ3aLOba6TWYMwWvbDi0UGKWAx2nmE4pJ0JvDEgkDgIJQRMci5IDIEARIuzAU3xGWW5Bq+oOAiHGEpmRgACJFEgKpZtVYtAgSQwuaq2rTWVue5TtVVp3Ga55mHpz26pCRMjhBhbuaIvtTSalW1CLBwr61GLKxJ9ABdXDBAvuDuIxaBRRIppXSpS1mW+zAvPisE8wBgJkHkZi3cEJwQF5b4cq0OVCCLIA8MWJbeEajIhp7BxJuhYcTSt4jF/UgYiBQO7iFvtBNMzH1HF2fD+flFWXfUEbH4DH1dfeXiPRHqV90babVCpHQ87fcPt8ICKalC7oZUhpQzOIyng6xKbQ3m2ubjxZPzs8dXL7/1ve2T9dgOp+vbh+Phxdvv3e+PYvAwHsPa4/OzcRprnQDi5u7B3YGw1nZ2fmbgEZ4ln06TAc9qpcsCwSSTzo+urvZ397vdWcnD8XT66OMPIQCR1kPPzJ98/Al4lFwOx+OpzgnlyeOn7331AzLY399WNDDSGqc69UPHqXO1Lg/uHOrgCMxIKUsWzlRKLt0bWrjqXKu5Qtg0TfM8qmpOKcAhsCslmIn4bPdo/d47//LT3/7B7Y9QYxqn6XQ0natOVWdzd53CnJgC0Kq1qvPYdAZXAAvX6gAYpBCqjZALsbkJCqEQACGlyAEBpEtJigGEkTkCnSAiwDzMMXRRoxIahfHCpViMLYhIIIziHmYtwpkFI1zNVOs0W2t1rnOdT9OeV89WdbZElgjA1c1C3XRWsKY2z2C+4OTMLQCX7EOYtQWwHsv7H0FNwyOlVEru0iLmoAVyj7IICx0gwsLUXSFCTZ08CGJx8wFF8JKVEw9WV4vqIYgJWBkENQuKqpoCukTz0AWCqEvVlICF0GxOJJkLZoJ02qzy06unwyrNug8UOnJ3TB9cPnGsU53ncbw/HupJp6k+XH+xv7n18JxXKF2/Gdbrs0NrOaXLR5fnm/XTi7NgI/KbDz998ZW3yvnq+sOf9AGBDCJpKFeXj1+//HhuD13ucuoawPE4e7O5WepkfzogR78ehqG/GLbH+/3D8eTM/bpbd6WkfP36evfoqraZEQjTfn+c6vjDH3zv3Xffefzs8TRPr26uD4cDkHTDalJd+jQXVxcNY26tzlMCqbON89yXgUhO44mpMCVCIuSSulwGlgSOyNwlDmLuViywTDgYgrnUVk+nU9dlEZ7mKUkqqXNJjvBk95SePfrH3/pn33/5fQbRycy0tXGuc23VWniry6sYDBFSBM/TFLXGbGFokJEIQtyDlqABBjKxoxsuXXAKZwYidg8BBDQEUgh3IETwCDNXAzVrjibknSuqerizMKIREgYhMEDEm2o4MeE8z21u4NbmWVVrG9VnXj8dzJsFopMrtKamas28YSguSrb4NxcQx+ZBSGbuSxgIYgnomztEpJS7lBdHOy/CpsQiQkxAqOZmoUstUsEUm4ZahBsyICLEkhJij0CK+qb/brQk0z2shQCCkTV0awzohui4aNTeqMAEnC0xdmQdc0p52Gy260StnG6nK7h4b/O21uk43V9/+noe23g83N/tx/lU5+N0PI2nk3u11lqN1dnZl778rmJ8cf3Fbii79apkRrRWjy8//vSdn/3K/rObdn+IIQfQ7RcvH18+vjvi1dBRspcPD+e7iyxye3c7T6N7AOTd2eMlDnJ5dXV7f+NmrcWwXW36fjod7k/71dBrtYuzJ9e3Nyihcx1Ph3fffw9d2qx3d3f7/UMWIcachZG1VrPJ6+zAYH7a76fakAmYZq3i1FASIEkGhEAzQgEWAARwB/QQBGJOXelKNouqNUJFCIjrXAVh1RUiMTImefb0vbbe/LVf/Ruf375c4cqiQqU6m6mqVg8N9AB3h6azWYAzSwKLmMkNjWfBYujuc7/4zPtOgIhIECMMGRbvkKMGOgUgZURzNABABUByjHAl6wv2TOyOaIqYgoCAAAtiWjDrKJRTlySDWasa0eaparUAN1X3MBt5+3xFSNpaaLiHNm/NwsgdTMHdECUiTG2pI4YuyGloqgHhvmSagRwglrUx5ZIXlJmISJLFH9PUF5KJq2lzV9W5aXMPDFx0igsaaVFVIRi15tFC4s0j7hGhgY7hmYKtqdYAF4CGCAKZlk4wCKAwRielH0A6Hta7nJOpyUhP8pP3r95DjZe31w/7/fHhOI5Ha7W22sZpOh1bM20GHsPQOTWw+b33P3j+/NnnX3y4P1wvgzJGfH33eXt1942f/bnf/fEPURuB337xUi0uHj8fD6+7JHPz0nVqfndze3vzKufEmLdnW6KUU6YU97cPzFkkW+i6L4f7QxmGUnLfr0/j/Or27uLi7NPPPn3y9Mnl46txPB4OD6fTOI6TgSNhM5Ocd9vtNJ4YKUPZ7/f7w55FRHIzZRFwWHKOiOABpmCGKecIj+bIISzqbG5CVHIBiqajVsiCVauHlS4RiXAB5H7VX7z9zvXh9q/+1//Z3bynCnWeW53m1g7TwawBmLam1qZa1UwrWENcOL1Opt60BjC6siCRZ5EskoVlObIQYgAgquviiAREoAZhizecGQGwNUtAhbehZBYLA6C5IyESAsgb5zimvuu6oeeFkB8B7mFe52nJ2c91Vqs8PC3gYLV5VTIwtyXPsEwhEWihT8ZPZQy+jH/cm9vSy7KF2LgEms0hIom8OfwTMS5hO1yWaBERjmGual7dZw8Nd3MwRIbAf3MRbOrhDE7oCIFuBO4YAC5hblbdmusCjkRZEEWIQsCSGDMiSgZOuCppu1pxyVXH4XhxIeeXu11muX99Z+oIdLo/PLx+XavqVLXNqi0szBoLvnjriQh+9MlPrh5fvvP+u599+tHdzct5HFPqEvhnH334cLp/74N3Pvnoo47F23yc91ePtvOkOa8gzE0P+/v96eawv+/yjjtmofOzc49otbV5Pp0OZ7tdncculcPxYbvbch5OUzVtEF5Kev367urxE49Ipbu9u71++UXXDXOdkpTEaXGSW0RgHO5ur+9uzs/PAek0nQIhKLF0gSkc5jqaVYAoXde8NWs0JMzJKLggeJz2x+NxT4Q5szCD4zSdkE2kL2VVvSYqVxcX3Vtv/c4Pfu+v/8P/8niadLTpOB9Ox6nVh9PDaTwsyJup1VnnVltTs8qhPp+m6aBmDYjA0bURESTKCQpCKalwEHlCcY8Ac2yLP8G9BSghuy15TXMDwcxRfOk+m6tBBKEhUBIoy7kcgIRTyiWnlJjJCeBNTmfJfUKAVaut8erpKmLpRXmYI4C9iXN6BCKRWbjF4po1WyLPoI4QvEDZFv2Ug/sCfY1gokVYnUSInJHDQc0QwZq7RmtqLbxFm1SrmmlYENKbYJ2DGyyHrlj2ZxYUiB7gYIoeFL5cPgqQpmXbxpTSG3IMEjMxZgLGVe6HQrmAa3m3/5nLvKmnh/OzTavuZoBx9/r1/e2dV0Bg9WmapwhAdAAoXTo/v+w6/vCjH7nBk8fP6nSo8zTOIxi0aJ9+/OOLy4v1sH59fb17dLE/3iXudhePtE3aqlCu437S4zzWWXm9W7HkzXp9Op1MPbyN+0PuS05pOo3VWtd14XCa5r4UaxUNh9VmfbFRU0n93f1pmufEeBoPfbfCpbboYO5zq3d3r/vtOsz3xwMnLikzSdOYTlML9KbhQCCubdKTtpkZCTnA3BqS55Ka1qlWRg60w2mc57lI3m4vAmB/ut+dbzabVT5/9J0Pf/Sf//2/s9+f6mT3x4fjaZ7GqlaneZrmmYiAUV3B3S0gsgAnkPnQxuNsRsBk5iLYD5zYS8IksUjdChdw1JgM9E0kMypGmPtyf8QgJiHjCCBAi8UWSIJIPzVnBIGpMgsRI5EIY2CY24LM8cDlg1XNzLUqr66GsGWKgwsNg5wW52AERqBZvFEBOEAwOIUtuQMCCyYiQASAFmaxXDdEJOX85nFkDGBA0kUSoNGqa9VoTdXGaZqrxfJ5X8oEBKoKgAC5zQ0W0pUGLHpnC/MgFqRgJkmceyy9i7BIkYQkyszIQYSEzJmplK5IlwvMw9fPvv6lx2/X64dpmrCj2pQd7+5v1Zs3qq151JTSPFVhSl3Xr9aYCT0y+mefflTneTucmdvt7UuhRASS5NMvXj55+gw8SteVYYWxSiXf31wjxGoYxnEaD2OiDplW67PVsIYQInLTOjetEwl3pbhZbTXnTtUO48Fq7buuqvabdcpJiE+n8dXLawwbx33uU1e6aZyYGVEgUKe5255z4OHmzt04pyJJmzZr682mDH1YhDkAjodja1pSEfN6PMz7g05K4VrnnJKQMdHpVF/dfFHyqu92SHY67RH53befM0l68uK/+rVf/i/+yf+XqUBFa3Mdl6NFi5gtFAlyt9TNkCllWmVIEizRE9A0zc3MhVms77HrUuEAaoiQMgsKQDhbs8okQMEUgkyJkRzdQAEVwH0hX7FiYBEgAHRyDjA0QCRkIpYkTPzmCrysd5dJv0I4tKZzNQ+QACMIX9icgK4RuFThZfm2kkQR6F6JISUhFgglAgRHRGsCiAu5P9w8KIzcUSNaLM4nB6gL2oUJhSPhcmx3Yl+gKuHsCorhaCQIiGYaPnsYAQIEEYFBOBMgSiAbi6ckktGZUxJmRoSIQEwighKAQaIRMrdp1u08xwpwEGKHYbs+He9rbVmSsne5a8NmgjY9VDfzqpvVGsg55wBobVbzTLgZ0t3NF17nt99+zmKvr19tt9v5cBLpPv7443fefvvlJ5+9/cF7bKVNbWwaAN40LFqzOs3d0GeCeTy1Nvf9KsKO4wGxdr0wpc9vXzrOK+uF16ZxsMOwWW/WAyDPk7Y6Tqe70NNpf0iC2/Vam+ahbC/PTfF4eNiuhob5NJ4IDVgg6HCacumvLs9JVqeTgiShnCiZs9da0pYTgk7Y6jzttU1mXpi3m/5h3t/ta5d3w2pYrYZpOu73+w/eeZclYwxY0ne+9x2bYlivHBp4nxKxo9UZRLnjqZ1o4lKGQBKiwoTBc3gpadWvam23p70zHE/KOYY1AioGUwQRIBgnI/dEbB6oAQkNfFHvIieBwHCRQpEgQFJCSwhu6IQCFG7N3SUVVUUMFtJmvNDhkTCQUMJbBHoLtEBEQUQIxTBEjIW2SeIQcHJITBwYjIiuTgzgAWzC9IY6HeQWyxweFxwRoDZvs4ZGqJsZIgHqUlnInBrq8qWRcxeBfedew5TAwNlDQUdAljD1mGlBexNSCoAIRSRoCMjRlVRKwkzEvowQkJyZkqSUiMWDDAozL683Ok0j2vT6+vqS1scYMcOg6WFqTWtKiaRbbbjZNO5xnKsk3mzW41TRqxm3cTSKgBCOOh+++PzTy0eXZnZ/d7sZuu36/O7u/mXJj5+/ePnJZ1/+8pfnCZyRcnHI01gp0XRsVNP+8FrDk5RS3p6rRljOeZrGLz67Do0QnVs1r2belQIoktPh8JCoI/ZpfBiPD4icc/FGTNKv18ES7tuzzc3167HpbjVsL85Gg6axXm+kX6vGdDzU5oSdpJxL36/PE8LyWxJvFPcahyR51125tanp/f3RHM83T1JyAJtnBZBh3c/Tcf3o2e3dy9/4rV/lDlOHaMA5g1jzxoAUxTTAYppP6sgCm7QCrKpMRMi4IOfJo83VDQ/iZ1sbMgMSIVprLEAJYQ4mADcQRiYgd7XFFkwUhMEIGVYo6iAUGEiCCKHGGIrNNMwI0MyYnYnMLCG1cNOAJX9h1lpDDEbk9eUKdDlPBoQjcEAsnjpCREQ3C38Drgogxrx4+QA5nCPYFokBwJKZQ0QRGUonjAUpfBmnEjiEoTXSiilJJqFAcK3N5moeGG7RWrSkraEDBRMCI4pQRAC5MAcHCkqm3AEll8JJhHjRdHvuqeskF04ZuAALdrIKUUQpBHrirV4+v7joWI7H0TAE8TTOk2mtR21a6wyAm836NO9ZCMIyy2rbtdpqra1NhEGE43j67PPPt5urdbcyMEF66/lbx/FU51a6bn946ZVX26fuBn4yMA9KkllKgD3sbyyilJW7QSB6TON0ON6thiJpYC6Hw/1Yp/X6LCdq2sbjuFmtT8eHH//gu1q9Hzopq6FfoYgjqQUh3l6/aq3tNmsz6/r1VNt6NayGoWokLOvhbLvZ5DJkzlOt4zi3cR5Ph3F/mE+HaTrprKDqaqdxvrm5AYX17gzJofnxYX9/dyslXT2+UIv1ixd/75f/yf/p7/3N7dV6kESZSIzFSRCZkDMSMZuTq4bFRER96sEJjdnYnU5tOtW5TSHgQJZdVlIMLTBEEqEHGQjGAoSSCApOkgQRDRctIEVKuUAmBAhiEEJAISSHxQposWBEwmlBNcDSTas6naZpmuvcWtMIA0Bi4d2jTQR4NMRgWOSS9lMkkYUtsR8KBwISQkQkDMQMKD9F9FCARwQGMBGipJRKkS6J4+Jy+ik7zikMIxzcUhJGaurTPJmGVUZlVwMXACGWtPhCKByJIQECJ0aCtID/E3ICEYQUGETsuYPSldSl1EVOESkCIyVFTNGkk1RgeCRP15QBnMgP93ttRsJNdZqbh4KDqSXh8/PzXCgVqjZnQWEKb63NhJiQs3AhvL57terXV7vL+VQD4tnTZ7fXd+fnj4L0dKhXj97JQ7q9edmXToQlFVXf7s5vb27A0zTp5ePzruvqMQIaYDiqSLq/26cub9brVcnW7P5w64rrfvji849efvbJbrNZb7bIpZQ+d3m2hkg2T6HzejXUcSSksdZqrfQbNUwlMXdplQ3NXU/jMdDcbZwmbZYIicLfOIbgdJwOp72Fr/ozKaJVT8fDeDpAxPZsu9l0qRuGyyd/7e/9rV/56LfW6+3KhQo6mBAQL/2pQAZOYuggjTpw88K547UbszF6zCc/TdXqFMxgjKpSMGWiQMAqiQgJIILMMMwjQAlj2SsJQZYkIT2tEWAZFwUALFoVojDXCA9bPORCpLWFujcws7nZm4PJkrFkSkmYkfur5GHmhsGIGZECGCG5xZISAEQCRiIAoKXMzgAEiIyA/wblgAzkwIwkxIlkeSsQAXAEuQliAoDA6mHqLiTMubk1C21u1cOCGAk9clDBVFgSAQAEMGIQASESMSdOiEtERJAAICIlTplTV6RPlBAEmHJQVJyEJPk6z/lJfvpMHmcsHu5Wx8P+4XCIiDaruhNynU8AHt5KH/3AQ58RKwIg6ul0yyIIETCbW+l7YpjGE5I/e/HiVOt4PLz/3pccdD2cE7vqhMQJUzvNw2aQbnWa6/pyo9o6FEYo2/NIXe71tD+Zq0iZx9PhcNysd6XvgfLd64fjwzUXJE0PN1+MdX9x/oTL2m1erdfdcH6apq5Lp9NECLVVN5h1nqztzs7DYZ7GeR7V23E8WCWRTBJRW5aCjDpPbj7Z6GbWFAPH8TTPp+1qq5BYEDhOpwO497vN7uJsyGl19ThdPfrb/+gffOuz3xtWkMmDgBc7dBJzo4SUAkUxGWVwEnVHp4QpKkZVbdWa17GNqmiURUZ0B8iUgCTQIgiFnJZUNLpbTjkFESAGIiCBMAtA0gAMRyBYDiOAulhf3uQ6iIExyNvC9dQIqE1hqZlZQwpO9CanM1ysIt6keiIs3BAwYCH2IDiGIzMREhOFh9CiB4bFDoAAHosCDoRQmEkwZeHETCJSALDOGgEiCQDdFrotIjIye2CtzeamVWPJvXVAGbmQpJClmokIGBBAuEQmAImYg3jZPVA4IQIQYArJIMmRlJPklBg8dCl35C2tz61nYQfb395PpxGT1Glq86ym1uacQMi6TABN20LfR2aBwNNpX/KqdMUhcl4hCXLenW3DfH9fHz95NtXDeDpAyo92T8Y6mXnVVueZQp3AAHeXVyJpHsdSinolgOePnh4Otzd3dynlJNzaNI51e/FIcrm5ufvJh99fiTl1q7Oz/d3NPM/dZpdltbs8W+8ubm9ORD7XUaRTnXSeqlV3Xm0uVWN/f68613YUpjJs+/68mbkqGt6/vj/e3Y/HY6uTNrPmoXWurdaYWwBhPwy73Vqt1rDS96WU7WqTS9p+5YPr24d//K9+9aW9ZmopCwBFOAtwYuQABhbAhMEBhIbgCGCGHuSsc1VX4giI6hoa4sREro0JKVviFA4OgUzECQk4EbHQ8j8GJkR3ROIIimBQC0cPCEMPaPbGK+wRYYRBYRHL4ugNnwFwEWaHL1+5JMzCAssAPhDCINrynoYwBEBgAEQAbW42BuScxQPDgSwSuHAYLQ8nhIMLARKhMAou4yCNpt6aikBKCYLdUnggNESEEIIqxClR12OzaOHIb9QDgogEHGTmtow/39xKEBCACTzeGKcdQwM5TBsG5pKRgMgIM9MGyE7z4X48fjLHOnd5ldqRvFnKA5JRpvBIDBmkK11t5KaAwzSfaq1EZN4Ipe9Xkmjo16rd0O9SSh7zMGw4/Ob1yx98/1vPn7/lga3ZXE/b7fnDwwOxPNxc77rkzVNHfc7j7JeP353uP3/56raUMo17n2O9Xs/jKXEAihF16xU6fvLj74ZPuftg2FysdltIHUnfr3brfpty/uTTV+FSUktMx/1DbYcsJVPnmKZpMp+YnKmszs4BRYLmaY/IOtfb6/3h4UGI3QLC1ZwDTFXRJPWZJfdDN/A4n47zaXOxW2237XjCMCk7SP0Xx7vuaniyP5/nwV3b/FDnQAbhCAsPfMO3dG7NCN0wKvjME3FCIWIUi5XTWOOkEeY53JFaQPUQtZyzNUuMmYUQkDEAmgZSICCRqLk1dSAKNyNGAEf3cAtHNPXZF2XoGxSnmmIEkRBgYCwKTSRbsnqLXog3jwZEClu2xwwhZhhgsbTbidwNIFgICSE4yH256UKEmQf+lNZPAcBJck4JBZGRYDEBm/niuUFcPoTmFghEga3WVmtEIKFjAC0ouAXOHZTIAcKc/f+vCnyToGYAYI/QJbDqgESYQgrlTliYMFiyAzVDDleNh2nusb8oF0POrY4elomZWHIuOW/Xa0DLWbq+Tyml0okkD2jVUpJuVRCg1pNkYHZAHfeH0nHpu0dXT5CYCS8uLkvaKkJJCKBmgQ77h3ti2G3OSuqCsC+Dt9Onn/zw4skzLuubmxvC0Ho8POx3u8eb7Vli/vTDD/c3Xzx/+90X73+tH1aDoM3T7nLDiVLqbm5vWTKzt/HgdartlHLpVtvpWFXHuR3dqjXry9Yj3T6cEKUvyU2Px0m4O7+4VHewCE6uDUybOqYEoMK4Wm9O88R9d355gQgm2A/p6ury/NnzH3/+8nc++93vfvaj69N9nzkJuDBlTlQiyBwMQjIBO6YIchJxMEB3h4x9JhQKAaIwiGYeFqliYJeRUsqJCyMHvnEFGzMiomsDZoiflmCamwUaRoA1t+bNMAwNfNK51eZvHhGP8DA3DW8eEYQYjhFY64Tkkog5ByKSyxuYYThy4JK/AIjghYqKuNDf3V0oKBZvSsUAaot+b7EEeLAEkjDz8mPuvgx2lgREM3NFL0H85gjkDm4a5ozMLJxD3D3AXCEANQLB0AEAjMEDwxHJAyPMEWwOJQNCd1JVFgwAymQ15tEBRFgg1B1VAzwpehNQr3Ozgx5WfTfP8zSPZ9udpAwRVSdBsRrMCbm2URkT+Nj1KcJLGjYXq5cvP6+jnm2G1WrVut7MoukX95/studDGp48fu7Exxo6XUcEmq/yMOHRImq0zarzadLWJHecUp+LO4gMzKf7w8PxUJ8+zY+vrn7y4Q9fvvp4vTu7uHzx+NHFaDrf71dl5SVU52k+bbdnEHV/+2ocZwAathsgvr+/BZubz25IOAz9RgPG+/vUD7mU2to01aHb7LaXL1/d3N0fXjx/Olp99ZN71Jb7Iffd6fgQzvu7oxTouCBCs7rBIQWUftiL/JMf/KtvvfzedbuGnnXWXkpIqtUNtc3zQq8DBBExUuQAx8wc0FqzSlPGJIICSdR1SM2iRaoNEQwB3cksJCEJL3dKCIJAQgDzJUxvvqR0IjAEoJlqdQ9llAibbAZzhuIU4UYA6LCUDhYaj/tP0WcLxxAAEQ1M3IIYGQPRkYAE0EMbhCIygUcENAVkl0SIaK6oVt3CCXnZfDvyYuRlQQpzwyACdSIKRDKrrVrDMPPF8QQAod5aVbdAWFj/uLQQkBZyKARAAwhy89Y8XJfXPhFEUGsaFICsgGqO2MKkdFRHYFZCpiQW5pC8mUARBp+NJRODq2vVi4tLDiR/U/Q3BZGU+26aTmqt1dENkKLknlmOx6NwfvvFB7c3d/PkKfl2+9i1blary8vd9fX19WEadpsvf+P33e3r3cs2ja9rm9jp6upqijmY1MfVsKktDg+f1dG0YS7w7OriBx9fT83OL6+aN2Ca2oRJnrz1/tnFpav3fRlvD/1qfapjneL88uL13e315z/Y9P3l5aOQ7uZ2b/XAUMf5GBFnm8cspZrOY12tt2dnFxEWmDarNQVf37z67NVn24v+ra+89eEPf4wBXDpKMo2neVJadTnJZpDjw81h8rOrR+cXT1lr3/ff29/+4P7zl/bgObbRj4ynNksQd9AUMQsaCCKiRyiiuLoIgQdjyiTRamSgnIQUXFLomvLczFyFVBJFWKvBTCJAwuHRzDInohTWINDNVR0ihcESl9RqVnVuEU4BHugE6D4vCbQIxGUGCmG2UGuxNWVmSUTMHuHg4FWEOUIDnEhIkHhRWQkw1lnjp6944UQQQMYsZB7NNRBTZrGAxdnMKSgilsWwhZMDI1kQYRcxtVYXYihzEBGo1VqnVtUUAJaNLyL8NAuHC0g9zLVaxAK+ckAOIvfm4EG0mJfAXcMroNY0jWYITqYFmSA8xLA1M4JtObvcPO27TY+GWk3nlAatLVQ1NBFoVUBD8DAnFE54dnbZ5sndu6uz4173D9PubJ2kO46nNh5XZ4O6P758EZA//uTHH3/+o8357q23vzwdjoe7a0A+zuOOUPLOw7p+kzC3dqy1fvm9rz168tbDfP9wukmprFeXfc+PHl8wJ4cY1punT19cXV1U8CI50ai1glMZ+tM83d5+kcrq6dtfrqoff/KqcOo6cRPp+kRpHtvhdAfsuazneX51/QnKar0627cZDOaTXV2cvf3B2/M817H2w6pqvb+/m/YPqUsFi0eM0zxr67ddGEDXb7cXgfN4uNvrfbBnlmqVSIQ8UFtTYGLgwGV00yAgzIUJiSQXbC0FFKIkzBiZVTmkoEQwGaSgXhw90Ah4bkrCEogYEuFYkZKQWNg8V18AaxGgNmuzFlatNkMnBDZwgkALLgmBa60AgILo4GoAi53EF1xVmAeEZJau4/4iEQqQcHJiZRZEZEZAanpyg4ycO8kdclJiJJyFiio0nQkgUV6EqZiR0+K4YeEMwACQkiQsauCmphrNogJA8tqOdTxNbZ5mqDPAGxZKM9CGERwYZhoNY0JvWJtnUHIGz76U+x1CGS3CFAzRAJQhUoAhIgJjBBq4NrDelaY4iXbvdM8f7Z70wypLQotaa5gJAmeGwAg3tbmeVBsh5lI4yWq1Es4R+fzRo64r8/G4GoZHjx5Lv97tztfbHZBfPH4seXV9fXP76vU41/Uqq+vd7V1oPbY5qMsl9R0SpYfb22Ykq9L3NJ/G03EsnAHg5vbV2fl2KcJeXT7ZXl6y8Gq9mY+n+8PnNzc3gqtguLu/Pbs4u3r0VI0+f/nZdp2JoAIkWWnANM/H8b5f567fltK7YimZhc0ijCAs9Hj55JKG4faL14fXD69f37g38FnIcjc4lWgTWnDJYI0M3v7q7xveewuq//qPf+2bd58KSKta3awaajQzs2UBasiOC08GnZCInEvOQfJGfohdHrpEnTfV0R1caT9O1QMSkrASuQRxAgxIRGELoc3DgzD+DZokEjppddPwuUEQLmFJDPcApJREWJ/2jcMAAIAASURBVNwhkRABE4lIOOTcEyGqLZ2TCOeBuZNVPwiFc/FADHzDGCQkgxmFUurm2UEsZWLBnDkQwknYTsdqxljQrSIiQ5ihahRJboAdEdnC4kNyEYpcIsBqm+roVQUwrM7zrG7EkBJ7mFpFpWgKJoAJDdUswiKCgdQYyYEagnMJylybm6K2EmDMbObzNHKS2QlVQ9PMVUIgaVn1gz95zJu3Hz0/79dtPoHaNE2AsV53WWhqNQGBJ2PgJBHOBCklAra2GDHV2ry7vHr61ruvX10/zPHo7CKl3IIxmGD46pevzs+2H/7k029++3uU9Btf+kByvpn2WYY+pHSpKbH41KpI7rp1gM9z63J3c/OaWZ49fU9nRJyfPn1mioykrqpKROG5lF3qu2Mdd7vdlruXd/fYb54/e+vm+rP7h+MwrFvU6XTfWjvbXUmi4/GIMOVcxrERVxGzmHOXNhfbeTrxsWvz+OrVF83bfLI6PgxDl0sPEF6rh/S5G8dmduo2K0hPceekAgBTzK1pKDrj0m30aBAkQguWoQEgkmNtXnvtMAkGUDhHCCiGt5gBXSHe/FsDBQnAQ4CFAKt5gAUxKzkjegNTFpYEEcjaIhq4oxsGZPOGJAThqokppSScMJwCI5CZ3M3d+tIhpimC+9UyFcUU0smw6XJOIpmHYWDx1maIjOiUWSA3nXnKiKhROUnpIGVwCHA2bxbOWMKghaWUwMENTUO19QMTG7IAkFlQmKQkqc85W60Pdw8PD+PokNzNKkSYAAG6a3Vrjj4ahCxAaQ9abvyJGAO9NUrKCYcs0vNx1nnycNeKqkYMesIZ3KraFFaFhkjkrc4G8mTYvn/23uPhCq1N497bydWYubW225wJ5+nhgBhLYyGlVEppTYUzBh11zIxgen/YO+Wrp89zFvAmktt4fPXy0x999+HRbvXWu+984+t/sKwv/u4/+Fuff/bhz37jD5QyIBQu7JHn5lkcEDnharM+Hca3n7/9e9/5prb57OLifPu4ttPx8PD40bOH+5OZrfJQp1kgwPthvQ6rpXTrYTOe2ubiSbde333+yXSc+lLq6eF0OnGJbugDsWkM61WdZrVxnkxyx2Krza7vs7v1XT8djtP+mBAmtXracwDnzWyhh7uSUTmHc171z56+K9IHAuScsK/TeKiza63zOLqiO7TWqiNYFgAHZDQ3QLDw5tG1ikwcQJ5ESIgYzN0CCBmIASAMIAM6uSAxRIQZILovqmQPECNgCg9wIQ9XcI0wMHVGIhJXh8AspevycvSHJf8ZCB4AS25AwKzjCAoA4pxR0EhZQFil9DkLp+KlSGupRcVkbkDIUoAmjGBgzx0xozOAUpgAz17VLTORIzgguoIzYORcmJeCDC4woEE4C0dP1TvFmOv9cX86ztURJAEvn7Jm7ARq40wABqREGAhCFIFuQRERLoybde43BGwgvN508ylO99HmarOFSgNwra1R6DFOKJ1sV0NpleaHpvV0mhgV0FQniFjnM8F888XtelhJovu7o7u36n3fM+eUuqXn2ZXBVes8hR6KlMrQjnb7cH93/er+9au7/c1Umx2O69Xq/Ol7f+xP/zt/+S/+9//q/+3/XNtv/cLP/tKpSffo8ni8S5OtOUfEsCkBmohPh2NgnJ+fHw73wl0SWg+rMC8dE6d5spzBTJC7qR7CmqRhrrp9fLXaXpzuX+ciq8324eZ6qveUZbvadN1wPLRUOqsK4IyRCUWS9CtOyTXmOgmlOkedp+aWWCrN6/XVanv5+u41h20uLoCKElxeXDx+cnaaX/XwPAzv9ncPrx8eXKFWbbW25q1hGGggQ50cSYKawhQLIZNkMk3SennTFFwKvoGdgbJgMEQWd2/g7EGmgAQsyGhN1YFZABEDMUDNIUidKQwI7c14RhEJcTG1YzgTp4AAdHcngYgQplwSYyBBlwQ8jCkPnYNP5mbNkSUX5DLljjjJw2mOSRGDSAOIM7C4G0EgEbB4YFiqhCsR1OZhEOzuoUwdOUYk4QgD1AAA9K7riagkWfUZRGoYsZOF2mgM0RwSS3EOtWaSCDUog1dbdNLhEGbMjCQBFRnLWlbnTKVFxOXZtmlAdV6Tlzwep7mqTnOWrpce0aD60K13Zf3e7uzx7qrLm5PWLqrPjVG6fqCUCamZvXz5eb9aI0Cd55wzc3Ijlpw4ap3MTShT4dbmNh5eT8dXr2/raTzsxx/+4NNPr1+3cKGyW9Hw8ndeXV//+T/37//lv/hX/trf/M83P/rixbtfuTuMHXGW3FpDpPE4XTy7OOr+7v6hX/f1MG93Z6uhS4m32y0RUWYS4Zwwxc1+bgFzm/b7/cVVvrh4vNvtxvmUxBDs7uZVL7EadtL1QIU5dUPKub+5edX1CR0xOQqENa3Zw8BhgtNhf7zb3589ubCppu7sfPf4eJxtPr54+0ULJYaLR4922/L6eN9/8cXwYsT+8jjh3c1rLVxHtUm1za01ImCBcCJMKBI0veGWBGLj6rUWdskA4GABQEQgQZoIghkpEwIKExEEvpGqZGBERgWPAIS2pNAAHNjM3asruBeEJIvcBw1goTEjEJq28JDF6IIBECKSJbXWaGGWFCHBeVYKUHcwln4jJDNlyIVzszoZAjAFsnsTSaEQAIKIEUrojgZcAxyhiwDwoABAJ8wsiGSAoRZAXsrQ91kCS8lp6CTnNeKu73rJxnW/f7h5vQeunBzRyIE0BafeWtTl5bIsIDTIFucTZNo8KWXbKEOfh0Rl/zBLCkZoHOLkEAK0Kt269IbiqIyRUKKl6VgTHI7JEHpQFU4AhIinw7FOo6ve3d0NuSOiUgosNU43Yu67DADaYJqhaTxcHygXyOcffvj627/7o4fR1Fa1Hcd20pvTxUB395/f3/3NP/fv/tk//kt/8re/8723v/S10/4IfX56vr25f1nn9uLRIzs2CMNM2eiorVTvzlM/ZIW4PL86Hvel61EwIvaH+9B2miqhdF0eNutq7XB/PR5ubz7//Gy3ksyJOyKaDKZqpe9as1RyKSlAZju2aSwcrkZECK4aieStt55O1Sc9Pn787n6v43TYna0piSo8urrIOe3ncZsud7IyvaGcuq4zszbadJqjuc82TxNmGUrmLoLAohEJBSvYMspGTtWjerDP5EAoJMkCESPQIdGS5CQBYTYGIARUd00gQKihECCQVJ0ZA4IIAWPJOgASQkJsy+S15MwC5hMSUhAAmjXiRTpkkvLyXhbBkgIxPEVzI4SgEBmIJSW23EOuBAeNYCdgMpbgBNaWeIUDB2BkThiJaFZoIoDI4BjqgYyZjEdKWd0QmDiInFNZDUPOWXJKKTFuSumd/YtXcKwPk8/eiYgImVTVit4rrQgAWgAgIxcDa2YEsTrv85kPO+q6Qg5sMSWWNBskwaCmCct6fd7lklIaKKmg4vHV9Y0ex0fbzcXpZMceL55upBcSDW/HfT2NdZrcVdsphs12vTuepoAGoUNeMngiIiyZMGFZn46n+7v6/Q+//5PPXr3/la89Xw2nh+mwv73evx5nPe2n3/zRFx/e3u3rf/2n/vQfe/Hes0nnZ0+e13a6q+1UbdcPZgZuFFCkONb1sEopqVvpVobgEdvdpYYG8HRqq+Hs5tVnD/vx8eOr3e5yrqZuOXcqw7NnXwqxKSqYoMca0R8e2lzbPK+7PjxYJDGrNq8n7KRZZEwpZ2QKMIr25OnjTz66PRxazrkbZIx2cXl2dX5uPKlKnyA65ry5//x7/+o7vzLV0AZ6MreGChhUhBnY5iZDODmDMCTFBoIBiGKK0CKFMZl5MDAVQBNd4H+JKTE6mqKyFCRjYfBAR3/D4kRYsGmGphMEmaE6EjmhI7lXRYYiaaH+uHtKmYAjAoBNDQBaa00hSyRmRLUGIpKIGWkZM0nKwcxA1Vmo58gQRhzq7lKcBYgTYlhYEvblu8dl2CAnh2ZhAZAIILiSFMkcFIt+rLW5SWbpUkqlF5FUUp9T1+duhrFZPdnd3bEyIZCUjVC1w8FKIx4wrwmR2+hEDMBzM0+Quxg2uFozUZACgQmHFCFOoJ4ASWNzuepKjwQDClNuUEa+GVbd+frJo67PhFODDgO5AbRax3mcIyKiAfrpdOi6rvn4sL9m15kkUfZQZCDMm4tnz9760uXT9d/5u//wk89f/+E/8m+V1cCEf+Iv/amyvfx///W//eu/8s/KZd1uH3/7W/96//Ctt99958k7zx4O+69+ddvR5UcvP2RC9JmoN4hoMwbc3x3WXXd+sctdZ74wCeY0rAFJrZeutYeb24fDo2dPn794gdzfP4zry13fb4G73A+n/QPVY+p6aHZ4uCMCrT50GRFrw1XH/VBQnK1FRoQkkSiX0qVVP5Dwq9fjrMd+GAjYrD46O39yuVYfKWKVEkhEIoD+o08+fXVzOB7382nEyq6QUlqvNv1qve1LUz3CQ0DjkE541NkjIxb345v6ObEpKRAAFKGanJVNTQAJRZMxGYKnn7a+AiEmwyBADg9EDA1oZh7NEyATOaIuAf5FM+puP7XFE0RzCwCMQHebJhNRIQlRETEPZTYLRHY3ceAX31hLMhYTRkZqszMxOhGSB7uDNy0FVucpD5QIEIiIiD314mEcOXDKneQhdQP3PQASWlQ91HkOyB33XZ+HYVNKVzruenKEAEHiU9sjz0whCMxNugUjRJsd785tvc5UiFJgZ8N5161LXyD3VnrGiOoNoYDkcHJsjhMw5q7r+7xer/u+6zerPDCkmPSEYJthWIv0sircJYRwdQ/VYACoOnoj6ihIa1Wbx3HvHtNxOp1Ojhpgm7Ord7/2i+fPvvLZ7enXf/d3L148PTs7+7Vf++2PP7/7T/7T/93jxxd/9f/6n/3uj370+ublk0fnX/rKB7/73e9zxJfeev/86slmuxnWHYTa8RB1XO96c5qPx+PhtutXfeklp4vdpRpzwvV2havd5H3u+3FsR5ve/dLbzx8/a6p3d/eqcbq/2+9vMKfjaVznnkKQCqeOCIauL1lASnXNvahqAEpyt4BmXemkrIf1MGy3JW9yXq82w/pip6ba5rPz7ZOnj1u06g0jUubd+VvD+x+gH//h3/+7//DX/sVeT0PqqCEEdauyLv2qrNerbjNc9mk7xRzUaujJWyAKSABlkIQ4iCRo6wSrLgmlCLQgMx5rnsIkYcrCREGBRGAqJgICYQARSOpg4RCEQUzIQkAYAOhMJEgmQhggLCUlouoRhEqh5CkCLKoI9p1IXkDMTmgRoOamYcFC3EQKCzMhZhyGoY6mAQFL1ACkQy4A4Ik6FJBFrQwWzfuBJgdSpmwpkxBLhEILFDc+HbVNp17GeZxg13LuKYkhcOa+H9I4DsO6RdfkABZhKEJpl2CjOUseOkrOtatTqjrnHsCHAE3dkcUcAJq7VHexYnMdVWrqC7NB0jLwqvQMaIzVpzX3hcGpGSW3INIMKcLbNNc6g3liGrCYYmC7vb9jZqIhE1ea1A5G66uLR8/f+32rxx8cVL797W+71idPn7bTw/Xd59/61g//wl/472r4t37v25n4s0+++PTjj/9H/8P/8Gs/80c+/PSj29uH7eUFJxaUvuR7tzaeCHAoopS227P1emD1tF7PbSylpFXPwznIrrAweqX917705en29ru/+3tffPbhw+GLNtf5YSbibt332+6DL33j6bN3jZwzswpmysOK9vceLQKaW2KmtLbTLQGHOQ3OOWXqQ5tGONBuWHdvp/3ZPmWe6wER+7w2r+6eh8Ilf/G973/3u998erH+yuWXPrnd//D2e0li062TkLBzcObgnHtfjebCyibq5EyZAABaM2ftc0ZK5uGkgO4QQJgydW82uMiSISzcF9Il4ptYP2AEKCASSSyldQZwQAdOCBSIyZfoGzROCEhCrr5YcsGnpq1No+uGJSoFI6CHYgB4qlOb55Mwp5SZyJeZY7dKrS6yVGYIyeYBVIwQMSBR13XNAs2FhCtPBuaWOHeClIVEBBCNBBDq6XCcbtf9er3ajOO+36YwCRCIRgApQ9+vTjVbQM4MligEkSHRaigeVIau6/KRTh2sWfrwpuZJOMnCYOynqgETZ6YETEyFc8mZJbEkJI8ghCzIltCNJQKahpuHIWvVphNaTHNTgs2wmvxU34DzsKTu1GK9Xpc8PH/23qO3PujWjwNgf3vz0SefX148effZs3/+T3/54bBPQ/dP/+k/NaTdbrOf7v/Mn/3z/+P/yX/4q//sX+y2xzre//Czj1+8eAYaKCGAJISp88qpb/2QwHk+nkrZUGROnjqPLpX1Y8au+Xw81reev3N//ek/+gf/1fVnR2vlVHkeeWpGjtu7k8jdpx+9fPvLb/2+b3xte/XWvB0EoR6mnLAxnY6TawvmHF3aXZ7miXNKyB0mV6ttBAug1HXdsOZ+uFD1aZpK7ptGVcurlNYl9vcxPXzta19ZX15+74uXx+MxF4mqhYckRAApc8oYiFmoQRaB2XSCEEJEI1cIsCgWC8AvA1bwpVGl5gpvzLmL5oHdEQkJCHyJkDkAECVekLdgQMpiqABKiwAYgFogUEhCycYIXmPBN89tVG3uboFzqyVhECAGU5jPBOjNrLoQkQghhQcSQc6csrXGjBDhRCHCKVFJQFCJexRAB2AohVmEOZhKphW4IYIRpMiub3Tyx7vT7c2h6/KwxrTqV0OnrTFjrUpsORNLztFnIsSEzoxCKfqUKWPpejcCr+aClDwaumVhITY2MHF2ZBWjlCMhY3ZJlgiQGvOAHpDMWAh5gLyVoda9UuYGYeR1qvOxYFqv18Bw2I8aTWsDgKYzsAzDmnN6/Ohi6C7XF5eb8/PTQdvpYZ7Hx289v351e3d7NHfEcvFoHRGhdnu8+9N//t/7j/7KX3l+9fj/8L//PxrK/WnsV0OHzAHeXPU4pBTQlUGPxz0aIbAn2pxforXT6frsaserjTazSqnvbNLf+OVfvbluH73U0WMCEj5Lq+1pf3j5cNrI5iLi09/7sd492M/Ho3fePt6Pp4e7gOZcUvGUsmo7qfV9GRKrTkBxmg8I3Pc9MnFOscQrASJitRlqrRAwrPrHz9+S3RnUdhhv7vcPv/693/3s+jZttk8v3rq/eV2nfT+cp5Q4UekYAHKl2SEWAgq0wgkJXDXcXZVEhFFQBDGS8+Sg4E2NCAkJPEUgs0YQpohYyljuhhji2R08nAUlIbEjG2ZxNUAkSgs0SNg4jBmBHZuC0zw3NUBOEOgarTGTs+BPtQQjU1AkATDmjJwFMUDZQjLjPAEgGoFTztJ1uRRTmKsCOiF1qQTxLCmI0pA7RjCXlAnAAHCZFaMlqLZ/PW63h/t9KasppQnczMIh1BTJk5BAl0AwMnIGoCScMG0Gwky1zZ3lqgRA8wRETuBmwJAUXDKbUoQxA/cByQiJIyCBC4Qb0CwJgKlDgXBJZIbu3rwxgIh48zrNWCAXoUbBjuE4JAQZhoHAEbHbDdPoz1+s6nTKWTjpfjrsZFO1jcdKMSBQ4CgiEPL/+Zt/4z/5X/zHv/DzP/fo8dPf/u1v/oEvvVtWgxCTYc4duj1+dMEid/cPfbedp5OkfrPbOWEikUjinVFuudnptC79N7/9zdfXpx/8ePz+3YOcd48uH+8uLnR+2Dw/h5Db6/HbP/rx164udvPmd7778VcN18NKcjIgNm+mREQuhiAiXZZx1KZR9bgeVpk4sET4IiZpOrvbdPLA4KHf7Xb9+jI4/eQH3/onv/kvvv3pj8b97aasuv6MwMHn+/kBMZiR0ATRGfrSmfvslTEQ2CoJQ6ItYgVTgoSISFZSLxQOVNDmOrVa1RsgRHWDhsJvdL2OpugGiAQpWBDDS8KcEQkIyV0bmPtiH4UkLGTChN48nDhQEYDcgDHcwYM8FNHB0B0RCSlYghgkyUACxA7BZghvlCsWAB4ZCPsVdQNzhvCsjuhIEcKYBwTAlBm9BYRgYWEOnGutWrGFNqRE4jHu2/39cb09gEffr5pNVScFcPfEIrhCCxQKEMQeAQidiIgZdJFlzOrhgQiCGKgKwYIwYHiAs9GQlutytCKYKYXGTOQUwICApBiRYAXbYjlrSDCAOfFCQPXmCz0+5czdDj1EpI73jx5dcaZC9vTpU6BUGC52q8uLJ7/7/Y/+zB/7E++8+/6PPvr44f6W+FxSHus4DKuPf/id/+n//H/9pUfP9w83t7ev5und3e6cF/QZ17Pd4wgc2yHlDB7H/f7q8VPBzgmFyBBovUFKYtGv+3rdPvnw81/5rd/7/OS/8Of+6Fe+9tXnu6/+tb/5t/+bf/73dDz9O3/0T37j9399/9aT7//Wt/pKP7d5cjpY1xkR2FxtnnJKgZ5SX8DcdZotycq1CRX0PNUmGToqEGhuWTBy8hbCmTLmoZf1xfT69ld+7b/5V9//nbzpX1w+PjWeIJDTdnNp2A7H1yjrNg2nlFBaYu9Sp1NlzAMxB5hXYCeSAiVDIiIiwrBhGKRPrSLl882k++k0zsdpquYqocBOAeFASJ0kU/BIpTCjZonEkFiYourMkOuErQZ5EkGEeeFUMUoRIkpnkU7HmThAWoAiObGaKYKEpYhCzCmLDKttLmNECyf35dgjKfVmcyBSIilUekAKceEgoQirENylFQp4hlbJLRgdSVF91mbq0WhqFROmHr2maR/393sMiggkG0+jgzccI1A4iaBVVYOUilmLKKE9JRMyxNHdwxxSJO4lEQrY5JRSGDLMKJaZcyokbCDQKEyDKhNITi7mFmABjjq3VWICZgkObpqCNSJ0rjZPAHG6r6XHrs/Mvt70HhpAn378WT7bIb2VpHt0dvELP/MH//k//60vPvv8/S+//+lnH55O33l9+9pKMtPLs+0f+P3vlZh/8OGPv/eDH4LqrtuNp9YuYNP7eJiZ8tR0s05CIT6t+r7O2l/ltNno3f328TNYd2bVZ1fnjz7++Nvf/+yHn++/+od/Vuf27/7p/+Dmlf7eDz6/fj1Op8Nf/xt/89+7/2M//0d+8eu/9As//q1vfmXevP/lLw+DHQ5fGIwAgCQEKkDTNAXGaj2YGaCXMhAlCxd3V8u5uJqHI1DOXFIqfZ+HC9isX/74u+O0f36+PYbL+bbdzovnwdLgsNnr3OJQQ1OIYENsWXKWJC1TRJe8BlfdZxx63vYiffKUEgoSwabr5hKY+o3JbPowne7u9uN4mOZDm1WIE4twzpLQJLjkgsKek0JMDDNTCKUZCBKHQiAjKAOHV8qQpGsK4CxdSkAeaggCkxAmwZRwrjMAIpaUEsHAP/PHH4sAcWHKANaq1hqtqptFMHFaDf2wEV7OzhjcIyEhrlNfmAE8EuSFRkdspnUcqzZtzsc9uNF2u0uFgaN0fZLkWtGjjqO2yWM2VJacJQOQKQinrtslIaRgEghq7cFMl2R/Ji+ZMualNemRxjZV9K6kPq9S6ogEI4OBMJVBkCvIFDELpZVsV9Sxh86u4eBxmmszc/V5ns21X2/WqzN3BKSu64mlzZWIGrQPf/Q9bY79xVyn9x9fNo2//y9/+emTi0ePL1f9ENFyF++98+IXf/5n//if+GPC6V//9ne++Tu/++LZkz/7J/7I491mt94kYp9P4zx2JQ9dSeDH+1sWOb86x9U6DatxfyjrHeQNBpZudXx1951vffO/+Ee//qWv/uzji/L3/sE//r/83//WX/9bf/373/8tUxkPh/v761bbarfaXJzn8mSq9etf/7qftIPJ2r26grbQOp+OjNCv+tJ3p9OJBUVkaqPDBOGBDAgsRCgeABDIPjx5Kz3/wEtq93fXh9cPt188POxXq83Qd12ggEuPkV1xBom+y6UEoxF7otTcJ5gZvc8CZB36qgxM1HW07cuqT/1KUsopJymZ85ClpMS5z9thV/JKkoSjayTMXS5Dzqths+6H9VDW6261KixBCZIQAQLM4W4utc6JISUJbIuCS1AEBZ1K6ggTEUPU1FXJRhQLpKd0HZEgJCkdMRm6c8pE/XgkhEbomHmeFECYObFDkKkvhicWypnIQDJjSXUGQQ0P8zr5UZVMOXQRvPg47Vfbp9v1usfMFQx9cm+Is2mHsk1FzZxQUCY7qHcrzkjM1Ajbgv3NqUGAOxExO4IbcdFaEYOwE80pupxWJJQoAEtUT0IMFkJCjNJCp6pzjdSZA2NomqYx5qlWF8acGGkTFA18vV0TRCZh9BC+f319ebV9cn754Y8+jLRaDSUNm//eX/yTiu3Xf/03P/iZL7/1pffXl+dD5rOzs5SH7/3o+jd+7Vu/869+5cXz53/5P/hvX56txtNDqxevr291OpZMQ8fqTSMo5xTYpRVzH+ackkLqV1ubHMZ6eHX3L37zWz/+7OYbv7ht7WY+3f/mN3+jdKuz7W7/+tWf//f/0n/6v/2f/W/+V//L737r++fr59uri4eb16/uH56thttPvjjef9iOY/UxEheW0g1qp9bOLndX0zQdjreATiFAwEmIiBeOd9Nm86orabOGsiHwYRi6Bptu3XVddVfJ945r7maPhnFqanjvXgl6oHuKLOiM7i36Doece0DsMwa4B4uTzEJY+pJFmFJIz569SWuYvG8dStenvl+ttnU/RauALeeUBXOSrmdOE3AwnhEzo47H6l4rGoai8+nYIDR3WOtpWHOfh7A8Q4TDsDrLOTfbtfgi46RxEqlTgAP23XayEBER8UQsJNVyW3XjcQ/E7CpUmDkVZ3F3kQV7whoOBtEJM4BQZzyrhkVVr4ANCQD7VpGQSNo0Hi62m0ePLlWVSUgiGGbXMGBPDOJ4UqsRDEwes8OJIoMXxACeJDFGJgJDAvcIcguLIEpmEQ6ClFLpcy/i3pyoUwRiT50gBkY1AkMNnVqjBwNokTyLzW0+UTALh3lrc8bCaTYPlqQ2a/VQD7fTzcP2rfXTq/6T7/7ryWl19ej3/8wf+o/+0p9963L7G9/65t5cuk4VP/zeh9M0ff75XVL8j/8Hf+EXf/EX+rwZb1+/vH613pSr8y2Bi5CZJc6tnkrJQ9cpONgch7lfbWDoyZUI7z99+cNPX/6z3/idy8vLvksvP39ILo8uzlUKSJrnw9e//sG//W//4p/6M/+t/9f/8//xS3/E3Gyufrs//L63nj5cw9Sm8f56aqe8Xo8OLIdhfXmBZZSHeW7T6WDWdtvLROV0vCOMzfo8563kJI0w9xFuoKh6d3+TxvlqWF3buL95tepwtbs42nQYK7P0Xa4qhOpYE60X8HIS77Mn5JwLUhKcBDszI57cAVIYQVckpQFk06G4Q6u5ap7NEWcAKClrPqK1BdlAgLlLnD1lAWyEgZzRWyq1tmxuWqNVrzUinFKQELiVwhQpTAlWl+dPhGie140KpZP67Wm+A9fWqhTcbLdCBF3qSuqYGWdd9V3botlxnBpCYkYiYxImCq9EQCJN4c2RBwWRchZVrXpEIhJOHVozQOPEoXpxcbY726xWXWvm6jlJhQlBhZCREnJIttBwECGHaqZApKYSlQSSdK4tYE6Srbq7IHL40sQHRGbwzJKldGzLBFM4E1HpyGOGpbvJJMjjNB81BsidSNFAmBrA8TBqm9frLSsTB6ipKwuq+fFwEsbjWG9+cPz6V76y6bNotPn+137zlx9fvfsHv/Hog3f/+PX1w83d/n5/4ovtaiibVX+2G853qy+u9/vDuNusyNNcT3NFBjztD5uhV62qmjkCEUqZpmnoOgOkecbxMJ785ub21c3xZIUI1Pzs/Enq18CvOka2lmT3z3/1tz789AtxeP7isYpNp1ksCQXm03vvfTDf39VjW+H67n7PBNt+fTydjtNn2/v95eW5cIeQ2qzuhwqnruspSN1SKUNaSwDUEK02PdTbL1YX3eEByt4uu3KEada72zodvVHuV8xpylZAYyY9y72n1Oox1tYxUUkdyeRKmQUzeFASaDrR3M2rUlYlSY5ITpxMZA46zWHAkuosmgDNI9B0JAzmYEEWMZ+BlRGbOQMyVkL3AHdyg9ZaQHSFABugdXnd5w3BpkgPWK1Vp57ZNttH6Vhw/xpsnOpnZ9uVABqnIikLJXfFruA2E8zXd7RvjSRSZmJEhEJi6oiaZEXYZUk5ZzVE9NKhIZlra5UTS5dyzpVstVu9897b/TCkIgBgxCyeHDsujM6CyMgLRQIQgABMtVJm89qsJUZc6DBAjBYMYUaQCIFQuiJTI2+VBTAMgJEzODIVpqHL4B7hE0p3+3Cc6mmV1itOyUAE0SkPHWlExGq1kpI92CMBSmBrVUW4dGmapkxop+P3v/v9r3/jZ590fUn8k89ef/LD755uL59cXD3a5HcePRrWq3nGaZqQ/HS8f/X5nqDbFTkerkPH8dBgl6tCmysPa3e/u7ndPX+03p5NwKrzbCpV11nq/c39nR3n2qXy7PLJd774yf3D9VdePH/3vWff+/DH9aB9H4+fPvrtf/3rv/DzP/feW+/87B/8es6AEcfDfLla9086fH3x9lsf5KyH2/u5unBMMyCHYBBBhBGJKcx6IqTcFeKi7uJuzUrmwIiqYMGtXnTlOpehu5znGOP0cHiYYo/pvAMO0o5ljG4fTUpKNPdp2w0XQQenCaOloigOqWujMUZXCrO6TWQcOGDHWViwgzSMrQmcEiByoXHGmBHcGxIosYJRzpkFiBsDxCIEZorGBJJYE8uxeVgguKsDYspifqLkq3IZmtRORQSx1zolWSFMRCCE69VqHPF4fOA/9GeuSteV0iNJRM68JRB/I4jVnHG9zd3AyBaoQSq8hkiJu6GsA2DW2QIEI6Xi4a0FMRKBOULQ87euHj960nVdSh0CizALaAsIyoU4MTBpzOGKgGZeW4tQphLhJMpJAav5ZF4JBYmW1SHiYtZhD4vw1dB1uQRGYDAV4V5o1efImRTV1GmOHF3S1FVqVb0aB5r5rG3WlvuuW23yUKT0QiKckkjTJimzZKtaurxe94fjw3oznMZ56LrHF1tyRSRtOE9VEm4uzp++8+Vutbs/nObjaVVkmg+vXn5+PN09e/Z4Oo03r292211mWQ9J5ykcN9vz3PXj/c00TZv1tmzL/nD46AefBZfd7lyj/pN//C+H9eWXvnrZZzo9+GfXPxmn1mYEP15e5C9//WuPHr24uHz6+vXNz1xe/uGf+/27ty9immAaOQeXohpQx6HfpMIQARDa5jrVVCRllJxK2jBlTstiE0vp6HzD51dtmuonP5yODwYy+dxscggiFuHt5qIUZgoWrKyO1q140/dZzrpy3q97zgThKEokRBIOdW5M6jEmkdx13WqdVlvKQzd0XdeJYGA1aCktcE4gRAxnnhFaRDCuAA3lJMnCW3gQsKtYDVMIlfFkENQPqXQhuZXSS1JOKsIISaS4IwmmRMRMYhb3QEbEGFLrJIvAHZAQc0rMLIyJ4rIftqthnOa7xGOXABCnSTMn5i1oSpyZk7oHAkZCiG7QEGhNzAxKW211t7l4dPGkT4W5IWgpvcdoBuqNEubcKToyoc0RIwN61LneYWwtn8zCjobcOFWPGUAjxJURGIGBDCFaQ22OCAwoTOqNyDMLAxEEUE8RENxay5wzbwi6nvP5buiCpFrL02y1d7em7lTyAIA6qaoBzgt3z8LKegCjafSut/vDw/bskRsU5ot1Tuv07K13ADtOuV+fl25z98PvTYc7gJNZPT5cH/dffO3rX9+U9OMPPz3brnORgDbup2hVU5lrw3qbCKfj4eHuVeqC1KOCQwyP1/+dP/9n7h+u/87f/5Vf3+x+7pe+8Sf/XMI0/uSjTy3sS+//3Fe/9sHlo6vt2fPXL++2E/2hP/Sl9bMBujPfvp752Ev2kjkl2Q0WPh6rSD6NszmcXW6H1RYAAUJSVzJTpgqwLplIAgp4bdc37XAQ7nfrjarSdNwmmfrd0dpYTxPUU8KZvKbIqy4P3SCPSLCUlPNqvd7eldub/bXHjMF9x1B1GmsujRMndcLsmjTxZAZslFOh3tDn46kvwkglcc3QpjSHIjrGrYODH9iByQBHCyJmlJYLT+K5IGHHjMiV5f9H05/1WtNk+X3YmiIih733mZ7pnWquYnWzmzO7mxQpURzUlCEBvrEF2xIMG4Ig+8rfwveCLwwbsD+CYcCAIEG2IYuQLMqSmiJ7rO6uqnd+xjPsvTMzItbgi108n+Cck5kxrPVfvx8QNoLs1lt/PSQmeolERKyWglwSs0v2AomyjENxMe9m4U4pc8rFGxAk2t/mFmPSpnPwe5IViRISBBML5RvmpH4cigCPtdagrrax+DhRa+AeJdF+zDNTyoHUER3ZhOj8eGx+TsEAE5OwZNVN9dSsOrTAtrU1D2dErtVo7cnVzMwU3VRRBJKwhdMFUYeRiBDUrAIYBIE4IQpxZhKZClloHgAPfJfzzlfNyTCgtQWEGHjIg2dXdKAkQkmKqdx/OFmtGHZ9tZtGeXqswMO4O7x+c7/0+OzjT7ZzTUK0tHeff3777OX903Jc/sU8jL13jk2m4fx0Oq/Lzc2Vbuu7dRHsxKbbORD6uopI0346H/t2zjkDuLf13S+/enh/6gvu8tSW96TL/+bf/Td//MnNf/Zf/skf/HP+7Pt3/6N/+9/66vMvQPrt4eXdi89Ssj/6w5/Rh/W3f/KDmx+W3fe+4wD9fB441rotT49DpjTdPa1nbBjEverp/kmGcd5dMY2mS7fOHZNwSQSMnoeUD7GuaX0w1GgIfRm1suQK5ZHPvcVSTYQkibpN2K1gKfupTJI540SUxgRTPgDl9+8+BwCkXkrWbdtaB+J5l9QEjAkEgGtXCYegQZKJoHSAKDmXhGdVbSfADYEY0RUdILEBqqMRk2Rz9ZRpGNIGHNEvSB5kstAI7bqBHxOJZO61A3awttkjcs1DuArgWIT4r/zrLwFwyAWBKDilWXAMENSeedhN41QyQCA2STqO05T3g+w4ZRIUYWZr9qjR3AwxE+eIDZF3824slCXJSJS4dQ6MZtvD0zu1bT8/Ixo4cU7Jvao/WTQA783cI+cxJQL0gEokFhcdeQZAgAu1AZlYRBwhEafhVxk3AR7KVIYroqEIJeEe9by8W9v7qqfWV4vN174uZ4Jo64boOeWcSx4GckssHvrw+GgG0zAnwnVZHh9O5+VeUhCyaSzL1qsPw/T2zbfv3rz98P6b9+/evv72q/PDN8vpA0JoM2t9WVcFXL1/8/b14/E47qacWCLAjHJmyUMetu1MFEDQe2NksvSnP/tZ3R6f3e1yWns9Dow//M5H3301pK5v35zvTw/7/UGQlnNrT/H1V1/OZr/9w89+87d+8oPf+nXsev/mz/2bz5f7dw+nh3o6QUKRISKbewQKyzgOZq1bU62SgUEcwANK+JSn/PKjOBzs3bd2eupNH9ejrU8EUDgFwarYVLtwK1zZlCNlR6JOtJv3Q7qa8iyJCWUer6c8Lkt9enoH4YQc0B1WcBzSCOO+jBNPBRNBNIhwA7fN4mz2BO7hqF3dkIBUwV0DmmoPgIjNLzb3cIyLDTITZo9GqXLaSJSFkALJ3EK1d+vHp+PWHljUfFE7Cg25ZLrEJ1H4L/+rLxF9GGZm2dpS0m4cb3p/AsZduc6lDNPMnHv0SD7nfcofEZechTlHYLcHi+UiekR0ZiaGiC5USi6ptMziNrW+mp9Pp/OyHklwmg6SMgGzUFDVvrgpkJpSeBKRYcyOFQAAulm7uCsBiQiYOKdEyO4O0Iix0MDCnECEmTNBCQcnA1b182l7/9jebnZsG5omRCySEyOA1tXO69K1erdee9XWthoKJOKoqis4dusX91suOZc8DeP5tAzT+OKjT57O27DfYSrD7gAiDtwNlnP16qe6dO/eZOth1qY0XM030zjv9nsFaY58mTgOiLb0cz09HllS91rXh7mwJNKUbvbXu7s7s/XlIf/0Oy/HdZGtQYWbJDPZR/Pw1376g9/4m3/pO7/2PWimH748ff2nx29/8eH169O6eGKAYTfuKM2SACAICgut25mQ9ruZMVNO+2mmIjxO8+0LvNpXdd6eCDqYRIDaIhFAWAGVzACcU2RGdEGkNDkl7SdAfn77/ZxLkAmXeRxL3o95cm21Lu5G1DHUXEvBYd5T2achAziBI6oBgNVYzfqxra0rqbbokWjIPAiTo9a2uB2HQZl7RKDPiCAyMhYkcGgRgQweKtIRkyvZxXIabAGInGRgQQQgKjlzEkJUABRkCITu50TZvD2dv2QYhnyrViNwGgdKlPI1pn5cgWjHRAEJwHNOgLasyhiZseHizhBGrCyKVIFH4mJgtX/YWmXGquvW1pSviM855drcXRIfUrrvfu+GuTBS59TMgYIvbuy1q6qzB1OAsTDknAmlteYuIjKM7FG3tjoGkiEJ+YGhWHSPToRCxJBLnnLkWhuwL82SRdcFHJMMtvXWq3oQY+1L71ZNhfxqvrq+efkcnx0fjvcPp5RW6+urZx9/8cUXp9Ppo48++frrr8c5ffzJK9P2cP+u1/N+Hk6Pp/BtW9fweT/tMu9LnrPsZdx3IO8bY5gFMKPa24djXc5hwAOnotM8t7bdUuyvxvV8f/Xxx9//rb/1xb/479vrb/76X/1EW+K8dwrK6eZ2f3197ULH95+/+/yXT6+/iPOC5kE+5ETlkDh1U/UGADlnGdKHD28tNNBTHnZz6aZb1NtyMx2u8eWnuDvwm2+juYBARrVagSKCGDPSLiWmCttZiAvjqZ+c8YbnJ5S35/a0Pr64/hiVJTkXZh9v5pfz9/cfnt598fXvnc6PiXgoXe2+tw/mz+s6ug1QsnonCMDsAraNXc9JUHhs1N0QIhW+TpxTzKfltfYnlkBKxAxBDp6HMIOdDWVIarxVreeaDqn1SkSE3aAiIUaoasm5Q1OrphlFCIGE+G/8w08DFakzMgBrP4fHbnpmGpJzKUysSAbkEA5wsUQzEQR24tbs0WwDCtNqcVEV/apsWXIWSuGOhK1tW/uA1CGCmeZ5n/OAxA4qnJBa93uHSmRAyJyFMgaZuzGYe9ss1BGJORNxSiKcUkoEgkDCDakG9sDK4swlghyUSAHPtR2tKXuRXkpMU55JPSubhWSG1nXb1NXdwPzx8X453bsqBo1lit6HkknILMKQKYhx62pNv/72Cxb74Q9/sJzO63IsgvMg8zAgAOXMlLvBaV1Titu76+evPrm5+0hZugYjZ5KUZsnlfH7o6xKhTmEEhKXwkAjN9O7lS8oC2vKL5zff/RGYOegG1tvbu9vD9fVsvmzL/Zd//ifvPv+zxzdf6oe3EF1HqRdvltrpdH/ShQnLwIJJtQN6GQphFoiUC4gOeTjs7/KLj9rdC0dOT0eqR6+tbjW0YgQERViADykLAAQ6IkCwQ+KMMmDIuR1Xf9xfXSWZLxrwnImiueYx7+bpFkKRn5iRpYMAiuScOGdHY+oE0NVCrW0BhtE00wiatKPZBQuCwiJUMLJbNF0ByI0CutqGKOE0lIEJhZhwuDB5zD2CESUcIiBcESTCzDfmlNPIlJiZf+fvf4IAjBHechLm1PoGGEO6JmJzD1QiN61NN4Om6oAIKbqdFBaL8wWsTpDCIsAAxQMRW07ZLrNq+VpYAJUpBURAzWkYyh4jIwQyEAdgmJ8CAjEJT0xFrQW5B/au2npKxMI5ZWZCREIWEjcxD6KVUrBkAAM0ohIu4QjQVWt0B0u7fH1TXk3plsKEU6LMgNZ7183aGt6Wuh6fllbrBeVbZOxd12VdHt/X09K2bV3Pj08Pp6fz+3f3VTdiHobx+9/70Yvb/fH4er+fX756DuHvjyfO6Xhq5+rTbpfSsL96fn33aab5cH3IJQNmSmMei7N5Eo6o2xNCSsMdAWyndyUDsu6GabjeP71/z8vy+OaXE/WrNGdC9HpaHj68+7Y+fvj6Fz/fzk9MsNsd8v62ajvdv9mOT8S0KxNCCZKUSkrD0/Hpw+NDV23dMIyt17VRwrv9i/H2JdxeYRapWzy8i3qioN57XY91XbRXAMu5dNettiBA5oiIgO6xMW0SgNKhEZxJAmk80GFkljylNJpp107MZSiAgEnBgj0NeYc5IQIHqyXX8BYUBUzbeqqnBT2D79yJuBHZpbfDgO4twLsqXtDPnpgmZgpQDCNIYQWR1Lw3J8pJBqZMQMzJzIki6HQRjSGM4Yn/xt9/gYBEYXDyqIgU4dtWJRVmsOgskDlaX9e+hPWuF0r75tHUFgPLaXJDQBMuCEyQmQnQAEl4Kkmy7ETGYZhTGtWbxpHES5qYExEjOxMBRLeViAICnYnIyA2CEOTiqifAC2OehCBBhBu7kSQhuTAwECECHELCS3hCtNC2rrW3mhDJsJ43Ao3wAHA3bWvdzsu2tNbWZdnW1lo7HVcPbmbH8wlLzsJrbU+nczDncVyrrkvL43C4fnVz+1wbjLtp3E0a0oxk3Odxv1TT8A/Hs2k6zPPbt1+//urz+9e/QGvjmHMZpAwkDJww7zbzrTbBPmUJsHV9AID94SZxmvbXfHdLeYr7x9d/9scfPny7H6Wv29Pjh0zU60ZIdzd3hHE6Prx++/X94wcmmuYr4eTuqkaoaZhbxZx5dxgCJYnkBAlTGmk37K5ffJKfPwcIWJ/g/Ih9QzPrTohuW90qYFDExdw8YgJCd+PAAgnMK3ZKpXBCYFvPW/0AyaYyj3zgPLgJBNR+6npiwTKVHoHREUI4B2cItubhGKbRzc0Aums9nU5djYgBAdQll4sKt61HdyVkdQf0CMFIiCSJszCRaI2uzcL+JQz90tbJTEKYEZEYkJp5NTeCIQj5t3/3FUmn1ADcIwAt3MO49T4kQXGgDta6rc2qqfVem1YkRXKzHhZMCYMCTGgsMiQsCQtDQZqmcS+YIhyiiBTOEGi1b246lCkPCdE9Asm7V9UzUvNwIgwIdXRDhnB1dEYphFQSljwiJL+QV6gEAFFH1rgQdkMjOGy0ixwcvWu35q6YYDeXGyzZW7PeKULXpS0bGHjr29oCAYFKmVKZgfFw2M/Xh6WrCef5wGUEZM4Fc8oyHG7vPvr04x/+5CeGjGkkmX/++de9w/Xdi+vbjxzjw9Ppi1+8/eKLPwd9f0gW/e27b79+99XXx3dfPb3/8uHta+sdTLtlwFwSjQmHgajINN7kQsSblKtyuHGyQf3h3fuz99b1uKy7+ZBkVIdAOh2ffvlnf7Rtx6vd9d319TjfCedxHIIuplCD8CIJo3VdGbCUJMKMhRMdDtfjzS1e3Zl6Wo7ezuBMAdu2uVcENwtzC9NcpKAQwOodwUtgIAKhAAzN2DgMG56a36s1KTPKCO5mUdvZLDwq8vliL/fwFpeVboxI2BSbhnqY18sksy/a160uTVdmxAggYFnDj9p720AhnLZQJEyAcQkYjGUOJ1NVg9ZVZGAamdM4zEyFIBMJoF/iLh7d3QGB0Pl3/vFLkibJIghjANKuFTx1RQ8thZHATavVrjXCuvVuG3DzaOEaGOAsyMyZQYrMwskMEMckU5biBqq1pCmlpL7Vfg5qxMw4QUCwawc1DWgePbwSJiIhEQwBJ74IkSAhk2AIRUqZqXTrCACREAKjE5qBhYeHAiTwITy5t96amqpuFGVML5lkiGHMEzVHdeEUCOdlpYD9za2kknLK0wgQ0ziot9PxFERZOMDX1k7bZs1ePPvo5Scfvfj4o08//v60m3b73dXN7e3dy+9//4coMB5uFGhtrchwWvub1+9O70+Pr5d5mqYxH+/fPb193c7vc3YEOz18/e7DNzwcDoeDhALC4Xr37O6WhzSl5GTDfJUOd4tDRuGAZV2n3d2rT18hBLg/PH6IaPM87cY74UtOPjVda12QM6eMgK21arWHIaRhGBHImhHC7dX17nAYd3u+vo40sBlsmy2raSfTtp7XbRGmJIyhROhq5F6ICKH1FcATyQjDnHaI4gJPvoQsUZ+6kvXStlOtdWs1oHXd3NeIaq5o/qv914m5IETT1cLDGxCeT0/aFvCmtjV92vQRsQMtCCugujez0K7mJwaKYCZm5lJG4nRBSWMgCSMREw6lTPNMxOEBgAAkicxbwBbh4c6c+e/+41eAxgScCELcKLDH5cXCSClSEnNYtyNAA7TeoHs32OjCNwlMWBInRmESwEzEAa7q6CZEdJkGYkiZPHBrR8lngN47cCoAEKDqm2oldkJB4EASSeDAgAHg6AQoBEIm5ECIlHpvph0tuStGcDIPc/NfmWlsYGSzbt7UzmEqMGfcJ0k5ZTBAN2ZGYDQfE2qCp2WtW3N0xw4ArXVkHOZ5LPtAWratdZ33N/PVx5znT7736a//9Nd249W6rvePj93gtGzN/O7Vx9N8vb+5SWm4nq7TMH7+5TdPb8+/93t/sD0dX7w6mC15GvO8P2347ddPtS6nitP1bV9Oz/bz4bAzqxxGBIwy757T7oZePKMXr4bnn2YRW5at33/55S8fPrxJ0m+udnpS6NG8Gu9cQgCG6Sbvb+arq/nw/HD7Ypxv0jjP13fXNy8DUmvNrKmeOdHNdDPmwa2DCPCA22qn16316BXDh1wi3HVLyA4klGgo+zKo9arnAcM1EMQzBcGKa8fwUKfH1pqbb1tvXVvTbVtrVe09omtvGMRB2swxM47hXXELdVfQqL1aPS3WVoAmKcKt+SlAk4QwIaKjMQL0FmEA4AFEOSKllAMtwJhHgAuSl1PKQxmI0V0DVGSIAPdGrB4GEEyToASTICsCE5tqQBCAImaSpTYkEggB7EQBQIYerq49JLvncMHEUqj2hpCzHLquhFVYVHXZzvO4B3SLWpsF8TiOJLdrnFZ9WDuJ7ZgZjN2NsDogIbNEhIuk5s2thzkzMwv/SgfTzZ66mikpPxBlFwAFQGytA5pIiiCNCOeI0A6Oq8iHpQvDrfdtxikn1taXukRvKedEfJWvxqEIjwiyvx6n3fVcrnuvmyEK7w8HABkPU57G88NpzAFYOsF4c6uPp69f30/T7t3PvpBScs4/+tFP97e30xV/xvkv/7W/+vl0Z325HuHd43k3wLOrK5Kb/+o//73Iw1/4td/44Y9/M+3pruhHn73II58edqftJI7D7av06V9EHrVmV8yYx+c/LF++rcejNtbA9+/7vA+7ugv3rGs9b4/3xyzpxcuDjDsDhHBEKfsD66SqLSjtbmZCqAXhFWXaikw3e0wD9g30bNtaO2y1JrAs7IAeJCIIho3QA9wWcHQ88NgimGyzh165M1I0VsuSTfKC9yeXHPVhe02R2QftweLTNObky/rYnIJFe+3ba6ALaErQ0UJCm8YKURNnDCmjRqwYzaJ2W5kzS4XI4pPVioDmFgBgW86AZGArkJeBseWIztmcuzUwM2QMaBDiEImh5Nwrbe3Ef/N3b4hdmBEpgsxAtbsjkTAzov1q/EohoLtBtyByDxPOANIaYBAxmoIbI0HOORw9OmCvm6r27rXb6rgAakrDfvx0zLfmfVsfzXtc5K4BZo3ZEQGDL6AvCAIAJCeCxMBMiAbYI0DVPaK1djHcBziE974GUHhBGMLIbAmP5keHDaGBMToTEJMQDvvrm93V/urm43F+Pu93u3mc8762bfXz49Pp+PDheHp/2tbeWvemoKfl+M03X7358ltQu9pPuaRxmJKUrgHIz5+9lJS/84Pv3756+fS4fliXtVph/LW/+NPa+5/+wb+4HWger+ra1WN/9+Inf/nv/vbf+bdvnr8cZhn28NOffjZdX+N8fVo7ycHoCspOrl7E9BzlgIBuK+aQnOpywsBWq+R0Pj4e3351fP82DKI/9m3RcPXqrZ4ePjw9Pbb15Fqtt7o8RTu5LghxXk5KbXcYd8OdOWIhdY0eyUkQQGjMiQDUDNzAm5kacIRBmLo7mGrDCDA3V1TN3gydAZlayBIIahxRu3ltZ60NAJGMQJgHD6tb7RaE3b25aqhGN1c323o7WmscQGHhxFxEsgggbkDdvImEqoZTIG3diDMiIrGaRqzmzUIhSJgup2dB8ubWagCadSTnFIA95axmtT4KhJgqgjAzBDIrc1I3wN5auBPiqZCoqoe6g0e/IMdqreEXBXHDmoVmAFrW99mGy44BAMJD9zV0gW7EY8lDyTvBw5Chj1hXX+sWHCIumBBHiMYJwj0CARUJPLpZzSVdNN1ABEbWUa212k2lYwNKnEi9Qwh5sghOatEcKwtlhNN5Q5wOu0KQym7/6bMfFp3O52NE6Hk7Pnz7+P716fT+dGxlGG4/fn5ze+edHh8f/Xh8Oj4s520+XAHLIPPNzVXA06Kn73/8ats2ijTNh8++930Kurp7UXbD9fUL+BH/6S9+Vtfzbn/Vev8Lv/4X3/xrf//3/8n/Iy25n5bMw/nUvvfZ87IbH89/zoV+8NGnGYina7y9vdrfFIXltCENCBBhXAYAWe+f6sOb5eHtn379FfQj1vv3n7/JnKfDtZQhYZSrZ1e3ubmqNkLfT8NpOUePeg6lOEwzKGzrmtNw2O05yX7a764mxcKRrW29nsAszEcZ23Jat9W9QxgHMWWLBoEBoL1dWD3gARDknIUi8ZUEoKtRb4gl7QlO2iIYjBgZ8YJfMW2AkNW9aUtFwSHMzQxBiIv7ol7DEyKHeYBG4LQbmLtjp6LoSDSWSAnByLFTKLPIMO21L70/WWj46OFCyGxmPYwAovceYSTsGOgKkVTZUVlMupIwBw6AIpQCKyGKuGuHCEaptfb24MZA1cJ6uEeYATj0vjEgp8HAPJ0AyKN77SKC6B5+ERZFJEDCGKb8CQaZn5jHnGbmK7WOpAIpECRJgJutSI5I4Qk8MBQ8rCtnIEZiCcBAN3d1RZJLmLV3QzKiMGMhUFsMqrsjcUS8evbZYfzufrzNctXP6e0XD+LvT/UhPCWDhFGmm0bluy/k+e3dw3H7+hdv1uXhm2+/Tnn86W/85l/49e/vr1+WgRnw7bvXMuCPf/zruyH//h/886e3T88/+mT4apzSDJJU2q4c9lc3h+sD5vHdh5O1fn24/d3/yb9DGf/k//tPvNOOrnl4oYA9Hm+flWfPXoRIa/z0xVt4fy7jeDTN034/32EacL6KIATMaTo9rf3+A67Lu/fvrq7m+cWnx/t3uj6FQynz4/Jk3pkxp2nBhBSJRdK42x02V3efpskWfHh8O5Xhez/4i9cvPrOZY0xRaYgSLG1do8yn01OYa9+0nURyyVeIQbYFUjNFiiJJW1iClHMxqM1WtAYdQZOnwW82b5q6oPf+RDgiZmIx8w6W02RhJMXaB6sbM4ODdndfAx5AyYEDL+I6UG3gjXPkIohT+JmTISycc6LAMuQ0bcdoG/kG87Q7bab9aLYyiUVYXy54PFNs0bwHxwwUiZypAGhKGDHzX/67d4jENIoMgiMAh2MEmTojqru6LdtJwbsv1Rd3gMv3HOiGrmQGHtBiUTMMNKuALcLNe4S3rqZKGO42DKWUnXtHbA64rUvXSiiEmYgQMQLUnyK6u6mG28UFDwEhyYio5IGQVdUtulrXTShLskvcBdAIi9BU27b2N/M4vHjx2ctnn+3GGwi/f3dejmqrZ5N92edhTCJZhlYD8vjq4++V3fAnf/onP/vjP3x4/9U0yr/69/6Nv/Lb/9rV7avWjn/4L/7b3//n/93jun36w79wfXvXq37+5VdLXX7tp79OXJpZq7b1Fmhd9enx2Gp/fHza1vX49FSrffv2/uruxdPj47evP7z40Q9/8Jd+4+7jZ4frstsVbJrHbN2++urb1+/evX3zzfnpbCetFbwjJwYG4iBJCVnWZb4ba22tWt5dyXQdMiNnZFYki+CQ6AyY1OJ4PLp5HvI0HUJ7zokEXT0P4/721XC4CUgQyk/v7Xg8nc+hDaxZNEqUy+SewAysNt245CJCSDmlxEIkgWTmrbfACOsIRgAOqVIGJHCv3mvdwAgiEWVgEkRCDoRetfezaQMwIlRf1Z/M1DsFJI+LVhoiolsHB2YSYSDP2VOycJfYcRoL7TEyU7ZuARwk3Ra1k6oChEV1hwDstjXfupMZM6FIMEVgY0bwgf/S33mFUBiLUEJkQkB00w4AW+2m3HytvVY9dm1+0QCGQqi6moU5Vq3qtXcFgG5q0YDNwdRMLVTRlRDdzdS3eb4Bl96bW9feEaTwjoIxzB3MIeBkdlEOoimYIYAjQk7AhImLO/Wm7nAhvAediYzFiC9NMz+e3uci3/3or3zy8ffC28Pju9fvvm7tacxjonHeHcpYuioEhvett2l3c/v8+f37t3/6sz/Lkj/79JPf+Kt/9ePPfno8t5/94e/9f/6T/9vP/vQPvvuj7/723/7buZT7h69/9rPf/+//6R/M+8Pf+Jt/nTmP81yGaZx2eRg++vhjkRIBb9+++fDhAxIvT6eHNx/evnmXUqpray0++t5n560JD0AulEeBp/MSrmXi3aFczTvKQ5qmy58zjyWset0iA4/TKCVI8+46zdM4DuM4XF0/nw+3424/39yVsusa3WpdTolpHAaG4IQIQChAmJjneff8+avd9RVQMAHU1Y5Pdl5tWawu2s5b3ay2koZxvoph7EwEKCAo7OZhFghIDKamGyEUgsTi6ApuQE7chFrT1jYHMq11a0S7LBMAIlAYoZMQWDMCl0uNMoIwmXmYa7RamzuycFe1jm7EQqlwTrnkjJC8s1rGyAgDEopI+AQktZ6bLrU1QGBGM2IqRNQ7qv4qxpOSEF38uMbM/Jt/+7tJJsYEECwM4A6rea/NtTnhEOREol7dFSEiCJmsq5rWrq1uFqpWL34o5AD0rrWrmtvFZox4gXFlj2rRmbm3pfcNwhnSkK4I2b2qh1mobu4OqAHmHhYG6ICepRCSX8zg7q5gHZjS9c11ySOLEjUzbX37+OOPXr78zBq/ef3F4/EL03Z9/XzMzy4LQK2ttXNEgCOhzPublPLnn//Cev3pT37zuz/8sSX8sz/92Tef/+z3//l/czyf/9bf/Xv/2t//t0oZ/8k/+c+/+vyrd+/v3797+Nt/5+/98Aff++WXX7Lww8OjcHo8HcdhXJft6fGx1m05na8O1/f3xzB4/vzatFdt33z55TyOZb9/OLfe7Onxvml/PD6pwXnZ1nUtwzzvn427w3zY3bz4NOfROSCLULa19naMZA14nPe78XBBsmrv63IUSaGGgcLk0bVtHlbyMM27cF/Ws0MAQKjN+3G+PlStsGwYCzWNNONQJHEqmRFdN1Dr27nVI4TN02F/9SwPV2aEVJBEDWozCBBhYFT3Y11PrV1GyCNoQ+rgXrm1trYPVU+9gdABgcM0LIQk5UlkTDJDsBsQXbqlsrXe2qa9uRshEYzWCWEIUAgU3pWcmcO8qrFpCM1uRDhSzBgpydhaILC7uTdGyjkDuTbAEAYeypBKQvQAw7AkyH/xtz8FF8RgJklERIDV3U0RfMAL6Fc40d5jQeyc5VdWU/cIAw8zxbi8+gHYzRe1LQC21WurFs0M3QHRAajpav4gTBFGmBGIQgiyB29Nu2q4BawBG5IjB4BFeIBjKFMGQEd3t8sNJedUhmEcx5LJY2MaP3v1V8N2X3z15x+e/rhkmFLJXLIcgDnCgpxTHuUw0K1QGcacaXj7+v3HH7189el3Wt9++cufvX3zDaCty/qDH/3k7/3r/zCo/Df/9X/1//tv/ot5zEMZj0/tf/6/+Pe/+6NP/p//yX/81/7aX33z7v08lvN67q7LaXl6PFetfavI5OHeNM3D/elxW9rp/uGf/df/tJ1Pzz7+7u75c8SutTKntZpkPp4eT8fltOqx+vFhHdK+pNw1elumDMGA5uKKuSAzBsgwKicNqMvZ+7asjxYA5LvdNJRRHVLOTIEIWrWudatPErAb9/PhbvfyVZqe+TDZplByKiMPgwy5nZ4+fPjQzqfWKlAPb9gU3Hpft75SygZACKWMJU0gbMweUqNphDEjgprXLmSDWTOjrfbT0rd67K2P+brI5I6URFIKz+adqYhMAbhpRQhHuAw/RWDApVqXEUZTAKyqFk4izgShohZmGWxIuFfN7sYhEcy0QyCEIDZEyakEAMKEnjMlSoQphiEBNABnFv61v/USLDML86Xw0pG8tUaYDZI6uy+JxsIj4lmhCQ0p4WXG1DTMwMyQUCQcDNADAJDX3k29bbqt3bxBEDgIFCaPaEBJ8Fp4KHnUQHfW7hC8LEvtJ8CNxQKBiCToV+JuV1cCSuESwY7EUkZhoqVIJ+RnNz9+fv39r17/4qs3f0ggiWb0IScHN0d3FOKx8CjM1hvGtrUNQwjo40/uzOznf/7n7799OByuhjwQjj/46W9+9J3v/P7v/Ys//Gf/NIN++uknzeLc+T/4D/93kvn//H/6P/5P/53/2R//0Z+7Yu99W5fz8emwv1LtLELCvbV337xe6vr6m2+++uXnT+/fvf72XUIkkd5drd/d3pacAWgcBxSap/H67plTmcZ95rF3b72uy0IZSx59aw5KJTOodxcIyQUScsrT7iDgvZ4OeQbTQN4fDlMqGNSsaTfDdSpjTlKmfJiv99M15QnmKe0nyxPwQGj9fKpP5/V4Xk8PBJaE2dC6GgZ6t7ZoPUN4WAVw9dVAgYmTKDR1SzQwpe6dUBKOKKVVPOqHZnrarG/BkBAySw6QkkfhjGS9b6odMcLN1SMaIpgugeHhiJhoAG9qbhYGOg7F1d0aEXRXVW0LmObehTlTcEpjBEXo5T5CDCCLeSAUvnSXgDEkJZTkgOFuAMF/6e986gBJikV1vCdyADbzAAMcTZEuXyNbQHMARGHO4eTu2rtqIIJIkoSEgoERoN3dQFXXpbVqYYxIzBhIhIEiAMA4SgATCu/MIcARvVntdrIwZCMOBEISIA7HgOyOSEAMWnsAD3kWcskGYM9uPy55+PKbPzot70u+CcsBCLAi10AJCIOGthvSDsMZU6vdFD/9+EdjGd69+erbL792TB89f9VqH/b7V59+dHw6/tnPfnZelrvb2/3d4dzIY/h3/71//3xc/6P/w//+P/jf/od/+Ps/W07Lbp4eHz8cj4/X11fzPCzLKXH66suv/tl//3tbbXXTra59q9rq4TA/++jjMh+O51Ndz9ZrTmk/j2br4+MjJy5lGubd/up6dziUaRymqUyDe2cmTgjWxem81uhba74tj7YcW20qfHjx6Xz9nfv3X9fH9x/uX58e7nVZgCPct20jb5nwcHf73e/95u1nP4qbaxCJh3fL/c/LuvpyOp4ezw/HdjxqPwfYthy1NaJILITILEOZh2HPwsKcEiMABlrvdVusbRzAgO4dKcDBICr0Vbe1H5f2dF5PCkQ0sucAbFYROKUSzu54qaWsa22tBVmAdt/gUmGNIKK4nISthA9judpNV+CpdY/w3tQ0tgUiEtOQ0x4wAYBrRJiDdasWG6KJJMLEXNwRGVMGYuu6AToh8l//+z9wR4bU7YzpQSQQGbx5BNJMOKSEEYpI5hUCiNnNPfyidXWDgB7YEDMipVQSiZpHmKr21rUzhBCTMF56FkRsoOZbTkk4hRESRARgCJG7mjdHTZexB0AEAnQA9Eulx8PDqmp4jCVHyKtXnxD1Nx/+mfOS4Ht1jW4bsmdid1dszSuCiw9oqeRkmxW8+8kPf305n37xiz9C8v3+2c3t84e3j9dXd1d3t1998/XpeJqGcZqnPMyPp226evb3/vV/+8vPv/q//l/+o//Vv/+//PKr13/8B3/04x//cFlO7r4s57u7uy+++KLW9vDw4Y//8A9N7e765vmzu/28S5LmcSx5SOP+6u6ZpMwE5+Px8fHBPZACkRWI0tiaqnYZMoBLYhQch9x6G4aZQIjZQB/WLfry4f4dRpC2h/u3X3zxRR7GH//G3+RyhajL04enpyfrJ0aXPCELRJpTZia53o83L4QPGJ3WM9SWw/p5zZzA9fj4YX168tCcUiJyd0lClx4TEiJFgF8kvAGqXXsDDw+tfTmuT9U2Q93Cjl6f2qZ66qDnuoZF4qJGZpCY3TsCmMblbTCLbavrVg2bgxl0pCAUpAhojAUxCc9ZClOax0Muk/BAmLXbtp29W0DyYEC6fBWq5t4QtcfZooowUyl5FhYiMddcOKAiBSETMv/OP/gBgQC6+SLZWIBICU3dMYYiE0RtvXmQRQ1yBEIi91CtANy7mVWIgEgRwEzCbNrMzUIRJQwBmIVyYmZuql2NGCw2i0AQEoYQASQgsEAOIHBvRJqEL7dqRnSDRICBiAAQVauausPHrz5xjPf3Px+H0W3XtVknD3BSpM3sbEAQ7E3Fpix+Oj0VHj/7+LP7d/dff/WL26vDfvosHL76+uu725ci8vnP/6yUoQzFwK5u7tZz3c2H3/iNv/L2/Zv/+D/9v/+b/9Y/VLXf++/++a//2q8/Pj4y49u37z755NMvv/xKNbatdqvzNLx69gzdReT25vbZ82eXrPaQ5epq3u1KyjKOo5q9u/9wPm23z17wMDnykDOAJxaIMGtzHlkSE67rtjs829RSlnmarR7XZXn75sun89Pdfj/p8c/+4L/96svX+7sXu/1QpjLsD9Zb73U3TLvbV7cvXx2ef7K7ep6JsFcL9M1FcpU4L+eZcwA0a2HKmQkcAUUYL1c9QDM1cBZmuTyAS+hKmXEYc0KGgDwUESk8UC7AQ9u02hLATQGDEEU9hSN6Z0lde2tba5uqr8u69eO6HTucUTZiADTAxhKIGmCX13eeZ0Q0tVwA0JiyRW3tCRkDSbu6N0R3g6ZHB+u9YTobLADANAonZAIUCGJhZiZCohJO/Nv/8BMi5ARBhmIkjpepe0hmyd1a62vryAbsDk6CRKzqCAl8qisiAIALZQRABPCLOi8AEJzcIIIIsORhHEcPU6seGoDmm4Uxc6bMiMTWbVW3ALeoEZoSiwBiMBCjuCtjYsoe0Fqcjk8vX90iLa8//HPizZTDBzU0jUBk7mpL1y0AiZLAzOTn0+NeDj/94W+9/ubbZVlvrp4xlG2t796/efX844j48z/7o5LFAVnk5uZ2XSpz+cmPfv30dPwv/+l/8dt/67devfrsP/9//1c//Qs/OZ+3/X7385//+Xe+893Hx6O7MdM8TykXAiKI3TzO+zlAEb3p9vzu+sWL27qekuB+f7i+vhmmUVUvedXpao8IQ87TWERoSGWahmnaOVAuxbSZ9/lwUKPzu68ZAgOsHtvp/ZuvvyXA/W63Pn7z5pufA3QZh2G/313deKRxLM9evry7fnF7+2J68Um6+rirAi02X2kZYn2q9+/v3795fHp3Ph/dsACHtd7qZavWcARMSVLJIokIiQiJJHMuCYlE2AGZk1xkss3UCDCZQyATswZ4UCAJZ+Hc7YxIEWjWa11No/dW+7HWs+GKvEgKZoGwX7WFcGMacs4AmHMK7EyA5ABntdrVERmRwwLMwTnAa5yarj2a4xNxIPIF+otIhIUoESEzAhAiBzr/7d/9fgA4avdqWJGMwJNkRjGn7dRqbwbKCZghIFggvFBMQplx5JBEucihpMyEGIAQzOQeXVut6iaImVByKuM4SObEgBitrRGOhGZRchlyBu7NNzOtfjLTABNB5iyQiZAp/yp+ghUCn+77Jy++l3n89u0v5pkDajgHCMHQtREj4KbePCKnnPgKUNf29VX5yQ+/+6988dX/4K3f7l+15Xh8fGzr8uL2Y639m2++fPHiuSEAwfX1DQaa20effqTW/+TPfvbi5Ytf//Xf+M/+0//XT37yo9779fXhl5///Nmzu3VdT6fTfj+nlAFoW8/b+TRP0zCN3ToTEvjVbuemZZrGaUJVDM9jub07DIUIA8gp4TiU68Nuv58lldu7W0Y8bmcIkDIMQ16Oj0lQhhJhD2++GnZFys1EeTeItp5Ynz1/Nk55e3qigDDdD7tPv/djuXo25XF/feDnL2wqIEnKtaSpguKYOAmeTc716f6N+UbaODSlAQkBADGQMbGMZUglu3lr1VQBCQJUVdXMAQFbq9u6ACiZYQASs1AYb6Bn6xrdobEgJVHdTGuSjEiqql2JAUiJyehIrES/antRAHMggakSUYAxh0iuVYdcAKy2c2uKIgxDkaS9tt4UevPVo1lsDiujmF0y08I4EuzdBUEQhHEgzIDG/8q/+d0AtFC1DbEjOYEziTlBpLbBVjcLJUZmEE5MJLhHGLwjeWYcGG0sOQubmxsQATM3t9Zrb6BNKAbGQSRLQqaUkggXQOoNXRNCMrNcRsloCBZqvrXew5GFEybChAiIGIThFuFPj+tHtz+8mq7fvX19uBkzEYJFuMDOwdW6e/OoSB6gCCmnIdx35cWPvv8X/+SL/245P0y8Qw1dTrbqy5vvgsObt+8++fijnDKQSCnCUrf67PnL1tdt60+nh7/4a3/jX/wPf1gK7Pe3HriujwCGGI+PT/M0sZC7927LeqRwIjfvta1a28vnz0XScnycr+Z5Nxchs21/NQb0KadxHm/urqd52u/maZzHcdrtdimlXIowRQBGlDQlSa5LQst330XGxw8/vztcld0tsA8cEC0Q9oe7/bgHwJubu3DM43y4+6jcPKP9deCcJak3ZKzrVuaZpquMJXuAbg5mmw6JuBj4EOjMxEyAzEjE3Ky5O2IQ/ao3jwhEpBaIhB7uqh7gyJJdZG1+//h035cYM6WMocgISHU51daImClHRK1r1wXZWBTSxskuZq7EiuREDNQDVjMvZQAgJHZTBEuclu2x2VHYRZglzBfDBtRapaqPASe8yNtpQmrESFgYd0IZIhDIDAmTx8Z/6x9/jChNt6YLQECPhCEogNkUwmPdNABRNHESSVkO5JM1ImAwCo+ciMgtFhIgZkCwwAjyAIzilhmHnKZ5HMY5lzIjIhohT4SiHREFIhF5Hkg4HAIsWl89IuHEEAhKUQgpoJnZ02m5uf7senr5xec/m698nke0GcEjQAQtVN2YLtCUysCCo0eHTj/+/u98/fWfrE9fPZu+X/yAsFlP0/Bsf331+u23z+6emeL5tGrT/eFmWU7TNKhhEv766y9evfx4OS/39x8++853Hx/vd4fd+XwiotPpnHNBQvfGTACgauC2GzOCeuhuHHNJqh0sANS83dzc1N6JoOSyGw7z1dXVs9vb27vrw+10uDrsDyUnREzznMs4lHHalc22vDugIPaNest3zx3i6asvOPH4/KWHAqya5gQpX92WUvIwzy8+CUPtLY9X44tP3AnUINy0cxI8L/3+vdBEwGiVAFRbD+2dWl85cxaJQJGcpaBwmaZShpRzGcaUS85Z1ZpqTqOhm65Bdrn/ba0eq64OD72dXSGNgEXSkNPsBuZPta0AgZDDyWPbtgUpSLpFc++5OJERXjD+gWABTsSIHEBdl4i+nM/gm5OZniC6ZIZwJGnW1lZbde0VEHO+jA0i8gYQEAwxhDlxmLkpunnrR/6d3/1OhF1CAbX2S0eMkdXJVHu07hGISTyLCE7JiwMgi+BgtiGypMv8QB+GVPIEaD0aBCGgG7QKETKUcR7H/TBe7w9JcoAz8yAlAFrzEZlgkySAkCGEfbO6ViUcCAnZETJglsTrep7H/fXu2ZdffMGJy1CEx5wHQAzSwAhAMCcJChIcxvRsGK7r0j/75Cfe2sP918/2Lz3YYDPQteLN3ScP7++F87atj6fHWuvN7bX1BsgAqW56Pt/33r/73e98+dUvP/30k/v7D4fDrre6rsu6bkSX2hZGYIQTAWFJDGOWbd2K5K3VaRxFsqtSipRKGcZt3Vqrz57f5rJLpUzzVZ7n3dVhnEZJiCIy5GmcOI1BzCOVLBCYxsk0fH3E1qcX343E/f5b5OnwyQ+9OkIM482w2+2vX+bDQcYx0zCnibaGuvHhQOOVbrEdn/h8wu3Jv/ni/bc/R2J2WO6/9djO50foOQ/s5s26Y3BOJImQE/+qzHKh6XdTNQtEi+h1C+8hQCyhfKzrB12dBVAggEsBcowAHlzM+qraulK31i/FcmyqGyKEb4AOYCJJDT0gABwQYQRMampe+7q12rd6WuojEYoY0EZoKY8KDh7bspl2dwJkpDCvHhtzQnB1RWQMEmSMar333npb+W/+o+8G4NbWblvvzUOHPFxKre7QVd2BGAINach5T5AghiwDErmh0MDiIh4YLIgM6tXBAdjdEZIqhWPJCTGYZZznaTgIFzWxpkQAUdxgyDnCJBWiDVEMUL27ucDEOJk7SEO3kuZ5d/ji9S+B+LB7ISjClEsiAkJqzQhKytMFvpJ4P+WXCa6f3373Zn/31c//bM6oGF0X26rA/PzZp7syH98/6La23pB5d31gSWvdbu7u3r//IJLevf/2xz/+8bfffptSGcex1rrb7d+9e2/eEImIcs6qegElEHFOOUm0ugxlrL3VWg/7Q5hv2zLv52EYVTuAm7V5nsZxRJJpng9XV3WrwpIk55QBIOUxJSE2sMycApXSnnmE4s0r6zBe7zJI0s0tdq9+GJIkglnGYS/THvMeHdLNjONYTwv4RtPA82HIWbeVXUqZ1vtffPPH/4wlV/SH+9dZLkV+6L0zcsoJkdwjAIIiSb4ogGqt27ohoVAiRETOqRDCtlUkSiVzGloQInhYB3O2ZluYETijI2XT3NpS29K8pRQIqTdDbuGAmMMREdwrEREMZtw7uIf10AZaL+HgFl6HYcjMDoDozG5u2mHbqroGBAIzpfBMREk4wNyAKZtW88W8dq0Owb/9D76jEFuvvfW6NVWVLEwIQeEQAF2bRQcAAM5lSFIYRowS7utaT+dzxMbsgebRHHqABZKaqXaIFP4rbMuQR8IRg8ZhLjypX9YREJJwNW855SyBABisXjVWiCxxQzghOTIg4tXV7v7hfe0t5zlRKTymlDgBUQrMtZ0Bfcw3CW+YZiIWzkKHFy8/ev/hW8JgnIrRjscDfDrxxy9uv/vtL7/t9YThAVByHsbpw/3Dq1evzsvSu14frolhnndfffX1d77znaenp3mej8ejuzMLXaZ2EMwMEUXEXYcykAATofCybleHA0SER+udU7q7u1uXc4RFRB7K/nBIw5xyIWYASCkxYCoZIpDlV+V4kEvFInCIEE8tsdhRSQSHgwD1vjmk/c0nQTkAmjYnT3mKklwJ5is+vMLTKZ6+jaTRKVSTLu38lOYpoZw/vNde3fogWQQtkBBzzlkyIDIJCwJGuPuvRk49pZQkJWEipAvkHmkcSkk5IYGjGEb0p7o+1WM3Q0oKttUTJsiyYxx6b23zZTUHA9faFo8Ix0CPMEQKMHcHyNrBDF2TKYaBh1/mgHtriEycicFju1z2XEs947qFXzR4NBEOl/AeUTIDN9PYIFTV3QMZ+Lf+4WcWbh6u0LZWW2WOaR6yJKTGHNpdmwJwGDFhykOigVwC2DzqdoLoRAbY1RqQEUMEtW5mHUEYEwQgyDTvE48SidlzTixea3UPDUPo2htioKzgnGVydPUONmQ+CBdCU++7/bTZg+maeRdKiJGHgsHBJkk80MMQKPEV+TzKPObJml3Pz7EPfTnOAyecB9qP6bZtueRkbXv/+ptMoSGENI7T8bSMw7Sfd1999eXt3W2r6zCWh4eHYRj2+/35fL4gWi7JbQBEhEuHKOUUEaVkJiEWFnYLYpIklzqGCElKRFxbZUkaMY5zSoVSGeZZRNRtKAMx11pTyaYhQh4BHOYunN06AEJTDKQioKsnNpgG5rrdQ4Dsb8lWZFxOJ44Ybl9EpNiOlEBuP4b71/r2TVCHtp7efbm8/UVr/fbVqwTt8cNrFiEEAAqCVLIkuQRmiVC7AkTORYT9X/4ggmo3M4r4FbcSgQ3V0SKSelfrII4QBGv0sz2JsNAotMucBIQhbbWdlpN7D7S1KpAICxG21lvrqu7G4bQtDjCAEyBwIotwZxEhSimRu7pbQISzdupb3tbonQEyogA4ElwGDM3C7eKiSESDuTET//Y/+sHF2gth7r1ry6WMQ8mJiBUpwsHU3YCRhRJiEZxKFklFJAE0tJBkw4BIFG4GvVtsW3UHdAlTc289IGI/7gk4vAZ44rJ533q99MnCcKurwZaEheac9iSFMYhIpDAFp2a4BnZm5LjQhzIxQXSklGQEJAIGLwIjhAxlf9i9ZMzovj0dRx4ERSh55G2JDvX5sxfQiD0VHkNwGGYHBI/rm5s3b96o2f6w//qbr6ZpcvfdbnepfvTea61mhhgifHFslVIAkIhLHiwMiQkTpzyMORCQkCUNQyplsHBAyKUM076MO8mFUt62KjmVUrQrIvbePZyFwa33noekreu2EXbvSCmDNyAiQgrHNECvqcBmRwTEstenczwdt6fF6rncXQMjPL6L5Hb3Ez+/L++/9ixOQwNYv/mz+vBV3h+Wta3HJ2Y0YBYys63WICLhYZiGcUqp+MU3EtFVkQiJWERSiktpiED7VlvfuhkFE5ljJ6qwPG33j/Vk3vbDnvIuuoBpgpy4NO+nZenaHYkQ3ZEwh7N27I3cWNV7t3Bn8pQxMFQ1AACh8BQuzJlJ3ClMVAk8MYg5blWBMMLcnZAI2AzCHSKIEoREXK40mX/nH/3QwGpbPdTRkEiYcsYsxOyXloGZqYY7Co/MI8eUWKQQAjg0M2eOlAECWt88uoFvtZpBRNpq7d17h/PpNOScZXAHRFRTlBRoECGIgFKbq/dhnBPtJe/zkEsxMzdNqSAnW+qxFBEWt2CahIbMlBJ7SDhcVNVojFAy7zxiv79Jab8sZ7NzQJhht1M/v4UtbuePr9LL+/f3Dr2vm1OogjsAU87ldD7nUoAJ6eK/5JyLSEpJHh8fL0tgKQMiMctlKxDhYRhEBDlKGQOglMxMnHgYxqGMYxmkjCx5mveSp/lwLWUCRBGJgLrVknNcLl4AddtYgAi1GwExUj0fySI4eUYBhQpEaM05hznp+UwY9d3byNepJKFtOuyPH97I5mW+9ccv8f1D5CiHcfvwjW0nlwLz8+38+OaXf0xIn33n++fjk7oyYdduqoA4zXPJY0oJidbWa2sByJLKMLKkYZwkZSR2BEI0bapVLRjYzDtG7fbN+d1Xxy/vlwckJiAi5lzASFt3BVOqvW51UWN3FCaiBMAQYhrhbObmFsApE1IDMARwx5xmpkSWTLltkdOOI2lzj4AA62ER3Rwuaw+yO6gBuACgeyOGPAxmv6rk8u/87vc8LpuIeygjY/iYIJcQNoJAQDVRZe3gRomFhYmJGQEdcEMId0/ZIkLNAdFRa93CmaCYkyskEnAM8N1+RyCS2L1qWM4SvRPJPO3DyZwIr+Z5GOeSeJfz6HHuujGKee2xkLSSM/FgkZBERJh3bm6+sQThQDSWtE+yJ0reOaccsJk1COMwqCEIQmXCu1BUWyIgD5fp/h4QwzhdYFCUpDdDoOvra1UdxxEA7u8/+EU0TpTzoGq9t4hARGYaxtK1J0ngMA4jAgpnIkl5LOPIaeRU8jDmYQZMZSgA7BHgUEq5nKyGYfiVxRPZwMI859GdulYB1NohyVoDVEE30A2Y+7ISynpc/Wlzb+v6/vrVp7ZWPZ6vP/kuUqy6Yhm41dAjEAkOx8+/rsuHkQPy2KKtT2/LmGWaTuum2pIUFiZmU7tEfj1MKHHinLNIRqQhlZQHRGitUbhuzdSIWVIBREU4te39+nh2M+CU2QWPfW1VC48Yg3kN16Zt2U5Vt7DLqEokTu5w4eIENYsegSkHoDMjuiNEkSHhQFGsU3heFzUDRo4wjHC18KTu7mgO4EFMgAGggI5AzA7ozJxSBuiIC//tf/xDDEcIEujaKbAwzIWGTIjg7ojSN+pVAqK1ypzSBCklRo5wJHPwiAa4RUAEIrFC61oRMuMAIRQoAEXGrptkvDncUbRAVN8wdJBdREkpjePEUFjKVHb73ZDyIdG41futPqi22lcqR+RtkDHRGMgRjJiFCxIBmUXzAIJB8o54TDKFuelqvvZWQ7ekNiiHixnfzXemjYUZpEOEd+vh7k27RxiAG0DAfrdj4Tdv3rx48aLW+vBwPwyDuxMRgNW6MlNKOSJ2u/n+/gEAkyQz2+12iMychceLAA2ZOQknaVWXdZEshNTadsEYppQuy3/O2ezy5on3DSXlYdrqmSCW86lBL3nfzls/vfdwiXw+Pq3nUxpH1Q6xWa01dvPV1dvPf4kW4+FKMsR+H7tDd/DPf5mR0m54+PrPCsP1Yd5MEeD04UFSBkQiBkYWDPDMOSdhiFIKESIgE4G7sIBHb6u2RgDEMAwTj6ML17WqmXFsW21cSpkR+djOj9tT7X05b4lGxNztXPvJvFVtEJ6yloxCA+Il6FWIimMl6cQUaMKEhokL4+WNymRiuuudI3LrGoGuFhHhKYIiSI0IBEkAPBdhgZQREYg6kqlXlobY3Rv/zj/4EZInEQjUejaPJCUlZCJ1NxB3UvNaPcBDOrBkygMTXfqUCGpbt5qYk4iZI0vigsYBIVkI0MOSDIUHdGf3w04C1aEPAmrGZZiGK4yym6/KsGeSVPI43EoiIN96X+r9cXlE8cxVQ51gGmfCHGDk5AYgCsjeSXsluLxOY+KBAnvrrmfEJWzrrVkwWAxUxjRua+tu1UybtfW8tQUAEQmIkNjQEWmc53fv3+eUxnH88OFDShIRy7LM81xKbk2naSainAUATqfl+fNXiMychmFAjJQYEVQVkYWZkCGw1uruvel+v+9dtXcS1t5TSrVWImqtCaI7n44fgmMYOAI3w9PDBzyfaRwYCUxbU5CShPu2hathn4adLP3161/w/sVut+v3v8iSeBxtKPh0HqzBLp9Oj0kwmb/7cE+w7lO2poFs3QQCgNxUWFLKampu9KuLb5CgQlcDQgEkYMqp5CQsREkCMDwIiVM6914dCiUQOtn2/nj/0I7mFh5P/YSurdq6ndWrRtv8kcVTlpSQizLzIKOIIxFxIWxCVggzFsSCVCKy9qEbmrJpIGBoa7p2JHVBQgVwV0aQNDsAseWcipQxR2Zz1e6r+gOSQVDtlf/mP/iRQhMhBN16d4oIZQ8PdzWwMLO19mYGyACM4CUnZAFuiIqI5q3pKjkN4wCQASOXnGWgIII08RxODJQTlyQlYWYTpgikJGadYtjtnu131/v5bhrnQWaEHAg5UUSufevxoHZeN3PcSmZ2IHBJhWKnG/TmddMwj94cAEncQ60L5ySjasVUUbTbEtDdacAdqIMTS2ERvAz26KJaXTWAUp5609rqkIf9/up4OkpK7t5ay7nUuhHRfr9HJBEhIjM7HPYfPnyYpvnq6hoASimIMAyFiC5WD3edpt3lsNRaG8cxIsxsv9+vy8rEl06Cd3UIZl5OZ2E6nY8s0tV103Eoy/Gp1QqCZRxZiAFVa+ZgZGTazJ/efH17OPjDl8f7D8Pzu+LNLDznHDlG7B8+ZE88j29/+efQz7shn46t5IRg6iuXACKP8EsdFFHkgjlnulRbUiKSIpkJkiTGIA5khAA3Iw9BRiaFcIQOttX6fn16WD6sdtKuusWq9rAct74p9q37povh4mEIncUNUNIwlpk5mIlYHVYmSJQIOfOesWBINLKWtELT7m7m2ltr3QOJ5fLvlkScOTNBzll4KGVADGIADo/VdPWoQbXpUtvKv/27P8oCAC0wulvrHRVAqYeCOwZpj7W1ap0olZwDGogTk4VHmDC7Qbgie0rZ3EhkLDkRWVfBkoUuAWaikJwHEaGeWZCpA3iAbrq/ut3tbwoNu3EqMhho7cdhGIR2VbfaHxPNblJdhaxA6g4YkWDoXeumrZr3retZQznNzKluhsZI7rg4tN6dwXaShthrjykN14cXW4ckLIim1qu2doLAcdpDkJmzlKvdrgz58elJRJKIqhIRsyDSfr8joog4Hs9XV4cIf/fu3SeffDqOk2pLSZh5GMqlO5ZzvpQRe+9mJsSSZLfbmZmIJEki4u7oceHq5ySmflmJc5lSHr22kmkYB6u9ZJGyezzeD0zWq9YTuW+95v01Ht99ePNt2k345ptl6LFlbtvIaA/vdC487tvPP883+1x2H95+ftjN3WNZjtc3NwF4eZRIKJyGofxLgTwjIjMjsuSBIRG7e7PeERAgHAhQiNPlGSMRIarqw+OHt08fvj5++14/bLqa0uasxuh5tbXqsesJXcG7UOBF6sY85KssO0kSEQ4a3glmpnALMEEvZBNEAgPgZuEB5t7Dw4MgQCQYgzmNhRgICKVcZAWMiO5EAizozWqt3Wv3Tbvz7/zDHwomhB5oFmCthyo6CDEReXjvpuZh4U7EJIndgBgRs7VISIIcoECasjADURJKhNANHIzYNBQQSTgUzC1lzzkZhYYBodmZU3p2911GFlLmBMxrPSHpkEZwbluPLomzQpidL52yrhaW3ei89Nas96qxOgULM4t2XdfVvVqv6F4kZQ7T6j3v8t1cdixz75EIwKP1BtBYeOC9RQKAYR5yHnpXIF+3mlOapqn3flm/d7udCPduqpZS2u3mDx/uI+Dly5cASITTtCNCZgEIZimlpCQ5l2VZtmXd7XZJUi55nmdVLbkMwwAAjMSCl12lDDnlZB1KljxOicCtYy67obgjUAlb0BoRL6eHpw/v6uMHaHW6/SRsTR5NH+/fvc7jmERH8Loey+sHubleJI5ffXn13Vd63J4eHsapfHj/zfHxEQHq1olS72HWzP5lmh1Ata/rwpeKh3tvBkwimUmAspSJxhE5QS6Yc/TWW23akkiRGTidI87VHur2FCsQFRyZ2BTMGkYkolL4UlAdhyKIAoiILKn1jgLAwBdPkCJBUWUECSA1VauqrtrMlZCZA7ELQSLJiS4uk8CQBIxIQIgmpAC9d2/NmqmbIyT+V/7B95MUhW7RrUFXYKLAgMAIUutdO4SBsakQSUkjYwkXZMcAbxhUWywQDojECdwhhPCASN22qjXAARFJENCsB6uhKvRASlm4YN3as5tPyzC5BTIT8+l8Oi8PJbFAasumtlJgD1ULM0BAU1iru+JS+7JufnFziHMihCCM8HANCp6HkcApaOBxkBm7gVM4123VttXaFMJtcwuMjJLzkCTLpq03HccdC43jmBK3Vs0853xzc2OmZn45/avqtm3DMJRSUsrTNJYy9K4RTsS73e5ScxiG4Xw8EdE4jtNuHoYh53zZWzgJiwxZEkvKKaUEALv5ICzoiilR2FjkXDvUhom72cAgGF07Bq6PH/rTGzu+jd3LQ4aRqXKG+6d4fr3bX8XDIxSw9uhPj/l7L8+vv22vvxKOx/s3hfmjFx8/PjycjkdCIioi1HoFABFhFne/HNvcI8KAgtOMMhNLAORSUJKDmZp2beeln1fCQCYSHsqwm+a97AfaEyZbbauGEV0BILLMiRMYZM45E3IwIQUAhAcCQgCodYuVeROEUHC/bKjq0c1bV9UOgB5hlzkpoShFABAikMHAgBwpEg+JGQLAMRw6WNVaa4NAROS/9Y9+nAubm4c3dd3CQTSag2l473bpZNUOZjTkMVEpskuUELq7Ne0aW4OtawdElEAXwcmdAzElbq7dNnNFIhGMsO4QAZicnAKRU67rypwOh49aw+6Lu671eDo9ONgg8/l8qv0IYV1rdwdM7MVDLMIsmnptysyXVSkJZE4USYIQkKMkSmNJIxdR5kbW1TZKNDIEhUlOCIGuHoyElAhFgNmDpmGaD1fC0lpV7cMwlDLs9/vLDKS75ZyGYbhcakspwzAdDrvLuXnbNoBIKQ3DUMookoahHJ+ORHQ4HFLOpZTLNxMe5lZKudrvmYgQc8kpJWZhiKGkIHLt4D0AtvMTWQXGAGrLKYEyYHR7On/blnfnp7eA0zQU4sIUHGbTTV/bmFrVDksf2jbtdl6DqKVBHt68H+fh1ac/bBoMxpyATVhyLsxsZoickkQAkaQkKac0jJIGJAAwQFDv0Lr31tZFuwWTmiMhEJgiEoHj2rs5OOWl1WM9btaJ05B3QxmHRJIwgLr1ZhvCGI50GUkn3GpFNDdIiATg0QFR1SAIg+3iQEJzMiZixgAnhjyIR+26mAKgCiliiBAhEVymc8Ad0Ar41BryX/9HP0FAIq6q5obOtVrz6qGMGB5u3rp3pfAiNHAw4jAOQ8K8bbrZ2REutCz1xmyZZvLsph7QPQCgtzWoBbqrAiDSYKHMAkiIHdxVodaYhxui1HXdWm3tuCyvz8s7yUVdj8u3hisJWHRmFJoJxwiESK5MlEsp865IRgC/+LETiwQPPA4pZ0HVc+srsKAhhOzGnRDkPIIjYVhv5kQMIJDSUJsjpaGUlFJvbd2Wq6sDIpo5AGzbduHS7/f73vvpdBqGaZ53t7e30zSWUtxDVUvJ0zSIpGEYc87upr2LyDBcOmPDUIacc5LkZh66n2cmJMKSyziMAFh7C/c0DOBmrQ7D4NZsW4PYu4tuy9Ob9elhvxt5TNZOgvkU49tv/vT27hpwtad7GdnaxsfFiPJUsNn9h8frVwdEgeApyePpzXh1s58O1hYAAqRhGB28toYoklIEAFISGccppWzee9tMGyK6O4aJcAByTsN+x2W4kJ6X9XzS5am2p7r1dq5eGztLskAPoMhSOGci4ZzmnGYh2nqs2xbhBGzOapgywwX/pysLAMHWcFvVLNQUCBDRw4Mh5UA0j96jpoE92rK1CEQyZu29mfWUMhI4WngwjAyTdeyN+C//vc8wiMUjFGEEyqqtLu6uKWF4mPauEZYJi/BENFIumQ6ZJiI496VbTUAXlB0SFRqEBg1em9bmrTW11uzsEZc5N3VijkyFEzNiYmeSeokSDkM4qS1rfbtsXy3t9dP61mnb6octFmIgFOEJQC6QCQYsaWSZCHEYZRhGQgfvRJwgCdLAKQOzIxglZHRzgFF2o5TWtoeHh/P5EaxLylIyoUzTlRoAYxlGbTrvDogUYUjQmhJRTsN+vzPTUtK2rcfj0zCM87zb7/fTNF3OQdu2EXEpebeb53mnavM89962dZvnOeeMhLv9TliGYRAQScJMEI7u67KM08ycUmIA7O6pDENO2hYRyUXujyfziGVd3n8rYu/ffft4/+bV7fem4aZ5uXr+6jDA1tab3f7+7bvFlut5evv1F0o85MFKNqtPb78eMB/ffygDItnT+w/iPO4m5CC41DSj9W4el0mqcLhEvtethXehECkBlJNkZr/8WmVCZHFIGNqaMJGwUUTUp3r6UOv7p6fH06OCEQliF7YswyDXTIVF8vB8Hp4D+nq206OaJUqZmRhTkFftGu7EYalr1KYGARoOUWsAQR7RY4MwZHK/8AgTSAD3i87RFIgAIBCCPEGdVFPrzTX4L/zWzNJKnjBKhJJkpNKb9nYOxAi15hiFhb13hGmcXzLtBDonwJy003JeAS8jyUhBtS/EjJj61qxHbdu2roiVpBFlAGFxJAFsGefEBdxzHrPsEo9TGV09wNft/v3jL1s8LfW12tlxNezMIjRkmVmc2RCTQ5XsKQsFJJFhSiKOCIguhoJEjEOapnxIktU00LFbrxrGy3lJA0+7mfM4lIKUmPm0nQEpp11KZZqGcbc/L+v5fAbwaZpvb2/3h2ldN7MWwWY2TfNud5inPRIg4jhOqn48nqZpOhyucr7EJXi/359O5659t98P43DpYIvIbrcLUCaZpx2S5JK3rTbtwzA+PNxzOJHrVplimMv5vIRaInAg257WD1+dTvdJRI+LLieLk5Kv7bwbJgjfuj2/vf328QOW3WHY1bpp7+F2uL0Rzh8+fIjQ++M9ggy5dOvNXCiNOefE3exSN0Ezd1dXYTJyBxvzMMxXwkOSBJwoJ6YMLKpqtTFYuHZVYZqmcZ8n83iq7dzW07Y9tO20vlv9LFkST0nK9eGm5DnnYTfeTOOrZ4dPrufPslxttbuvKcNcriMAwps264FcyHN4AkaA1Fa80BeGMXEOAwMwuygDMFpvSA3IgC66504AFhie3YsphuPWjf/R//hvrNsjEhAWjSd1QhgI8bSttQYgdGOAYWBKkfpSp+GqlMngTIhCYq0tW9OoSMjMgNyttthQRE2X87FrN1egyikAIqfEJKlIpgyBhAxAETTNhyIThgRVD+xen+ovt/aBmQM6ogP2JFlSYuLMBcFE3APMex6QJTA4cREyht6tkjFgJmYIdtC1PRKBWsfAkoYp71IeUx73N3eHq2eOhBHLsgDzYXc1lHGa5mkcP9zfn5d1GIZpmuf5sNvtlmU5nU7unnO5u7sdhmG/P5Q8HJ9OImma5giKiHEabm5uELHWmnMexxExVFVEcs4550uKTkTGYUJkkTSOQxIuQ2FKiHh8Wk7Hh/P5AUDPx4dWK7ltpw/Wtvnq8PjuK9RTVwv1/VzOy2Mpu3Ger1+8fP/mGwlwgKf7+xeHG9jNhDKTnNbT6eGRAOerQ183dA2wbdsQaBhnYoYIYprG4cITZ8KUxN2RUASFRVIiogDgVESSX7LQNGIghFFYoAZFyilTOS/LYh0I1GmtlyBmXdu5a2XMIgVQUpnH4Woqh914dXd4tp9eHua7Vy8/en73ijiFgVsgCwPWCy8RnVyYMKwhJgIJU0B3i5TpcqYmAuEiPElQdPNuSSS8rbY5kHruiuHglWxTEON/73/97wCmHgtGd7AL9CQa9h7mK4KE58RpSEOhtK1bbTVPsyMDWaARmQauG1hUEWHirqt6M7WutepJu0eEqgEIkucCIgNEHqd9SQXchf3Cqcs5S04UjBgkQ7d1bQ+AwCSAW1xi6BnNNvJEII4r5U1tQYGSBV0ZVchV116bBQGQBIM7sQiKNZ0lH3YfT2U/l4mBx3Eqw6hq6t5byyntrq4R0zCMkuV0Oq/rsr+6FpF5nsZx/v/T9GdNk21Jeh7m0xr2EBHfkJlnqKru6gYagEAMhJoADQNJWNMISqIZdYWfLZmJkCBA3eipTp06JzO/IYY9rMHddRGFv7Atduy13N/3efZ9LWXdtp2Zvv32u2kaRSSGYd+rSDgeT4jk7syUcsw5ulsIUUTcPcZwj1XfS1XMDAC930d4bGZEyIRENIw5hsTsRM7kIuJdy7pA6307f/npx9KtLS/nl98c5zkQASJFUZK6bTHFw+nw429/ezo9aG/t/Tp9+wmI4LbnOW/b/v7yCu7DEIcpsbCgnI6PzpRDZKTWaqnNXN0dARGAmWMIbqCmpsrIQASAzCwxIqLDbl7cOiMCumoHIHRet23TWnqtDbZi13q7tuvuTaGTZyIoe221Hw6neXh6OD0/nj48P/5qnmdVQ+AQgkGv/SIchJJDUHXwFhhIAAkQmAXdgVmI0FpH10ghpDilIVE+TQ+Rs5CQASqqYVd0jgzBGnhHIOAA/L//+z+TkK2DhHsYtYIXcN72brAFziMNgSiwBHSwuq5rsZbT2HVDRIlBKEY5QC9qHdCIm1nT3mqvQPBfCSmInjhAjB7lCJ4BKPFMBA4lRLFu5pjyKEwOFmQ+TB9a68vyZt6RCSG6EwCYWms9hhQitP5u2NxKIkBtap0YALS1VpXcjJEZQpQxSB4oJoj7an1X7wiuHLiUsu3bvYudUkAOiOIEtXXt+vTx0zBmRAghrOtK7Nt2a01//es/yjmr9pyzdmemjx8/qN5Hn0gMx+PxjtWJMfXeQwjMtG0bEd0/Aojo7uM4rstNtSMCmhOjdSVEkWC1lr04eG+l1xvovpy/Wr1aa7UD1EuwXvcWmY055GxkTU0bELqwbMv27Tefat+/vH55PD7pXta6nk6Pw5C77jHIly+fswgRO7irIlJM6f16bqWaOoDXUsyciO5TUXBAQJFIEuBekHHt2gA6gpu5dUVAUwXXrpUDVTNDC0Oubu+3y/tyvu3XvW9mLeXgxstaUsofHr89TqfD6fHh8fHD84eHw3Erl62e97p0uzbd3JRjFhlMDVwphiApYUAAoVBb4aAiId9slwAAgABJREFU6NBDkHGYgnGCIJxSGLIMoNDUDcSdwSn4gJYdPGeex4H/t3//Z3WvtW0d1NEZFaAYlFZu++5TimNOBJxFoKl3VW3X2wZkOQiQiIRDenoYD0JQajEyTm6oxE4A5DFIZoAYAiA5KhiijzEGQXRlQFZ0iSMBtboR0TQ/W8OUwnF8SOFxWc5L/1tkAku9mzsiBqYYYxinEQnMdu0VujKKGzkqMbTetk4OCAwkqfXe+g2d9rWrbjlNQgmIem/v75cgQqR130SYSAB43XcJ/Pj4nPJwvwDGGM18L7fb7fqL7//wdHwqeyEiRIoxPT4+5pzVmrm2th8Ox8Ph0Kq5YWtdhEXI7Pep6TtXOEoo+x5DSCnd89Xgjo5m3rsSM3u9vH9dry9vn3+oyyvUvZe9rzcSnubDcvkSSFKcSy0VHAhGSdP8KDysy3lMfBiPe+vT6Wi9X94vrtVNS6nglgIiYM5pvd2G05GH4LXWvaR55hjbvph3bX1dl33fWmuqnYgkxiAxpimliUV6V3MLUVhGkYxOpVXtnYljzByEZGCPW+ubY3G5aXnfb9fr+rrttdwIacgzgO7lMqZxyk9jfjydxo9Pp+f5cUy5+H7d3i+Xt27VvXRrCIBE2tycIqDwnVxBQZwcJSBy66aDTBln70wKA+bk2bzfbGlQxIZgA2JAvD/4EKPzv/v3/9zcu16uy0V9RW8OW7UOagx5iFMIIgECduhbbXuzvpS+tjaP4zQdiGKkIUlOaUQgVTWjEEYHRPckg4RM9/AU5t7NzFQB/L6uiojkzGDEoGpLqZcwhnl4DhyHHAMnh7Du6215B3R3VO0hhDEfSIQJogRB6t3cOcqEzoCKBB0IQEqDtezmHb0hGDjJkA4h9opCyb1v+zWGTECff/7d8XhsrZTSatMhj8+fPmozJ3cnkZBSPJ8vpezDMDycPoQQAbDsJYQ4z0dmYSZTN3WzHmMa8qTqIqGUql3neW6t9d6naSbCVur9GrCuq7vHGO8dAGFW1W3bRKj3kpigl7effqbetBezuu1nIfr47XfWmhoM0yGP2cDBoe0rK44xA3RhEQi9b4D7h6dvnXG7nuuy7vs2D+FyPmsHYh7GcWv1NB+tNQcszRhD10buzKKqpRQRSSmbubkRSUgDS0QOEmJOWVLGIOik2s3VEB2DGtaqpXZwr+Rvpf10ff+yvb7dzufl/dJuqrs2SDwJUSlv6/6epzDGYwgyxDGFSUL0YMXq+XrbzhfD3nFT3ZmJgb1bQCIiRCCmKFE4BWb33bRCS2M6oTMaUhdwAgJwt0bWGTyypJxyShEIWDr/d//u+94rArn51r42fTfbTTuFPE9TkANjiFHY1fqm3dbSb62ZI1r98PwhpSetLcUxpti81LohkKkxIYpRIARC81ZVVbqi+33chxIyubgTuBMqQmt9WfsZID49fj/ECVEQrRUWeKh63vcLeFCzGCXFkYGZLFBEH/dWCIR9RIjIDbmTiOAwDYfIA5sFSJkOh3RgEF2McZjHUy1rDBzTeL2uf/THf4wS3l7PccgPD8+S8743ZlT31g3ALpeLqqnqw8PD8fgQY7her5fLeRxHAByGQfWOEbBxGkWk1hZjQsTWCqAdj0dVvd1up9MpxrBv+z0zl3O+XC6llHEchXjf93uGdFlu6/KuvaYkOY99L+6KiEKyti2GLJymwyMIc8zWe9tWpEi93a6vQMVBrXchf395u+3bPE1JJARa3l7AdZgP5mZmXe00HPdlp8AoASjspSD4EAdCBIRpmuKd0ZLi6XSMId0P4BwC0P2OQCQECIggEiXEIEJBILCISAwwpA7x1nRtbVtW091oI2xmDszAxoLrfltul5glpqH0HREdDJRa6bf9cr1+vWxvQDWAo8MQslBiFu5CLr17q87MiCZRgHpf0aylGFS1NwDCLGH02Wos4EZIFNlxGEJMjA78T/+nD70tvRfE5t5r20pZhnx8mD/k9CwUAwhjdPDrdt21GLKqcHRsHYEOx2dvkFLOYVDbq3aFTtiYiCi6sVlza4gMgObgxqaGQMQYOQEwoHJwclBrRd8QUpTTaf4oPCDavldtNqS5lrbsV/NOCFN8iJgQu3DsaMva0XKSkYUUNhFjAYnHIY6Z8il+GughuEx0TDocH4/TdDJDVR2nyYB+9etfk9Cf/8XfDMM4H45qaCZ5GM+X166ecl7W6/l8UfXT6fDhw3POo3n94YcfWFCEp3G6X3HuRZnj8QAAX1++HA5TrRXQ3N2sE8v1ckH0lNK6ba3W+yColIKISB6E3ay1GiKHSHXf93V7O19YsNWr6g5OkdL0+GFbVzfLOQJzqe7dWtnQ+vH5cS2lLKu1qlZTCOPhuF7O7rDdbtM4HIa0tQ4A1/0WOByengLHulZ1ZwmRowFoL73U3uq6LXvdzZqDiQQCDByZ2RyIAAiZxQjJENwMgEMiEASgECilINDAi4JhoHFSycu6LOWt6Vr2pauFKDEkJGEO1+VtWa/j+JiGvO83gQYFtaXr8vnn60/n5TNBO4aZOQDCHT9oJqokRGDOzoxOiCzRUFuvte5EQiAJwyA5UEyc7jEk5EghhkBxCDHM/C/+7QO67vXWdHeHtvOYHz59/JOH+QOSMAm4uffd9mq9maq3NNA9uVT6mlMIw9QVjWvtCzMAVNMaMBKRm3lvzIJAxDsYWwcAQwBvHUDTEFgcvCEioG3tZgLEs1g4pBNHvJZl2Xd3yFPet9teXyl0wjjiya0bKJhpY4GILgiBY+YAjBbClCDnNKIF7Y4EA+bT+Nhqvdze67L1bmuvp9Op1/p//H//42Hgj59+OQ4TCwDRtuzIkMekpp8//9x7/fD84dtvfpXTZNbfz1/3vT4+PuU89N7v0MzfK8uJzHzdlhjF/PeRodfXlyEP7r6uSyu1t3b/IEzTpOrmjRzQybVVvS2XV2g2D2Nv6+12JicGL9u7EBLIcJqsW9mWvS+MJJAaELm3voPzMIwhiLauit1szoM5MFKvvdc6HaaUs6TY3ZZtPYikcRw/PNVq2ot5C0RqXlrR5tpt226ILUape1+3fa+3fpehEKQ0IBNhAxDnQMPkxE7ghK5WLpdSFjI3kI56vS1vSzm39/P2ee1vN9+hO7rcUxPiLMnPl/O6bM8ffjHmk9aOoCFCgnHr+8vttWyXcZwPh6eQxRHckXTf2qK/P5gJQFMzZIqRkOy6rg5yGg6RSd2ZQgox5SFwIIopHyNHIY6c+V/82fdE1LS32upev/3wq1//6h8c5g/z8BDjUGtb972Ucl2vwI3EqxdEjdHiMAAPFVqKubfS+tV6c1NANd8DgwC7VoOm955bc4WNuAkmJEM0EpfkAA0cGBwJVGnf/Xh6aqYkPI7PrfXl8uK+h0ApHvay7bUgAGhzs657LRujppjRYu8sHIOEEGIQxWhEvG19aztoe0rTuly/vL8zmSAxx2k+iMhvfvPXTDYfTqp2vrzebhfrlsYx55FCKqVeLudf/eoPv/vu+2EYhjy+vr1cb5echtPpJCLbtuWcEfF4fAghmHlr9Xq9PD09Et07Q1jKjsQp597q+XI21ZTSHYEfc6x7RcdIoZbdrNayvr+9ZLR1Obu3KJHAXXcCzykrkjYjkd7qerlEkTxmJy7abS9tW6Z5QBYHjDF2L4CEhL3X1gsT5pzVIechBIE79ElCDEFrdTAHFkEmWq9Xd88x3E1AvRsjEyOLUAgO3I3uUAIFBAIE876DKaGDdlDvddfenOJe6q3sq9X3/a1CNSitdq3YHIX87gULlETC+fa5m3/7/Ms5PqtayJzzccYHdVrrrdRynD8+nX7xMH1giM2bArj3ZkuHW8jEYuDgiB2qB1i2mmQc8xyYhV1EwpQDp0BDiGNOMTC3WvlP/6dfpTgihd41xekX3//y0/M3x8PjN88fH45Pt7V8eX+53i5lu3RtFgRZiJUEjYOkubbdVQFpaytoBW0GCq7iiKbdugNgTGNKCKywU6RACVFCwhAdoBOhqYEDIrKk1nYReXr8tncNIQZJ1+Va+iqIhOOQTqa83TazRaIiiurVVKE5SyAkIknpFOUpRjLSrWzM0hsc0kO0bB2Px2lI0zQ8pzwSwrKc9/VGRAiwLMvXrz8z8fPztzkNW92WZXl7f//22+8+ffx2GAZE6tre3t5CCNM03bNApZSc8zQd7jm5lNL7+/l3v/vdhw9P4zi6g7vX2og5pdR6bbUSMxOJyL7v45hrbYwYic3vXWplNCjV3az3GEJvFdR7tZyTAW7XGwaZUtZaL7d3sHZ8mEOe1+s5MCDA4XASSV1biDzkIaW47VspxRGfnz611rd1AYeiRkRk0EohATPT1mvdt+Xc6s5IRBhiEAmIICGkNM7zY0gBSZGRRRDZrC/XS1tW2Hcsm9WC6JKCmK/Lcr5er7elaOtMHaw3QPFe277V2pUBHMjczAGR3cvL6+840tPjN2M6lbZSwpQGwhQ4akP0aZ6ejqePx9O3QziSCbi7r6WsXV1CyJmdkjoYgJrV3mNIQ0gJo3DGFByQAIYx5BhNO5rxf/tvf+nOMRzGOT89fDPmecjDPDw+PX1LELzjXvafXn4L0NDFG6mZmhOPjujegLqDE+WqveuGBrXsjM4EHQsw39kVh3kec0ZBswTgLCABhRHvbVkhwogoRN5pNaMPp2+DpF49Cnf16+1q5hwdIY7hWThtZe3Wc5QYhB1aOxu2GB+FnnI85GkOOat69bWWGi1FDtApUYqIMQ6AwQH25Xa7vDn0+XR4fvzW3GLkT58+xTj89NPPr6+vps4xPj1+yDkTkbuVsrtbSsPpdOy9L8siEmNM8zzfuXGqqqqXyzmlcDqdav1919Hdh5yZ8Hq9EmBK6R79t95M+74tIkbkdzaNMCECU3DVXtZWVkJijqU3xK51cWjklnJW93K7bteLpDCN47aXnAYCHIZ85zuQYJQYJOU89tJba9M8CEIej3kYt20H8A79Ll0jwrZvdd8BwFyXdeldgUCCBImECODWOxgysZp2M0aJLHQnH6WEIWKI4EBhDDFurZ5v58t6fd+WatrcOm7kfSt72SopUUiErG5uGimWtn99/Xkej8fjE1EstTJwDIKRhvEBIArLMEz5MGd8GFJqdlO1FKdutbp2UODIzqQoJN1q63WQYZABOhRoft+KgXpwdet153/zf/lHIgNTRu4hhDGcTvM3D6cnc399ebtcl252vX5l4kFyQCbKIpkAQxhjCA5eq7l762vpq4MTKklXbJuuEifhEbQ4Uog53mN7VAIDYVQzUwLnzHHOxxynlCLjYPWaMuf4TWvYekXU5fpStaWcmCn4kIcjS1rX6r7GQMIRiUt19OOQnplzylOICSiykzeFpq7NwQl6q/u+lm1rtWznty+t78Nhfv7w7XJd12WfpgMh//T5589fPo/TPEyD8PD49JxSBsCU4h0pllJura7rmtKAiPM831+AWquqqfZlucUkOWezOzOC/ytRC0IIX798CSEMw5BS0tqh91q3UhcE1961921bQDClWLel1mLa9n1JKYUk1gHQGKy1jv+1fMzo18vrPB85TkV1msfWyuFwbLW10sFRm+aUDoeDqu5lC4mFhjTmaZrdvXXdarFu2o0lMouIsFBMkUW0N3dPISOSAzAlQHLoTgqAIUQOgQJjCJACCDPLfcokwimnIUZAPm9lKU1JK7SuDV0DJnRG9ERM5OBKEvLA5fb+8+evHKdhGAJz7RoDAkDK03E+1VrRgIWGmM2rmtZWGXvIDoTdcGs7A6OiKSJT7armEkJ3Ne/OhIGbtr01Qyh15//5f//nh/HbIGOru1kPgd1xzMf3r2+v18tW/Ho777evOVIIkZhjIIkokYc0TjmkPCLYtl3I3YCa7SGCozbX7qbWCYVczAmQg0TEBqwpRCI2gG7OGBPkWXKUxJRjCK7m5vPwaLo4aWt7qV8JnGkgCOZsyjFFoqK9IVCKh5ifrY+lQB6mGCaRUTipO8FAQA5uJowUEIu6gx+GKTBFGT9++O50fHh/v1Urp8OjVliuy8vrlzTG08Mjc5im08Ppobae8+AO1+uFma/X27ou8zzf8aCn0+k+y9+2rbV2L1IN4+8rkffMDwAQkWqPMZrb5XwZxzHlYOr3zO2yXIYcrVUW6Fq2fQlEKUvZKrhq38w0hAjKLDHGSZ2WfXOrriUwRuFt7w/PH0lAXTEECpIkEEop1VRrLSFIzLFbr00HCZf1hgCn42mcRkNels3Vr+t6vV3N7c44IaAc0r0bGLKMh2MaD44MQIhJQiK5D7IR3a1UaB29qxarBg6ltWVZt65LL2tbO2wQkoGliDFEAhIKAyZCQCFiDpRj5Pfzl69vP0vEIT0KsxbLMTpBHNKYx/W6QtkNzYmL1tKvpb47eAqI1pzE3UHB3RScYyrFFu09EiE5uCIoem2tlaKu/K//1384xuM0jN1g3W7q+9vbT5frzTy9vV0IrdSX6/qZkhsoYUSshCB8Ly/fy5xOHAUzc1JTJDcHRSPW0m/qznAAYKAeJAI4OiJAswZACAFUBFgQcooOaIAOfW9nFhAPvWwOa12uXXsQjnJEk96aeSeuMU7CA9NxyB9jnNd1a17zMFkFJrhDjtU8xCCctTdomvIwDkfq4kqHh8dhmrblpk4fP368nM/vb197v1XbWrU8HJmjxICAxBxCeHt7CZFvy/Wnn36UwCHEEORwOA7DQEREdLlcRGSapvf3t2ka7pm5e3vYzFRbCKGVejeduRohUGRCtN5qK4geyBEd0bHbtty01RhiV0dAVyBkxKYOzDHE7IzaWttLqz2nQWJS05xjktDB3ZzV7+1eNVXTtaxIEDj22jHwmIbL+aKqIcYogk4dtJfSu27rptpDEEJMKY55TNMwHz+MD99KjE6dI+U8hzRgCNZar13NSAQcdK/3Qf+tbi/X959fXn/7+vXrcl1LWVstuIJ4YhDBGCKhuDJSVA+mFZ2Ec0x5vZ3fXl+c0hAfY5zUGzP0fU8SDf318vV92zEaRzOvVZujdS3NKrv03pHcsXdoIbCIlFIMCQCAEaD5vcGy171X/rP/679A9BgjMrzfvp6v75/f/ubLyw+1rZfL19fzb7pdqlZH4wCOYI6BiV2JDSl0a2Yagzize59SRgYi6r0ZFKag5oQODqpujoImKN1s74vVFnEGT2qVArCwCGJbq+2b1q4bge/l1mqNHkrdumoQZOTesPembjHNh8NTiE/EUUIE5Je3171c3FrRhVBUfd13RnKqTffYYZLxTtI7HE+U4vvb6+38Mh9PZd9//vEH082pd/WPH3855vn15adpnubj/Pj08PnzZ2Iw67/73Q/btjCHp+fHeT4cDsfee865tXa7XaZpBoD397cYBRHvZ4xt2wAMEcu2D8PAgOuygGurDRkDsQiVtr++fmZ0cqh7jUICuC8XRw1BwB2cABxFEciaqtYYJaU45wFJWrd7E1+Yg7AAgrp1J4GYYspZ0d8v7633eZxN/bKt4ziHEMuylHXbtg0RCEH3LcdwmOdtX9Z9kSD35loaZ8kjcwAC8GZGxAkZACJ0N21uiu6g2lrpqmpk4AbQOxTXrW/XZVuWssmVgyRMLMDixGTsW3etVdEIXTuBMVO63m4/nX+LDvP0hMi1d0Ta9gUiuOr78tb7mgNEjsN0ZKSyF7PG2glUSCmaU++9poBRDJu5EEdOARGpF8XaDYn/l//7PyVEYu9I1+X69va72ta1nd+vv7vW162/VKs557rZtl7dnYh73zuqAqGptuKAYCBeQyDmFigH4SEiaDWLQTDGLkLYxEHMUMgR3LtrB2tgYF219ptD7b4qdu9dqbR+035rrVvrFChEKXW7j357sd5VUgBHIhnGMXMix0wTOb9evuz1uq/rXhVAxEyrumCWNNpoFGMYUore7P3t/f36hdymYbq+X1sr8zy20o+HD998892yLfPD4zefvmtWl8v5+v7+8dM3v/3px9oqAR6Ox9PpIYR8p1m11s7nc0o5RjHT/yoSh/tB/3K5NG05DG9v7yQyzVMrtbVuDlXXOYe6b0kCdXh7e2H0ul62dYlswojuvW4izMJuXtueh0yECArQ0SCl8TAfXVLTIoTgXlujGJ3QXD0yB2EK4zgDeNlube/ffPMNgf785QuRnOZZRPbe97Jp11abOUzHYxiSILCTd6dAIY0BRXVxU8RkzRA6jgOmDCzh955pJBEAdMDebS+rMU55rqXf6t7qulvtWN0xxwi0FDIDBGtt37ujMWnvpni3jBLS7f39d28/A+PD/ETGdwLVXtZdW/BWt8Wqo3vkIBKBOjESAzgwIZO7kzcBVAIMEilIjiFLQvW2b2AeEPhf/d/+BIHMwE1aXc+3L3u5llp6NcTOvOQgH46/+PbDn6yrf33/21t/681q9bWs67ZWrWrVoSDgPIwhC4CHEEOKIBgYY4AQNDEmyYiBgHvfHbv74Aa1+l6LAmzlupRLtdKhOSEgEJKZV6sGAI6M0dHW7bwt27puwJ0DGXirNTIKRVcH8BAzM19u59fttu8F3VBIHUzllE9jmomiqtay7OutbI1RDtPctN2ub6eHOQ8DUfzuF38IANu25ihvL2/X263s7ePHj137+/u7m6eQjqejdjDze8vx9fV139fHx4c73PN+iWTm//pxKMttE6YQZFlvKSZCXJarBHb1UrdeayAGNK271g7MWnvZbkSACNaVGO89AVe33gE6IQ95NHc1l5DjNBKCIwQeEFFNk4T7IqL3JhLieHj6xfdEvF2u3fo8zinl3uq2rRJDDJJZmE17B2t13yLh8TjP8zGGMUoackQmJ2II9zg0iGDIJBEYMCaI2ZAdPcYMzda+ddRlL7elLrW8X8+17xgJgZ3AxEBAW7fuRTc1rkW1KnTovZupNQGQana93S7XN+SeI63rtbayl91KIQJT751AIhJ1rd53Ao4uaKJGTdWVHLgrFCVAHoYxxSGIeK91rYDetfM/+tefANi0k3PVer6+b3VprVqnyGEapueHb56Pv/7m6e//wS//vsj4tz/+dlvVAdZlXday77X31axMw3Ec8jTNxEQEKc05MeAlZ8pCQpwju6tabWV1R4XQO3XV++xIode2GRiImJv1FmN0tGq3pqqV1F1ha7WW1bd2a31VYwO1jr00h2LWyZ2AMx8UwqVsbW9au5oNwzFBJgjT45BCWLZL2RdrLUpCDNo6hfDx44fj46dS2+HhUTj95of/crl86dUI0d2n+UhB3r6+CBIATIcjswDgPB/MrPcK4ETEcld5o1o3s3EcS9n2feu9A7ianh5O27btpYzThOiXyyXHqNb3dW1lzZmu54t1Y4HTMGzredsWZGbmcRz1vlNhUa1gigwIlPJMJLXVqi3FZN2b1pSzSFjWjQnRXVvXDta7apmGh8PhQ9W97C0Fmae5aFv3rSzrvq1p4CmPwFRa7a062OFwPDw+K4C25oQhT8hBzUkCs3gQdHVUFcHh4NPA4wgUQDhkMYd1Wb+eX7+u7+/rtfTeuxclJeu0mxijeSuAwB5KpX3feuuGUDvUUrV3RYwSSlk/v30uViTIUq4AOhAByzAdJWW1zd1due8GBlpqLe4au9LetKnvrTUl4cBR7q+4qoEzGDRQ/vWf0hBnBAAK7rzt+7betBuqhNC+/fiL7z7+n+fhOaUUZDgNv6jVrssbEYBL175uRc1OUwiDRJlyyuMYHaM2FypIK8uas4TgQi0EUqdW1cG6YTeqvZiBu7EUAPeGaswkKQiQAZkQuKkbF7feFR1rhW2p+1q8gyuo3lrfl/1LEJrjHIm9OxNAw/f3q6MSq1o9zo/CGWkcs3jX9bIRyLbut9t1GKd5OpHkz19+XrdFJLy+fH0/v03T4dM334P77Xo7HI/LvpStpBifHh/VVdXGcRrHqda99/7w8FBrbX0XCczs7veYZ+9tXdfW2t1/CIAhhF61lGpdwQ1MxyG03i5vb2OKh4fjy+uL7gu0LQm13hEDIA3DWGptVYdpkBgMTM2I4zjOIhERet17sxCk9c1dx/k05GlZlyHnGKetbkHU1tK7TqfTMB3NoZdVcjw8PkHX1tpWys8//bjt2zjN4zha17qXptXBHRqGKY0TRsHAEAK4IxGlZB3AgJFUO4cASL0XxCqIiBSJOtq5rud1PW/NfVBmEajNt34LmQNH8OBoiNDatm513dq2bdZabbU1MDNADEFK2XuvMTEH1K4dXCQe5gOg7evqquhe16ZudW+9Wzcs1Xv31sydooQQQwzeWqsKbKTNqjf+o3/eA2WmAZQRqZuu663W4mpqu7D/wTd/+uHhV7313rzVXrflWq6t92mcAXDdW4rjlA9EIi5THCRmQGJU03dOilTVFiYmZAohp5PwtJVWStduQNR7b22V5Mi9Vo0sUUaBZAACjaGIBBVxj73W3tyctnXXBr1Bb9tW3679vWPzXhPEo8wctNeyrXuxpmgpCgWOY/j0/Inq5N1BWSi+X25d23fffneYHxDw7f3ldz//5TQl4cGBHh8/DcNJ1d8vbzkNdd/2bXf0HEckWMrycDox853s0LuJBNWeUrrrM+4IrbvgFhFKKcuyxpjuEFxCcLd9v7l3tBruUo0ol8sNwQk69Gratm0nCikOQx616zCk3nrvjYRCSIQxxdx7uwewc8r7vjv0+TBve12W2zCOKaZaIaQA1pbLNQ9DJHt/+ULEpVe3VkohksM0xTQQSy/1tm/LtnpX79paRQIEE6Dp+SPFaNCJgCgAIQVxEiSu627rldpGrUAtDEruvTfsJgY55xAHMtkVlSKnEMYjYbrtK7MchiMhO3UG7Qqtw+XyUvYdNJIHrb2VbRzGwzxI9qWtrbbH41Mafq9VZqQgEJnXZa2tIIFqVG1Fb62rAbsGcGTBmHhK6X7tdnPr/Q7s4T/5l6EUiyFbd/BurqU2haIGWrnua87y3adfmxKAKbT315/flp9QLBARmrMSMuOYOU4BwMBAwBWhOHrvlVCRqyq5RUTI+WHKn0Smy+1ctLXeiToLtGqIXRBBmSU4GFp3q2hGQTqDoGIPptLNvRqqhWCC0Mw2uxERIVDrYmjurWpR3LR2aHGQnGnfvgaSx/kX1jQPGdEV2nEYEw5drbT1888/ENLj48fn50+IsK63rs3Nh0N2s23dhjGv6y4cQ5RhGmJMAPj49LAum0iotR6PhxBkWa4hRGautd6Rb6odAEppiDCOo7u7ubadUBl1vd28ahYJkZzg9ecvCVCx5mlyQ+EAgAQowq3veYj0e99JAEQEIAbz36sriNkMJMR5PrjD++vLhw/PhvR2/t3heBSZl+2Wj4fhcLhdv/Zal2XrpS/nt679eHjqTbf9dno8AfK6rWoNAYeUCIkpcYgiotq9dQQEYSAGjoDACKAFW7WygzdHIJl4PgiGttzW9epqCXjMydCrgfIAAXov6DDkwzRPgZMDA/XubV1tWwAxMrv1Ns6n54dPQ4oiHhNrbQjy8OH7ORwRhAF6W5mRUbZ9rbq5WbW9WUcSxkCGQBwCDOPIAYTykMcUHIHMXdH5H/+bb2t3BCInJkOCqsW0Oba97L15LQuJMvPb5a3Ubd0vS/spBmRHxc2hkAEhJ84i5IhNm3ZruqsVQgWoCEYcjvNTSiewMcfjfHhgHr++f9nLdchZkGtTAmVkYDdrwkGQiJ0luhMJCHVwri1t247uQ5DTGIaBhzHHELU7Wxhj3rez1lb7vumytaVaG4YcEUj7XmtIKecEoO5gpsv7pVx2ILrdLkzhD3716198/3e+fHn52x/+c+/bYT4+PpyI6Yff/PDdd99fb9eX97df/+Gv3ZEEm+rjw0PvWsqeU+bfu31Kt55TBMDee4zRzEorEkTVtbd79jVIKLdr2a4piYCcz6+380sv+8NxLtva9r3UbduXw3gaxhyitNZ774AmIikl4uh2RxX2+5KhaUHAex4bwGPMUaJ530uNUVB1ud0Ox8PT4enr+bK3/SGP1rT3nkMIQQx9r9vzw2Ngvl6vzBEIUQIhuXcHkDimEChGksDI5g4EiAxhcARk4pARmYTMtLcOABYycQbzy37d9r1rkUAxJnUpW9v6tZNO6TDwIU/HYTgEiW7afKGg7uCdYjx8fH7+xfe/mqfHECLHOA5DTulyeSvbNuYJzFrRXnTbrs1uLHttu+reejcHwoB+r8kbUouBOPGYD4MIsoE7Q5CU+J/+D98BcYpjDDOzhoRuVtoGSKBTLbqX/nb5adlvL+8/vLz+5aZfwHugFsSZUEKX0NHZkXdrxKitLst+P76BrWBmOiJE4eE4/eEQPxCZBBrkgwNt9WJK1hXIVF2C5wEoYpAh84CEmDgFEVQmagplt66NUQ4yTYI5hxCGIRxjmAljcAHv23Ir3XZfSm0sPATOAhLy1vrebnl67N0EhlKs7wVVS605D3/wB384jvMPv/mbv/mr/9/pMH3z8RNRCnH+L3/xn2IYH56e//Jv/urv/b0/QeCvL69DyoeH03yYf/rd747HE5PcSfGt7eYeQzBTdxjH0dxVe601xqC1uzsSDWGAbtu6btuWJRwO09vb15eff5gCzPOpmtfthr0ScoghjNGZDJyQAYA4IiIw3JNFrezMTsSgGoQcWq21V2XBlHLZTdVSFgdflm2YhsBYr4vt4ADaGiIic23NrO3bcpxP5rDf1hhSTgNxADBhCndgyjhCmpyIBAgQnFwiSjcHxwCSlCjkg0gCq32/kSSZj+r+fr2e1+vSq5IYBWttraVIPw6fnvK3jNN8nAK7ILe+o/BxGhjaGPP33//y9Pgw5jFPMUqOPKaIHNvl7efz5WsIXsve2q3r2ut+VzwAORMAmJupAyGaq4RK0lmYxJkpBUFEAEY1/mf/+o8SDZFhCDlyTBHvTxuMUhiwn+q+rWu9refb8mWr54575E4CzQpRmQaexznlANq3fSsdiAfv1HsD2l21OeV8BOO9bCzhOH9jls2s9x4k9MaX24UQBAI6SMA09nkcAFibAyd3YvEooXpTU+2m3QxkEhqhRGIJE2GMfHyYfxEpg1lpftsWA2CnIBhHEhFGZPbrctt1Oc4nbMMQSHtRbYcUjsfnvdXbZb+efx6Oh48fv/vpdz9spV0ut7Vcv/vu43Ldpjx9+vDNf/6Lvzg9zPN8Oj0+ff7pcwrhu2+/e3t7vyfkCPl6W+fDwTq01o+nQ9kqKBLBHZ8/xKHt+zwls91V9+s1IA45RJay3y7Xt0D4cDoZ9o48TKN5F2ACigKE97OOBg58b2QhAcI9jddbL6XmNLvjsp5DkKenZ+3aWnOAIQ+B8fb+RubjNKKAoqOE0ntOgxAul6XUvdUSxzQdDnZfJiCGHCWEQVg40DAgCpGokFojA5fsYbLegJ3jZAbYC2AlTwxgpRACQ9gNd8aX9fq+ree6fqlLB5x4jDGN80MMAwGxUBDpdTO75cnSMOfxFIaQh5xTZnGhDI7b/k5oLNBaKbV13S+3l1L37lbaSggOoOZqWNuuauauWBp1RwCsLBJjgN97nV3V+L/9l98/PXyXU1KrEpkZw8BBInQa8/BweHRtt21pzWtlQ2FAMyMGiTykGITMAQCjBKG4llq6InFX3fbSAMCMkXM61GJ72URClIdt1WZFu+5lVe+AHHk8jBOSjSkQIRNFieYEICwMQF2LQzcNa+l73QIxE1GkNJ6CnHI8jnkaUgoYaqtbWXv1KMLEMQcWUi8o2qGJ6pRPHGP3xkJBAqott01VDSoTjePxr/7mL17fXp6evs1xHHPSrrfr7fvvf/n5y5dhTPPhMEynUvd1Xf/JP/4ntfTL9XY8nlIKtVZTnca5tSYi43Tcty0EykO2rtZ6TNysIbqrai+kfVlfwNV7SYG9t95bSBJl4iDkEqMQ3OvgfifP1b2W2kIId9oKk7gjYUgplVaut/M8jyGEZVnQ8XQ6bevaWyt1jSG4w21dzREluAoRifC6bYiQUt62ZSvrbV176zEOrRXEPuRDHmZhdcrhcEBhJHJkdEZkYIDMIvleE0Oo3hT3zbbFTKG3fnkz7U06kRPm3Vop+Hq5IsGUx240juOUp2s9e3cWQvBtfTe8xgmBwNFSTIRRKxJi13WvL01LSmlMc21Q9lpbXdZraQUwAKhp36u2pq1U1YoEgOTdOlYKioTgbA26OqC0zvwP//vvnp+//+7T32Ecm14BIGAcc0qJU5pP0+NxOvZme2m9Y6mVICEIIuUhzVMSQVWo1RHZQUgA3XpTA29OTokRu3ZAYcl72dZ1Fw61luV2U3W3Rl5CYGLmSGngyEPOIswIEuJo0M2K9tbca1mYFaCVvSKgiKtpiqNQTjEHZuwWxNXrtlT1LkxBJESiyGYVEKbhgTo23VKaycfAgtZKLQRGALXvIcTPn39a1ssf/OpPvvn0i9vl68vXr8zh06dv1q0EkdPpuJV9PAza7O/88d8Bpx9/+p1I+PTp075vZV8R6XA4xZgAnFjADcCH4YBO2ru7phBa61HEVRH99fWn3sttvQzDEFIuvQGQGR7mIUjqvZhqCJGQmcM0zEyh1WZu94AdIpnZtq1Ny2Geay2t6eFwtG7n81sIHGNclgWJ1r0cTw8xpt6qtZKGbKbLtrZeLu/nWkoMSWvby62tW287MaSUnCzlkIYDxyRhhnE07bhXSBkPB/MOgJRnbH5fvxo5CiAjWF+Wde/lfb9ebq/b7QocYxqqw3LrxRViIiPmMAxj29Z934OICHXTps15c145KgD01oW1tovBBXkPkXIWd2UkR2Hmve57NeJQa2v3BmrXbd/dBCAQkAM5AiOjM/Pv/997VSDkf/5v//6cnp8fvxvTg4Pv+2K+Cevp9Bz5lOM0DhlwvK2L4bVDI4eUJAQhInQHYJHEnrypO3dsRCKMHIwkRg8oU++4t6Xaba/7dVlu29l8K+2ybmfTxurMEVkUNAlJYAN2S109SCIMbkZs1rt7BTQUUsPeEcCwt9pbCAkBhsTuvZat176um0LNOaU8kIhECSGZA8cZvKr1OKaY5oGnMSdE2ta6rmcE/Pr1xbT+4hd/+OnjL76+/vib3/7l6fHw8PhhWZfe/HA4AuAwHvI0PB6OxPTl5evlcv7lL39Zyr4sy5AGIpCQRAQAt30nhBjDneQE6K1UYc4puakQ7vt+u1xykt46Y5hPx23rSDEE79qmaSaEUndwiOH3shY3jDkiQu2tqxEiMyF566XsdZ5O7qjdUhqEw+VyuSNvm5qEhMA5JjMz7QomgWttrZQgsi7L7XoLMZt1LesdRt261roS0eHwHIYRRJCR1L1Xgo5Z4PGjtXAHN5kZRSR3VO8kkmYM46a+up7Xfeu0IijHW63v26pOQgLObpCyNK1b2bU7C8hATSt4Z7Km4K051G6r2d61AzozOythRfYYxiC5m7k7gDtQ0Q1V0bxV3HfrZiQIhEwI3khEQmaIfa+q3RT5f/x3fzLnj+hExL336/7F4KrexzgN44TKKU4hjute1vaGoaMZmcYg6NIL9YagHFFcBQ061Y4QKTIRAQkiGFsPvffWb6XXve9bv5a296Zlv2nfkQVAmCRxIAOUEVwAyaB222MM5g2AhymmMDKmJFkwLGXftQFJa4tZVbNmXrWo9a64lc1aHcdDSqecMhEadJOGQUWCG3dHThIo9NL6vgH01vv57S1y/Oab72IYz+f3l5cfP3z8dp4e397Pl9v1OD+cjk8GPabpcHj67W//Wl1fX78Ow3A6Hd7f30/HB0DqratXBnKwbV0ZKYXYeydxb92tpyCmpdWt7HuK8e3r15zoYX7Y1xYjg4GrT2NsTcHZUYXZze/CbQeVFFgYhM3d7iJR7yKUU+rN3UFE9n3btj3nIaWhNYsxWO/buiIAknQFRdlrV/PT8WBVa6kxZiTc2y4xCYuZmyET5DgmyQIUUobH5x6jgyqzoFgzm0+cJ/SOAdrbhdqGblqb3a61lPT0YTycRkUzK5F35ZROzsPaLk13gmjKtS9OhYOVsnYHI1C91fZmoNrYK/Ve3FvvaiaAsavV1gBsHFKMYwiht/sIuCEC3V3Oe4VqURk7ABAKEYJJkQghYIgRzFqtXdt1u/F/8y8Px/nZVYLkrZ1f3n8A2IccDXoek+mIkEkcHLsWAGASVwMjBHJHcyKIosKQgwQIqlZRzQGBoys4ILowSUzJAYgIzM3AuiJUBhVmBwA0CQFMWlcAdVMzV+tESqwGLhiCDCnmlNKYhiABgUBVva7boobd09771lZz32sx6k8fPohEcMsRiLz7RtxjDAh+u65gkHnAJpnjvm/b2jLzmAcwLLXvpUzjLHG4Xc/LuhLSN998L0HWbR2GfD6/E+Pp8eHl5eXh4bQst2+++SaG/PXrSwyB5K64bgg+5XFIedmvDAZmoIbk2lurO5o9TMe3t6+1Ls9Pj6W2urdhnACs9eruDr7v650AVcoWUyZiCoJMKSVCBkRwW9frul2HYch5KqXe1w4Avm07AEiUFOPxeGitvb6+Xm/XPORtXeu6aO+APh0PXXXblpiiEJdamQIRAWMMIhJiDMwQY7Zh4ocHYvFqDTsFoS6/t3UCooG9vXnrHEGw7ev2/vK57WfQ5uSNmHFea40SxyHc1tfee7Nlh7cGi8ECvCpsDq21um+tN7cK662sa6lFe6XWqfVmZq2b0JDCyDIigHkh0tqLWQNDBkdA7GpamHDIiQIobRRsGFKOLIiotK71fF325vztP5Dj/MgcWq/d19fzZ8BLiqDutbFwvp4vtRWiFrgKUBAMLGZdrahWRJ/inChHBE6hWDPviK1qJRQoYdM9pzHnKcdjknGIM/W70J0JiEECMkA1qkjd2N2w1lb21nTvtXmnMR845N4bEYkEQYoMMcTEQQK4S+APKT2Yc9dQum5tVwALUVLMeWR3ogbUzDa3zcndQatqU6aUOYHWUppAADN3kBgdcZwO0zBdrq+Xy1cR+uNf/0mM6Xc/fc7jaRhHM/vjX//6/HZhljwM83z4+Pzh5y+fhSUPg7buvW/LMqQ0jxOg3g+s+7oQuhC69bsOSHttWpil9q613sF3IswxCBOzANjtuozD6GpddZ4Prtp6124pJQIA8CHHVmutNcXxHlMjohhTjKHU7Xo9l1K76jRN7v7+9lL3JQki2bYu19tSyjaOiQCW6xUMcs4IIFGIHTFyEkcDAw5IHAnI1xtrhXsHEioqdmY9LxwDJQR3K404h8dHInp7e3lfrte+N+ghHXTXaleVfbm+gC81vBhfEFvRc+lvAJu1tZTetlBvvK5627BsUDaoBddbqXu/3WqS4zg8WwdCQ1Lmilxbr6VuagCEYiCIAOrYIXQIFaQLGVOLRK6+1X7b1rXYPHzPv/qH2W2PnJf1fanvTfdWX4VTTuOyrdt63bey1ws6uLF7D2EgJiC/b30BMQUZAgPLrovZZqoOJAhaSzV3yDmNYx5D4BRzwJyHgcAQegByt04dmMjM1IkQWLR7a7psl77vgUOQMORhiHkrqtgpKQUwVQREiMfD8yl/CwbdXD0Urc1WkVygI7RBEiiYmXprvqurW1VoyLHZXrUEOblKZMGq4H46PUVJKad8SOtyfv38ZRiffvmLPzgen19e31Hk46dPaUp5mLdlqbUfT08p5I8fPvzudz+q6dPz0205awNh1t6GManWXss4HZftFpgIupZdiIi8tWVdLmYlxel2XtwrgWrbAT3FcDw8tdZDYHQEw3GMe9l/327p3bQb3BcOtq0lp0lirqW5G7GN09i7AWDOsbV2PZ9vt4WIjsfTOEzLuoDjOMxEpHW32pfzdbku67Yq9Noa3AscQCych4OEPMQU8kGiOAJ37V35eDQWW6/etxCOOAxwvXEzEPYhGmeSKR7mIaf3y+uyb7XV635tDEi92OvW3jd9W/yl+KJta+Vca9cOvWBbWyvtei2v7/u6t7JrV+3VrJo5l821Y4qJKQAW85tDq2Up/QJAhKwtkylZh8pAySOgdOgFDa1Lb1y7VrOOHOXh+0//gP/eP3u2vpPUYm2vizASBwWTmJj9dlmX29p0V23oPaYpygCMTuTIHEBSFBkEySV2atve72tjdOulmpJCTDHMQ84hMkYiijikMNa+VVtAqmkMcZAwmLlaQ+jAaF5bJYacZDSlO5UJWLTvrV+VOiMIEIVT5mN3dLhvKBeFKwVF1kgVqQGjw77Xt+q1mjqZ0252A9mL73UvI41RMphHl9M8T6cnRGKi3vjLzz9KkO++/T7E4XK5NW3PH5/GaQIIAFa2NY7TfHwY5/z25Yt1/e4X3++Xa9nqaZ5f317n+ZBTeD+/IPE8H1W3yIjm1/ev6C2GQZj2968GcJwftuWmvZBQszpNQytWyz5N47osDw8P3fpt3XLKhE5AXbuqau+MnFOuta63JUq82+TMVEjyMPTet213N0DU3rZtJYJhzDlkdHxbLm2vcxpqq4AeQ9jLzsTu2mtliQElpcxEIeec5xwDjTOKuDDyiDSRsEMkpq4dchDtsCyASIFZxExpLzKOx8OpNV2aOfcXfd/bbd9fipaX7fPtcvZWtlLeL8u2mLUDaNbuvWvr/bzv+2rgdMfoI0VmCpFNu7uhooMirF1r7Zvb1YmrUwKraqVJ54AhNrKlGvRWCUwGg+DKASamUXgcwyP/o3/2S1BF0jyF1joAjPnR3XZbQ3zozdfl0rqhEzGFcGQ+9g4kSATGRBLHNImFZiWkrJj2ukEAdDTFrtEpklCWiADE0QyYAvhoTqW/qoLIgBBTeEhpROQGbq6MrooOnOOYRKKm1rx39gbey152N4gclAJYZxJERsSOV6ebmgLuFB3dyOJSz7f91a113dR7CAy0OhTgTpa4jwIYIRzG45w/dUO32ns7XxYWzynelr13jTE8Pj4lmW636zxFwdBaH3JOY9jXFR0eP33Q1i/v7w/zuNd1XW5Pzw9IsO/b08PjNB+W9V1bBevL5SzsKWU0ur7/gJHTkLfblVRDGs287DtGKWUNgZmp1sLErVdwDxLNDIlSSuiwrkspZZrGYRy6OjOKBHBat33br8OQGMnUsiQm9N72/ebWp8MMTExyWdaltUjUe/PAmaMbdmvunmPOQ+Ygd4JLSiOHiDkjshlRDk6dJKII5InC5O83Lxe/o+LKBl2hN3Iry9LAG0BptbEv29vn9x8dKnNsFdet3q79cm2vr+deOcZHkeSq6KhQb/XCKAIsgvdpz12pACDm5ozdSrd9rbdWqzkWBzMD1w66gzdGpBagB+E4DnNOmVJiEUlugTEMkZmd/7t/9QcpsHAOSSTGIGMMLAHNmqsC+V57bUg0CiUEiVFEotVG1CUEcxBEQjSlbp5zcrR1PROouzok5AAu2q3U8506D+4GYLD2rog0jiPjg3ZiCkRJeGAiVe/WSn8H0ofpFGBC462Vva5dda+91R3Q3IEZXMNWXtby1g0Ox+nx8SQS9l7ExQ1L8+aO1AC7mhk4UycyIBBO3obMHx6nD1OetPfz+afr8rovN7cK0LdtT3l+OD0dT4co+Xo5jzk+Hj/u21tvdZxnA87MIlK1bpcruqaceitBJMbYe0FvHz98WJfry5efj4exlW1d3g6HKJK3tWzrl9u+Mcm2LndVLiC9n18P0+xutZRpmlSViYc8xJTMlJiPx4Pq3WYXr9fr9XqNMcYofv8FAIiIKexlzylLDHdJ831wva+7de1FyeHx4WBt124I4Xa+dW+UWDrGnB2USUKMd8FrGDLliYXNzFEgToys1EwYgTAFar3XG0RgCQaAyCjsqtf19ecvvzuv7xi4mP98ffv59W8YcEgH11g2//zj8vq2dQ05PQ/8kGIQwsip9bJuZ1UNAZGIBWJmEWGkwBBIAqNBbeWirZhj62KIjlBKJUTANoXwcc7fPz1+/3T67mF+nE5TjoKMxGrc0RS2Us78p//TL4fhUSg4IIkhakwSQyDvpd1UoTZuWoko8EAwppiQqPfVbRdhhlRb67gjRnQ2pZRGQa+lGGg1IwfhwVyX7e22LSwBlEq5qu4ORkTT8HgYPuidt23CADmNiGOtvteLt2bmDkyARFjB1ma91e5r1RVaQ4zbuuzbAgRjPk3D06en7x5PnzKe3LC2JUgmj9frrbYeOLh3AiAMgCZBMx8Tfcox1tLO71/3/cwcCLC2DZw+ffr+8fmDGrr55XI+noaPHz9+/fn98+ff5GGI82HMg+3b2/ur9pZDiJEAgADd++12US3H+XC5vC/LNYV4PKTPP/8EVgA8pbnUzawTMBqZK5CLCCIMQxJARFI1+L28kkiYmCTGfV1rrSGEbd8khOPpVGp9+fp1bYUBg2CrxRyGYRSJiCQcQhREUHdTHMa57j2FgL1vt/fjYYrj3BW96bZdt+Ua3BGN3Ls2U43pXrKPPE4s7BAo3gGdAFPi8ZNxaOsbIYiM6IDCLpl5RNaqbbutL5e3L7evm5bOoTuetxu6xBAIad3aT68v13IpBofpwykdIzIJITE4tFqaqUsgTshCjEw4hDykaYiHO6SXQFvve28da3dV7dHTINMvnk//6Ntf/oOP3/zhN0/ff5yP0yASgFid99bX0q7r7XJ525ad/9m/+nUOcRgZCNUdMTBDiIzs+9721bQFV+u9pRyGdDQdCQjctSsRO4ZSaq2ViGMY3RiMcjowTbVp8xokOZM5tNrWpSEAUyi11taIPVASpjGNkU97W4yaQDYLzIfj/CFL7rXXfqXYHaF521rprboV7V2VGFLZilltzdxknh+n9Aw6aOspTqfphEraNvC2lq3WIk5qzhQYg/kegqV4UEUGrL20CsfjKcRxW/fA6cPHb2IaX1/fe28pj6fj8cPHD+/vL3/11/9Je/j2V3+c53x9/XJ5/ZqHeJynGNhUJVIQup7PrdXDfAgs18s1Rnl6fti329evX4JEV09xaLUGRlNjCSFKYCbB6/X8eHogROIghMys2vdtM7daNwBjhNvtaqaBAyDGPB5PD0B8ud5u13eEfphPUdK27+7uiExs6mYQ01BaX7bt+ePTMEYl37dy2RcCR1cOGJkRqLWtls21IiGjhJxTTAOlOAZDonjQmEDcGIwD0YgYuFZkUgciNEQKSdEMMUqMks+3y4+XL1fXDoKQx/TL4/AQjFCDg3x5e72Va1VHpG9PzwjQrKurUem9IqbIc+SQIkX2QCHRmDAPlEgoyRzDwQDW/ebehHn09BAffjE+/tHp+MvHh4eHw3QISdy7X+u2btuylrfl9na77Ldr39bSnP/7/+E7JiBKDaw7pxSQgFkcuDUvtZs5ALoCow05BQpjyJFj2ataATNyol57vUaRlAbiCKgxpphGVKxWJAAAaTdTQpd5PCny5fZlb7dpOsYQGTnKw9rflvqaQ4hyDJKYaBpP8/xk3BVa861502bbWm7rufWdMTpBK7s238t1vW1g6TA9gQsDpzCcxoenw1MMtK5ft/1MAOjQq+yLIpaUmGmIYQAiayh8mPMhYOjdjvP04fFD6/b15XOK8fH0+PD4cDg+qPoPP/zlXtY/+ZN/FObxtz/+ELQfxowBh5x7rdYqEZa9rNtyt2Yw8Xa9TkOYhvibv/7L7bY+PnwwMwcve2l9WcoSQ5imad82Znp7f+vWRcK2rUG4tcKEiLCvV0bw1nIIzMHccx4ohLVVdZumaRzmVvfr9eKAKecUUtd+3ZbuykympmrDMH79+vV2vRALx5jG47bst5cvpVzdIQ/DMI4h5ZCjs1CMUx6DRAnCQjGzoSF0SCPPHyHPXr2vV/IGIk4s7N6679X3K0fEOJt2IXo+HNMwdyA3vu1UK378MM95aJsW7V/f367bTpK0tUABQZdyM7BitWgVDkNAIQtSY9RAjK7ujZljHEKYI08xBWGWhgeWE8ePFB9zGLMQE1O3fb+8rS+X6+u1/PT6+npdbktZ161vJSLMY+I//TefiCOAGmJt4NCFA6ALC7mXUtZ9CeIpZXDLIYQYokmIVvray4bYEYl1CdS6vXMKIkMKkRNNeR5Ydl3VvWt3MMZEPqR0EMnrfn6//MQSDuM3BCFGZ8FSbm59yg9RsjsBcBrmcRrVWutn1JaBvcCyXl19ig8SUozkTqq91r6sm5kJp3mcT8NzrRvQFgR7uem27qtfa0ULrbnEPg3DEOc05CAHwhT9lOh++ovotrzfLpfbw8PDhw8fmIQZUh7/5m/+i1n7g1/9XWH+6etPD6eHbz588+Xrz4BAjgRQtW3bdjufp2F0MHez3pbL++PDVMt2fj0z8zDk3krMUnv78vKzg8/j3A3cvZey7ZshWq3q1sp9Xa/DOA7DqGoI1NViTikOyBxidKJa9/V2S4KfPnzK41S1uXmOGQkVrPd6T/C32pblNk9TL21d1nZbiCCgr/vSzUHd3ADVMAzDOD88xukgMQ05pyGnw0TxxJysLiSDD0dglCAAytet9xWtowTPAxGAVlgMvZnV25evDeDDd7+Sji/vZ+BxrxvYLUkA9b3vr+eX99uCxO593zezvuzvVVvrXlthVpZdqRmQIxtvHWtzNuJ5OpEkr0joYxoPIU9QHxJGh+7QGmz7dlvO19v6u9fyV1/Ov/n68rvl/bwt+7qw+8MQn2Z6PgT+J//jJwdClK6goNbdYZUooJWMBSeySMDCwcmISThrgaVeFd16V9uMLAgFEqXSnSXkEJhjIBw/HB6m0+m22m29qnXhw5QPMQ5I2Frdii3rMk+TQGLyIBQT1r7XfjO3yA8s3H0PRGOeUFO/XEO9Jtgj0hieh/RxGKaUc+u91BUAW8HLednrxbQAgekSya2X8/nrttu2Y9nNPQCASApBUsZxPHA8MQZUhh60921f12Uh89PTcRgm7a6qh8N8vV7B++Fw3Ld+uVxyzH/yd//4z//yPyHR88NT2/fb5brUcr68JAnTmK7n92FI1/N1nkYRvG2rdX94eFRYupaUhpTp/L4ETixB1efDQa3X1pLEthdmRsQQooRoiCEOKY85DxBE3U3VEQGAmbRVUrte37Z9m44P43QspfRazL1U9WL7urVWgQCJVDUwai+9ln29FC9I0vcKZCEKUuCcBZGR5vlRjqccx2E8pvmIRGbG89Hi7CmZdacsMrgMTgZ1pbpRmPvxieYjNW9vXy/vb+fLzz+cX3+6XN5vb6+X7X3Hbd1TDDlMt9v72tdLebuuF0MFMMLe2n7bt3W9oGnX5l6dSu/dzen3mR5256baYZ+HmTg4igCK7UwlEwLO696v1/Xrevntefndef3hdX9Z9lttG7SOHbwPSY6Z52TTgfmf/MtPtdXmoL17Y6POdBGyphEZj+lw4Ec23Pve4WJQGFIDqR1dVSL3uAO2CQnRAb17RVJwDzCN6XmYnj89fBckfz1/WcuWcBYaOVII0ZRrvS37167hOE91WxiRCQDOdX8rZdEOxE6o2inw/Dh9nyLfzj+XyxJgmA5P4/x8mB7TkJf+ftk3LZ2qtKplf7vcLqMEDqnp2nQv/bq1i7aoxVx3EgpIg5ziOKC0QCzUte1dQ9MGrhmG4+mIUfatuLcPH79Bx7fXV1N4ef/aepnG09/9+/+n3/zmN9u2/vrXf+TqX19+eru8EMjjw/M4ZLUaorhbDPzwcKx76a3GmHJO21YIhBDNTIKcTh+3bTEvzCBpWPcGyNNhIufulqaBhFNOpVZiliBxHIlnBUhZtGsvpd6rwyED07rfwH0axt765fze1jOpMiAilb0exymm2LyrOQfspr1pihGZmwLFFIIMEkOKTVutS2bO8zGMQ8wTMNZ9VxZ5+pYxkFfsHWjUFCUJhUmJ8PaOAJ4e4ZCY8/L1d//vz3/ZHP7ib/7qz3/716/L21/9+Nd/+/oXKBpBz7ev13q7XM7n7SsFZzG30ro5MBMBsbm0rm7sqODGEAyidhT2kPauG6cgMZSylnYTKOKbk+3At2LvdXtZy9vuG7KRABBFHwKTdnAg53nCeYQhMv83//xbU8VO4qh+T5VVC5VRxzQO6TjmmMf5srxv9Yyubd96NQPaew2RUuhgHchqp2JguBlQs1ChPZ2eRzliG0Kc1/6+rD+jUsBBREKUIAOAubVtvRCLQyzt5rSXfgYCbf769kNv13F6DDy5o3h4HJ9zPl5v+9uXzxLz/OHT4XAMKRbdlvXSqt4uTZSQgyORYZPrz9e/ei9/C7wBiZsBAAUA3gNhTk8cCHExqG5dMWN3dkuSx/xgqutyCxIAadn31/P7vm2tlzxO4zR9/PBcavvy5cuvfvWH6Nha+fL5s5l9+PBpmkcOzATuEEI4no5RQqu1lDqO47YviO6gx9OhVAtxSjnt+x5DMndDckcJnEJUI4qxW2fh1nt3W5faDaBDtyaB0KE37aW0XhARYzpMxxwHNwXiFAe7m+7au2uFXgnU3CXGGAckZqAYsoQQQkg5xTh00xAI0FPOOQ+glVynYYhpRLJumKZjVyBkGh68me8XFAIJpAqBKH3bsMnrjwRNc+DhMH76fnl9+e3rb0Mefvrp5998+e1r/W3d32p7b/t2Wc5rub29f73c3kGcTFk6kg1hGNLASOQsgRMnwCgysiRVtI4SO+Y1hB2gmLdl20urGN3YKvFt86XjTfuuCiQxhZCkgxo0EXcopntKMk7iYh4D/+N/8YduLapnlpBBWTfdPTaC18D5MHxzPDyNw4OBXK8vte297rWqORoZoBLsUdyid2/doHl1ogZY2sLuh/y8lV513cq72i1KJ0IHtk7mIIRCZrCu+6KGe7uZNrW27XvrtdVyuX4ptqfxFPmBANFTnj/k6fl6296W6/j0MByGIGHZlsv5a9l9X0y7pjTEey9fnERab7V0EmExSZApm1fmFIeHICMjGtduzU3ERChFnrXq7fI+5gHA13VrvYWYkOneN398fF62xQxExMzmabhe3n766beHw+njxw8AkFLc9v308OiAEiIgvLy9DimYOQLV1pB8mo+qqN3NKhEJyzgdendViyG0XiUPMWWkYAbmmOKQ8mBG75dbt45ErcG2NjBnohDEQ9rXmiTkgbvVUpsgxxjM97JuwjyOsyMUbcgMDqrIRHdLOXOkwIjgShQCUhjzlPMhpIkopRCpd2tKw4mPs3mDOFJ6Aqb99iUQY5x63x1ziJ/awLSf+esZasEkz9NweV0/v5x/u/z44+vfWq/TIIrtvN7Ot+22l+uyNzAkNPQ4DnkcU47ESCDCFFIQdCYchjFLtAZW3LlBXKkbgbnXrqKGCtUZO4S941ZrV+u9a1NzMMDWratJQCYlghAFAyhhA+R/9Wd/ShAzg9sC4pJduRgoWjez58dvDsM3gfOQvlnWy/n8BTEChiBjYEEw1cVRMTCG2m1X6BZAQoCqddubwqVd9/altjfvhg7V1ByhgzsgV6DCqFX3ZSutNu3mTtqg9WaOzeH1+vNW9uPx05BH5KE3VROZnov15ls+TUnCvtzOb1+u5259aBWixxyHGMZEhzEdcpQGa/MVEDmwhd7J8vwhpw+P8u0kc6VVcQdXLwI4gXJdyjQMwrSui1l/eHqUkNyRkB8eTj/++OMvf/HrZb28vL18++03puU//+f/lzB/8+n7cZ5TSrdlzcOU0nC53ESkq7pDENr2nUVUjTmqqgjv+8KBwDHEGGMsZY8x1FIcIMfMHIRjkJTSIHlAiSjRTEhoXfcYBgNs6o6kbs7NzPZtN1dC1tprK+otxjANxxiGNJ5kGIjE1AJ609JbBfDr9WZdY0zo7AoKFoUIgTiGPKaUgohpR2SQSMQcR8uT0cwyRrF++9kghvTB/IpmXSaQYD9/1vef/fZWCRzSbVk/v/30+v7iyCLoDNV8r9CaEM/Hw8d5+HCcnw/D83F+fjh+OA0fxvBhGr57PH47TkdBygCBwLu1rntrtbv15tar7yA4pDGERDyaQ6t1WW51a702VayGACgcRJjIJXJKEdEBPOUA7vxn/+u/BpcxZiQ1aiiMiATaHQDKmI7Pxz9AEubBoJxvn1VVJJFHMC9l630lQgQ0JCBrvjJjDpM32kp9W5Z1vSy3rwhNOPXGW3GSNA4TsQPt3crp9OHh+KFs2qqiG7MQRG1UezcXM9n2t729UeQUDqyx7Nvelig9Mk2nRw6xb+16ud7ed2tkzcraneV0OOQwubUOm4bWSQEcoVa7AfJ4+MUUjyOJxNCtVH9nbGyzawyOSRJz2tfltlzm+SAxb9seghzm429++NvH54fT8eE//H/+H3/06z9097/567/odfvu2+8fTk9BpPW2rus4jr1rCOHe0wgxqakBkAiQxJi1N/Bquud0J/R3IEdwInZ3lsQckPnuj+AYQhqaQjdCkWmY91vpvc+ncdlWjpGIXZvW5mbMpK25KREoWuSU8swxdwdidgdC6GZm3u8fx17W5X25vqEX9B1doWtgAm9gNRCxREoZ5lmGERF7iMIjEwMycjbX/fYlRCbeHQsbUQgwpde3L9fX143hdbue9+u6rUutnAJjJgzIMcpxiM/T9GHKz6f0YYyn0/R0mk5zfs78IYXnYXg+jN8c5uPTeJiD9FpbK61ZbdaaBXK1FehGHHIYxziJZyt+u71ty3Zb6tasEdbeA8OYJWbi6DFyIARTJhBG08b/7n/7X2qtpTQnvHfHHIDdOnaEbs2n+SHG3PqquC7lrfRLDAEsOsSm1bomyQDRkAGwt5o4z/kJOXbH3g1rDSIx5t4bIBgwIE3jYFa6bQ8Pj9N4nIf5+fh8u12ulxUhCAZG3mtpamM+Bkmt19t20d9vZxjqDfp+fHj+7le/TjTtCp9//nJ5vUYUM1vavu015XCYM7F2K7VqL0qoDfba1LErxtPwgQmr72jv0G4iMdIj+uCmAw+t9rusSUI6n7cYJUZ6+fruSH/3T/74P/wf/89ffvcNOPzmb/82CI3DeBgPLBERz5fznYw7zwciRMR+J8BRRMJhnNxs2/dxHAE7mOZ46F1bb2YuEoSjcEQiY1SkPM4KwJSEY+/uZsM4aOu9tt/+8Jv5OAKTto7ovVHf9hgIAUWktaKq83RC5NJ7SMmg7/uaYxAJtfW611Jul8trWXZTc2/7dt3XHXtHU2u1tgaAzOTmLBLnkzNhGPowIrGX1fsNDTHPkgSXi2OAEBgNsIE8yTB8ffmq3JYK/+Ev/vw//vY/llKZghEG5ma9NxAaY44MoNpJGCgys6nV7iGmkGitq+01YbCua7nWWra9llaQOwfmcIVUKYxEo3qr1cruy/V827frdueSODowQxwoJk8DBSEhZAADM7faKv/P/+7fgNvb7XzdLmAOrECLNiPMCK6wAGgacu+11OW6vtS+BmSwXCu6iTVUo8Cju7eKASN7FpqIU2BmIFITlphj78W9IgMFT7kiNbV6Ok3CAR2mMTmW1hS8l9IMST1oxzyknKcpJfR+u7wsdXFA6rvr/ukXv/746VeoVNz+/G///Pz2HjmpWzcEAO1VuAEUkW7atmVXr+aB4JDioLoS7sBYvZZ+q17Qx3E4ARAiCzF06K0Ipe6OBimIaVvW7dtvv7+cz2o1x/DbH368S2uYxIEQgUVaa/X/z9N/NcmyLfmdmKslQmRmib2PuqovWgEgAcOMkWM0Gh/4dfkZSL6QD4MZ4wxtBmADaHSjrzxnq9pVKUIs4e58yIPJx6gKs6rIFS7//vNah2EMISBiirGrhhBVLeXBzZZ1a11Pjw/uRkwSsgPc1/HGmENMZmCuABjzEOLgjkCCLObOQqWU3vuyrR8+fko5PR6Prvb2dkaiGNjV8jCXbvM4mjkzA6AjgrsQufVSas4DBX55+Xo9nwmBELZ1DyGO42hu3hW9mSuQEIeUUkoBEAhA2E1FwhPEaH2x6wu0jZOhBEzB3cwadqXe0Ip0Lm3788ePX67Ll+XL6/VLX9tNb0CAoKWv2hEM1apa7163ui9LtW5mdds3U5O7qTZLgPt+vW0vZjs4mKvRHqPGoMzGgsRoFVrFrdRay76VVorV7l0JAZgo0pCRBIksEPdWt720Dq2p7O0LYGm2XJZXS8xONIi3CsUoBJ76ZTvT199lft62q0OlAAi76+ZgqMlNWte9VisF4K6U8ltfxvzo7gxgWRCRwMGwNW28hAgGs5MiwuX653fPf82aartOE/72L78D4x9//PK2XA22XWtq+TilyBxlvur5x0//+Bq/vE+nSDHHzE6IzN0UFnn01gqhj4oYOae6rF9ZjhakmTl5b5BocnLmYT7E3m9f93+sq0Pj4TAxx618CThyeOjaGFHB3WoHIceyeu/Gwuvttm3bcUqfP30NKYlQ0/10+tbMzH3ZVgR8fHza993df/3LX+37zsjjOJWyM1Hv7kAxDU27AhJHJ08pEGJXpxj3vRqoCCNEkaRmIQQiaVXByd161XXf19o8xG3Xst/nhFKxPk9TXZcOziLb3oc8GDiaEYAw916RRUv5859+nB+Oh8Ph+vZxuV6fju+/eX+63M4O9Pj0jhyYmZmRhBEFwBGW1ritwWYI1L98xIdJjrG30Za3tpZw+MZowLBT2dUWMKBL+fTx4w31dPiLP37+d2+3Px/HyCbrbVtvW4Wi2NR6t50jIcS917013ywnyCNbI0DJOU1xysJNovaVfLvPD8bIZqi9m0bs0G0zfQueTMHQETlwJAdXN3YAB9VtW7aYlSQFdC291ta0NiUC/m/+L39V+/p6+XS5fnHdGR3doPVuCqT3CoF7b1ZKW1vba9vQQRC1YzclZiQwNgjqrZddA2ZzUlwJSc3ZmSw5cScofV/K6628OanCXsvtdquqalrcmgjMYX5/+GGYD01bM20G27aehnEaDpE5Dw/tVi4vH8DteHo/P30T0qjd/vz573/8+PdDxCEGGTAdKYgngOnwNBzfCUTt2lvH4o5J8sA0Hg6nnFLZLut2a020JUJVKgoVnVkzKEUkU9a+m6mpbfs6jad12cYxrdsFUY7HAxGDQ8qBKI7joezrOI6t6fV6eXx8PJyeXq8XIwsxGUBzdUJ1U7UUM7ijuzA1tdo1jSNz6L2auyMDCoXAIiEEpFCbOqCZXW9bLdvl7ZXcD/Pw9fUrkyADuqtpyEkBBbFrA8IUo5qBA9jdOylLePny8qff/ZfjFN+/+3UpVfV2mA+Pp8dpGFhSiMM4nPI4gTUh55SJAqeU0yNzUGiotb58Vtfw+GyOdtkwDCxHcCB28M7Ebdk+fvrxzy/nOurab3/48R+Wdr71Zd1u2q97QzPRWrsjS7JOWnfQprq1vtW6KxTHfdvflu0z1cVq6bCikDkYFBftgL3uqj1IBuhWjeHAksGFi3TtpRYGYxIjDMFJCFzvTIF9r2bcetm2XTXz//H/+ou9rKXrsq3sVRgQUDtW64DoAFGYkEyRmR0VTK11MGIeqkEIgwO7K7MxEyKYmbk6NAQgk2aKHoCgQUOmWvdedVu6VQ3h+brVHz/8eD2fmW2ehtPxcZqeTo/TmMfbckHiHCK6TvMh8kQojn1dLoCcp1NOxzSkrd/+9NN/vFz+OAwShhAnnw8QogcKD88/pHACQ0Gvt80N1XCeH5McUVPinMJslrZVS6kGzVAUHQXJA3vMFLG7Qnf3ZV2tQx6HYUyt7722aRpSTL21eTog8sPDw7IuxIwGl+uVWb797nsD3vYqzMzSmiESAJha03Zv9MY4kHBtxiHGNJoTsRAwOGMIIUTiwBxr0VZbCFG73a631tbr9XycZxb+/OWTSBynQy3bvt1ECBF7qUzuYEwUmAF823eRkFJ091bbx59+v12vT0+H+XTM+aSA97GE6XjENFIYKETOWeaZh0MaT+N0DPkIFGur3ZTBcN+xUxgnHtgZKEyAJ0PWbrDeJGZS/8cPf/inD3+3rl8NfIe+7C/eb0X3ujN6JokxzMLxPvXvQA7gPbROhJFIhDkPs2BGRIrshE27gzmxmZS6tlaYo1AinEKYiHMrWPfS2n5f6A3kOVNOMiQMjEMSVevN3F04lE1bBf5X/9370pZlWwAMwcHJiN0JkAFiVzfXnLJIdCNwHBLnFHuvwpJxQCVArFhBlYMIMwV/eDiMOfTWrO+lminFHJjBTQXhGJ+TPwT4PsdvUzgu1/3l7atjiSEdpl88vnuach7j2LXe9mXKp3kYc44hDtq99ELEHFByOh0em21b7XtttVTzPQ0cggvaMMbp+Hw6vs8hiYSmutUlxsgQmkKgjC2SC2pCe6zN962V7oDMbMEC9SRGbGLKW7nVWgBgng7jNCzrpfeWYh6GEZEBsLV+mI85xU+fP6gquIeQpnmWPIHwsm2EzBR6U1NwhxgjuKcYGDmE6MAikUgAkFh6N0cMIZKMBsySzLGrxyBEdFuuvev58uZuMabL7bys23x4lJCul+u2rYd5imHc1+KgZoYokXiYkqnXWg38elmYaBpD36uDUghxOEqMgO7khIRxnKaJI8eQTg/v59NTylOe5lZ7J045CxO7Bqu9XVyET+86m99eSZRoQs7UFlwaY/yyv/z4+ufb7RKFSVOUFOiqDgjZ3SVADDmEgICEbIa9ce9qXogIAUk8yjjw1LopUSXroErQ3VtVbebN2Ikgs0xOESC2pn07m7o1tQ4ing4+3PWDkQABOZJk5sSUiARQ+Zd/c/zZ77gxsyHeZ70QIkZsjtdtK/2G96VraNBbzBwimyLDEPiUxnEaI3FXWw9Ten5+nqdTJtTeStnRopOwUBKJkVMIQdNJ3j/mH6yFIRwzTdvyirAfxveZv314OE1DRHI1XMvabT/MxyGOLNys17obdUo+jeN0fED3uhlTZErrsre25oAxUu815Hc5jswQmaC2QJ5CJMzrtte932FsZlwbEAlQdxdwDsAZ00DjIAeCVFsDa4iQcxLkr6+f9307nZ4kxBgPOY/bvseQHk7PX758LHUfhjHEmNIwHo5IYoa32zLkFCRs2waA9ySbGFMMIVDX5gqE1FsPMZqjWidmCQOHyCwxJzNwdxLatqWUbW99ud0QoGvftg2cjseH1ouVAqY5DUOIrezgigrCIcZMRN26ez+/vvbW19vtMM/TPKm6xJBSTPdNjyRMZLWD7TkSEzBLFHZvagVDNANteyD2OCA7mToSDiO40P6C66vvX2kcME+wvb28vpy3y8vlx8vyqspsU+YJxZU2B0QMMUZ3k+jEzMSM5IqGG1JnQQkQY8pxYpbqvncvfe1WCSITWWvQhRDdgWCI4eRO3dys9nLxamBmrYREcYYQHQCYOcQoMsY4xCDjNAZJCMCPv46qtSmCCxIaGCEHnIHJoBp4TKNrLFsXj0mGmI4h5CD53fHd99/95W/+6l/89re//fb5YZySegtMd2EwQe/a+l6GMDQFIOSAzh5iEI1imKdDCKdIYwysYbPQT9P7d48/HA7f5cCl7K2V5fZy25c4TXOanFh9RzAkI/YQYghhzCc32a43V9j3dbm9uYNgDoDdhQmLm2knWw7TMA/PINxBrtd2Ob+ZmzuZU1djDvOYhMVVgsQ5jwEnBEKvrqCmbS+363lbbymPeZwN5OHxm9tyu62377777vPnl/Plkod8fHja9n2c5mk8mqEwt7Yf54MIq2pKqbW27Qsi1NZEJOVhXTcHyMOATNqBY2AJZsSCEhiQ9rKZamvttiyAWGvdS933FdH3dQ9hSIEul5/YLIYYSKxXgs7o3q31SiJ5mK7nV+0bKvTSJFBrfZjGYRiFJQpFxhACM7t2dq3bWtYNkDhIN3O1xCGO74JM2+Wr1xKPz55HUrReEFeKo02/JCjt80+Xjx8YUR0/fnn7009/+unjf/56u1zbYJASh87keFa7hZiZMqBLcOHkQFEkJCA0gw5gIiGGQ5Sho6l7LVV7dTPmgIjmql0xMImQZ/Lobu5dba+1aa9CSliHSfIcmRsxk4CbpTAc5inHOMQcJLZWBJVr6QrK4m7J3buppRYDa7Xv3n/3L/7637x79z4EqWW73W44+mFMDKXW/f27X/zw/W+J5fVyTn+Ia9k//fST1zOhF/F93yNQtHaiuSqt6+oRj2nMIZnupb8O03Rbb80vEZHDMck0Ds+EiSBo36zdWJdedd2305AQIiFM03POx6YXIK/1soUZceQg3ro6Bhx4D3ULkkY+tM5Xpcg9RfQU45AGG/BmnnZu2r9etg4cY5EYCEJvIAE5ZATWJhgCoSEhiDhoqY0AUgpmVvZ2fDxcb18/f/n47ft3f/rpD7fbOk2HkKblVkhijLm1DkAgpK2rKiOEJABmXt31dtuZmSgisTIjirMgiXojl1o6gBNAGJOqWXdVba2llHq3vb6YIUJcbrdPnz799td/WbZ9vW3z4xhzaqa6b4g6hNhrK620to9DqmW7vn0+TQ/WaxAeD5MpSACRaIbdjRz0zngKkRVVATCoIZrxOEl+Z+qUwvj8i+v5T3I9h8ODzpl3s7JTUopi0z9P3z++/v1//2//n//3Hg5A/vHt03W5tVZ6WC+35vnQWcEmDj+F0L033JlpRLTARm0mIh9cfa91ZYiMpNCss5mZNTVXYqyd0IkTR+RAzEw+3GeITa2bIUMaiB3HaaAUKAlxZElNvffOkUlkHh7m4WHI02F+keP0VO0CYGtZEYLQUHvrbRXiX3x/+Ntf/cu//u5f/vVf/ebp/enr2+2yfDlvX8qycLAdKgfBXoTYLbhJL33Zl13dHVvfcNOHdMiJa9NipfSNKBZooAKt93Y1/HGttwbr3i+Izgyq2ra1pkwojOk4PfHLn6+XVyadczqM7wI9u1q3eW0fHPatf2E+eALtjYMchocBxau33ZrXohuLi9PII1ZUWkOS+ciEEsSu50vdKmANcRJMBt4qxJECsXZ0NgY2TPfOTAihO9W2JQIEu769XrfL+3ffqPrnzy/DlMd5UINm7RCn3sy0pzxeble498LMJMRtW8zAzPa9nh6etPvb2xsJS2aW3Hs389Y6IjCwo6jCvtfWmllXVQBalqXsTbtvtb18fVUHJ78uNzdiFlU9v70chtjrXiSQ9WZNvH7+6Y8SqDc1MxGqtQ7TGIOoNkQlEiQmJAdFhGbKOQ4hIQgzzePUEL7czkF86Ft694tx+B5ezv52g+cA04H6qOXCaWxhpum7b377L//x91/+X//T/6f6h3Cwi112W/bbjj66r0BwWZdKPXOLCYLPYIpQhccg2I0Ng7Tcm4MHBzKzZqCtlb7XboxDQEE0JI55QFREZCJ33Vst2ok4RwyYyWGcBooZhAVJna0WgKbAEqbnd7/85vFXbjHLr/h/93/6rVAijAZa29oVxKN3KH2PzL/87tffPX///PAcUljKpftm2m/bdr6dL2+3shUFP1/Xz1/Ov//Dnz59+NO2r021FOwFk0sW9g6X5bLouoOSBEK2ihmz1Wut54a19BXVy20TjEN6QGtGO6EGGgFlWc7bdnVmRiQ+HoZvJA5EVtprw53EWrXeW6s3wS4sAhIkQCAgBCbEtKuvoRujew1sIWdiDyw5ivWGVJHWQMgiLAO4qvYUco6BKTAMrr3bZgBNCQFiDGp2vZxDCI+n5/N5W9dyOM4IzJRSHE07ALau13U1tDwOrmCmKY2uVmu73dZxmMdhPJ/PgAZOKaaccm/dDJgpxkgsiG7m+77v+84siNSqXi6LOS7L65eXn1rZH06nspdWd/ciStq29fxlCIIAt7evkbzutykPe63jNPVWEf3h9IjMe6kxkjpqaYjauwNgIGEUpChhYggcMaQcwoySwbs2hd48eIoTqyNURPYYKDAaY1BgAHTI4ymnH18+/Kcf/+5Wfty1XPbb29t5021p58vb2+evn/JwOhzfCWMQUlNTDvQtw4TUHVqv7IDIxiIG2Hvdlr3ue1XrKgjuAJtWQCUJIUQkKl23UreyASoxEEx5GlI+SXgSHgWDmWlfHXpKASU9HH6Z80NZt9Z2IU4cg+k1YGpQzBWJSaSbffxy+ft/+I9zmgrUp/NxL8vL+Vq0mrfrUj7+9J8a7qc/PpVGn18uL18/132dpuM0JXSA2gUVwGqxq/ZqRZFhJUqstQfsR/IuobiTSQcni+W6vX791Oq8w/jt89MxZJweTqeny7Z4qxvneSLJCVEMNgDobXMTMGi9dt2ARXutWIQSUiEmjtlZetOy99Wrj0Jg5CqRY8YdOHDG7ga+9xLYmcCsEgY1a9CFzHtXqK17b91bZ7RtW2NUdhhienu77Hudc9yvW8wHJ3bXum1A+Ho5D3n6xXff96q3cp3ykMeh976umzscpnHbNyFiFwDovbfWeu+IaGZEpKq1dhG5N9SYWVWX9YrU3OptXbuWmES73/bLwzxj1yZopaJZ1RpjNuuAgmC9dxbet348PZf1dd8Wotzb9XarTBnUwDtiBQvGgk4yxgGHwzhwpobWe5/inMdwWzdEg69vtioeEw6sW/NlxXnCw+zubisTQtfb56/ocDq+++nlp2q+bLbXGlrYy3q+vD4dv3uX/2bEAOED8623M3FDe+3wjmBCck49wWDeDLU31L1XrZuqmWG7XgrGGImoNHxAwZaMVdvebsW6labYYziy09ScWy1mRthB16Y39Wtpodr6+58c8JJ42KxJq2vkSILaDXggD4FjFBwptN7/y58+fPjp/0GRZeJhTPP0MI0Pw0ja63lZ9vJ64XNf5fxyu8E6DAOym/dpyHGIvW4AptiDhtYhCTvVZltbAWAfn9LANnFcfLx5hylpbZevf+z9WwmhjyaPNMT0zcP3y+v5S/nJdtbWTYvD0vXWjbRLb0uAiCa12ra9MgeJ0ryrggBEASecKILkbl5qU28xojqadcWKyWIe1bqXpr11uLAQAKx2pabAOKAzAUMzUEJQdUVbl9s8jGiw7mcJaVmWmAciqm1db3WW4eXDl8u6zL+cVXW9rtfbWd6/3/e97PvtdpumqamGEFS1ahckkajqZqDaQwjueL0uAODu95V7ANBaQ8Te7e187r0jsaqt17dxSDlna3qa5uu+tq4AkFKoWWKMTLG1HnMCc+ueUzq/ve3FUhRgwaE26+bCSL32lFIIod+2xXoMLcoBOCCV2q7WMyMYBLcd6icrDxRmEdGMphUkG2ap1f3Wv5x//+MfPy9b2fq+9G3v2w7FYKN9L2uk+Z/94m9P+VtpN5qOu50prBSI+mvfCXAKIYQQGLoC3cVIVVpb1tYWNxQUc7zUQhJjlWTYArlab/u+l9rWzlpsDCkyqbckIiB2azeD5rAArtsGLNtWP7f109PxVyTIf/Gvx24NndBib8YMOY8xpSgp5QFBXl7f/vzpx58+fPjpx4/7vk9TCALs0UOs1i1wjNmgMzIjDIGTSDdDIslJxZ3ZjIlxSEk4iWQkxACSaIwxpcmInME6gJqCgRjnNMR0GMYcR4CwbOvr9TOgOYFDC9IR9+W26B5QgTxYVUYOHMfx8XD8Po+PRhjJp5QyDENIknJ30F73tS5laXst2o1ciBnQCRmwe0MCQFdTd0fiyGMUxtaxaW+FWjEtzQw7sNxXJUCphYVEwjBOW9lbqYb48vpVRIact20rpVxv1xijEJ/P51JKSomIWqtmpqop5Xmee++lFHdPKd1ut1LK/dyv66qq9xfAzC6Xy3K7OXjZt1b2Xus3794nEW3bYZq39cIMKcVxSAhdQgickTRFZiYws07MsZbL9e1T228OlQDBqJZ1vb4hqDBarwiKCAGFQUIe9q5r6YKAgRAK9CpOXs3QZIyeknUhGchBX77+l3/4L//h4+9frp/run19e7leqzUWjoGEKfzmF7/95vExBUkhNV2u+wtgu8MrwEnVAJBZEAMidu2m1mut267NTMG698Ztd92c3E3NVaE3773utTYl9hAJ0TgAcnXeSr9s/exYiAuQAUJre11uaJlgZHZp/VV8Aj9YFwYEa2qNaQAA7TpNU0ppvh2+fPpyeTu/fHz5i1+/j5ZSHErDtrtapRin4zS2m1cI0CPmGnCzXWpCHgkgjpzBAIxIFLyLkTURsDjvDMQtOmkGIAegSl7aeq7LsRzfUZopvz89ff46frq+gHDKPsR34B5FZQoRvu29m7QgOeZjHg+OcW83YbTF3CggkchSN+172ffr+a3WSkRhGA+neQ5kqkS0NaulS+yRYpI5cOjFNq0hcTQ1J1CutatXZGYetHnrZ+IY88zubn25ncven56ezuu56C6G5HZ9e+1uIYQ7mbmUcj/05+vlXnZUtxDCPc4xsxjjvu/X65XvxGyAZVnu2lJE3Pf9drsxM3fsvatqCJJSIjMWL9slZjbN1p0gMEVzBDLiuG5N+tcYY05HRIpp2MrXUs+wVGs9ShOhTnpb3kpdTsOU87Mb194kR+RRCLa2vJ3fhilPw0jxBGoghUh0VxoCy+xl8XVvt7p0fXvd/vxPf6x9zcORtxYBh5yQ+PHx4XA4Irrj/nndXi8/9ngbDjQyOK0cxvtmEOLpDhYkBtTie3dFRmGCqmrduSMrOGpxiwicU+/dDUNKMaFwwegqtruSVuPCyTLFxGNvbNiikPk0pCnnsZfIf/mvBnQWDEzRrdZatSMzBM78c/U6jsNwOJ5CjG4Fvc5DTimJkDYtS2PJMaHzlmJMIGWzYowgZBE7AQBQIg4MkFNmCtbB1dypgu+6NwcFqYaNnSKNeUIkZz+MYUDqve1929fXRc8Gt2kK8zBlOcX4IDLn9JAPeZpOMc6nx+PDw3PKI0c1retlJYBACtSLbuC27+v5cml7BTD3Fmh7HnhmNtJtK+t56WULOBzy42E8THEUj9hItQMYA4EHQDdswmLNu1pOkZB6t4fHx8v1bRyjq5XlWmt9mI/M8k9/+sM8H6dxctNlWXrvIrHWptZPp9PtdhvH0R2u12sI4Z4M1NZqa2ZWSqm1Xi4XZgYARFyW5Xw+p5wvt9dat259yPnh+Ljvt1auoO308ACILCGG2NSHcZqnATrcrlcRiUEAK7EQRkEjFABmQhYPIaY8ioQoghwxxDQO8/Exz+8wz3HKrtrKJhBCyHFISEhBaJwgTxAHpRMznX//n//zn37fIL18vf3hT//04e0PQJpiIFB3f/fu/bvnR0F0wmW7/enj717f3gyAOQbIBs1wY45mDI7u3ru22vrem3ZtDUsTQAosDsGQAqUQhyiSRFiQqIMiOUYEVgidWJmgWyeWwJRQhMKQxkAJXVxDiOEwH8fhIGChVVXcohAxEqC2vi17lKTNJAR3jxJSHOf5eLs+1uvn27XkbGMc359+Gfj6slxIte6xmB7CBJKFRBgSexyVGa26gSlWtYVIpsR7rGbayBlRUUQimllDJwXzTrYsy9vL13h0iHlnD6fj94dv1nJLaQAsFC4RD9TJfeUg6N0MKIKydOPr7fXl5XNrW3RtHJNQCtb6BrAeT0MpMAwcmHKwGJWhUtsDrmTaam6IMFnynPMskdrOvRNzRVQRWdYSNAQJ/WeAOJiX+fDQekWHGPLb21stxQCZ+cuXL8IxpeEeu3/68vnx9DAM07ZtT+8eiWgaD+Mwv7299d5zzvfl8qp6j39qrf/blWVZcs7btt1joVoKIjKziLSytbJo7xBDCMmNydRMY0zgLHEMIITeG4QwupdSWo4pnt6Ped+bK1QkM0DvDmgSUxyOMQ1pnJCFSGJKe9+Z+TCfWi+9d4boQREZiCEKYCBgwEjj/D//x//4hw9fK602LENhtaJue22Hw+Hx4SgiTfttXT99+LHdSpaD3qwC32YNg3FEZkcyt90hIN49a6GukdgluDsiKamLhxhiHmKkEEHB71tRHQiQIDA56x42VqIh+cDaOgFHoMFyHnM72txbt+v2IQ0qXnVfAUhKqN4dEYYcAwYAEOJ1WXqOmDAlJvfjfJT5aF61uRIIhYfpueywX7+uuzL5PJzycGj95n42WTBo5DGDd8o785flz9BLDocU3HEHJ0YROaIbDpRW2LZ6A+NOqvgTejONaehYwzSF8E3us0QFbnu/an8Bp5xOpQSSvdZ9+xophtr87evtdi2hsBJYAks1UXHuDvFSJExI1qKkFLERI3NCTNHzQOUCr9fbOIXH+TuiUYIDB66IHYkVWht8IDxYdwlqXnvteRwcatnOh+m9K/TWmsN4nGtp5PR4enp9ff3++2/33i6XyzRNAIboUZJwlDFer1cRuRd/SinDMLy9vRFRCGHbthgjIt5/h5m3bXOAfVsCcTeMLGB+PZ/HxIGmNBxUtZbNahnHYRjGbdtSD6otTkNEVrMUDmRVArJnppAPUk1dO+GdCY0o4fnx+eH5u3iYOMZhethK37eVu8kwJozUwZpSHjwwULjvYzH7s6+ax+cffv23/+9/93/7+PIf8sBzHkrT8+W8l/aLX3xPqIjcQb98+bRe1tFjCGnvrV6gaR9NQm84XlIChWiFVNGNiEi835GZ5s7uXdzQUbpTG/Ipk69t66ZQeyePmCMyK5ZCTjgPAmuj0CWn3ny9lR41x3HM2UBUfd+/ipKXxq1uOWPk0QBVeh5TlDiOMwh++fJplevhdBzHPHBQDt6JiFRw3/ecc/J43ohhikEBoxM7Wmub0jmYduyWhHsXhDHGDV66vOaUQDACiAg5OjzV5lLWAGfD2hVWCqWe2+scwxSnYQgYKA5xwLBwJFD0vX99+ezX1yGLWFyW6163qr11IDsQ5gFOQmmIXKww2zxFigEvdV2jOvUulXEzQGakEGQeh9pZl03fyjqXK8TBWYRJRCJBKQXQp/HU3fbr0ryBtXCHKLinFKPgst5UNVFIILfz5f3T+5dlkRjMbN0XEem911r1XtED2EtxRzNtrc3zfK9+llJijMxsZszcar3dbvM8l1L2fV+2G96JhyQG3k0D8zgNZe+BwrbcyrYys8QgwoSwnL8C+G7w8PRAgGg+RgYgRKLhJMEyQC2diVLOACCM6XDI0zHPDxQHawTWRATcW6/TfOLozdRReBhhTA6ETanU1z99/Mcvrwb+m1/+5u32Yb2da1u99uulvP/uYTwmMyXAclu3ax/4MGdW1QOn12277u4exlmJV2YI9GgCvWxGa0jdVEi1qFVwcDfELuBkjwGp96YdmpICQx2Zg/Ts0IGDD9OQpLuvpcZSm6EoAOiglm/bqixRhFAS/+ZfpL05Anpr7IxMqjrPw5gyh3w4HvZ9vazXqktptQMnSSHAdb8WNe3bupZKe23Ltn4e5nw4vsvxVNv+dv3p7fK2FW3qqUeoobTirgCVhUVSVDAXaiSYG0jRmnsXahx7TWZerWylrkVbrYbmbsCMIgn7OMrD0/zPTse/Wc7X8/mLtqV3ut28rL1rMG0z+qOMhzx2bU7daAWuQUb20Tz11kgxcZQ0dEM1AGZni1OfDxwJtlqAo6AouiOCI/TOYMjUenFvSZIwA5B1zykPIe/rNbAHQEbYt5u6YoSt7t9++12rdXt7Q+jTdIgxv75+zWM+Hh/Kvocg9wmveZ5vt1vvVkqNEs10XRYiMoVtX4kIAF5eXnpr7t5bcXcEU22Px8ccE1JTr2XfokiOaZymUouD3i4XLTWxu26mpdbFFYUiR+YoFHIMMQzJmFhwPBym+TAcHymMBqyqX96+nq+vhIWFGZmQRBJLJBGbB+UIEN0CO5L2/+8//N3/+vd/dyuXyLz2cllvr+e3h/HxN7/8PkUbx3kt+6fX11ZpzIccpKtXtx3rVnYUTTEwIkmn7A5KgA669mqldbdGuDeoqxm7IiYMJ54CoO4NDBxUsKURQ6QRB5QxTVPwHDqx2r7DukJdzEpjjKrYCry+3s7nbb81/vW/nAwpZOJI6oag05TGFJ6fT4giPJ8eTqq3razbWq7Xs+kqQntp5+vr+fr1ulzn+fj09ODIh+n5N7/858f5edvq+frl8+cf3160nKWuu1XAgrXurrTa3KpoCWGfoJA2vNayrRuaEhNqBFTTpg5GaAy1ay+we61VE85DPjwcfzPz979895u/+c3/WeLpp09/PN8+ag3rdXBSImGaZuaBRoFktDauu4IgRg85ppBHBTcCcCxVeycDoOBjwiHGKQ1qrs6RM6FnZwFLFCRILbWVOqSRmdbb0vcaGQPhul5QG6hb63EQtTqMMUYZc3p+et6WZduvOeeH4zdq/nb++v79t+7Ye7uXfQhRe9+2DZFEpGu9H3Qi2rb17jR677XuvTd0VW1uFd3nKQsbARzm+Xp5O83HyCzChFxLJXAEZfTWtjFO7lD2EkJMw0ghAHMKEmKIwpJC7Q1qG2IeDqeUBzUwMFdbbq/bdstpOh2fkRADe4wuAZghJMaM6/ny8Y+fL+efXs7/7p/+w+cvPzboBrq3PkT5zS9/GAeOEaz6x59ebtddIHIglLj22qAgkymACwdndqQi4kJEQO6szWvbew9gDdpa94qFZ8pjiNlJgNi9W0dWCIiRMSVFoRCZkBWomhW11vbbm7GGYUKMvdt1K7dLvXzW5VL5b/6bB46CzDFGQHMo4yDoJWaaDw+9yzxNMaRtrdq32i7nr0utrfX+tn66XF6HcRzTu0N+/NUv/uZp/uZhfhznqZT1erlt+9Us7a96W+t2a7pVJBKA5nG7wX6DbCNhhi5FpXavzYNDig9Cw3pr1o3QGI0x9qbEGEh8t23rKT384rt/9u7hu2+f/+rXv/pX33/3l+fz8vL1z83WVgmcXSGgJslMsVFbdd97r73HkBMPKU4kBIDadN9r3YE1ZM5zPAYIFAanESwSgHRiwhRTDqmXbq3N4wgA58vXwPL+/VOKYdsX8J4Dr/stRZnnA6EKyzROzw/PkXhZryw9ShjT9Pr6EgeZp/l8uUiWbl1rN7N1XYloyAnclmXpqkPORPTx40cAa6323hChlMJoqs1NpzwGZtV6mGZB1ro/HKavL1+CUK0NkY7HubcdrRJi4HDXV4sIilCIQQTRmYUQEJyRwExSng5Hd9trd8AYk1pbtxtamB8eghAE9uMJY/LakRhJQG//4e/+3f/4H/7X3//0D79/+d1lvSyt7htMQ/jF99M4YJ6Cu376/PrTT5/3wkBiYMakAMV2dUNiROxeACsxETFzA2imgE6tQa8U1cndPcTOI2DMxHQvL3kHpQgUGZg8CMDASAkg1I7dQFUSh5HyHCimvVettl19eXPdQ7fO/+y/mYaIOQszInXkHjMcH0Pprzk9BD61Wphx38uyvmnzbl62bV22favrtZPnb9//IoXpcfr21z/89uHwnbus2/Xzl991veY0nd/2L1/q7dJquY5pOIWjeOxKdV8UyPKA4ZHlhBD6ZhmzoQR5FJr31hxgkFGQwHczH8IxxXnf2svLpzSN73/4zfvHwzzwfHicx++bruflT3U371ZrEcgpUMxBwlyrbWXbizVjIWEeDdD6rr1pE+8hGEQeA43esVTTFgYeJIQkU8iDste9Yu2DyLLezue3eZ4eH08xBWbu1oXF0cdxODwcXQWJAofT4RDQe9vRivVbIkbv27aMYyZ07erurnZnB93NfM55WRY32+uGQLXWWvd1XXrvzAQAXSu6mzlLYEQ3n/KUA3rbrJXz25dal3EY13Wd58NhHrZ1gdanIQuHbsaBgsTeLQgPKd4XOgNgTsOUBgzBWJiEQyC6V3jd1GMQAYlBZIxGiTDhmDEObS2EhWj++unt3/2nf//3H/7LbVvrtmMrx8Pw3btpnjTnZAhfXt5++nR+eV3Khg7IdCdpmoIDGrMKazcFHQgzMwQmZlftraq1jrtp68Q05CkxCWGKFAG9dlBDNhZkRAoBA5GN4i692V4ZTEYOM8QxItPaam29LE3fWAtt2nff+Lf/6uDYHQ2wi0BKItE4V8S+7fsQnlvpZb+6oxuCoUHrtda1aZvA4vVyzVN8fv4m0/Du+YfD6fF6O396+fHL6z8OWQ9j7sDLddfdhkSH6IcxTDIWh7fm67Y5QMiPw/AsMup2rXpWTHdaAnLI+fQwv5/GUcgRRMHdIthwPr99Pv+5tnbID2k4nJfzui9qrrYVPdcduyFTmIcg5CKRYlr2basFlczdgZraVjatvVVEwBAiU1DtrXpvoLUzB44phFCs9d572am3upd93+Z5fnp4ioRkbmoI6OAi4TCfmEVY0D3FfJgm7bvbjr5TXwlo229ECEZdtZROIKVqraX1sqwLsxDitm29964NgVprtZa9rKqdGB2stdZbJ0IAQEYRHwOhbvt6aXXTvhEYIY3ThOQOmofBqgKZROlaVXtO2bq79SDBiViCSCIKgHg4PHAUBCSOHIe97OuymSGSD4linpkTxICM6ghpkCR+eVs+nl/229v5djmXdr7t69d5pPfPeUguEhzH27q8nc+v594q9U1DiDEkkdytAAlTRtpTlBAj+oAYlJuDuaO772VppXnFdd3UDMkQOYQYQ2DvaGbeHRtId9AOqA6gdO8LU+AwICcgNkLcdN1KvV379lb3t46sOK35QKKFnONmXSIl0hQ8RGjaAgTV68vbP4ofazXA9PD4zGLLy+pGtVvduwgh4T/+4z/keBr+8nC+vRJRLev59rlBeRznhPjd+6C38gY1SmJHx33k+mD0xfC2d7by8IAhjhHI4u3z+VMKSqreVVUPcjiFBwoQQIhue4d938Ecgr+9nv+n//nffvrpw9/85d88nJ5LvXlv7x++F7bf1a/XdZWgte4u015rmE7fPf2VfvkHYp1iru3qHrQBURon7FXdoRHU3vtewA2JVBd3Q20EgToMhM4ecnoeRiIiTCn45fxKBJKkao0pD9MMTrVfsTcENF3cm4AJA3LaSxVWBTi/fZlODxzGVmoFu13PTds0TeM43m6X221NKfXWGOV2u5SyM7Nq6//1Q0wA2HVjiyiAsEYJmEZwvZyXVjYEPh6PpW5MMD9+Iw+8bueuysy91LItEsfe6/nyMuoDEQVJzdQQvWoQnh8eMA23W/1ZhxdlzANaIWCXkRwAK+dRW0XzVuE///i7//zH313OX4JvcYD30+M4BUJFK97C6/r6tr3tvXEWKs5oMVFIkQSxu9be3DEgSiBSTurMZQ+qdcyZcRQcR5EywBGRXFiU3PrWrJtaiikkxu7VSYkDSOzOkkYOIYQwjZiEhjgEyWZwqvVyW9dY7GjwqxAixEkxR5EWsTb0UHondnYnJndxcjS+3T5EvIX4DpBRrZlFFgvzjkW9ecPDMcVIv//j33PEOMj1ujTdr8ur2q6aXOaI9XGa8EGsKOLcal/wMiT5DvEfil8g/Da+e//0PQG+rMtyPZVlq9BrU0ocaNi5zsc58FHMToN3xLdbk/iznOZ/+Y//9t//03//7em7d+8P0zzEYOPh6fiNty+YbEHvADTEoamlcPru3V8v28cYXDgxhUjezAOGQvte1s16jEQOWJuzK7fmFaEHzRNHUAJmQvHmBIRuy7KrWwjxfHnjEE6n0zzPX7582Zfz8/Nzq5v16xAzUSSSFI9Dr8XK63Wp9cY7JfcCvTnst5t6F8Z9z8t16b0DHEopAHBvBpt3d7/LRVU1xaFqzTGeDsdMOiUnhMv5Ukphjs6uvV8vr2Y9IGmpFNNIB+01EhJRaU2iEZG2fru+AXhASsMQODt0wqSq7J7Gwd2vt8telhSGMQ6uigwmrChRTcpmRT/99PKPv//Dv/1f/ofP2z+lsOVjGokVi1k/v/p2ey2Ehd2UI5FmceHjw3ycD1FQm/TmEsN4muKQVFtkUfC9ljHy0+MxholgCj4hUgoZNfa2l/3r+fVr60WNDodpGIZe27pvZkYSRSLFMAzjcZpTZEYMaWSJbmxmZblps9qLEROiIFAGeUiZXHCtlwUrhbqVuFkAKKBJAqEt24soGYyIuO/A4DlGmwOHtRUlsGmYFervfvx7MD0dnoHref1z25eVVxXGygn5IQ89ejVfFlezCT1wT+I4zO+PP/zyu79FhH67Xb67BNGOAABgS0lEQVT+E5S3rrrfFt0ihmAqb5dlPE3T1MwuFDgmWrYtppBGlnD48PLh7z/9j19exl/86vt3D78iHh7mR1LfzgvIXvorQwoZ976H+G4OgPpVJLqVkGIv1ZuGzBYI+9KtUYgBsYt1hPVWtl6ZUk/TAQfBjOzIyMzEuC17EHn7+uoIv/n1Lxzs86efzudzoppj2Jer5HGIJyIKg6iTN9tLMYp7aWa6b0uxrTb1os2txHI+vy7XdRjG8/l8l76ZWe9drREBEbmhu3fdmfH58fn9w4FV0evXt4/Lvnzz7j3a075e2ragtxSE3fq+ME8cYwixt8rR1ay0mkLUqs127AZ1f//DD301fB6Oz0/rbb2+vSkkraX3KkFEMmEGVqhXPnyPcdCy4Nevnz98+fH169flRr1PdEjDQNRduzeoLaxr1RLikMbp4Na1sM9TCvx0fDodpiiBgYVzGsd5nnIeOiBoL3VHoCnlwzSnlERiCCHHlHIwZ93s9fJ6vr0ty5mFpnkOIUKTdV2v25UF53EaUg6Rh3wQlG4G7CyGQOjEHUopi66ttQhDkDlPJt9MgwCOXPpbX9TcfOttJzXAQkuSrI3W8sUwtY6MlAMNh5lG5cV8qxyAsz2MJ07Hpa5ff3x1XBgqIBbdE788y+PeFTxkpN6vYGI03GqP0Z8OPL5/fvfdt+/mZ63bS5pQiUwD4PvDg/LgRjkcO9TWNA5DqRq1HEiLkXdTKTnQdw/HV6uvX88GfwSgIR+j8DE3WwV7NNzL+mdM33SUbnXOT3urHV7zwN6n1vZqb0FO38xDh1xu5fV6u6KySqTQwPetTzGo8HR8OnLSuo1pDCFeL69VOzFRgF/88BdJ5r//x/8foDPC8fQEhjGmlBIykcQYo7sbQRpiHHKE9PL69WVZ9+prre2mwCoD91styx4Q9roBA0ojYEBV7+jBWm+9IQNzejo9nA6nyJG4rLdL3X2eZ3eY56NDM28EnEnQDRHZLXkCBjN1RyJqffPSjsc5Vu/1usr19bM/Pv2q92pmKQ29mxjV+7pOMyAOORBnCGhxcHqmjA0//Pj69eN5fRjnf/m3//q6vqoWB+UwEIay6/68EZEwe6u9F3WQu6B+HlOIpVQJ6XA4HKZ5HsYghOZIgCDIIUS+d8cBLOccY8w5M3Mf+zD4YeRtG0XC8XgMIQDQXUvbVVNKQ45EVGvt3QAYAO6Tk4zi7rwL71y5EtEwUB4mYdRo29NsG4TrpwYUI4mrAql7uW6FenJQ82qKTqQwCvmQJFDkzEbogb/77i9++5t/4dB/96c//u4P/2ldtxhDNAaXLXYiGQ8HYdbFtO+3a8GIA1uY0zCBYNWit5u+fN3Pb1sm7AQt1dPTI0B0tMN4XPFq2sWjEx6OMsxBDW7tuvaLKU/TgQHbevn65eXdczTpgnEYpnLukQlDX8pHJEUPFh6m/Mttxb28DQMGTaDJveR8jHnWrOpc3m5OHDlGh5CBcMghO0JrLVJQ8Ov5lbodj0dv9eH9Y2D53e//YVuXb755yjHGJGaW05ziJEGQIefcWnfuTpKAjkdsap1Yz1vd+62sQF7WpqoEuO1LqTWNgyspOHESv58GJZQgPAxpnufW2qfzJ2EQkTiMgfB2/hJFhLPEptYVgRkUStRAwZFloAl52vRCiEDdwJ+evvny+imiR0Ltt4ynvl0gTGPApexEg5GA7m3bPA8+jBjFVBk2LM2UptPDQ6U15DFPQxrVyjRNz0/v5/lUS7+9nUutKNxaO1/fVDWIJBFg8q7koGDay15Q2y4k8zCmlJxcBImg96aqd73TOI6qervd7uKo4/E4TVNrnTnEmHvvRJRzFqG7oPDecTeDGGMIQTgQAziaWQhBRGJriJhzFhFZvaOo0BLjYAZ7aRpMQhpg5BBXrWsBM1Q3RATG4ma2JJQgQuwsJU/Hv/5n//u//ct/FbCf5ue3t7c/vt4cJEYBZ115eJzmMMaGOhy/fLXrstlowC7BHNt6e/3Mf/50Pf/uw3/+8PEP748Pcci1L7uc56e89gtVdOxuu1uVEJ6evpnyadvrT68/gbeIumxvOWQ1KPt5ucLxeDDGMSU+ZuwWmLReq73mPPV9Ps3fH8f55fqf9rKinrz7OLVxGMZpfis3chYKBhghHY9Ty9bWHjxoR0cjx2XdUhoPT0evKxK7448//f58/TLPT6fT8yGl18tHbTqNj4gcY0QmVYsxqoI6MUNKaZ7nDl671VpD5GVdL5dLCCml0Lq5IxqDUxoGQ5CUEBHU0IHAx3EMQnW5fn39mOJwOJyQYdmKo3R0IgQnpNAc2AG0dt23AllG4QAA0+FYezTr2K2a/vCL31yWC8RxOjzOp0cP47Z5NVi6RCGJOTEyuDFwTB4HqpvjDTt0nHKGp0O367IUzyFN09Pj4+Pz0/vj8aGU8iZJRCSGrdmyba4K3oBcVdfbsm1ba9VAl8t5W3Z0T8N4PB7HcRjGdG+BxxiPx+PxeMx53LblfvpTyncBee+uqvu+E1GMUuvP8xWltFK2WquIIHrOERG3dSe+k1GEiObDiMAAQETyejU7DdD34tc04vULeh8zKnAMPjymmcq21gKI7uimxWo3NCfLu0h1s8d5+v6HX75/+t7r7XFY3p2++RD+0NumOnYmBgxIQH7z2tp+W7dtb9Rh1TaRt3l5ffv4urRP1w9fyx9popuW3CTkcS8Ay2pha60hU9+uSPXx9BxtOMQppeHz9UsrVai9e39Sq73Svu2O19LYhBM/5DF4927eq75evmD48MN339ayhCSPj7/6+voTEaIeXRc3qRug876vt+XrMAyCMXKWKCOKhHGUOXZAwHenIQjclusYcm310+cPEuDd8w8ppWkarKs7xjwwM5OEEIEAwBGBQAAdAptZSimUPY8p7iWX1LRqr0QEnlVVFcBxGMZxOnAMAGCtIyIhklvKUVtzLYwEhtu25YSBcHz/Q2Qo61licGsssW51CuzQLucr0gOPMxgDYErZDafH0bxwmn7x/rvI8fj0DPOInUOwvSNA6/sS5mG6q9NhcENHIAHfVDWQjBhacUcrwpDilPOAiLflspe1lNKaniKnFMZDOulkqswYQmhNt21b13Xf91r3Wuu6ruu63geAkNy9qRIzs2CMMcbIjHdb3nsXkfu5N3NV7b0y876buztC711bR8SUooi4+77vjrspBOS7yjCEEFjcvaszs9wWrQBpyBVbOrSpxHJV0hASCyUhHgYtVgDIHcH/K4a+165LHt0tHudv37375jgdb9y+nD9HgtMhvl02V2zcQyQAi0OqGZa+rXItgZMPrfN29Tj11/GCUpfzn9r2JUxIpE3XEI4yTo5A1i7bTzmMu+v58qKdjuHJ2mrsDj2MIAYi5NBT0DQdum7qO9C06J7RDWEvbS9Yy3j9ch3C6+P0bLVDGKbh/Xa7kotrsjXdoPpOiZ8fx+fjcDwlGSQhJo8poGTgQCnlyfvt9etPU0611s9fX8djen76vt5qTE2tlKqARJiG8XBXVWntEmAcQu1uvSO5eUMEIspxGEc7v93cFZEQubbSWhMOIpKGfDgccs6muq4rEsUQ0FStq/rlckkhhzhNhwGhN9t779Bwyo9L+RwE2QkhOZEpmOr17TWyME8sgcgBvHkfYnQmkfj0/j2Ms4KyKbFybVya1rKca35+DodMZtCuRNUkosZyWy/ruW67t2rqMaZ5npnDtu8vLy+qSkQxxhAZC5M2dxeReTid5gMw7Pu+rnvv3V3dvZT29vZWyobIKQmS39P9UvaXl5f7VvC7LBzAW7tjMvT+YebW2p0ZY44ANI4zoiOiuwFg750phEgphRDCkDIiqmqt1R0xRln3dtvq8YHDNCeuT4/96q1ssK0hHrMHlvGIu6Ov4GpOKcRuaoplj13b4ci/+e4vvp1/IMC3df3zy+92+/L49PB6vuzrxnbAKQNGQp5Sqo9Do9MBMhTGtmMaFh3i3hHeynomVhBVZu+q1JEBkYMoWb3t2+60d/nzh5dtKU/HMTHVSGkYkwTCHjEYdCICMFUnOPZ2RA/S+cAyH4fHoW/jhUrWhSJN3EycYp8VwLlOMqkTpDq+V/OYOQkrMlrFpTVoGzHREPdW2u2KGLa1XW5fhykdT/Oy7gQY49wUz9cPrV5Px29yzq3t7r6XdQ6jK7l37R3uwQzYGNnMB4nTPF4ut3UrLFm11bpDGty91rqWHRGtNkHq7rU1b52lb/suMT49ProTOJStEMD29mU+jHl4RBvDwMioe40cJBBCqmXbtx5yS0EYwdFSICJ6eDrGcbqtZRwShaAVFNC9B5SUufRrr2vkI46hobGjrjVYULB1b3UzZg55CCGM4+zut9ttX4sIIVLvpWyblrJvpZky8+n02L75fpyiu/VeWi8p3aetnEXKvm5bMTNwJMLea9nb5fzp69eXcZzGccw59d6XZbmLw+8OAQBU7wBtAqB7iM/MITARmUHv/WfYNbqIIHKt+72+TCSIKF+/3oSCNXrWOCY2Rj7Ap7psm5+O0zAcRPKS6PVtA1AkRG8hCEqsTcr+9d379HB48N7Odfnppw8fv/6hlJ8CTyK4raubOMq3D9+ElMOI3+UQJS2tRAi66w5COXatqIUjxzAjJet98/1r/wKND/iw1627Vey4DSd61NDdGfQphDDEFPMI4G47AHCQGIVIoCFUMg7mOEwTWyqtb/uZx6M10V1jGpMHAcfsi5emIWicYoz5cZiyiFjxqrVp3Wwt66LdMYy9eauVw7htpaw3hkIctAc3F2GgBNDPt9fHwywpGOg9c+29u4K7r+ta655SQgcCS0wFnall4efnR/vytiwLQicC09Z6bVVvt9tyuRLiHetXSkkscwxMNs7zbbnkmNzRwXLObTk77Ov+GiklnFwAIjCEGIJ7GIZRrbr3XrfpeBQeHLQq7Ht5/u5YGpBHcClWbudbMw/dgTRnQmvUAR4fMYiVgpev17eXt1b22rWDmiE6gLVWAMCsE/1MV1e3fdkcbN0LAKSUzueve13ncULErvV+kXABQOKwbdv5fN73GkMexgRgRHwP692993a70X00dBzHlFJK6R7kpJRSynfY8J2e9HPu+1/15/+VrIH3AeumvWz7PXUuZZNitSnYBvKCj3PqcdutHA68ewHCObxrBBd5W88b9BYTVg4se6LsCoDmhJ/PL398/bM4f/7zn14+fLR4zqGE1LddO657sY+XkMfv3g0PyLGM7e364bYv7EgMU0SOWrcaDNiFQtjbHhV2XdiQIQHLCZ9OCBJikDmO05yOGUYQcEJqTd0cn2JiJoFijJR5CgkULXFMcVIs27btAQAscm7WTEHYAyGiZHAsSuaRMOXAbtw78x2ZCGbGRBQYyAlCHvh2fdOqxMw+MIzo6NaYcxzkxw//BEDj+E47uHsIYd1uYF7rbp3dW+urBEBMRARMwCqCrbV1W1Lm2nbr5g73OTBGIRJwAzAR6W7zNEXCvrcQ6e31SyQ9HcdWVVtxQyC3xkvbeGZ1jS4pDa21pj1OSYTdpJtr11J7OEpOg1Iuzd4+/enwza+MB2pbasvNO7ZqvTrAEA8UUf0C9Ez4nmIvsP50+dOHjx9fthWMmCnGxMzbtgLAVnYgdIR1K04oAZ2AhpDTdEiDlbbdlvW2AICIDMNAKL0XVSMiMyNCRHdoSMwUUjrknFury7LcbjdVnedZREopd2BMjOneLXE35gCE3Xrdt1K2EALiz6Okvddta73fl5F77/1+VytVRMQJuxk2PveuCnFUHphStL65Ve27NUagteyo3Sx1LhLIpLHZu6fHh+F4uXz4w4e/Iw1/+vwPvfu69oW+hpBMem/FXT+9WMJEFnmA7ky7UPGYD2M6HNMhi/i8gZfMiTntqfsBAY0kCk/iEpHFAzpwkGGYpvFwHzNf9qW4JZN5nmOM1+t12+pSa0klpMiMNCLaGqPkHO/qYhahBvu+d0VAAatmvauLoCIty3JvwcYoImIGAPchhJGZCbn2HkKgw6ydJ+YU4r5up4fj48PjH/74T58/vx4OP/9t6ECIl8tlGjIjllKjUJMMLuYWQrhpAegiMkyRXrVthZGqdgEUYkJ3oceH5yhUywZgZjbEvK9ba6vberu9ffvu27Lr7fJlSnlZrsQRSYWE0Ep5pXhC4Bxjc2s/V/3moEaZeu/7esvj+4EBYk5xZhKI7P0AAz2IlkUBViCMh6MksYho4E5AE53ep+my+uvr5dN+23POz09PIUBrjZlFQkrZECR54GkOiQUL9DTkKEFjhUp3KME4jilERARAZr+jfg6HQ7c71Bp7bSmlaZpqrSnle7h/r4qqain1HiUCwJ2dEXMKIdRaS6m9NiLKMczzPM6Hw+GbdSvL7WJmzAiEZnovQy3bIqiCFPdWFN0WeCAZJ+/UgowIvGuJMv3w9P7wr/4PRBQpFEchUOgk/PR4eHyaD1PibXu77K7jafx1KyTc3h0ffzmnunuKMVBIQQaZchi7tcdv3iFADvk4n4Y8hhBcobUmSIgMdyEUtKq9FDCDLCwYuxZVRYfeCrirNtSeOYyHMedcay2t1t7U7e1yzjkPQ8op3QNBRBaRbduwNUJnvqPCupqVuoOjMGuvvffbenP31Ia7CalNa3O1moeo1s01plFZmkUnPS/L82nOafiHf/wP19vrPL4fhyhC4BEwnS+v23ZlalEoyKmZMgckQXPt7go/I/BjSimdL0vrQAwhxTTkcTiOKQ5xyINIgHVdg2QD6r1fb1/39TKmlPP49nY5jBOAu7aH00kClK3GNJay9VLTNKkbmbdW91qt1jjkYZy5sbr1fQshxjTEYYY42vGEzrTl1oUHF6kckMIMMUMgw0TIDhZippSPD08AcM1vEjmn8Y60IAzMEmMUEWBi8UFCongAMiETwsBD41RbjDHH4b+eYGIGEUkSOAYiaq25OwDcI/gQMIQUQsg5hxBKKdfrFZC113sefMe+030fGyIRioi3XgmN8HA6DMfjsXvbH9d1ZUF3u5u5Xpu2LthBe0tZGA3Jd9+t2Gl4GvLzw+H5af7+ND4+jPNdph0kCRGA7V3VIceQssjA3drTCN8//eLt+Yd1+6s4xEA8SFZA7hgCd9+GYTpMRwBwkMQkZswYkoSQwKW1VrWqNWu91o4oCrZib60JA3l3ImRw6LVrbatZJ6IhD5Jk39fr9brsi4FL4GOe71ie+5DhnS1lZtu2qSqLCLOqruu6lwJM8zC6u2BwRgoCiBikuXnvjo6C6t5MhWOIA5i3ps5h3RdC22v9+PFT12U+HrZrTZ3ccZ6GUspPP/0UEwNxM22mgBxiAAD17uYA4L2Bee8WYxymqV5vYOCueZgen57nw9S1Lsvdd0uMqdzWZb+2pozycDxZNxGmIFrL48NTCrKs5yhDzLOq1m6PMSCA1m6miKja1msFtXE+EJAieCbS4q3dSZKI2YZjDqNbdUdnAQ4GiBAQmnvHsq3ns2kZs6Tnd6dpvn9rrXUJnHJAYDC3rnXdqrdbpBxi37ojz09THoYYs1EgBySy3kspROTA61Y0erCfIQD3cdA7MM/cY4wp5+PpFEO4g5VUVb3fc+JlWa7X6+vrq5mJSM7pdDrFwzSOY8ihu/VtJ6IgcphHdSvlDqHpIclDeJQA1r2Cg0xBskOAx9P3vzj++vn03cPpaR4Pc5pQJISQUoo5pRA5iXdkRSIzMHcovfVeetifhNze033LBnmzXTzkHCVTpBg4Eom6oVvXqqqGwDGIiChll/u+SoXNHYRkgBBYVNW7c5REd1ha27ata53nWUKqtVprvTUzA8IQwmGcmPl++q/XK4uklO5tlNvttpRd9d6J9JBiDtkJjdBZ1BURkURCIhJwZca7LhcACAWAat1ZvLXarYfg1/Wm3nIet7U0KzEeA8fa9rLchiGP4xgiC4dtr+M45pz3fW+tEjGi36PSXmopW4hy/17TECXFZn69rUTdEUtpzGwK3ru7O8BhnpmCmQHA2/n8fDoG5tY6o+SYt3Wv3VIKpdQcE6cIvTNhCPFuI+8JIrKgDFmiJ4Ixa2ltPYd8cI4YBoBEAO5AaHD/7wHfPn/58NNPy7rWVureHEqpuzsw0ziO0zTdQ3kzU2tYHCFuxX789JPX/iv7dX6XICqadTN3b6YYBMxbrcyMRIC4bdvlclnX1czuBdDD08P8eBrm6R4mEVFkcWJKQ2ttGIZ5nu+H8x4O5ZyHYRjmnxPlZVlur5ckAYVFpFu/J+uSIgUREZmOKWNQ6pJxHPPzwzffH3/77eGHbx6exmkCBmA01OZuW1NvKTwBgHkDckysBVophB3dU8g5Juu9bLt7U7UkEdEl4pxHcjDrjJ6TlGZOZO6AqGYB75s9PaWBg4zT5Nb3fe8CNIKqtqp7q621Uja15mZMwRVulwXIg8g0Tc5UWh3HERHvdkJCMP/ZOZq7A/Ter9erqo7zdO+oC7G67bWodyLKw0AkQcYYE9HPG5/umdO9zQ4AMcZWOcbo1m/LBzQ3TWZ2PD5Mh2PvvSzLYRopZ1UFcyJCIO3VId59kaohBgrRtjWItNa2UoU4yDgfH2Mar7e1NJ/nGchfPn8BoKenhynlXoxFa+vb5ofD8e3tNScZUj6/vrHE58dj2+tW95CAQ+putdZ5Pq6+qdr9yTDz3ZBpb0ce4mH2p3cQjpJG62C6kXfz7AgCho6A4OCICB5EwvXt/KfPH1NKgeTTx4/X88t4mE+nRzNY1/U+zp9SGlMmAgamEH713S9L03E8IgdVdfuZCRCIJEittbu7+zAM4zjGGO8v9rZt7t7dcs7H45GDvF7Oddu1d3fPOQeUu1Fj5ufn5+fn5/sbYgpqLZEAeFv3223Zeo0sd/Cjuw0pE+Kch/vt8m/+zX+r1Q3AuZjVh/z+2+dfzfNRhEGsE5h1uROh3K3obUG+WW0NkuQ2E4i5jSlljobm2upevBftlljUAAG09X2vDI4ELIRukQMRCBkAiBCimxmim2kKmZnd3QwILcYIALXtcIVtW15fX2svp9NpmqbWWt1biAwACt5aE0YH3Xp1VVYhZhEhortjJaJxmn49TYoaQkhxYOZSynK91d7yMB2Px+PhMOQJkRH/t7sQwBHpji8nYFNww969rtu2bTlEZBaRw/w05MnKyszDMLRSHbS1EqOkYXSFXhURQ4jbWhS7iDAFRooxXtaCAEMeYphYJgBwVEBZlsvL61dtZmb4+DAOoTZar+fjMC23cwj27uGBwHtvvXfBB0rptp3ruqXIyLFzaNqnabpc3mptIQgi1n0jgCApJMHHRxueencIzA/vvRbVS/ANBJQFCBkQAADQEYb56Ye/+Kvp6VHb/vblM6AZyOvr6735lVLKOQNASjHGSErDGPM4sIwpSKnn5fNLN5zykFLqqkjOyvu+q2rwVMumvZZSgsgP33/PzK33Uso4juiwLevXr1/X642ZY4x4nxl2v8vgAODeLAMAB7Xed29EUGu33sQBQdEdeq+lLOe3+2G4R1ny3/6L/447N7OPbx+uy9scxyEkJlPRogUNzawphhQBbd2X3is6uHu0IVjJA4ZJpukYYyxlaVWZEpDXxgQIbmDo7tfbjRBzjhhC3TVQAAZ3c/dScNvKfR0ii4BTV1XVvZR1Xd1dAsUY0jDOalupsG15msfDoe0lBQOwtZU77VOAtr2KCJLsrW61iEhKiZmBKY2DpEjws8wY0Hr32+12N11Dmsd8GIfT3VL2Wksp1vvdaqq2u4KFCFGb1t6qNtMhnxJLTtM4DsfjwbT1Zt379Xp9OJ62zRU6IhKRSCi13p/7MAx763vbkHycwrzN161qh8Ph4MRvb5emVUQGd5Ewj8fb7WbmvVeLUNYyxNHMtu0yDTEKg+vp9PDy8qVqJ2Qk76VYbZ6k6M47Pzw8mM2IyCz3KALACVr3HqcnGL4FxW6KvHBg6qRvZwpXPj05j3f8BDgDAk6nH/76+Av0fn75u3//b0/lgDO//NS/fPrUWnv37pkhbNtW3oq7PuTjkz9d1mXdF2E2s+v1HGP87a/+ZjxNt22t2kII5JgkTNE+fFjWdQOAw+EgQiGMhI5g67L01pCIAccp3wEZ276UHe8J3r0NDOZl2xFBYiSAvW69NQBAogiUQmIiALDeLm/ndV2BcLhLrb779ld979u+bLW06szQbLdi0pE5oAMiOmHblNDd3UvpgGY29ZoQLYsb1H0Db9AbIqaUYoyqQ+8dFDqqK8CyaHfhjIgkAQkcoRbV3pm51Oru83gIIRTd7xCEfa91b9u2EcPp8RiDDCke58M9XGTUNA3etda+9K1pFaIkwQmaVjC/t59qrXcByc8sTtV930opYM4h7LXvtdy9thCnkMZhzinXthu4mQPgvR2zLLfWKjOtt8u+XRmcg0RPSeR0mMHA3dZlQcSAdl9PrWq1tiFnAGkVMFopJQ1ZtasZgAOauzoUDjROMxhO07g2//Tl815u29pzzv/8b/7mm2++6c3WdRFqtfgcaZrGWqsp5JBr0ZzCOA61rNu6S0wp52kQ2xUDVO37tpQckcDU8jiSMAvHIaeQRszaKuAN+BhwgL7orV8/v9a2BPa0run5EYcHAEYABEMkQHUQOb3/9d/+67fbdj7/0xiGX3zzAxENQ4ox9vFwu11LKRZpb8Waln3fEUnYHZdl+fz15T3DttzWdb2b851546Bgd4XPuq632+1eDOUgYC4id1UIupdtiyKltX1dt1IAYJ5n93BPbdUa7QKEZV/P1ze3nyWftMA4zCklBRum8S4vvdeOpNfFwAA0h/h8enTvCtW8t0altLtzERGzn8vJpFq1Q1cbhiBpiAmaNm7WooQUUrrbGLNYS1c3cf2ZeW+WUwIndzT1VjZXCyljFHJQVQjsCOu69t6JwKFxhGM6MLMIrcv1/iMWZ7e27h323vuupq0zOJIreCtdWxckF3I1EWmt3TWD9+b53lavHR37tjUHbSZCgOZoElIIgRiggTuo9t47EPTavKtIUFcQdiD3XmsjkGGcat17be5OIOOUxpxrXZv127qAYZIMjKXeBKNpabuKRFUls8zBUHdkBAXUYZolstdWSllK7cUur58fjsd3796XVvdtQa3hMfEUwKjXfu/5I2JX9K3mPNaiijsTjeORkrvDGGZHa92YGd1UPR9y7725f/P4vj8+0eGgDWz9kmICnlHb7376fevLLw9P0BwZhjgZBUQAJ8eOIAgGQKd3v/nV9x/Wl8vKhQgAIIowM4AdhlFVb3vZthszz8dHpiABWquX69tWVtWHh8OJgVqv5r3stUpPIYqEbdve3t4ul9sdFnY6HXrv1+sC9JOZ7fvmrofDQUR6tZTS4XDovX/9+nXbF3eVGIQCorempTYiarVuy66tbPM6TYck4W4KSbW11kxl33cAQIKHhwPzg0Lvvar6uq73ROSuGr2Tye6tBEbqvevtdpoPQjwMAwcJLCFFjgER75psBwVwQAREEQmIDHjdbuuy30ni8/Ewp9CWVWsbhiGEIEj35B0RYQUEzTkjOpK3qq1305+vrGUvpZRSkCmEkEOs5kAoziIYIhMgRkQS894cDKHWvpW1tyJIROjIQUjI3B2JDofD4TilHO5p7n9dWqEhRTPr7oTYuxLwmIey3DrYlGTbL4SIADmkw5DmKZ+vX5dl++bxW9Btq5cHOfbW2rZiTvM4bqUAYcq5lMqmElKyLlIINnc3RfCAIGwcx9HdP335omoiIY8DoFp3SUl7k0SIGNMhp3y5vqUAgN1Ybbc85KZ9HIdeO3FgIqLQexOhprVu9Xg8RuHqLR6/NfkmQGr9y/WnjzKtnYwwbKuepXhOoWhfdzlkd0REdHQEdAIEAHv67hfHP/w+W9v3vfUiSQLhVpsDDOOIMRFzTiHGaI7ujmVNZQjjUFrV+0JZpBjiNIYY872Mc7m8tab3+k8IIcYYY17X/evXr9freV3XEKRs9R5lHA5zzkmtff7y8cOHD7W14/H4q1/+8jiNSdJxmmOMwLSuq3tS1XW97SRDTPN8TGlIw2RmAoSMFKOkNIjI/W1uTS+3a2vtTvS+TxJs2wLgIeQc0050O18+f30Zx1FiCPdMSYM3A7x3p52ZBVHNwF0BWmvXfS+lqGmt+7qtHEhB18s1SQCwrW5jTDEld9y2bV33OyCWCLrWvfb7Q7u/Xa0VZMIooEoAMeeErKrMGAOHIOiOQFU7gKRg5kgk45jrtiNiHGOQVEq5XC6llBDSfBhzTogownciuZnfZ5FcLWe9j+T23sk9xijuWlZJwIBR0rdP76LQut6W29VNBGNt5+PpAITLcu5tSfTe3T3GZd+ZQmDpiijONSYJOcq27Yh13z3HFCWoA5AT/py+EyFAH/LEBq31eZ6IiFNetttWbnk49G1FxBxD2ZacH10dmfa+jGFkzgSo2ogQTLWXFDNxUKioBZDD83eKQ1le+rq9ff26llWnSZDSEJnxLihwAARGb45813fn6WmtbVtec857LV+Xi4gQ8TQdnt99gw6X61trRVVrb0wy5qmVXrZtcRjyRCQphofTUwwB4Wfx8/F4/BkdiQgA7ioip9Pp/fv3y7K0dhex0TzP83FGRED0jjHkPEzX2+evX98O8xxjfH54PJ1OMcZSyuFwqHV/fX39+vWNiFpSdTydHqYp0/+/qPfosS3L0sO2d8deFz6ey8yq7OousruglghoJHDCgQBJ1IASQOj36B9IP0AjzTXhWKCaZFNsVBcrsyrtM/FeRFx3/Nl+a3Bele4oJhGBe+8+a629PocQkVwso9ifsDS0nOWiKDDGWuvz+bz8V8ZYCEFgKhn3MSSGD81ZG/Pw9FgWhRBiWWNBCFNKEH6OslkeBuf9OI4xxkwpIQSE0DnXtW1smuWeOg5DPwycsbKqQghN08zzjDG2VsfkjdFG+xQjBtAyCuHy3iFCKEWfIAAxLobCAENMMYCRMoExifMcgk8pCq5W5QrC5GIAAECYnI8dhDGCokLr9brIKwBACCHGoLW21lBKsyyjmEwpEYIjSIRgnZLWmiDgrAcQSkZRSHVdAhxn72KCIQTOVQAGYCBYNU/eOo0AcNYSSgEA1lqYfKbyBUANPhKES6WsGeZxmExMKMQUrTUQBO/jOA7e+xj9zS6nFHutpRAUU0Rw1zcpWoRQDHicjLPm/ua2t3YeR5SQBw4Br6MD0NX5BhDqjKvKkjA8+bhmBWA0JWemTy7SbH3LVgSeOgD++PaHP4oY6rIANEGOQUIAAggAgCAaHSnAMAcR8EzdvHn59psxJDhbs98/x+RX9UZlRUqJM17XZYxRa30+t9776Lw3emw7AuusYllZcC7/PGAvdS3LMiklQkhr/XlIiREhtFpV63Xtve/7LiVwdXW1Xm+dc8PQTdOEEMaUMsabpvn46Xl/OGy326urq6qqCCFCKC7yskyMKmPMcj07n8/jOFJK8b/+V//94kBvFx+m5TJlDEJoGUWW5BIA4pI2zCkDAPgYGOeMUARhjHGZKbz/rHVYLAy01sYYp00EKcWojXHOBe+Xi2mI0Wnjg0eMAgSddYs2YhxHa60PFiKAMPQueBcggowSzgXjXAixcLtxgpQQhLEQgmDCKKGMEkpiAnoy1vsYgfXWOw8AJBB775wzgolM8hSTNtpZFyCoytWq3mKMUgII4a7r+raBAFDKlJLWOmud90EbgxBOi4sbgJRyxomitFY5hXCc+hQdih4QWJUb7yZKUgLwsD8SmBhBEOCEIEQwBO+c41ImTCZtXLTWOO9cirAd527s+qlv+2GeBmuNMa7t2qHvnJk3q7ws5KpSKucgBTsMBIOpHwqZgZiGoacI5plCmIzzzCVnjIMACYDB+9navMyLLDM2VNW63t2n4gIqDiCnUEJrm6f3uj9zjBGCv/vH32o7397fCs4RYkioBCBIKYKUzBz0SIRMEAMIpKD68xFESkmQ4DTNep6MnvTcB+czmZd16b1rT80wdJggkMA4DIxSyYUSAqTkgvPRp+i1nkPwjNGU4pKgAyFeBAB5XmaZWti1hGApBeeSMUYIFUKqLM+yvK5X9XoDEZ7N3PbtOE0+OgAJCEgJtV5vVqs1QjSlSCldun0Iniz2G84ZY2eEMSHMubCIFRBIxpjo3cKlXhoFZSylxBEAMXGZYwAhhD5FCKFzRmuNEKnrEgAwDAMAgFJMvKGUOmv7vvfeE0qllIiSoiiMd3Nwbd9lTCxS16HvYwhS8sU3HEK8dENCPrO9Q4ohBAQAAgBj/HnkipFyRilNIRrjCKIARAghSMRFyBhDGHZD571fvi09zwBCyEiMEGMKInAuUIqMMfM4zXpkVCjFlncUQpjnuR96jLHg/OriEqYgc5kLti4EieDcdGK1A95BrzfC29lGhI3W1p4JcSkRRktI2Ww0YwxDGKwzxmAuGSPaBkJRmoCLwTnjnIsBhBBCdCBC5wIAERNYZCpTilKKuYwgTeOppHwaGuCjUmQYBgyByCShlGBgrbFuZoyxbEtRyAQ30bfDuaxfKlHNk6vqgKpNhAVIIDBF2VaM3x+eP3hp19vNF1/86qd3f+z7rl5tXDxVQiJWBRAwRIFnqO0Ca5DcAICKYgMIPTXnGKNgclVfxmStnZ3Xk/HjrGcTykrN8wQJXK1WXLBPT8/N0COCGKPeuxjj7LV1DqS0gPfe+xgDpawsSymzheIWY5xno7VZkOxhGPb7w8KfK+uqKLPtdpsQtNa+fvHy6fh4PO5DCKt1tanWCBAAACFswYmNmRfU4vO5WrhHnwVpICGEMI4xguB87/ulKlPOlhGIMSaUJIRMw2jHGWOcrDfW6uAgTH9S7rhlXTOOI4SQc7lwxJc7K8Z02U5yggEAGECBKZWiyPLFPUEu9oAJAACkzMqitk637ZlQmWUZpTikCGKilCwxchhjPc7DrINPjEFMiPdRCJHnihCMEJ5GO00TxGAtuDW+77pp6ud5RAhRzjBXLvhJzwCkLMuMsc65vu+D7y4u0J96cWi6xlsnpSQIbDZbySmiKJdKCkoIu/oyz/McpPT0+HD68I9Qu2EyhKuyFGM/AIi5Wtk4W+ud8UqpRcfkAeKEZlnhwwCI9WFGGDDGMMVlXiTgjPMgLg6KblMVnFNBGUxgnmyp6jD3wdl6te66bhhHgpNNWAOKAeSyphilyIz1BiVIU6HKCFXbT7fFRVELHwwODpICAW/sCUOMUQSYdEMrlPz1P/2nNsxD11jvCMJuHjkrEEQAIISpA3D+8FjcSShzgJiQpQ0+ej32A8b05vaiqgoAUtM00zTs9/sYw2az4VyEGGKMSvDtdsOlJIwaa51zhJOMYms9xphgaowex3HZvixMh77vFyp/jB5jzBhfLLUXXnRRFF3TGrdfHhVvraTkxc2t1ppzzikLIabk53lwzo/jiBDCeGFfY845CclzIWHiCwS9gFwAROssBJ8JqM65xZ2CMSa5cMHP8zzPE8OEIYwpkZxCABYGzqJzwxhLKUKIKSUIyZL0lmXZAhZGkKLzISRCIMeEc55xASAM0ReFkpIjhCjleZ7nWbk/PC2DoBBCMg4JhBAstTykSBCgglWMTv3UnFrCsHPOGTvPI+cMY5Ii7ftRm6GuS4IFJgRSkvEyuiV9Ohozp5AwxZRSrWdrbdu2erZZVhKCtZ76qTN2EowLhhBIKbiyqCHA3qTOaZv6KngpRF1s1BdFWa3Gw6d6miGN54ePMEGm0qF5gpRiH0GCzoaUIMEYADgHDyFOCANCI0II48269AESnAghs7M+GK9nDHiRiyLLizz3fYcISgEAiPJqM8wBRuAjJQQyVAi+M9ErIQlmjCMXhtibfhggIOt1LZk0g1VlLqq1RxDAHgORUpjak8Rse3H1dHqe53nq+5SCC+n923eXt3crSgCIEJCYAALQcDB8f8zKHRISQCxFmWUZhpRTOY7zNE1VVUmVH/b9h4e3KTmCVV1vYozn81kJUciMYjSPfcMYQUhrjQPOCsU5R5Awxsuy2m63wzCE4Pu+59xaazCGUqpFHtD3g/c+y4qyrJVSzrn949Onx4+L3ohJ8frF/eXughDWdc25a/thoAwzwmOMIKE/cSKA9x4hQhhVkvOUgvWfn4Foo/cxhARhIIxmRb6E9kCMEgTzOHVD3zYnTigBmDOSZyXGeLAaI4AxijFRylGKzltrdQSIQgwSghgs9COECMTAuxi8TSlhABNMs5mjDzF6JqSgnHDGGAMgnJtna3We51IIjCHAn+Et5xylFMe4EMcjx72e+n2TUmKCW2vPbTOOo/GOcRlCmuex6stVWSUEF2abyFVKKYTkrIYgIrSUf2+9I5DgpOexsTHpoQ0hRZBoLhDjCCWfYtO2DOOIQF6uFM2jB31vm/5DVpBcZfz21bWARs+M58350B6fu7EjYEAY5Kqw3iEErDeIgeSB8wl4nHwKKSKSxYRDHCFRyHlvLMCYqZwCmGWScdzr1rgZTgHlWYSIICZLWZWbFFE7t0RmF3cvmFKYC0xIsAYO2heTKGtGeZg6KDFVNLgYCCYAhkgiBBxJkMXxcM6Lzf2ONe2h1Wejpx7Bh3cfs1wS9jUAJKWEIEzRM5eK7TrigCEGIMY4vbi8TCnEiKLbj6YbBpCp4s3L62CHh4d3CIZJD1V9t15jAEJiJKVAIArOYMaUUiEBijKU4mhn5xyndV5WWVZO0xSjX4ovpVQphRBqmvM4NcPQFfl2t9tBiIZ+djERxiKMhDPGyPPxQIV8/fq1KpR2tu/GeXATdARhznnf90opKRUhLKZEuKDL7JVl2RJFuHQWLpkzflmBSS5CCM77GEHwQU9znpVVXgTvg7MhecH5rtpxRhYQYJomq2eEkJSZFCqTOQCAS+GTX5g5PoYUgLPBB01QCiHp2RCGEOILuG2D7/seY7SQecqyRIiM43g6naSUIYRp+qwNNcbYo11o4mnJ33WYEEIICcGbacYJMykiZVrrnpA8zxeu0cJsSynFmKbJBp+WXJalLQxjk4Azzs7Or/OSIHQ87evV5mpdM0y0mZHMynKd5aWUUnLBMGv6pu/GwTdDd4zBrKriZntxud7OL984Hd+++yGMDQgaUaIEG/vGzTEADBFJMAYIMaLGaG2GepUrIYO3JRcRAs45CongaJ2PAKSEZJ5BKZOP9eXN5eU9hKQuNofzk1Tq+sV9lmWE0JjSPM8+11QqoThNEBvTjGcMgKQcJJEwQkjDlIdAXDNG67778Q9SleucJ+8Aoce+l5IqIaINkEQAEADg9O57f37cvbiFZQESiDAIBnfrDSFomCdAU2VLijGlgRD+9ddfF2X57R++Oe6PgmerumSEhwQiCZ9rrrWcQ208xnS7rjCj4ziOgw4JMUYgTBFEQUWe55QS59zzfn86nfp+SAkKoYqiWk4shGm9XnNBjTGn8/l5f7QurFarsswXWMm5EYRAM75QW2OMxhkELIieSCmtNiklyujC5rPWQggJw4xQ6x1G2Fs39UOMsdxuaYYzJRYe8mjtZLQNLqJwXUglhY8pJdi2rffuxd1dCKEu66pcjePIBMdsgVGT0ZMxRkoMYQZi8N4SglKCBFPrDGNMW3c+n7NMFUURY7TWLQ+G1to5xxhrmmbh8AxmHoYB+kgQopwb5+IwUM5D8CCmqiiyLIeIMMbGeWr7DmO8BM4tuqSFQ+ucC6HDBGLEnXP9OB/2zfHQSikjjQNAECaqKko5kyoTTKoqLzdFWVPKMcacEmt9Xa+bLrXtyWvTNof+ePQXw/b6sq533mFRqfPzwc4NJhADRvlm6JquPTkAPUwhImujtZYxjggzekIIWOcgCpxCxiWngFBKCMmqVbWqI4ibzebuxQtM+DiOCcLd9jrPFSY8RSRFZlOIEVAhFWcIQlyUmMv80BIYADIxGezmhDIAIURumob19W0P4tvffQN/8cJaP8+zHqfrV6+FUtaMQq0ARHbuTvsPcTJk0JuNSgkgQK8vrz45bXSPcZCcI1VknGlnnDHVqsiKryCkP7376fn5wzSpqtzGGDlnlFJGxGKIYq03xoDosqJgjJl5nsyUYAApWW8Ywev1dolI6/t+no01oapWQogYfQgBwqSUEpJyTodhmrWmlA/D8G/+zb+5urp68+YN5/xwOCIEsXUAAUqQNkPQkRDkrSOMEQCi1QtfGEEICSHaGmstI1RK6X3UZtJmyqQCwQcQMMYgBQhiWeZK8Wkemra180Qpp4JPo+667urquq5WwXshOJes7Zt235SrWuVZroqyKE7753HqIMQgYcJkSUiMIKXEOPUx4hA4Z8sqSWs9zzMhtK5rKWXbtm3bLpeKEIJPkTCqMo4hmo023iUfdPBOm2kYAABt2zIhLi8vMclOR6e1Xha1RVEsm/iUgDFG6ymExKjo+3EYBm1d33ZFUWGByIqXZV7k6vryoqzy5IKSlcoyxkhV1SklM09lmWvr8qoWJOiGEwTN2DndHh4aWbQQ5RZa5yLiO6FICEYgElGcw9g186RHY0zw0Vr9dDzkQ353dQ0A6vtGZcxbmyhDiMtMMiZysUKMKsWLqpYyN9oF79uhizGOs7hTL0peaK0hBiC4GANkggmRqIxIqIs8wWT1AScHmIRQQoB9ikxlU99hABOkv//D908PH+ahjy7G6G0MThuhW4CJOZ9TQIkJbb01mjKZdI9BMtZ++PSMExRZhjPqYnDBBxCyskAJvf4SUkHfvv1x6CfBNQDQe5dCKLJlGRDGcTTOd117fXvz6tUrEFPTHpwLKYF5tu1op2lKCRpj2rY1xuVZWZbF8fj87t1PKYGqqsqyBCAdDoflYEgp5yl9+PDhdDoN42itnWdTlmUKIQWPGRBCUMIpFpBR/L/863+ppIQIhRBccHrW8zxbp1MAGGIIkTWaQEQpwZ99wXmKcRx6yuh2u6mqinPBKEsAzdYem1PXnRgT69Xau2CNxpSootjudkKKEMI4jSkFQYkx9nj6pM1IuRBCIIxDjLPWC1ywVHfO+TzPznnn/CIzt9Za+/+HhFJK86IoiyKTijOeKAYQKqHKqipUhhDS1kAEi6IoioIzJjiLCWitF2mItdYY0w/D+XwOLnnn+7E/ng6H4/5wPBzOx3GenYsRQCnV7e2L+/s7LgjChBAOUYAIIAgQRCoTRV1KKUOMnFIpKUiRoECAGZpzfzozwoObIAxC5C4CSDBm1KdovdeTG3Q/DLO3YRrHj09PWZZdXVxaF5wznDMEIYSIC1nXmwQRADHLFCVEShlCHLsphoAwVrmKKYUUs6KYjNbOcalUmVORQ8khoTCBhAgAjGAFSQlQAQGKMRKaM0zmw6dPD49YZc/Pp6eHjykmjKhUuVR5JgSK4bR/ev/u7aCnBJwe+v2nt8gZOx7a06lpe0xEkRceeAAjRfj5ee+dl1IlGPM8V0qmhLwPC9YrpciUUpmkhC07cSElItjFcHV5+eb1myyTMYZl+0mISCl1Xde27TzPUqrVqp7m/t27nx8+PhBM7u5ebjZbrfXxeDTGUooIoQjBPC9ijE9PT9OkCSEpWmuMcwYkEBwIHoKIhciIcw7E9GcobqEAAZAoISiBrmkhSquyAlF67+syz/PSWlsUxZJjLoSs69X1xU2WZe04/PT+p8PTE+fi3A+fDs+//OrV9uJi8Wa5ubn58PDu8enDYf9JMMKoiIFYq7mcrUNa6+DTNM1/NglbEDsA4LKyTSn2fbeMZ4tIYHlBHwkhIQbG2CaTWhpOKKNCaw0QhBgJIZTKjDHRByHEss5a1C0LSk0QVSoXjKeUjLNGT0137oZ+ms00u3meJ62VyhhXEOJp6FJKhGEwoXkc+7bbbi9UduGtbbuhbdvdqsScRR9wtM3z4cP7B1Vs797Us+0ZJVxZChTEufeeIM+xpXQAMYEUjNEAo+vra6XUfr/vxy7GKPguq9ZCiLKsAcQEI8Y55xIlwJjAhPlkvHeyVJLxxJKUKsToQ0gJ8iznRQYQTwAAyCGAACQIXAIeAgwiThAiBEACow8PHx9//vGHD49P524euzNnKC+K4/nkv/12GjpZ5NM8DF2XUrrY7DCX89R+evc9BPr9/pSx7P7+PsFweu5Lurm/e0UYP52Op/MBgMgYQ5C9eHHHGDmdTkIIITjFOMuyoesZ5S/uX5aran88/PY//+6Hn38WTGg9D4MGAYSQ5kkPYzcM3fKnQghd1y1XOCl5URSLCcp2u0MInc/nae6aplk2/atVVRSF4IpzEWNYOEiIMp+AEFLk2W63IxiieZycX8wPE0iBIYgxwxhFH6K30zTRAHYX2+riwlr7/PzMOb+6unLOfffDDyHE6+vrdVWDFFfbzXq9bs7HWduP+6fJDC4GSilFaR6Hfmg+fHjX971kPIQQoKeUTtP09u3bLMu22y1CRAjRNGcI4WazWQBjpTIAgLVmIVkopRbV3LJRnabJGTPHCCGMUiqa51J575+enqZp8sEyxrIsQwgv3NoEgTc2QvBnF9UYAaW0ltU4T4zQPM+llNvtxTjMbdsunzVjAsLYNvtpWlkzMyaChy4GYycAJiFUWZZLjHtKybkACaUyqzab/ePP3/7w7r/6r39J1dpnqqCZjTNhHFEVjI0uG8bZRoAQWYYxAFAmMgKR1noa9eJdSQnHiCPIMCQ31zdc0Oj8MEyH/Wmz20aQICYMC4QRhJBgSCjaZpvkA0fETxoKAAjDgACAQEoJxJgSRHGJBgMxARS5Kh7a4f/5+79r2pZnJSHeTSBAP5lmGovn4ycs0c16t15v87Ku1yuCyHq9HvT822/+w/fff/83X//aeu29X5WbqlwLwW7urs7N4fnpOM0DhGG7vdhtbzabDUIQQtQ0TXAOAVgU5V/86ldVVXVjL7n48suvunPz7bffci6VEkIxRFmmEmMcIzqMXdd1WmulVJ4rjCklHEI0DIP3HsJkrZ1nbXQQPBdCABhTSlJkkgkps7yuACbTNMx6RAmUqxolgCEi0YdlFzlpHWNMPgTnQUzWWgRgJpXkgmMiuSSYdlPnvZdSLj4tUkqZScYoZaTpuqDnEALFAHJ0s92ussL18/7pYbPapgS7rplnI6jgXGRZBiIEThOCpMwgxNbElEyIRmViMVRbnDBCtEIIqbg1flGdhhCKoqzr2jm7pOp6a12IxjnftIhgbb33nnEiMSuyPMaorcWMYkTbvpu6cbGnhRCDEF0Mfd8DACAGCCQAWJlnWSZtaa6v1yklo522JqWgGHXGRgABpNCDiCAieJ5niNBC/aiLcpoGhBCmSHJpkQJQff1X/+z+q18BICSXNOMZu7DWhmAtBIEQyGmIyIeEEDROa22VzIO3WltKUJ6XhKBZd4uIpaqLlKDTjhKe4ry8ECU4ApqSc44Scj6cCeX8IoMhDl3jncmqGmVV4h4kDCGOACEoFkZnAAgDlADAhPzmn/03bdv942//Lsuyal0DF4ZhcH4WimsbKlJsdtdXV1cARuc8ZFBxsaLldnPLaH55fesxBhiXmRqn5pvvTpQJClmeF2VZ5Xnuvev7Tqns4uK6aU6CMV4UZtb39/e77fY///73H58eb6+uV2VJATTGYExijMMwYYzKKtvttl3Xf/z4mCIS3FJKjbZKZXd3d3VdL35YjBGlcoJFrli5qpepaRmbIYRCKEIpSCkSDCnP87wuau/jME1k7NuFvhadn61JPiAICUSUcs75xcV2tVotMYaHw8EEt95uOaXDMFBK7+7uLi4uy7Lsum7xXNDa7Pf7GGNd10WhJgzGXlv9adE4C0ypoNbaaRh9TN5bgFHOynPbaPMMAVi+aWvtMrcxtnhZu2nUznlCKBO8KEop/TzPhBDKOYaIUkacSwlqraMNpcrU7iKAlIIzRkefIMGLNxuI6eJyjTEGCUkmQVVYa5e6IqVcfi7L0ns/DIMQQggJEmKMIQyM/0z0CMhqBDml3gbBuFTch4ASADFRygmFEAFOM4fM+u6FWF0ASOUqTyilCDnLKJHjPPmhsdZDEFVWhqenczM23RBjGlNIPvgUEVoU4jjGkGVlnuecSQAixMQnD2C0szZmppALJYQkMUGACCowSClaF7T1zkgoQGIYwpRcAsmHESaEIAfQQYAwpAvBOQK4vtj9d//dv/ri7vYffvf3AKRqt95stiklQjDnYru9kJJzroyZj/vnLMtjHiklF1V9f3WjlOr7PqXkXdTatm07DF1d7f4szl6IN2VZSs45Tp9iOJyONzc3EeGHjx8JhZvNGpA0Dh0AcGnsXddhjDEWXTsac/IxyEzc37/UWhNCCEF5nl9eXmZZZoxBiCyeA4t7SgihbduFzbmstlNK09gv3zLjMkRgY6CMlDQnbpwJIYxzmuUQQp10URRZllHMpOQXFxd5WTw9PQ3TGBBQVSmE6NsuL8v7+3vnXNs2CMHDYX8+n8uyjDFcXOyUyvq+b5oWgDSamVJaFMWyWULWLFhBCAEhgDHt+iYGxzlHCDAmKKXjMENAl9h0re0CXwuZjePgrSGY2uCfD/tMKkEZYSQl0Pc9QkipZbM2EIb7cbZ64pwhhMau74Zps9msNzUntGkaCCEXFGAEES1AlhcKQdL3/eKx8edrRtd1C1l12SQYY2I0GGNKozEGgrRoOFb1pm2atm13uw0AwLuAIKYsv7x5/f79e8wFJDjhBCIctEEI+GRjskulJ1j4APbHZpoMACC6ycWAMKYEQUistc/PU13thFB/5tgSQo7H4/F83lxeEIiCi9o4kQmM8cV6l3Ext/1kp+3mAjGZOIuMgkAQhAgg70IIPZIUIQEBWGjOCMB5nN/9+EfE4XZ3qc00aa1kfnV1JTgFABAsGMPJhyovIATWOhjT2PcAIZRA9A7EMI39OI4JQQgho58F+E6bZ/1U13WWZfM4mnmepllSThI6Pe+bppG5LJWEKFirhRAxgqWrr9frEMIy3xtjQghKZYSwGCNjTEq+HPQ/Cb7TYjW32EEvLotLrM7CiCaECCGWi+s0TVpPQ99+tpvGFMcQz+ezKkoAAMVECVnmxUKDOR6Px+PxcDhEkCjG29UagGStzbKMMTYMw7fffnt1cbHdbkdCjvu9c+bu5Yv1emWtmSacUkKURAhGPUcIRKa890tvwgDKPEMI6ZOllKYID6fjdnNxsbvibFr0KMbOAEQpF/W62+/3Dx+PlPCyLAXjbdv6IpcoaW36vu+6brvdMsaarvUxLJEK3kVCSNudtZ4JAjCuNQZL1MKpbSjheaGWDwVBsnziS41RSi1SoaUbhBDW63U/DnleLncJlGACQGsdfcAIEUI+Pj2mlC4vd8sH7RJgUnGVVav1oTnvT8c3L18RAubZxhBghNbacdLtNNjgrbUhuBDSZ/cyGJ0HCKHDYb/42TPGlFIppXGesiyz3jHBy7KECYx9n8u86wZrbV3k5OZKj/bt739fVznLKaQCAAExBcE7PVIKAZUBALCo3VNKIECInR4ePvzQj8eccs7KmcwIkU8fP1xdXb1+/ToluES4ntqOMQJAGPU8z3Py4XH6IKXM8xJCHEKYhznLMsmptTbLVFkV57btui7G2LatHue6rqUQf/O3/6Wex3cf3s1Ge21mrcd+8t6v12vOxSL1WsaYruuW944xNmamlPfD0A/DxeW2KkrnXFnWIYTFkM9ajRBBCF1eXsboj8fj6XRaBAZFVSKElntalmUuuH7srbWkWK3mcWrHAVsbosMQYQC9sZRSTiiBqB36eZ6VyqP3epqttd55a93hcJznWTB2Op2WmFtrbTt09scfT6fT0E/ehxACwmlxcuScLxTrBdOGiGRSpZR2O5RSenx8BAlTyoyx1jqAoPe+awdMYF2vhZSKO+9iAkgpVWQFRfjT81PTtX3fa62ttYvk13uPIRr7iQkqpQw0EUKyXC4T84eHd7Nxi1TCWi9VbqwfxxEAQHFaBNeU0hjjQmVdjMpCCFkuIYTWaee4MTNjBQawn+cY/fF4vr6+vb65OR6Pz8/PlxfbGHyCQCp2PreEkCzLQorfffPHY5ZfX19+Tv4KIcaofTiPbT8MDGGGSaAAALA4qXCWOWecN3/5l39RVYVzBgDVNM3Hx0+//OoXUsqiKKZp8tqsViuZFb/9d78Tkn399dcgoWBdwDDGCGOKkGAAPAAYxuTnRDkgGEWSQPq8a4PQOffbf/h3BPvt5kI3TXD+anfVT+PT06eiKBDBBDPnDESICWq1maapWpWU0/3THlFifXQhAAA3u8vFjBZhgDxYJIRXV1dKKWut1iaE6GGqNuu7F68+PTzM9jtjTCnL3famKuYPHz4cDserq6u6ruu6JoTkefb09BxCmGc9zxpjbF2ggmNKECSztvM0lWUJIey6bskfoBQv/E5r9QLsLsJGZ6wxJsXonJsBBADkQpYXl2S1WimRSZkNw6BtTCGeu0Y6yblACE3Hw+Xl5cXFxUI51lqH4I/Htu+7hfuZFcWfHiwghKgJhBAOw6C1ZYynlAAIf2YUhxAWkjNjzHn/8OFdnpVKZRDCu7u7xeFwmiZjDKXUWjcMY0ppnt6WZVnlWSYUWWFIsJRy6WspREzJbrcz2nHOg7dd1ymVY4zLMld5tjyZKaWljR4Oh1PTLVbaXKplEbTciZdiv0Dry2th4BGElVII4nEctbbTqBdWnwtOa+29r6owz3MV49XV1dT24zgAEDAlAPClBfdtU69Wr1++6trWrNfOeD311plJj1rrtmn2+z1CiDHhU0QEwoQowjLPtAbXNxdlWU+TjjGemuPT/pMQijFGMRFSppRkljHGxnF0Znzx+lZKCZx1uttdbWVZA5gDTGKaEOQAJuCSxwExiCBGAIKYAMIJAEqRkuK7n34fAUgBXF9fMykqSv7iV3/FGGvO3cJALvKcUjZHQxlDAEKEV+ttkechBOdM27bTNG42G+/DAuOEEGMMwzAopRDC1to8z0OKXIpvf/uPv/v2G54zKVVW1S9v71fb6p/8k79++/at1lqpvKpWizvi7Y0cx/HcHJcaKpUoiiIr8mjcNE2C82Uzbq1dCPMLURTCtAjBl7lxKY4LyDtN+uPHt0KIXKoff/iJ9N1IMFZchBCEYJTSEHyC4OLiwhgzz/Oihxzarutb7321rtab+ng4d12nlIoxJgi0NSFElatClTEE55ySudbae6eUVEotDJzz+dw0TZ7nWZadTodxSF3feGullBfbzdN+v1hmWz0HpyWXVVU65yHE7el8Pp+zLNPGjONY1zXjBFPy1S9/seiVp2laRGSccwAi52KzWS82G1mWcUqXmQohtFnv9vv94XDYXOx2my0hxNgZQjh10zzPSqlK5YVQy5xGCIkQLM2NS+Wcm/T8tH+GGEmI+rETkiGE5nl8evyIEKKcHE9nJnhI3vtIKWWUHw4HBPHtq2v9k/7x/U+KyvPhqJ3Vxr37+e2P3//Qtu3idCmE+BPoAa2dCSEgoa6dry5kgkDrkXP++tWbPxtCSSkhhBGAaeiKorjYXkYXASOsyuPJQkYiwSAaiAAABkIaGY8pKgAjDDBSgFJKCzwQfv31X/723//f7x8f/uqvfr2Y61OKCQRSCklZil4KwTmP0VtjCCEQAiHkapUpmQMAHj6+x5gqRRa3/rquzTR777fb7TRN799/IBD5FC8uru6ub56Ph/dv33315Zt6t5KEDdpop7//fo8Qub+/x4haa6fRAgBAIlJSrXUMYL3aXl1fSCnPTUMShJw7axcDdIxxlmXTNJ1Op4U8towbxhjn3DLoIYKtd58+PmGMVZZxzl2MEWHy/dsfLjdbmBDGWAhOCAkBj+PIOb+/v99sdj/++OPH9x+stULyoshOp9OsNUCJMLpMz1wKKniatfdecqHtDABggviAFBbr9XqhHy2VeAGuCSEx1kVRaG1BQimBh0+f3r59CwCs1zVBeFOVMQWIESGcAoTB4nkEmrF/enqa53m7Wa02a6Zkcm5xrFimAgwRAKAsy3nWTXO+fXFV5JUzC5YMqqJwzoO0loLNRkdvq1V1PtuUUpHnlJAsk8vuoiwyznmMsdcTjpAQAjESghln26YZho4i6L3fbFerzU4NXTf0kvN+GsZRq6IkBC0bW4RQAPD50GwQCBD8+MPPiko96kN7bprmj9/8/ng+QJRiipBhwgjGCCFkvfMuQBjmea7rDaXYmNk6m2elEOJx/zzOI8s4mhesDwAYPYBaa+MNd7Ys67ltp2mUNUHWA4wAFClFXubQ+jCdEM8AJn9y69A4OgTgzc3dvjnWZbnAroLRBajB6AwAQARhDFJKueJCqdkOzfNBkKxabyileVYyKkJ0C3lz8cl7UxRcCIxx2zSLGMU5t386fPPHP9zeXRdlOTaDqKvorfX+cDhored5XHLvlnuwMXaahyzLrq6unDeLxbfgXM/z4neyNOGlVC2/vuiJEULLKLFMLkVRLNj/zfU1hDDLsqqqFlUGWW3WH58eGcT39/d/3mddXl6GkN6/f3h6fu66jnBKCYQEOeCtXmhqheR0aXYxJsaog8ZbN6Th46cHKeWyPPXeNk2zaM8XGIsQsmwYl8slSMgY632ECO0urgEALMtISv04+hipkpiQ6BMGOKUACX51d7ur109PT8fjMaWkp0koSSnFGH12qJRqIXuaeX71+gWlWM8jpXxXrrthCCGPMd3c3MQIjqfTfr//+PHjer1mjLVNX1T5okoTSgghYkxN00hPq6oCABpnF7M+htG7d+8O58Zb1/cX46Qfn6+qTF1st8659+9/xhjnWRlCQggAGEOMIYRxHAftnSGnpun79g8//vHp8BSMLooqRh+TR5QQgiFIMQVOmRCSMaYUhzg9Hz8ltEspESxOh6MLPiF4OB0lppeXO87pPI8xOO99fzyH5Kv1quDy/HzEDkICAaQIzkQob7A+nzMGvdZkLRFiMXoYRxBN2x1/8Ytf9G46nc5XVzeLroNzLmRmrYcE5kLGCKRUKsu6rrNzmEbbBX/uBynler0mFOnBSCkZE1prxnlZVUzJRep1Pp36vp+1di784utfrtf1MHaUcBsihigk9Nd//ZsY436/r+t6oTB4bxGOw9B1ff/y5cuiuDqdTu/e/axUnqkiBLcc5XmeT6fTPM9SyhjBMAzLjL0ojJfDsNAfCWHL3ZpzPk0TQCnPc/h//O//6zxNL27vKKUAAy4FxYRzfjyfvHVLgWdCNd0ZghTcHFys61pw1XWdc345fCE6jgVEcdTj4ia0uFwsexhjLGOMUqIy4V1cCo/VOkGkjdNa51WplKpWNWPs8Hw8HvcAgDdv3gghuq6zerLaweQppRATH0OK0Do9DL2UcnmWYowBpDzPJaOcMYYRwMjH0LVD17b3N/cvXrw4Na1xNoW4MD5SStaF9+/fx+RXqzqlBABcrWopOUJk0R+FEObZYggxhIRx51xMxnubAPm0Pz58eD9N0zhP9WpVl2uFhR51N/eXF9tNtUsJztZEECJOLthCFj6kj+/e2qkHGD4+P5vZUk4IZwJThFAggGcKxgQBIIQQKjBKMNgQAkacURkhyIS8urqDDJ3PR5jAbr25v7shhDx8+ng8Hr768uub62tKcV1mMYJ+1IwxygSVmEEMEQEA/fDd95MehWS3r766uP1Vgs52D9DZtu2AS4TR47lXSsTkrXPeOYJFAqhar7eXF8/vPyjOAMWHwzPHqCgKXuYgxNPpNE9TURQLu9Zpl+d5hP7Tp0+MMW/84XzyIXAlOedvvvwKU/r9t99QhIs8l1JKlVsfCaPRp+60pwwTzMZ56tojIaSq123beu/v724wxtZ6RsXh+LhI2hlj06TP5zMhZJ7HJfkLIZRlcpGwOxcuLi6urq7atu37NsaY56Vzbhz7cRwZVSTG+PLlS4ZJ13UAxIUpCSGQUjJCPgPDJITZeW8BSOPULzL+WY/W+N1ut1pXi5uXt55STij33gvOvfcQYGNm78MXX3wBQOq6Tkqx5J9BjKWQQqXnJ6uU2mw2IYTT4Xg4PCOEXr58KaV83j+G4ClljCNGJec8AuBsKOrKWsvO5/P5bEOnuMrzvKoqznkITnAeQtB68j4yKYidDu2+/c/9OI5ZLglm0zRlueRChjRf31w2zfn5+XG73XIunTOMo8VMTirOmXz69Pzp4eEXv/hS5rJpGkLyPFeEyqurq1999UUMoOnOj/vnaZr2zb5t++jTZKaPn56UyMaxV7lElDztH6EHu5ubpu8wSEKK1W7rjKeUGj+vqvrV/QtA8Tzp5Ly11iUAkvfeR4gTAs770/7Z6okCBCJEBD49fizKNcJ8tm/tOCefemvef/yklOKCeu+HYTqdDpvdBkTw2V/V2tOpsS51XXfujq8+ffzn/6IKID38/D7jbF0XJniG1ctcLTuDyWjnqJ6tECoFe3h6IgQwyTARRb5KfgIgHp/3nDFrTIx+Qa+qvEgh/vz0SAk/NAefYp6VZbHGGDJOhBBumnCWEYiOhwOIkFAJbRQqAykhONar3LnQNA1lDAA09JrhgWPy03ff/6f/9+9vbi/LohYiCzGeT4dFpL6M/hDCPM/ruq6qylrLmFBKee/fvXv3008//fDDd0VR3d7eCimbppnGkXNalqXzEf5f/+f/VubV0HZLroG1emFs/3lzYoyVMpv6IcIoBJ/1OI7jYkK9ENfKspRSLpZGMcHlGno+HaZJr4o1UxRCWNdlSjBFuAATn43vMG77vm3by8vLoiisj4sYlBCSEgQAFKXkksMEAIAUoXEcV6s1Ffzhw6e+7znnEKVx7oPzGMAU4+IhgwlxzmVKjZP2MC3OYafH0zybu9vr+/ub7W7NGIkJnk6npSE+Pj6+f/9+s9lBmDjndb0WgmEExnHeP+5/+P7HX/zyi9sX1zHGlCCltOu6GP3d9Z1zoesaY+eUkvUREC6E6Jvz8XhmhAnBjTGIkHme3717F0C6vb3Ns3KYeoAgTCBGoE3/9OlRyuyLL77AETptjA8WRD13zkVrIkhoMuM4tkbrqR/KYj3PY3M+AkghI9M0rIvVy5sXeVUbM+aFWK1WgucQpYeH91xyQmgmc5VLENPQayboX3z9iwQIDP7i9nJV1cZ6qx0jpHeDs0HiQDGTUo6T8SlKmWVZFoLzIXHGFn7KPM8hOADAMoKXZYkJ++abb9qm+eLV64vN9tOHB0ppwujQHqty8+LuDkLYjc27d++M9X/7t3+LMf7+u+8QAq9fv44xHs8nN2nC6Wq1zrLi229///33P3LOIUzrqtbaYoxVkT8/nbr+lGVivd1QzDlfAoP94XDIsuz29nZJ/qrrumkaABCjYtmOvH//Pldie3FVlmWCccGni6LQWhOC8OPjx7yoFnC4qMr903NwFhHadX1e5lmWDUMPMKirIsFIab0wxvq+F1KOw/Dw8HB1ecOYgNBhjPM8996mWD8+/rHI67vd1axH7yNn0kXfNO2frB45AMDHuKxpp2naH89FUSxglnPBOVdXZQSh7RpKKSdCKeWcdc5yQRivpeRKqcPp+Pz41DWtd+7u7o4LMU3TZx2Q9/PQE0Yvd1sQU7COcyqlLMryeDwuFhV5oQgh9/cvfbB69qvVijE2z/M0DQTDxc5S5lm93i62Akv3y7Lsw4d3x/3+xf0r5w2lhGKylQohggisJK6rrCgqzun53KYEY4JXV5dN01htSyU3qyqACBEOIYytiiY45+ysX7142bdtRJAoMevRzt7q4L2nDAIIrbXjOHdd1w+TqorNZuVjOOwbmNAQbBpPPlifDBGyGUYpJc0y66PkUsiMS1ll6s3renuxATBW5SZbF/3+5LTjghCQrI25yBFLMZl5nLpuWO12HNO6rgmGwZKEQN8Pfd8TsiiKbEhRcA4AmCYd4nSx20kh2radxwkjxBjaXl7sbq9/+P77v/u7f7verRFC1oWqzNvmVFWrlJL34Xw+zfM8dN2p6T9+/Hh9c/OXv/paKSWlZFQQhg0AJMsk45vNpizWjCNCUN+PCYDwmU+Abm5uECIfPnxcGKBt2z4+Pp7P59vb++3mIiXMeTZNQ0r4cDhRBtfr9RK+9PHhPVmCloRkx+ZsgplHeD6fOKVVVTGCk/NEkOZ0CCBxgdfrGiT2/v37cZh3FxsAIef86dOz1npbXhCiy0wlEAhk5dU1xlQbN00ToTjGiADghAIlrbVt37VtJISEEGECjDEp5aRtURSccx+sVFxBNk2TykRVVNM0WmullBjDvu8hTFVVEkJmPXLOf/nLX57P53mec6kW4wlOhbF+ITgZZ8qy3G0KrXX0vhtaemQhJIQQRCkEFyLhjF1d3vzww08QJgjT4fC8QOvH/WkYhvV2u6RHLitnjDFFuKpWzsyEomp73U+j9x4Q7GMYjx3EUDBmzNwPTQghz4rjufHOX+8u9s+H9x9+/vLLL8uiOh6PTXNys729vJJZlkBAGMhcDuOox2FVr8mKpJRc8J81ay6kCBknUuVSZhgCF7y3ASUwzINPtiiKGJIxzlnddR0EuK62eUaUzAXnUnKZSQSy3dXV4fT+4aendV5CgBAAsMhxgoohwrkZdC4tQsiBCBGy1psQEYgpRBhhmZXjOEYXyrJs2v756SQkgRC3TX9zc3N9cdk0DSHkeG7bab4W3BmbZdI4RRi9v3s9j3bs9z/88NOs//D69WsA4t//x3/48ccfv/76V7/5zX/xq1//jZ4GZ9OL+zdCKETgbMLbdx9fvXlxOh3+7b/7t4KRqihvrl+8un9NJB6nfhrqpmk2m83vf//7//Sf/uNvfvMbrXnXdZTS1Wr19PTp7vbFxcXFzz+/c95jhKQs908PwDu43X769Ekogf+n/+FfQAjHYcQQ6XGyxux2W4SxsxpCsPD4irqqqooxCiHSWp/Ox3GYVpuNtQ4hKJWKKTEqEIRGTyF6a103ToRSQujxdAjeO23GcUgpGGeGaeBcCM4JIYyxBMES8pHlBabEhxiBxwh471KKi1MnADDBqI0OKQolMcYY02EY+27U8+ScWwqztmbSxltHCR2neRo0ggiEwAW3JsQAIkjeO85kUZScs6JQGGPOBUhgGObj8SilKop86UhSZqdTSymWRRaSq8uCc5Zg8sFTKsqyuL66JARP0wwwoZRp57x3MUHKGKH83LbBu7Is58l454TgYz8xKZgS09SXRTGOg5lGVeQxxa5rGWMAQkKpMUZxgUDq2maeRh88RmRo2hRjXRQv7u4EZxQiBCEi0OmBQF9klBKZQtTThGJSXFxud1+9enV1dbnb7m5ubwmAxhlKqJstwvjdz999/933QmWEc5Vv8vUN4xJqPespesg551Is1c17Z4wGEBIMVZY7F6w1znkXwnq9rqpSqkwptd3scqWM0fv9ngq+2e2c1+/ffmibgZKMimK7u3bWztNAs2x3cQMwffPlFwEkY83LV68ur65Fxsdx1kZ75wCEKhdt25yaI8bs6uo6k0JlYl3tplFPcxcTeP/4vm87JSVjrOu6sq4udhfffffd+/cPZVkJwa+vrzHG7z+8naa+KGS9qpw3xkyvX704N80333zDuaRc4P/5X/4LQsjz8z46LzjfrFf1amWtDQC4EPtxIozXdbVer2dtplkDEDNVZlnOOF+In8vKCRPMOAspGRcSJM64GKIU0oc4jxomgjDabjdCSUxYtV6rImOUZnk+z/NnMM95PetFtey9D+GzH7X3njGKKbbGzvPsvbc+xBSXhBvGeN938zwv9KSyrILzmGBKCQQgBY8xIowUea1kZrQGELx+9cVqtQIwMEaXBCRCYd9PznmEYAi+LGuEYAgRQrDe1FW9zvMcweisSwBCQgCGMMUUY9/34zRlMsMQUYQzlas8QxilhJTMBBcIQghJluUYI6uNdf7q9noe52UMhSlZ71NKS7wkJSQB4KzlnC/BBUKI7Xrz4u52s1lTgvNSLRQsPc+zHjvdO2+naZqGXtKCULbdbHa7XV3Xr169Wa03ZZkb67z329324uISA+TcPPTHeTQJorvLl9vrFzQvojbeGQ9QcH4YO0ww43QchxC8lMI5q3KFKNYmcJmtN2UEi8u8ZoxACLqub9tmmodvvvmdc6Yoy4eHTyn4q6uLFy9uIYofP77d7OrZ6m++/cP28oIJdnl1acw0jv3lxUWel9b4T48PQztO8zhOQz93b99/eNofvLN39y+uL1+em/N2u9qud+vNKiSfEtlsNymk/f5AKQshIoSlkATiV29eU0rbtoMQ3dxcCyEeHh6Op713PstUva7HSf/dv/8P/TDUq02R1/h//G//uTEmpKgyxaWIKVlrHx4evAdMSMIFgKA7t3aeEYYRxGmc1qstoTzGVFU1QjDGRAidph5gzLhMiAopGKVKZjDCrMxTQkqoVy9fSCUgQogxlatZaxs8odTM+nw+M8YSTARTjBElrG06a4IUEiAoM0kJjSFmWV4U+RKws92s67qSUhBGBOec87IsKaWE4PVqvTj0M0rKqsqKzDmfl2q9rut6hRDpuj6EYOxorQWJ+mAhCgn4YeiVUplSxujgbT+c61X1T3/9ay4zIRWBCULMVQYRDMkB65Y8MZVnUklGGaWUcZliooQjgBnlPljnLOc0xnhum9N+DzEq65U1ev/8nABo22Gchvv7+2X5HUIw1iKMtTFK5lfXV1VVzfPsnNluN5yzx6fH/f4RRDB2PYiWML6ur4ps++L+i1/9+i8vLq7KqqrXdVWuOJOciZRSij7jHBHSns6ntjHB7vcHmZe//OKrgACXNKagTZtxhDCnnAvJMcLOh5SAFBJCZJ2PGMm8QpjGFLph4IpTSjDCIYCmOZ/Pp9nMGEEp2Ha7QZjEGDGCGDNn435/fP/uHSK0XFWAsFzKjw8PlDDGaN+2IYQIwLt375XI/vpv/snN9Yuy2hAq6npVlavd5ur+xRuEMSEYE+KDk1zFEMtK1fUFwTQFYK1/8+bLerUpsvLu5h4itN1uXr9+rZQoiurm/kWe5d9//5MZ/dd/8avLm+vf/+GP//Ef/qEoCwQBF+r/AwHU3/3CQv3GAAAAAElFTkSuQmCC", "text/plain": [ "RGB4 Images.Image with:\n", - " data: 594x629 Array{ColorTypes.RGB4{FixedPointNumbers.UfixedBase{UInt8,8}},2}\n", + " data: 256x256 Array{ColorTypes.RGB4{FixedPointNumbers.UfixedBase{UInt8,8}},2}\n", " properties:\n", " imagedescription: \n", " spatialorder: x y\n", " pixelspacing: 1 1" ] }, - "execution_count": 106, + "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "using Images, Colors\n", - "#img = imread(\"cat.png\")\n", - "img = imread(\"/Users/chiyuan/Desktop/bird.png\")\n", - "img = imread(\"/Users/chiyuan/Desktop/bill-gates.jpg\")\n", - "#img = imread(\"/Users/chiyuan/Desktop/dog.jpg\")\n", - "#img = imread(\"/Users/chiyuan/Desktop/schoolbus.jpg\")\n", - "#img = imread(\"/Users/chiyuan/Desktop/horse-face-2.jpg\")" + "img = imread(\"cat.png\")" ] }, { @@ -63,7 +58,7 @@ }, { "cell_type": "code", - "execution_count": 107, + "execution_count": 2, "metadata": { "collapsed": false }, @@ -73,7 +68,7 @@ "output_type": "stream", "text": [ "Image resized to (224,224,3)\n", - "('Original Image Shape: ', (629, 594, 3))\n" + "('Original Image Shape: ', (256, 256, 3))\n" ] } ], @@ -98,7 +93,7 @@ }, { "cell_type": "code", - "execution_count": 108, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -132,7 +127,7 @@ }, { "cell_type": "code", - "execution_count": 109, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -161,7 +156,7 @@ }, { "cell_type": "code", - "execution_count": 110, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -170,7 +165,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "wig\n" + "lynx, catamount\n" ] } ], @@ -191,7 +186,7 @@ }, { "cell_type": "code", - "execution_count": 111, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -200,11 +195,11 @@ "name": "stdout", "output_type": "stream", "text": [ - " wig w.p. 0.430989\n", - " feather boa, boa w.p. 0.127852\n", - " fur coat w.p. 0.112760\n", - " hair spray w.p. 0.055442\n", - " cloak w.p. 0.019644\n" + " lynx, catamount w.p. 0.552236\n", + " tabby, tabby cat w.p. 0.118180\n", + " Persian cat w.p. 0.114004\n", + " Egyptian cat w.p. 0.090389\n", + " tiger cat w.p. 0.086328\n" ] } ], @@ -218,15 +213,6 @@ " println(mx.format(\"{1:>18} w.p. {2:4f}\", l, p))\n", "end" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] } ], "metadata": { From 1dc944e02f334ff90511c68af279648f15fb1243 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 11:00:11 -0400 Subject: [PATCH 153/630] first step of refining the data provider API. --- docs/api/io.rst | 134 +++++++++++++++++++++++++++++++++++++ docs/build-api.jl | 2 + docs/index.rst | 5 +- src/io.jl | 163 +++++++++++++++++++++++++++++++--------------- 4 files changed, 249 insertions(+), 55 deletions(-) create mode 100644 docs/api/io.rst diff --git a/docs/api/io.rst b/docs/api/io.rst new file mode 100644 index 000000000000..d3d060fb12f3 --- /dev/null +++ b/docs/api/io.rst @@ -0,0 +1,134 @@ + +Data Providers +============== + +Data providers are wrappers that load external data, be it images, text, or general tensors, +and split it into mini-batches so that the model can consume the data in a uniformed way. + + + + +.. class:: AbstractDataProvider + + The root type for all data provider. A data provider should implement the following interfaces: + + .. function:: get_batch_size(provider) -> Int + + :param AbstractDataProvider provider: the data provider. + :return: the mini-batch size of the provided data. All the provided data should have the + same mini-batch size (i.e. the last dimension). + + .. function:: provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} + + :param AbstractDataProvider provider: the data provider. + :return: a vector of (name, shape) pairs describing the names of the data it provides, and + the corresponding shapes. + + .. function:: provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} + + :param AbstractDataProvider provider: the data provider. + :return: a vector of (name, shape) pairs describing the names of the labels it provides, and + the corresponding shapes. + + The difference between *data* and *label* is that during + training stage, both *data* and *label* will be feeded into the model, while during + prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and + of any shapes. The provided data and label names here should match the input names in a target + :class:`Symbol`. + + A data provider should also implement the Julia iteration interface, in order to allow iterating + through the data set. The provider will be called in the following way: + + .. code-block:: julia + + for batch in provider + data = get_data(provider, batch) + end + + which will be translated by Julia compiler into + + .. code-block:: julia + + state = Base.start(provider) + while !Base.done(provider, state) + (batch, state) = Base.next(provider, state) + data = get_data(provider, batch) + end + + The detailed interface function is listed below: + + .. function:: Base.eltype(provider) -> AbstractDataBatch + + :param AbstractDataProvider provider: the data provider. + :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. + + .. function:: Base.start(provider) -> AbstractDataProviderState + + :param AbstractDataProvider provider: the data provider. + + This function is always called before iterating into the dataset. It should initialize + the iterator, reset the index, and do data shuffling if needed. + + .. function:: Base.done(provider, state) -> Bool + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. + :return: true if there is no more data to iterate in this dataset. + + .. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + + :param AbstractDataProvider provider: the data provider. + :return: the current data batch, and the state for the next iteration. + + Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that + is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this + case, you can safely assume that + + * :func:`Base.start` will always be called, and called only once before the iteration starts. + * :func:`Base.done` will always be called at the beginning of every iteration and always be called once. + * If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with + a call to :func:`Base.start`. + * :func:`Base.next` will always be called only once in each iteration. It will always be called after + one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will + not be called. + + With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation + of the built-in :class:`MXDataProvider` for example. + + + + +.. class:: AbstractDataProviderState + + Base type for data provider states. + + + + +.. class:: AbstractDataBatch + + Base type for a data mini-batch. It should implement the following interfaces: + + .. function:: count_samples(batch) -> Int + + :param AbstractDataBatch batch: the data batch object. + :return: the number of samples in this batch. This number should be greater than 0, but + less than or equal to the batch size. This is used to indicate at the end of + the data set, there might not be enough samples for a whole mini-batch. + + .. function:: get_data(provider, batch) -> Vector{NDArray} + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :return: a vector of data in this batch, should be in the same order as declared in + :func:`provide_data() `. The last dimension + of each :class:`NDArray` should match the value returned by :func:`count_samples`. + + .. function:: get_label(provider, batch) -> Vector{NDArray} + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :return: a vector of labels in this batch. Similar to :func:`get_data`. + + + diff --git a/docs/build-api.jl b/docs/build-api.jl index abccd31f1222..0d01e4f84ec8 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -78,3 +78,5 @@ extract_doc("callback.rst", "callback.jl") extract_doc("model.rst", "model.jl") extract_doc("optimizer.rst", "optimizer.jl") + +extract_doc("io.rst", "io.jl") diff --git a/docs/index.rst b/docs/index.rst index 1176f927174d..eda80ac1c18d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -32,11 +32,12 @@ For more details, see documentation below. Please also checkout the `examples :caption: API Documentation api/model - api/callback api/initializer + api/optimizer + api/callback + api/io api/ndarray api/symbol - api/optimizer Indices and tables ================== diff --git a/src/io.jl b/src/io.jl index c8396c1d7182..9f4ad67d735b 100644 --- a/src/io.jl +++ b/src/io.jl @@ -1,76 +1,106 @@ -"""Root type for data provider +#=doc +Data Providers +============== -A data provider provides interface to iterate over a dataset. It should implement the following functions: +Data providers are wrappers that load external data, be it images, text, or general tensors, +and split it into mini-batches so that the model can consume the data in a uniformed way. +=# -```julia -provide_data(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} -provide_label(provider :: AbstractDataProvider) => Vector{Tuple{Base.Symbol, Tuple}} -``` +#=doc +.. class:: AbstractDataProvider -Returns a list of name-shape pairs, indicating the name and shape of the each data stream. For example, -`[(:data, (100,1,28,28))]` or `[(:softmax_label, (100,1))]`. It should also implement the following convenient -function + The root type for all data provider. A data provider should implement the following interfaces: -```julia -get_batch_size(provider :: AbstractDataProvider) => Int -``` + .. function:: get_batch_size(provider) -> Int -which returns the batch size used in this data provider. + :param AbstractDataProvider provider: the data provider. + :return: the mini-batch size of the provided data. All the provided data should have the + same mini-batch size (i.e. the last dimension). -A data provider should implement the standard Julia iteration interface, including `Base.start`, -`Base.next`, `Base.done` and `Base.eltype`. It could safely assume that the interface functions will -always be called like + .. function:: provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} -```julia -for batch in provider - # ... - load_data!(batch, targets) -end -``` + :param AbstractDataProvider provider: the data provider. + :return: a vector of (name, shape) pairs describing the names of the data it provides, and + the corresponding shapes. -which translates into + .. function:: provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} -```julia -state = Base.start(provider) -while !Base.done(provider, state) - (batch, state) = Base.next(provider, state) - # ... - load_data!(batch, targets) -end -``` + :param AbstractDataProvider provider: the data provider. + :return: a vector of (name, shape) pairs describing the names of the labels it provides, and + the corresponding shapes. -In other words, it could safely assume that `Base.next` is always called after `Base.done`. And neither -of those function will be called twice consequtively. The detailed interfaces are list below: + The difference between *data* and *label* is that during + training stage, both *data* and *label* will be feeded into the model, while during + prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and + of any shapes. The provided data and label names here should match the input names in a target + :class:`Symbol`. -```julia -Base.start(provider :: AbstractDataProvider) => AbstractDataProviderState -``` + A data provider should also implement the Julia iteration interface, in order to allow iterating + through the data set. The provider will be called in the following way: -Initialize or reset the data iteration. + .. code-block:: julia -```julia -Base.next(provider :: AbstractDataProvider, state :: AbstractDataProviderState) - => (AbstractDataBatch, AbstractDataProviderState) -``` + for batch in provider + data = get_data(provider, batch) + end -Return one batch of data. Actual data can be retrieved from the batch by interface functions described -in the document of type `AbstractDataBatch`. + which will be translated by Julia compiler into -```julia -Base.done(provider :: AbstractDataProvider, state :: AbstractDataProviderState) => Bool -``` + .. code-block:: julia -Return `false` if there is more batch to get. + state = Base.start(provider) + while !Base.done(provider, state) + (batch, state) = Base.next(provider, state) + data = get_data(provider, batch) + end -```julia -Base.eltype(::Type{MyDataProvider}) => MyDataProviderState -``` + The detailed interface function is listed below: -Return the type of the data provider state. -""" + .. function:: Base.eltype(provider) -> AbstractDataBatch + + :param AbstractDataProvider provider: the data provider. + :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. + + .. function:: Base.start(provider) -> AbstractDataProviderState + + :param AbstractDataProvider provider: the data provider. + + This function is always called before iterating into the dataset. It should initialize + the iterator, reset the index, and do data shuffling if needed. + + .. function:: Base.done(provider, state) -> Bool + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. + :return: true if there is no more data to iterate in this dataset. + + .. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + + :param AbstractDataProvider provider: the data provider. + :return: the current data batch, and the state for the next iteration. + + Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that + is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this + case, you can safely assume that + + * :func:`Base.start` will always be called, and called only once before the iteration starts. + * :func:`Base.done` will always be called at the beginning of every iteration and always be called once. + * If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with + a call to :func:`Base.start`. + * :func:`Base.next` will always be called only once in each iteration. It will always be called after + one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will + not be called. + + With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation + of the built-in :class:`MXDataProvider` for example. +=# abstract AbstractDataProvider -"""Root type for states of data provider""" +#=doc +.. class:: AbstractDataProviderState + + Base type for data provider states. +=# abstract AbstractDataProviderState """A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each @@ -78,6 +108,33 @@ abstract AbstractDataProviderState """ typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} +#=doc +.. class:: AbstractDataBatch + + Base type for a data mini-batch. It should implement the following interfaces: + + .. function:: count_samples(batch) -> Int + + :param AbstractDataBatch batch: the data batch object. + :return: the number of samples in this batch. This number should be greater than 0, but + less than or equal to the batch size. This is used to indicate at the end of + the data set, there might not be enough samples for a whole mini-batch. + + .. function:: get_data(provider, batch) -> Vector{NDArray} + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :return: a vector of data in this batch, should be in the same order as declared in + :func:`provide_data() `. The last dimension + of each :class:`NDArray` should match the value returned by :func:`count_samples`. + + .. function:: get_label(provider, batch) -> Vector{NDArray} + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :return: a vector of labels in this batch. Similar to :func:`get_data`. +=# + """Root type for data batch A data batch must implement the following interface function to actually provide the data and label. From 49a5acb3841d2ca8ed63a7ec2e4993e68d0b9602 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 12:46:00 -0400 Subject: [PATCH 154/630] import doc of built-in data provider. --- docs/api/io.rst | 242 +++++++++++++++++++++++++++++++++++++++++++- docs/api/symbol.rst | 92 ++++++++--------- docs/build-api.jl | 28 +++-- src/io.jl | 108 +++++++++++++------- src/symbol.jl | 4 +- 5 files changed, 377 insertions(+), 97 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index d3d060fb12f3..6d4ed63f8553 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -109,7 +109,7 @@ and split it into mini-batches so that the model can consume the data in a unifo Base type for a data mini-batch. It should implement the following interfaces: - .. function:: count_samples(batch) -> Int + .. function:: count_samples(provider, batch) -> Int :param AbstractDataBatch batch: the data batch object. :return: the number of samples in this batch. This number should be greater than 0, but @@ -121,8 +121,11 @@ and split it into mini-batches so that the model can consume the data in a unifo :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. :return: a vector of data in this batch, should be in the same order as declared in - :func:`provide_data() `. The last dimension - of each :class:`NDArray` should match the value returned by :func:`count_samples`. + :func:`provide_data() `. + + The last dimension of each :class:`NDArray` should always match the batch_size, even when + :func:`count_samples` returns a value less than the batch size. In this case, + the data provider is free to pad the remaining contents with any value. .. function:: get_label(provider, batch) -> Vector{NDArray} @@ -131,4 +134,237 @@ and split it into mini-batches so that the model can consume the data in a unifo :return: a vector of labels in this batch. Similar to :func:`get_data`. + The following function will be automatically defined. They are primarily useful for debugging + and testing. + + .. function:: get(provider, batch, name) -> NDArray + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param Base.Symbol name: the name of the data to get, should be one of the names + provided in either :func:`provide_data() ` + or :func:`provide_label() `. + :return: the corresponding data array corresponding to that name. + + + + +.. class:: MXDataProvider + + A data provider that wrap built-in data iterators from libmxnet. + + + + +Built-in data providers in libmxnet +----------------------------------- + +.. function:: ImageRecordIter(...) + + Can also be called with the alias ``ImageRecordProvider``. + Create iterator for dataset packed in recordio. + + :param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data. + :param Base.Symbol label_name: keyword argument, default ``:softmax_label``. The name of the label. Could be ``nothing`` if no label is presented in this dataset. + + :param path_imglist: Dataset Param: Path to image list. + :type path_imglist: string, optional, default='' + + + :param path_imgrec: Dataset Param: Path to image record file. + :type path_imgrec: string, optional, default='./data/imgrec.rec' + + + :param label_width: Dataset Param: How many labels for an image. + :type label_width: int, optional, default='1' + + + :param data_shape: Dataset Param: Shape of each instance generated by the DataIter. + :type data_shape: Shape(tuple), required + + + :param preprocess_threads: Backend Param: Number of thread to do preprocessing. + :type preprocess_threads: int, optional, default='4' + + + :param verbose: Auxiliary Param: Whether to output parser information. + :type verbose: boolean, optional, default=True + + + :param num_parts: partition the data into multiple parts + :type num_parts: int, optional, default='1' + + + :param part_index: the index of the part will read + :type part_index: int, optional, default='0' + + + :param shuffle: Augmentation Param: Whether to shuffle data. + :type shuffle: boolean, optional, default=False + + + :param seed: Augmentation Param: Random Seed. + :type seed: int, optional, default='0' + + + :param batch_size: Batch Param: Batch size. + :type batch_size: int (non-negative), required + + + :param round_batch: Batch Param: Use round robin to handle overflow batch. + :type round_batch: boolean, optional, default=True + + + :param prefetch_buffer: Backend Param: Number of prefetched parameters + :type prefetch_buffer: , optional, default=4 + + + :param rand_crop: Augmentation Param: Whether to random crop on the image + :type rand_crop: boolean, optional, default=False + + + :param crop_y_start: Augmentation Param: Where to nonrandom crop on y. + :type crop_y_start: int, optional, default='-1' + + + :param crop_x_start: Augmentation Param: Where to nonrandom crop on x. + :type crop_x_start: int, optional, default='-1' + + + :param max_rotate_angle: Augmentation Param: rotated randomly in [-max_rotate_angle, max_rotate_angle]. + :type max_rotate_angle: int, optional, default='0' + + + :param max_aspect_ratio: Augmentation Param: denotes the max ratio of random aspect ratio augmentation. + :type max_aspect_ratio: float, optional, default=0 + + + :param max_shear_ratio: Augmentation Param: denotes the max random shearing ratio. + :type max_shear_ratio: float, optional, default=0 + + + :param max_crop_size: Augmentation Param: Maximum crop size. + :type max_crop_size: int, optional, default='-1' + + + :param min_crop_size: Augmentation Param: Minimum crop size. + :type min_crop_size: int, optional, default='-1' + + + :param max_random_scale: Augmentation Param: Maxmum scale ratio. + :type max_random_scale: float, optional, default=1 + + + :param min_random_scale: Augmentation Param: Minimum scale ratio. + :type min_random_scale: float, optional, default=1 + + + :param max_img_size: Augmentation Param: Maxmum image size after resizing. + :type max_img_size: float, optional, default=1e+10 + + + :param min_img_size: Augmentation Param: Minimum image size after resizing. + :type min_img_size: float, optional, default=0 + + + :param rotate: Augmentation Param: Rotate angle. + :type rotate: int, optional, default='-1' + + + :param fill_value: Augmentation Param: Maximum value of illumination variation. + :type fill_value: int, optional, default='255' + + + :param mirror: Augmentation Param: Whether to mirror the image. + :type mirror: boolean, optional, default=False + + + :param rand_mirror: Augmentation Param: Whether to mirror the image randomly. + :type rand_mirror: boolean, optional, default=False + + + :param mean_img: Augmentation Param: Mean Image to be subtracted. + :type mean_img: string, optional, default='' + + + :param mean_r: Augmentation Param: Mean value on R channel. + :type mean_r: float, optional, default=0 + + + :param mean_g: Augmentation: Mean value on G channel. + :type mean_g: float, optional, default=0 + + + :param mean_b: Augmentation: Mean value on B channel. + :type mean_b: float, optional, default=0 + + + :param scale: Augmentation Param: Scale in color space. + :type scale: float, optional, default=1 + + + :param max_random_contrast: Augmentation Param: Maximum ratio of contrast variation. + :type max_random_contrast: float, optional, default=0 + + + :param max_random_illumination: Augmentation Param: Maximum value of illumination variation. + :type max_random_illumination: float, optional, default=0 + + :return: the constructed :class:`MXDataProvider`. + + + +.. function:: MNISTIter(...) + + Can also be called with the alias ``MNISTProvider``. + Create iterator for MNIST hand-written digit number recognition dataset. + + :param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data. + :param Base.Symbol label_name: keyword argument, default ``:softmax_label``. The name of the label. Could be ``nothing`` if no label is presented in this dataset. + + :param image: Dataset Param: Mnist image path. + :type image: string, optional, default='./train-images-idx3-ubyte' + + + :param label: Dataset Param: Mnist label path. + :type label: string, optional, default='./train-labels-idx1-ubyte' + + + :param batch_size: Batch Param: Batch Size. + :type batch_size: int, optional, default='128' + + + :param shuffle: Augmentation Param: Whether to shuffle data. + :type shuffle: boolean, optional, default=True + + + :param flat: Augmentation Param: Whether to flat the data into 1D. + :type flat: boolean, optional, default=False + + + :param seed: Augmentation Param: Random Seed. + :type seed: int, optional, default='0' + + + :param silent: Auxiliary Param: Whether to print out data info. + :type silent: boolean, optional, default=False + + + :param num_parts: partition the data into multiple parts + :type num_parts: int, optional, default='1' + + + :param part_index: the index of the part will read + :type part_index: int, optional, default='0' + + + :param prefetch_buffer: Backend Param: Number of prefetched parameters + :type prefetch_buffer: , optional, default=4 + + :return: the constructed :class:`MXDataProvider`. + + + + + diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index 457b462982ad..2c7df712c2f8 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -48,9 +48,9 @@ Public APIs :param act_type: Activation function to be applied. :type act_type: {'relu', 'sigmoid', 'tanh'}, required - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -71,9 +71,9 @@ Public APIs :param momentum: Momentum for moving average :type momentum: float, optional, default=0.1 - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -86,9 +86,9 @@ Public APIs :param data: Input data. :type data: Symbol - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -103,9 +103,9 @@ Public APIs :param num_args: Number of inputs to be concated. :type num_args: int, required - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -154,9 +154,9 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -173,9 +173,9 @@ Public APIs :param p: Fraction of the input that gets dropped out at training time :type p: float, optional, default=0.5 - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -190,9 +190,9 @@ Public APIs :param num_args: Number of inputs to be sumed. :type num_args: int, required - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -205,9 +205,9 @@ Public APIs :param data: Input data to flatten. :type data: Symbol - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -236,9 +236,9 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -267,9 +267,9 @@ Public APIs :param nsize: normalization window width in elements. :type nsize: int (non-negative), required - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -298,9 +298,9 @@ Public APIs :param upper_bound: Upper bound of random slope. (For rrelu only) :type upper_bound: float, optional, default=0.334 - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -317,9 +317,9 @@ Public APIs :param label: Input label to function. :type label: Symbol - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -337,9 +337,9 @@ Public APIs :param label: Input label to function. :type label: Symbol - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -368,9 +368,9 @@ Public APIs :param pad: pad for pooling: (y, x) :type pad: Shape(tuple), optional, default=(0, 0) - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -387,9 +387,9 @@ Public APIs :param target_shape: Target new shape :type target_shape: Shape(tuple), required - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -402,9 +402,9 @@ Public APIs :param num_outputs: Number of outputs to be sliced. :type num_outputs: int, required - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -425,9 +425,9 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -440,9 +440,9 @@ Public APIs :param src: Source symbolic input to the function :type src: Symbol - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -455,9 +455,9 @@ Public APIs :param src: Source symbolic input to the function :type src: Symbol - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -473,9 +473,9 @@ Internal APIs Perform an elementwise div. - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -485,9 +485,9 @@ Internal APIs Perform an elementwise minus. - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -497,9 +497,9 @@ Internal APIs Perform an elementwise mul. - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. @@ -509,9 +509,9 @@ Internal APIs Perform an elementwise plus. - :param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. - :return: The constructed :class:`Symbol`. + :return: the constructed :class:`Symbol`. diff --git a/docs/build-api.jl b/docs/build-api.jl index 0d01e4f84ec8..0aed51a9a022 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -43,21 +43,28 @@ function embed_mxnet_api(output_filename::AbstractString, key::AbstractString, g names_pub, names_pri = sort_api_names(keys(docs)) docs_pub = join(map(gen_doc, names_pub), "\n\n") docs_pri = join(map(gen_doc, names_pri), "\n\n") - docstrings = """ - Public APIs - ^^^^^^^^^^^ - """ * docs_pub + if isempty(names_pri) + docstrings = "" + else + docstrings = """ + Public APIs + ^^^^^^^^^^^ + """ + end + docstrings *= docs_pub - docstrings *= """ + if !isempty(names_pri) + docstrings *= """ - Internal APIs - ^^^^^^^^^^^^^ + Internal APIs + ^^^^^^^^^^^^^ - .. note:: + .. note:: - Document and signatures for internal API functions might be incomplete. + Document and signatures for internal API functions might be incomplete. - """ * docs_pri + """ * docs_pri + end key = mx.format(mx.DOC_EMBED_ANCHOR, key) println(io, replace(contents, key, docstrings)) @@ -80,3 +87,4 @@ extract_doc("model.rst", "model.jl") extract_doc("optimizer.rst", "optimizer.jl") extract_doc("io.rst", "io.jl") +embed_mxnet_api("io.rst", "io", mx._import_io_iterators) diff --git a/src/io.jl b/src/io.jl index 9f4ad67d735b..52a0a8680a1f 100644 --- a/src/io.jl +++ b/src/io.jl @@ -103,17 +103,12 @@ abstract AbstractDataProvider =# abstract AbstractDataProviderState -"""A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each - slice describe which part of a larger piece of data should goto that device. -""" -typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} - #=doc .. class:: AbstractDataBatch Base type for a data mini-batch. It should implement the following interfaces: - .. function:: count_samples(batch) -> Int + .. function:: count_samples(provider, batch) -> Int :param AbstractDataBatch batch: the data batch object. :return: the number of samples in this batch. This number should be greater than 0, but @@ -125,15 +120,37 @@ typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. :return: a vector of data in this batch, should be in the same order as declared in - :func:`provide_data() `. The last dimension - of each :class:`NDArray` should match the value returned by :func:`count_samples`. + :func:`provide_data() `. + + The last dimension of each :class:`NDArray` should always match the batch_size, even when + :func:`count_samples` returns a value less than the batch size. In this case, + the data provider is free to pad the remaining contents with any value. .. function:: get_label(provider, batch) -> Vector{NDArray} :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. :return: a vector of labels in this batch. Similar to :func:`get_data`. + + + The following function will be automatically defined. They are primarily useful for debugging + and testing. + + .. function:: get(provider, batch, name) -> NDArray + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param Base.Symbol name: the name of the data to get, should be one of the names + provided in either :func:`provide_data() ` + or :func:`provide_label() `. + :return: the corresponding data array corresponding to that name. =# +abstract AbstractDataBatch + +"""A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each + slice describe which part of a larger piece of data should goto that device. +""" +typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} """Root type for data batch @@ -167,7 +184,6 @@ Return the number of *dummy samples* in this mini-batch. The Batch type should have a field named `provider` pointing to the underlying provider. Helper functions `get_data` and `get_label` (mainly for debug purpose) will be able to use this. """ -abstract AbstractDataBatch function _get_data_or_label(batch::AbstractDataBatch, provide_func::Function, loader::Function) data_shapes = provide_func(batch.provider) @@ -343,11 +359,11 @@ end ################################################################################ -# MXDataProvider -################################################################################ +#=doc +.. class:: MXDataProvider -"""Wrapper of built-in `libmxnet` data iterators. -""" + A data provider that wrap built-in data iterators from libmxnet. +=# type MXDataProvider <: AbstractDataProvider handle :: MX_DataIterHandle data_shape :: Vector{Tuple{Base.Symbol, Tuple}} @@ -375,7 +391,7 @@ function _get_label(handle :: MX_DataIterHandle) end function MXDataProvider(handle :: MX_DataIterHandle; - data_name :: Union{Base.Symbol,Void}=:data, + data_name :: Base.Symbol=:data, label_name :: Union{Base.Symbol,Void}=:softmax_label, kwargs...) # for convenience, we ignore the rest keyword arguments # init iterator, load the first batch and get shapes @@ -387,7 +403,6 @@ function MXDataProvider(handle :: MX_DataIterHandle; else label_shape = Tuple{Base.Symbol, Tuple}[] end - _reset_data_iter(handle) MXDataProvider(handle, data_shape, label_shape, data_shape[1][2][end]) end @@ -399,8 +414,7 @@ get_batch_size(provider::MXDataProvider) = provider.batch_size type MXDataProviderState <: AbstractDataProviderState has_next :: Bool end -type MXDataBatch <: AbstractDataBatch - provider :: MXDataProvider +immutable MXDataBatch <: AbstractDataBatch end function Base.eltype(provider :: MXDataProvider) @@ -418,29 +432,25 @@ function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) return (MXDataBatch(provider), state) end -function _load_general!(batch :: MXDataBatch, loader :: Function, targets :: Vector{Vector{SlicedNDArray}}) - @assert length(targets) == 1 - src = loader(batch.provider.handle) - for (idx, target) in targets[1] - copy!(target, slice(src, idx)) - end -end - -function load_data!(batch :: MXDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(batch, _get_data, targets) +function get_data(provider :: MXDataProvider, batch :: MXDataBatch) + return NDArray[_get_data(provider.handle)] end -function load_label!(batch :: MXDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(batch, _get_label, targets) +function get_label(provider :: MXDataProvider, batch :: MXDataBatch) + return NDArray[_get_label(provider.handle)] end - -function get_pad(batch :: MXDataBatch) +function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) ref_pad = Ref{Cint}(0) @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), batch.provider.handle, ref_pad) - return Int(ref_pad[]) + return provider.batch_size - Int(ref_pad[]) end +#=doc +Built-in data providers in libmxnet +----------------------------------- -function _define_data_iter_creator(hdr :: MX_handle) +**autogen:EMBED:io:EMBED:autogen** +=# +function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) ref_name = Ref{char_p}(0) ref_desc = Ref{char_p}(0) ref_narg = Ref{MX_uint}(0) @@ -453,6 +463,22 @@ function _define_data_iter_creator(hdr :: MX_handle) hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) iter_name = symbol(bytestring(ref_name[])) + + if gen_docs + if endswith(string(iter_name), "Iter") + f_desc = "Can also be called with the alias ``$(string(iter_name)[1:end-4] * "Provider")``.\n" + else + f_desc = "" + end + f_desc *= bytestring(ref_desc[]) * "\n\n" + f_desc *= ":param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data.\n" + f_desc *= ":param Base.Symbol label_name: keyword argument, default ``:softmax_label``. " * + "The name of the label. Could be ``nothing`` if no label is presented in this dataset.\n\n" + f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) + f_desc *= ":return: the constructed :class:`MXDataProvider`." + return (iter_name, f_desc) + end + defun = quote function $iter_name(; kwargs...) arg_keys = AbstractString[string(k) for (k,v) in kwargs] @@ -466,7 +492,6 @@ function _define_data_iter_creator(hdr :: MX_handle) end end eval(defun) - # TODO: add docstring # add an alias XXXProvider => XXXIter if endswith(string(iter_name), "Iter") @@ -475,7 +500,7 @@ function _define_data_iter_creator(hdr :: MX_handle) end end -function _import_io_iterators() +function _import_io_iterators(;gen_docs::Bool=false) n_ref = Ref{MX_uint}(0) h_ref = Ref{Ptr{MX_handle}}(0) @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) @@ -483,8 +508,19 @@ function _import_io_iterators() n_creators = n_ref[] h_creators = pointer_to_array(h_ref[], n_creators) + if gen_docs + docs = Dict{Base.Symbol, AbstractString}() + end + for i = 1:n_creators creator_hdr = h_creators[i] - _define_data_iter_creator(creator_hdr) + ret = _define_data_iter_creator(creator_hdr; gen_docs=gen_docs) + if gen_docs + docs[ret[1]] = ret[2] + end + end + + if gen_docs + return docs end end diff --git a/src/symbol.jl b/src/symbol.jl index ed7e2d77c411..13338602f259 100644 --- a/src/symbol.jl +++ b/src/symbol.jl @@ -308,8 +308,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) f_desc *= "This function support variable length positional :class:`Symbol` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" - f_desc *= ":return: The constructed :class:`Symbol`.\n\n" + f_desc *= ":param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":return: the constructed :class:`Symbol`.\n\n" return (func_name, f_desc) end From 76fee6ba4748da7bd59c34c0bb37398eb30eeb53 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 13:28:53 -0400 Subject: [PATCH 155/630] refine mxdataiter api --- docs/api/io.rst | 35 ++++++++++++++++-- src/io.jl | 94 ++++++++++++++++++++++++++++++++++++------------- src/model.jl | 14 ++++---- 3 files changed, 109 insertions(+), 34 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 6d4ed63f8553..1e23217ace3b 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -134,8 +134,7 @@ and split it into mini-batches so that the model can consume the data in a unifo :return: a vector of labels in this batch. Similar to :func:`get_data`. - The following function will be automatically defined. They are primarily useful for debugging - and testing. + The following utility functions will be automatically defined. .. function:: get(provider, batch, name) -> NDArray @@ -146,6 +145,38 @@ and split it into mini-batches so that the model can consume the data in a unifo or :func:`provide_label() `. :return: the corresponding data array corresponding to that name. + .. function:: load_data!(provider, batch, targets) + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param targets: the targets to load data into. + :type targets: Vector{Vector{SlicedNDArray}} + + The targets is a list of the same length as number of data provided by this provider. + Each element in the list is a ``Vector{SlicedNDArray}``. This vector described a + spliting of this data batch into different slices, each slice is specified by + a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch + that should be loaded into the corresponding *ndarray*. + + This utility function is used in data parallelization, where a mini-batch is splited + and computed on several different devices. + + .. function:: load_label!(provider, batch, targets) + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param targets: the targets to load label into. + :type targets: Vector{Vector{SlicedNDArray}} + + The same as :func:`load_data!`, except that this is for loading labels. + + + + +.. class:: SlicedNDArray + + A alias type of ``Pair{UnitRange{Int},NDArray}``. + diff --git a/src/io.jl b/src/io.jl index 52a0a8680a1f..2c694ef316f6 100644 --- a/src/io.jl +++ b/src/io.jl @@ -133,8 +133,7 @@ abstract AbstractDataProviderState :return: a vector of labels in this batch. Similar to :func:`get_data`. - The following function will be automatically defined. They are primarily useful for debugging - and testing. + The following utility functions will be automatically defined. .. function:: get(provider, batch, name) -> NDArray @@ -144,14 +143,59 @@ abstract AbstractDataProviderState provided in either :func:`provide_data() ` or :func:`provide_label() `. :return: the corresponding data array corresponding to that name. + + .. function:: load_data!(provider, batch, targets) + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param targets: the targets to load data into. + :type targets: Vector{Vector{SlicedNDArray}} + + The targets is a list of the same length as number of data provided by this provider. + Each element in the list is a list of :class:`SlicedNDArray`. This list described a + spliting scheme of this data batch into different slices, each slice is specified by + a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch + that should be loaded into the corresponding *ndarray*. + + This utility function is used in data parallelization, where a mini-batch is splited + and computed on several different devices. + + .. function:: load_label!(provider, batch, targets) + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param targets: the targets to load label into. + :type targets: Vector{Vector{SlicedNDArray}} + + The same as :func:`load_data!`, except that this is for loading labels. =# abstract AbstractDataBatch -"""A tuple of (slice, NDArray). Usually each NDArray resides on a different device, and each - slice describe which part of a larger piece of data should goto that device. -""" +#=doc +.. class:: SlicedNDArray + + A alias type of ``Tuple{UnitRange{Int},NDArray}``. +=# typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} +function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{Vector{SlicedNDArray}}, loader::Function) + data = loader(provider, batch) + for (d_src, d_targets) in zip(data, targets) + for (slice_idx, d_dst) in d_targets + copy!(d_dst, slice(d_src, slice_idx)) + end + end +end +function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{Vector{SlicedNDArray}}) + _load_general!(provider, batch, targets, get_data) +end +function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{Vector{SlicedNDArray}}) + _load_general!(provider, batch, targets, get_label) +end + """Root type for data batch A data batch must implement the following interface function to actually provide the data and label. @@ -185,25 +229,25 @@ The Batch type should have a field named `provider` pointing to the underlying p `get_data` and `get_label` (mainly for debug purpose) will be able to use this. """ -function _get_data_or_label(batch::AbstractDataBatch, provide_func::Function, loader::Function) - data_shapes = provide_func(batch.provider) - data_arrays = [mx.empty(x[2]) for x in data_shapes] - batch_size = get_batch_size(batch.provider) - data_arrays_fake_slice = [SlicedNDArray[(1:batch_size, x)] for x in data_arrays] - loader(batch, data_arrays_fake_slice) - - if length(data_arrays) == 1 - return data_arrays[1] - else - return data_arrays - end -end -function get_data(batch :: AbstractDataBatch) - _get_data_or_label(batch, provide_data, load_data!) -end -function get_label(batch :: AbstractDataBatch) - _get_data_or_label(batch, provide_label, load_label!) -end +#function _get_data_or_label(batch::AbstractDataBatch, provide_func::Function, loader::Function) +# data_shapes = provide_func(batch.provider) +# data_arrays = [mx.empty(x[2]) for x in data_shapes] +# batch_size = get_batch_size(batch.provider) +# data_arrays_fake_slice = [SlicedNDArray[(1:batch_size, x)] for x in data_arrays] +# loader(batch, data_arrays_fake_slice) +# +# if length(data_arrays) == 1 +# return data_arrays[1] +# else +# return data_arrays +# end +#end +#function get_data(batch :: AbstractDataBatch) +# _get_data_or_label(batch, provide_data, load_data!) +#end +#function get_label(batch :: AbstractDataBatch) +# _get_data_or_label(batch, provide_label, load_label!) +#end ################################################################################ # ArrayDataProvider @@ -429,7 +473,7 @@ function Base.done(provider :: MXDataProvider, state :: MXDataProviderState) return !state.has_next end function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) - return (MXDataBatch(provider), state) + return (MXDataBatch(), state) end function get_data(provider :: MXDataProvider, batch :: MXDataBatch) diff --git a/src/model.jl b/src/model.jl index 5b42278f8db2..d49aca6c040a 100644 --- a/src/model.jl +++ b/src/model.jl @@ -186,12 +186,12 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::B data_arrays = [SlicedNDArray[(1:batch_size, self.pred_exec.arg_dict[name])] for name in data_names] output_list = [Array{MX_float}[] for i=1:length(self.pred_exec.outputs)] for batch in data - load_data!(batch, data_arrays) + load_data!(data, batch, data_arrays) forward(self.pred_exec, is_train=false) if isa(callback, Void) # no callback, accumulate the data and return at the end for (o_list, o_nd) in zip(output_list, self.pred_exec.outputs) - push!(o_list, copy(slice(o_nd, 1:batch_size-get_pad(batch)))) + push!(o_list, copy(slice(o_nd, 1:count_samples(data, batch)))) end else outputs = self.pred_exec.outputs @@ -403,8 +403,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) for batch in data - load_data!(batch, data_arrays) - load_label!(batch, label_arrays) + load_data!(data, batch, data_arrays) + load_label!(data, batch, label_arrays) # forward and backward for (texec, islice) in zip(train_execs, slices) @@ -453,7 +453,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) # update evaluation metric on training set - load_label!(batch, cpu_label_arrays_full_slice) + load_label!(data, batch, cpu_label_arrays_full_slice) update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end # end of one epoch @@ -474,7 +474,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra reset!(opts.eval_metric) for batch in opts.eval_data - load_data!(batch, data_arrays) + load_data!(opts.eval_data, batch, data_arrays) # forward and backward for (texec, islice) in zip(train_execs, slices) @@ -485,7 +485,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra copy!(slice(cpu_out, islice), dev_out) end end - load_label!(batch, cpu_label_arrays_full_slice) + load_label!(opts.eval_data, batch, cpu_label_arrays_full_slice) update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end From 5e03d2783a298186ea47a4c70d16137dde6d2678 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 18:37:44 -0400 Subject: [PATCH 156/630] fix unittest for new data IO api --- docs/api/io.rst | 29 +++++++- src/io.jl | 176 +++++++++++++++++++++++++++++++------------- src/ndarray.jl | 10 +++ test/unittest/io.jl | 19 +++-- 4 files changed, 170 insertions(+), 64 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 1e23217ace3b..2e8a62dd7a08 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -95,6 +95,22 @@ and split it into mini-batches so that the model can consume the data in a unifo With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation of the built-in :class:`MXDataProvider` for example. + .. caution:: + + Please do not use the one data provider simultaneously in two different places, either in parallel, + or in a nested loop. For example, the behavior for the following code is undefined + + .. code-block:: julia + + for batch in data + # updating the parameters + + # now let's test the performance on the training set + for b2 in data + # ... + end + end + @@ -153,8 +169,8 @@ and split it into mini-batches so that the model can consume the data in a unifo :type targets: Vector{Vector{SlicedNDArray}} The targets is a list of the same length as number of data provided by this provider. - Each element in the list is a ``Vector{SlicedNDArray}``. This vector described a - spliting of this data batch into different slices, each slice is specified by + Each element in the list is a list of :class:`SlicedNDArray`. This list described a + spliting scheme of this data batch into different slices, each slice is specified by a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch that should be loaded into the corresponding *ndarray*. @@ -175,7 +191,14 @@ and split it into mini-batches so that the model can consume the data in a unifo .. class:: SlicedNDArray - A alias type of ``Pair{UnitRange{Int},NDArray}``. + A alias type of ``Tuple{UnitRange{Int},NDArray}``. + + + + +.. class:: ArrayDataProvider + + A convenient tool to iterate :class:`NDArray` or Julia ``Array``. diff --git a/src/io.jl b/src/io.jl index 2c694ef316f6..8fae4bcbb15b 100644 --- a/src/io.jl +++ b/src/io.jl @@ -93,6 +93,22 @@ and split it into mini-batches so that the model can consume the data in a unifo With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation of the built-in :class:`MXDataProvider` for example. + + .. caution:: + + Please do not use the one data provider simultaneously in two different places, either in parallel, + or in a nested loop. For example, the behavior for the following code is undefined + + .. code-block:: julia + + for batch in data + # updating the parameters + + # now let's test the performance on the training set + for b2 in data + # ... + end + end =# abstract AbstractDataProvider @@ -196,6 +212,21 @@ function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatc _load_general!(provider, batch, targets, get_label) end +import Base.get +function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name :: Base.Symbol) + for (idx, (k, s)) in enumerate(provide_data(provider)) + if name == k + return get_data(provider, batch)[idx] + end + end + for (idx, (k, s)) in enumerate(provide_label(provider)) + if name == k + return get_label(provider, batch)[idx] + end + end + error("$name is not provided by this data provider") +end + """Root type for data batch A data batch must implement the following interface function to actually provide the data and label. @@ -250,9 +281,11 @@ The Batch type should have a field named `provider` pointing to the underlying p #end ################################################################################ -# ArrayDataProvider -################################################################################ -"A convenient tool to iterate `NDArray` or Julia `Array`" +#=doc +.. class:: ArrayDataProvider + + A convenient tool to iterate :class:`NDArray` or Julia ``Array``. +=# type ArrayDataProvider <: AbstractDataProvider data_arrays :: Vector{Array{MX_float}} data_names :: Vector{Base.Symbol} @@ -263,8 +296,10 @@ type ArrayDataProvider <: AbstractDataProvider shuffle :: Bool data_padding :: MX_float label_padding :: MX_float -end + data_batch :: Vector{NDArray} + label_batch :: Vector{NDArray} +end # Julia's type system is sometimes very frustrating. You cannot specify a function # with argument Vector{Pair} to expect to be matched when calling with the parameter @@ -272,40 +307,43 @@ end # results, about the parametric type in the Pair{T1,T2} type, thus does not match the # generic Pair type. In general, Int <: Number but Vector{Int} <: Vector{Number} is not # true. So let us just use Any here... -function ArrayDataProvider(data::Any; batch_size::Int=1, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) +function ArrayDataProvider(data::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle, data_padding=data_padding, label_padding=label_padding) end -function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) +function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) + asarr{T}(arr :: Array{T}) = convert(Array{MX_float}, arr) + asarr(arr :: NDArray) = copy(arr) + if isa(data, Union{NDArray, Array}) && eltype(data) <: Real data_names = [:data] - data_arrays = Array{MX_float}[data] + data_arrays = Array{MX_float}[asarr(data)] elseif isa(data, Pair) @assert isa(data.first, Base.Symbol) && isa(data.second, Union{NDArray, Array}) data_names = [data.first] - data_arrays = Array{MX_float}[data.second] + data_arrays = Array{MX_float}[asarr(data.second)] elseif isa(data, Vector) || isa(data, Tuple) map(data) do d @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) end data_names = Base.Symbol[d.first for d in data] - data_arrays = Array{MX_float}[d.second for d in data] + data_arrays = Array{MX_float}[asarr(d.second) for d in data] else error("Invalid data argument type") end if isa(label, Union{NDArray, Array}) && eltype(label) <: Real label_names = [:softmax_label] - label_arrays = Array{MX_float}[label] + label_arrays = Array{MX_float}[asarr(label)] elseif isa(label, Pair) @assert isa(label.first, Base.Symbol) && isa(label.second, Union{NDArray, Array}) label_names = [label.first] - label_arrays = Array{MX_float}[label.second] + label_arrays = Array{MX_float}[asarr(label.second)] elseif isa(label, Vector) || isa(label, Tuple) map(label) do d @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) end label_names = Base.Symbol[d.first for d in label] - label_arrays = Array{MX_float}[d.second for d in label] + label_arrays = Array{MX_float}[asarr(d.second) for d in label] else error("Invalid label argument type") end @@ -321,8 +359,31 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=1, shuffle::Bo "Number of samples in $(label_names[i]) is mismatch with $(data_names[1])") end + if batch_size == 0 + batch_size = sample_count + end + @assert 0 < batch_size <= sample_count + + function gen_batch_nds(arrs :: Vector{Array{MX_float}}, bsize :: Int) + map(arrs) do arr + shape = size(arr) + empty(shape[1:end-1]..., bsize) + end + end + + data_batch = gen_batch_nds(data_arrays, batch_size) + label_batch = gen_batch_nds(label_arrays, batch_size) + + # reshape data and labels into 2D tensors, so that it is easier to work with them + data_arrays = map(data_arrays) do arr + reshape(arr, prod(size(arr)[1:end-1]), size(arr)[end]) + end + label_arrays = map(label_arrays) do arr + reshape(arr, prod(size(arr)[1:end-1]), size(arr)[end]) + end + ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, - sample_count, shuffle, data_padding, label_padding) + sample_count, shuffle, data_padding, label_padding, data_batch, label_batch) end function provide_data(provider::ArrayDataProvider) @@ -341,17 +402,12 @@ function Base.eltype(provider :: ArrayDataProvider) ArrayDataProviderState end -function _shuffle_array(arr::Array, idx::Vector{Int}) - shape = size(arr) - colons = [Colon() for c = 1:length(shape)-1] - getindex(arr, colons..., idx) -end function Base.start(provider :: ArrayDataProvider) if provider.shuffle # re-shuffle all data idx_perm = randperm(provider.sample_count) - provider.data_arrays = map(x->_shuffle_array(x,idx_perm), provider.data_arrays) - provider.label_arrays = map(x->_shuffle_array(x,idx_perm), provider.label_arrays) + provider.data_arrays = map(x->x[:,idx_perm], provider.data_arrays) + provider.label_arrays = map(x->x[:,idx_perm], provider.label_arrays) end return ArrayDataProviderState(1) @@ -362,43 +418,61 @@ function Base.done(provider::ArrayDataProvider, state :: ArrayDataProviderState) end immutable ArrayDataBatch <: AbstractDataBatch - provider :: ArrayDataProvider - idx :: UnitRange{Int} + idx :: UnitRange{Int} end function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) idx = state.curr_idx:min(state.curr_idx+provider.batch_size-1, provider.sample_count) - return (ArrayDataBatch(provider, idx), ArrayDataProviderState(idx.stop+1)) -end - -function get_pad(batch :: ArrayDataBatch) - return batch.provider.batch_size - length(batch.idx) -end - -function _load_general!(batch :: ArrayDataBatch, sources :: Vector{Array{MX_float}}, - targets :: Vector{Vector{SlicedNDArray}}, pad_val::Real) - @assert length(sources) == length(targets) - for (src, tgt) in zip(sources, targets) - src_colons = [Colon() for i = 1:ndims(src)-1] - for (slice_idx, dst) in tgt - if slice_idx.start > length(batch.idx) - dst[:] = pad_val - else - slice_idx0 = slice_idx.start:min(slice_idx.stop, length(batch.idx)) - copy!(dst[1:length(slice_idx0)], getindex(src, src_colons..., batch.idx[slice_idx0])) - if length(slice_idx0) < length(slice_idx) - # need padding - dst[length(slice_idx0)+1:length(slice_idx)] = pad_val - end - end - end - end + return (ArrayDataBatch(idx), ArrayDataProviderState(idx.stop+1)) end -function load_data!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(batch, batch.provider.data_arrays, targets, batch.provider.data_padding) + +function count_samples(provider :: ArrayDataProvider, batch :: ArrayDataBatch) + return length(batch.idx) end -function load_label!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(batch, batch.provider.label_arrays, targets, batch.provider.label_padding) + +function get_data(provider :: ArrayDataProvider, batch :: ArrayDataBatch) + for (src, dst) in zip(provider.data_arrays, provider.data_batch) + copy_ignore_shape!(dst[1:length(batch.idx)], src[:, batch.idx]) + if length(batch.idx) < provider.batch_size + dst[length(batch.idx)+1:provider.batch_size] = provider.data_padding + end + end + return provider.data_batch end +function get_label(provider :: ArrayDataProvider, batch :: ArrayDataBatch) + for (src, dst) in zip(provider.label_arrays, provider.label_batch) + copy_ignore_shape!(dst[1:length(batch.idx)], src[:, batch.idx]) + if length(batch.idx) < provider.batch_size + dst[length(batch.idx)+1:provider.batch_size] = provider.label_padding + end + end + return provider.label_batch +end + +#function _load_general!(batch :: ArrayDataBatch, sources :: Vector{Array{MX_float}}, +# targets :: Vector{Vector{SlicedNDArray}}, pad_val::Real) +# @assert length(sources) == length(targets) +# for (src, tgt) in zip(sources, targets) +# src_colons = [Colon() for i = 1:ndims(src)-1] +# for (slice_idx, dst) in tgt +# if slice_idx.start > length(batch.idx) +# dst[:] = pad_val +# else +# slice_idx0 = slice_idx.start:min(slice_idx.stop, length(batch.idx)) +# copy!(dst[1:length(slice_idx0)], getindex(src, src_colons..., batch.idx[slice_idx0])) +# if length(slice_idx0) < length(slice_idx) +# # need padding +# dst[length(slice_idx0)+1:length(slice_idx)] = pad_val +# end +# end +# end +# end +#end +#function load_data!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +# _load_general!(batch, batch.provider.data_arrays, targets, batch.provider.data_padding) +#end +#function load_label!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) +# _load_general!(batch, batch.provider.label_arrays, targets, batch.provider.label_padding) +#end diff --git a/src/ndarray.jl b/src/ndarray.jl index 347346e6751a..82293d2fdbe9 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -328,6 +328,16 @@ function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) return dst end +function copy_ignore_shape!{T<:Real}(dst :: NDArray, src :: Array{T}) + @assert dst.writable + @assert length(dst) == length(src) + src = convert(Array{MX_float}, src) # this might involve copying + @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), + dst.handle, pointer(src), length(src)) + return dst +end + + #=doc .. function:: copy(arr :: NDArray) diff --git a/test/unittest/io.jl b/test/unittest/io.jl index cffb00417a57..d34fb674b689 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -27,8 +27,8 @@ function test_mnist() data_targets = [[(1:batch_size, data_array)] for i = 1:1] label_targets = [[(1:batch_size, label_array)] for i = 1:1] - mx.load_data!(batch, data_targets) - mx.load_label!(batch, label_targets) + mx.load_data!(mnist_provider, batch, data_targets) + mx.load_label!(mnist_provider, batch, label_targets) true_labels = [5,0,4,1,9,2,1,3,1,4] # the first 10 labels in MNIST train got_labels = Int[copy(label_array)...] @@ -53,11 +53,11 @@ function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataPro for (idx, batch) in zip(idx_all, provider) data_batch = [x[[Colon() for i=1:ndims(x)-1]..., idx:min(idx+batch_size-1,sample_count)] for x in data] data_get = [mx.empty(size(x)[1:end-1]..., batch_size) for x in data] - mx.load_data!(batch, [[(1:batch_size, x)] for x in data_get]) + mx.load_data!(provider, batch, [[(1:batch_size, x)] for x in data_get]) for (d_real, d_get) in zip(data_batch, data_get) @test reldiff(d_real, copy(d_get)[[1:n for n in size(d_real)]...]) < 1e-6 - @test mx.get_pad(batch) == batch_size - size(d_real)[end] + @test mx.count_samples(provider, batch) == size(d_real)[end] end end end @@ -97,12 +97,11 @@ function test_arrays_shuffle() data_got = similar(data) label_got = similar(label) for (idx, batch) in zip(idx_all, provider) - data_batch = [(1:batch_size, mx.empty(1,batch_size))] - label_batch = [(1:batch_size, mx.empty(batch_size))] - mx.load_data!(batch, typeof(data_batch)[data_batch]) - mx.load_label!(batch, typeof(label_batch)[label_batch]) - data_got[idx:min(idx+batch_size-1,sample_count)] = copy(data_batch[1][2])[1:batch_size-mx.get_pad(batch)] - label_got[idx:min(idx+batch_size-1,sample_count)] = copy(label_batch[1][2])[1:batch_size-mx.get_pad(batch)] + data_batch = mx.get(provider, batch, :data) + label_batch = mx.get(provider, batch, :index) + ns_batch = mx.count_samples(provider, batch) + data_got[idx:idx+ns_batch-1] = copy(data_batch)[1:ns_batch] + label_got[idx:idx+ns_batch-1] = copy(label_batch)[1:ns_batch] end @test label_got != label From ceaa3eb292dbc534e710698e5f0f1c3a6a98ecca Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 18:50:43 -0400 Subject: [PATCH 157/630] fix mlp example to use the updated data io API --- docs/api/io.rst | 27 ++++++++++- examples/mnist/mlp.jl | 2 +- src/io.jl | 106 ++++++++++-------------------------------- 3 files changed, 52 insertions(+), 83 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 2e8a62dd7a08..076aa47159cf 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -203,9 +203,34 @@ and split it into mini-batches so that the model can consume the data in a unifo +.. function:: ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) + + Construct a data provider from :class:`NDArray` or Julia Arrays. + + :param data: the data, could be + + - a :class:`NDArray`, or a Julia Array. This is equivalent to ``:data => data``. + - a name-data pair, like ``:mydata => array``, where ``:mydata`` is the name of the data + and ``array`` is an :class:`NDArray` or a Julia Array. + - a list of name-data pairs. + + :param label: the same as the ``data`` parameter. When this argument is omitted, the constructed + provider will provide no labels. + :param Int batch_size: the batch size, default is 0, which means treating the whole array as a + single mini-batch. + :param Bool shuffle: turn on if the data should be shuffled at every epoch. + :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might + be less samples to include than a mini-batch. This value specify a scalar to pad the + contents of all the missing data points. + :param Real label_padding: the same as ``data_padding``, except for the labels. + + + + .. class:: MXDataProvider - A data provider that wrap built-in data iterators from libmxnet. + A data provider that wrap built-in data iterators from libmxnet. See below for + a list of built-in data iterators. diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 03be2f7d336b..857b25f50049 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -43,7 +43,7 @@ probs = mx.predict(model, eval_provider) # collect all labels from eval data labels = Array[] for batch in eval_provider - push!(labels, copy(mx.get_label(batch))) + push!(labels, copy(mx.get(eval_provider, batch, :softmax_label))) end labels = cat(1, labels...) diff --git a/src/io.jl b/src/io.jl index 8fae4bcbb15b..b3ca7e000d40 100644 --- a/src/io.jl +++ b/src/io.jl @@ -227,58 +227,6 @@ function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name error("$name is not provided by this data provider") end -"""Root type for data batch - -A data batch must implement the following interface function to actually provide the data and label. - -```julia -load_data!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) -load_label!(batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) -``` - -Load data and label into targets. The targets is a list of target that the data/label should be -copied into. The order in the list is guaranteed to be the same as returned by `provide_data` and -`provide_label`. Each entry in the list is again a list of `SlicedNDArray`, corresponding the -memory buffer for each device. - -The `SlicedNDArray` is used in data parallelization to run different sub-batch on different devices. - -The following function should also be implemented to handle the case when the mini-batch size does not -divide the size of the whole dataset. So in the last mini-batch, the actual data copied might be fewer -than the mini-batch size. This is usually not an issue during the training as the remaining space may -contain the data and label copied during the previous mini-batch are still valid data. However, during -testing, especially when doing feature extraction, we need to be precise about the number of samples -processed. - -```julia -get_pad(batch :: AbstractDataBatch) -``` - -Return the number of *dummy samples* in this mini-batch. - -The Batch type should have a field named `provider` pointing to the underlying provider. Helper functions -`get_data` and `get_label` (mainly for debug purpose) will be able to use this. -""" - -#function _get_data_or_label(batch::AbstractDataBatch, provide_func::Function, loader::Function) -# data_shapes = provide_func(batch.provider) -# data_arrays = [mx.empty(x[2]) for x in data_shapes] -# batch_size = get_batch_size(batch.provider) -# data_arrays_fake_slice = [SlicedNDArray[(1:batch_size, x)] for x in data_arrays] -# loader(batch, data_arrays_fake_slice) -# -# if length(data_arrays) == 1 -# return data_arrays[1] -# else -# return data_arrays -# end -#end -#function get_data(batch :: AbstractDataBatch) -# _get_data_or_label(batch, provide_data, load_data!) -#end -#function get_label(batch :: AbstractDataBatch) -# _get_data_or_label(batch, provide_label, load_label!) -#end ################################################################################ #=doc @@ -301,6 +249,28 @@ type ArrayDataProvider <: AbstractDataProvider label_batch :: Vector{NDArray} end +#=doc +.. function:: ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) + + Construct a data provider from :class:`NDArray` or Julia Arrays. + + :param data: the data, could be + + - a :class:`NDArray`, or a Julia Array. This is equivalent to ``:data => data``. + - a name-data pair, like ``:mydata => array``, where ``:mydata`` is the name of the data + and ``array`` is an :class:`NDArray` or a Julia Array. + - a list of name-data pairs. + + :param label: the same as the ``data`` parameter. When this argument is omitted, the constructed + provider will provide no labels. + :param Int batch_size: the batch size, default is 0, which means treating the whole array as a + single mini-batch. + :param Bool shuffle: turn on if the data should be shuffled at every epoch. + :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might + be less samples to include than a mini-batch. This value specify a scalar to pad the + contents of all the missing data points. + :param Real label_padding: the same as ``data_padding``, except for the labels. +=# # Julia's type system is sometimes very frustrating. You cannot specify a function # with argument Vector{Pair} to expect to be matched when calling with the parameter # [:foo => zeros(2,3), :bar => zeros(3)] because the type inference gives very specific @@ -448,39 +418,13 @@ function get_label(provider :: ArrayDataProvider, batch :: ArrayDataBatch) return provider.label_batch end -#function _load_general!(batch :: ArrayDataBatch, sources :: Vector{Array{MX_float}}, -# targets :: Vector{Vector{SlicedNDArray}}, pad_val::Real) -# @assert length(sources) == length(targets) -# for (src, tgt) in zip(sources, targets) -# src_colons = [Colon() for i = 1:ndims(src)-1] -# for (slice_idx, dst) in tgt -# if slice_idx.start > length(batch.idx) -# dst[:] = pad_val -# else -# slice_idx0 = slice_idx.start:min(slice_idx.stop, length(batch.idx)) -# copy!(dst[1:length(slice_idx0)], getindex(src, src_colons..., batch.idx[slice_idx0])) -# if length(slice_idx0) < length(slice_idx) -# # need padding -# dst[length(slice_idx0)+1:length(slice_idx)] = pad_val -# end -# end -# end -# end -#end -#function load_data!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) -# _load_general!(batch, batch.provider.data_arrays, targets, batch.provider.data_padding) -#end -#function load_label!(batch :: ArrayDataBatch, targets :: Vector{Vector{SlicedNDArray}}) -# _load_general!(batch, batch.provider.label_arrays, targets, batch.provider.label_padding) -#end - - ################################################################################ #=doc .. class:: MXDataProvider - A data provider that wrap built-in data iterators from libmxnet. + A data provider that wrap built-in data iterators from libmxnet. See below for + a list of built-in data iterators. =# type MXDataProvider <: AbstractDataProvider handle :: MX_DataIterHandle @@ -558,7 +502,7 @@ function get_label(provider :: MXDataProvider, batch :: MXDataBatch) end function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) ref_pad = Ref{Cint}(0) - @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), batch.provider.handle, ref_pad) + @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), provider.handle, ref_pad) return provider.batch_size - Int(ref_pad[]) end From 993be59c8b1a1066daf21a3dbc3c92f8a57ab5cc Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 18:55:39 -0400 Subject: [PATCH 158/630] simplify model a bit with data IO api --- src/io.jl | 11 +++++++++++ src/model.jl | 7 +++---- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/io.jl b/src/io.jl index b3ca7e000d40..ed63a2dbedfe 100644 --- a/src/io.jl +++ b/src/io.jl @@ -212,6 +212,17 @@ function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatc _load_general!(provider, batch, targets, get_label) end +function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) + for (src, dst) in zip(get_data(provider, batch), targets) + copy!(dst, src) + end +end +function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) + for (src, dst) in zip(get_label(provider, batch), targets) + copy!(dst, src) + end +end + import Base.get function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name :: Base.Symbol) for (idx, (k, s)) in enumerate(provide_data(provider)) diff --git a/src/model.jl b/src/model.jl index d49aca6c040a..55ba733b5d13 100644 --- a/src/model.jl +++ b/src/model.jl @@ -183,7 +183,7 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::B _setup_predictor(self, overwrite; data_shapes...) batch_size = get_batch_size(data) - data_arrays = [SlicedNDArray[(1:batch_size, self.pred_exec.arg_dict[name])] for name in data_names] + data_arrays = [self.pred_exec.arg_dict[name] for name in data_names] output_list = [Array{MX_float}[] for i=1:length(self.pred_exec.outputs)] for batch in data load_data!(data, batch, data_arrays) @@ -386,7 +386,6 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra cpu_dev = Context(CPU) cpu_output_arrays = [empty(shape, cpu_dev) for shape in output_shapes] cpu_label_arrays = [empty(shape, cpu_dev) for (name,shape) in provide_label(data)] - cpu_label_arrays_full_slice = [SlicedNDArray[(1:batch_size, x)] for x in cpu_label_arrays] # invoke callbacks on epoch 0 _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) @@ -453,7 +452,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) # update evaluation metric on training set - load_label!(data, batch, cpu_label_arrays_full_slice) + load_label!(data, batch, cpu_label_arrays) update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end # end of one epoch @@ -485,7 +484,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra copy!(slice(cpu_out, islice), dev_out) end end - load_label!(opts.eval_data, batch, cpu_label_arrays_full_slice) + load_label!(opts.eval_data, batch, cpu_label_arrays) update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end From f30aedc7f174a9aa4d3b4ff13e9c527b4a26f33f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 29 Oct 2015 19:43:59 -0400 Subject: [PATCH 159/630] part of the executor API doc --- docs/api/context.rst | 32 ++++++++++++++++++++++++++++ docs/api/executor.rst | 32 ++++++++++++++++++++++++++++ docs/api/initializer.rst | 46 ++++++++++++++++++++++++++++++++++++++-- docs/api/io.rst | 17 ++++++++++++--- docs/api/model.rst | 4 ++-- docs/build-api.jl | 4 ++++ docs/index.rst | 2 ++ src/context.jl | 25 ++++++++++++++++++++++ src/executor.jl | 27 +++++++++++++++++++++++ src/initializer.jl | 38 +++++++++++++++++++++++++++++++-- src/io.jl | 14 +++++++++--- src/model.jl | 4 ++-- 12 files changed, 231 insertions(+), 14 deletions(-) create mode 100644 docs/api/context.rst create mode 100644 docs/api/executor.rst diff --git a/docs/api/context.rst b/docs/api/context.rst new file mode 100644 index 000000000000..5230b892c8e5 --- /dev/null +++ b/docs/api/context.rst @@ -0,0 +1,32 @@ + +Context +======= + + + + +.. class:: Context + + A context describes the device type and id on which computation should be carried on. + + + + +.. function:: cpu(dev_id=0) + + :param Int dev_id: the CPU id. + + Get a CPU context with a specific id. ``cpu()`` is usually the default context for many + operations when no context is specified. + + + + +.. function:: gpu(dev_id=0) + + :param Int dev_id: the GPU device id. + + Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. + + + diff --git a/docs/api/executor.rst b/docs/api/executor.rst new file mode 100644 index 000000000000..8887d4ce1d24 --- /dev/null +++ b/docs/api/executor.rst @@ -0,0 +1,32 @@ + +Executor +======== + + + + +.. class:: Executor + + An executor is a realization of a symbolic architecture defined by a :class:`Symbol`. + The actual forward and backward computation specified by the network architecture can + be carried out with an executor. + + + + +.. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) + + Create an :class:`Executor` by binding a :class:`Symbol` to concrete :class:`NDArray`. + + :param Symbol sym: the network architecture describing the computation graph. + :param Context ctx: the context on which the computation should run. + :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete + arrays for all the inputs in the network architecture. The inputs typically include + network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` + and :func:`infer_shape`. + :param args_grad: TODO + :param aux_states: + :param grad_req: + + + diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst index 63e1a231b187..3f4126721542 100644 --- a/docs/api/initializer.rst +++ b/docs/api/initializer.rst @@ -1,6 +1,8 @@ -Built-in Initializers -===================== +Initializers +============ +Interface +--------- @@ -9,6 +11,24 @@ Built-in Initializers The abstract base class for all initializers. +To define a new initializer, it is +enough to derive a new type, and implement one or more of the following methods: + +.. function:: _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + +Or, if full behavior customization is needed, override the following function + +.. function:: call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + + + + +Built-in initializers +--------------------- + @@ -19,9 +39,31 @@ Built-in Initializers +.. function UniformInitializer(scale=0.07) + + Construct a :class:`UniformInitializer` with the specified scale. + + + + .. class:: NormalInitializer Initialize weights according to a univariate Gaussian distribution. + +.. function:: NormalIninitializer(; mu=0, sigma=0.01) + + Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. + + + + +.. class:: XaiverInitializer + + The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding + the difficulty of training deep feedforward neuralnetworks*. + + + diff --git a/docs/api/io.rst b/docs/api/io.rst index 076aa47159cf..b601db39c7ec 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -1,6 +1,8 @@ Data Providers ============== +Interface +--------- Data providers are wrappers that load external data, be it images, text, or general tensors, and split it into mini-batches so that the model can consume the data in a uniformed way. @@ -196,6 +198,12 @@ and split it into mini-batches so that the model can consume the data in a unifo +Built-in data providers +----------------------- + + + + .. class:: ArrayDataProvider A convenient tool to iterate :class:`NDArray` or Julia ``Array``. @@ -227,6 +235,12 @@ and split it into mini-batches so that the model can consume the data in a unifo +libmxnet data providers +----------------------- + + + + .. class:: MXDataProvider A data provider that wrap built-in data iterators from libmxnet. See below for @@ -235,9 +249,6 @@ and split it into mini-batches so that the model can consume the data in a unifo -Built-in data providers in libmxnet ------------------------------------ - .. function:: ImageRecordIter(...) Can also be called with the alias ``ImageRecordProvider``. diff --git a/docs/api/model.rst b/docs/api/model.rst index 7593e6e68f55..ab330db9bcfa 100644 --- a/docs/api/model.rst +++ b/docs/api/model.rst @@ -1,6 +1,6 @@ -Built-in Models and Interface -============================= +Models +====== The model API provides convenient high-level interface to do training and predicting on a network described using the symbolic API. diff --git a/docs/build-api.jl b/docs/build-api.jl index 0aed51a9a022..b70c480c9d79 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -74,12 +74,16 @@ end ################################################################################# # Build Documents ################################################################################# +extract_doc("context.rst", "context.jl") + extract_doc("ndarray.rst", "ndarray.jl") embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) extract_doc("symbol.rst", "symbol.jl") embed_mxnet_api("symbol.rst", "symbol", mx._import_atomic_symbol_creators) +extract_doc("executor.rst", "executor.jl") + extract_doc("initializer.rst", "initializer.jl") extract_doc("callback.rst", "callback.jl") extract_doc("model.rst", "model.jl") diff --git a/docs/index.rst b/docs/index.rst index eda80ac1c18d..7520047db11a 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -31,6 +31,7 @@ For more details, see documentation below. Please also checkout the `examples :maxdepth: 1 :caption: API Documentation + api/context api/model api/initializer api/optimizer @@ -38,6 +39,7 @@ For more details, see documentation below. Please also checkout the `examples api/io api/ndarray api/symbol + api/executor Indices and tables ================== diff --git a/src/context.jl b/src/context.jl index 5146d4afcb64..1e96c305fb04 100644 --- a/src/context.jl +++ b/src/context.jl @@ -1,5 +1,14 @@ +#=doc +Context +======= +=# @enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 +#=doc +.. class:: Context + + A context describes the device type and id on which computation should be carried on. +=# immutable Context device_type :: CONTEXT_TYPE device_id :: Int @@ -11,9 +20,25 @@ function Base.show(io :: IO, ctx :: Context) print(io, "$(ctx.device_type)$(ctx.device_id)") end +#=doc +.. function:: cpu(dev_id=0) + + :param Int dev_id: the CPU id. + + Get a CPU context with a specific id. ``cpu()`` is usually the default context for many + operations when no context is specified. +=# function cpu(dev_id::Int=0) return Context(CPU, dev_id) end + +#=doc +.. function:: gpu(dev_id=0) + + :param Int dev_id: the GPU device id. + + Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. +=# function gpu(dev_id::Int=0) return Context(GPU, dev_id) end diff --git a/src/executor.jl b/src/executor.jl index 4d57c1da7b7c..4485bc6dce9a 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -1,3 +1,15 @@ +#=doc +Executor +======== +=# + +#=doc +.. class:: Executor + + An executor is a realization of a symbolic architecture defined by a :class:`Symbol`. + The actual forward and backward computation specified by the network architecture can + be carried out with an executor. +=# type Executor handle :: MX_ExecutorHandle symbol :: Symbol @@ -58,6 +70,21 @@ function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDA return (args_hdr, args_vec) end +#=doc +.. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) + + Create an :class:`Executor` by binding a :class:`Symbol` to concrete :class:`NDArray`. + + :param Symbol sym: the network architecture describing the computation graph. + :param Context ctx: the context on which the computation should run. + :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete + arrays for all the inputs in the network architecture. The inputs typically include + network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` + and :func:`infer_shape`. + :param args_grad: TODO + :param aux_states: + :param grad_req: +=# @enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), diff --git a/src/initializer.jl b/src/initializer.jl index 3f830860f586..8f78bfba9367 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -1,12 +1,26 @@ #=doc -Built-in Initializers -===================== +Initializers +============ +Interface +--------- =# #=doc .. class:: AbstractInitializer The abstract base class for all initializers. + +To define a new initializer, it is +enough to derive a new type, and implement one or more of the following methods: + +.. function:: _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + +Or, if full behavior customization is needed, override the following function + +.. function:: call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) =# abstract AbstractInitializer @@ -42,6 +56,10 @@ function _init_zero(self :: AbstractInitializer, name :: Base.Symbol, array :: N array[:] = 0 end +#=doc +Built-in initializers +--------------------- +=# #=doc .. class:: UniformInitializer @@ -50,6 +68,11 @@ end immutable UniformInitializer <: AbstractInitializer scale :: AbstractFloat end +#=doc +.. function UniformInitializer(scale=0.07) + + Construct a :class:`UniformInitializer` with the specified scale. +=# UniformInitializer() = UniformInitializer(0.07) function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: NDArray) @@ -65,12 +88,23 @@ immutable NormalInitializer <: AbstractInitializer μ :: AbstractFloat σ :: AbstractFloat end +#=doc +.. function:: NormalIninitializer(; mu=0, sigma=0.01) + + Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. +=# NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) randn!(self.μ, self.σ, array) end +#=doc +.. class:: XaiverInitializer + + The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding + the difficulty of training deep feedforward neuralnetworks*. +=# immutable XaiverInitializer <: AbstractInitializer end diff --git a/src/io.jl b/src/io.jl index ed63a2dbedfe..6bbb9a5cf6d5 100644 --- a/src/io.jl +++ b/src/io.jl @@ -1,6 +1,8 @@ #=doc Data Providers ============== +Interface +--------- Data providers are wrappers that load external data, be it images, text, or general tensors, and split it into mini-batches so that the model can consume the data in a uniformed way. @@ -238,6 +240,10 @@ function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name error("$name is not provided by this data provider") end +#=doc +Built-in data providers +----------------------- +=# ################################################################################ #=doc @@ -430,6 +436,11 @@ function get_label(provider :: ArrayDataProvider, batch :: ArrayDataBatch) end +#=doc +libmxnet data providers +----------------------- +=# + ################################################################################ #=doc .. class:: MXDataProvider @@ -518,9 +529,6 @@ function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) end #=doc -Built-in data providers in libmxnet ------------------------------------ - **autogen:EMBED:io:EMBED:autogen** =# function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) diff --git a/src/model.jl b/src/model.jl index 55ba733b5d13..b978ec0c6f49 100644 --- a/src/model.jl +++ b/src/model.jl @@ -1,6 +1,6 @@ #=doc -Built-in Models and Interface -============================= +Models +====== The model API provides convenient high-level interface to do training and predicting on a network described using the symbolic API. From 388e91dc6381a3bdd549372c2779405c6372b7c0 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 30 Oct 2015 05:08:10 -0400 Subject: [PATCH 160/630] fix mnist doc links --- docs/tutorial/mnist.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/tutorial/mnist.rst b/docs/tutorial/mnist.rst index 2e5d087a5242..5fe21cde181e 100644 --- a/docs/tutorial/mnist.rst +++ b/docs/tutorial/mnist.rst @@ -71,7 +71,8 @@ the code to construct the data provider into ``mnist-data.jl`` so that it could include("mnist-data.jl") train_provider, eval_provider = get_mnist_providers(batch_size) -If you need to write your own data providers for customized data format, please refer to **TODO**: pointer to data provider API. +If you need to write your own data providers for customized data format, please +refer to :class:`AbstractDataProvider`. Given the architecture and data, we can instantiate an *model* to do the actual training. ``mx.FeedForward`` is the built-in model that is suitable for most feed-forward architectures. When constructing the model, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. @@ -249,4 +250,4 @@ Alternatively, when the dataset is huge, one can provide a callback to ``mx.predict``, then the callback function will be invoked with the outputs of each mini-batch. The callback could, for example, write the data to disk for future inspection. In this case, no value is returned from ``mx.predict``. See -also **TODO** provide link to prediction API. +also :func:`predict`. From 319c5f351ecdd6fe3583fb4d4237c24cdbcd25bb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 09:49:29 -0500 Subject: [PATCH 161/630] rename Softmax -> SoftmaxOutput (mxnet/mxnet#434) --- examples/cifar10/cifar10.jl | 2 +- examples/mnist/lenet.jl | 2 +- examples/mnist/mlp.jl | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index c46e2b3bf15c..37cfacd2cec0 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -51,7 +51,7 @@ in5b = simple_factory(in5a, 176, 160) pool = mx.Pooling(data=in5b, pool_type=:avg, kernel=(7,7), name=:global_pool) flatten = mx.Flatten(data=pool, name=:flatten1) fc = mx.FullyConnected(data=flatten, num_hidden=10, name=:fc1) -softmax = mx.Softmax(data=fc, name=:loss) +softmax = mx.SoftmaxOutput(data=fc, name=:loss) #-------------------------------------------------------------------------------- diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index 804fc1ea5312..ca48e6693213 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -25,7 +25,7 @@ fc1 = @mx.chain mx.Flatten(data=conv2) => fc2 = mx.FullyConnected(data=fc1, num_hidden=10) # softmax loss -lenet = mx.Softmax(data=fc2, name=:softmax) +lenet = mx.SoftmaxOutput(data=fc2, name=:softmax) #-------------------------------------------------------------------------------- diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 857b25f50049..b0703c56e5d1 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -20,7 +20,7 @@ mlp = @mx.chain mx.Variable(:data) => mx.FullyConnected(name=:fc2, num_hidden=64) => mx.Activation(name=:relu2, act_type=:relu) => mx.FullyConnected(name=:fc3, num_hidden=10) => - mx.Softmax(name=:softmax) + mx.SoftmaxOutput(name=:softmax) # data provider batch_size = 100 From 63890942a8af45ef9772060dbdf246a9740dd6ac Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 09:54:12 -0500 Subject: [PATCH 162/630] update doc Softmax -> SoftmaxOutput (dmlc/mxnet#434) --- README.md | 2 +- docs/api/ndarray.rst | 39 +++++++++++- docs/api/symbol.rst | 129 +++++++++++++++++++++++++++++++++++++++- docs/tutorial/mnist.rst | 6 +- examples/mnist/mlp.jl | 2 +- 5 files changed, 167 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 2c28bc58f15e..ea7b8577de3e 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ mlp = @mx.chain mx.Variable(:data) => mx.FullyConnected(name=:fc2, num_hidden=64) => mx.Activation(name=:relu2, act_type=:relu) => mx.FullyConnected(name=:fc3, num_hidden=10) => - mx.Softmax(name=:softmax) + mx.SoftmaxOutput(name=:softmax) # data provider batch_size = 100 diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 8ac5e9bda8f0..05a3dccba7dc 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -364,9 +364,9 @@ object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the out Public APIs ^^^^^^^^^^^ -.. function:: choose_element(...) +.. function:: choose_element_0index(...) - Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs + Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs. This function assume rhs uses 0-based index. :param lhs: Left operand to the function. :type lhs: NDArray @@ -413,9 +413,42 @@ Public APIs +.. function:: exp(...) + + Take exp of the src + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: log(...) + + Take log of the src + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: norm(...) + + Take L2 norm of the src.The result will be ndarray of shape (1,) on the same device. + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: sqrt(...) - Take square root of the src + Take sqrt of the src :param src: Source input to the function :type src: NDArray diff --git a/docs/api/symbol.rst b/docs/api/symbol.rst index 2c7df712c2f8..e01ecb35980d 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbol.rst @@ -143,7 +143,7 @@ Public APIs :type num_filter: int (non-negative), required - :param num_group: number of groups partition + :param num_group: Number of groups partition. This option is not supported by CuDNN, you can use SliceChannel to num_group,apply convolution and concat instead to achieve the same need. :type num_group: int (non-negative), optional, default=1 @@ -162,6 +162,57 @@ Public APIs +.. function:: Deconvolution(...) + + Apply deconvolution to input then add a bias. + + :param data: Input data to the DeconvolutionOp. + :type data: Symbol + + + :param weight: Weight matrix. + :type weight: Symbol + + + :param bias: Bias parameter. + :type bias: Symbol + + + :param kernel: deconvolution kernel size: (y, x) + :type kernel: Shape(tuple), required + + + :param stride: deconvolution stride: (y, x) + :type stride: Shape(tuple), optional, default=(1, 1) + + + :param pad: pad for deconvolution: (y, x) + :type pad: Shape(tuple), optional, default=(0, 0) + + + :param num_filter: deconvolution filter(channel) number + :type num_filter: int (non-negative), required + + + :param num_group: number of groups partition + :type num_group: int (non-negative), optional, default=1 + + + :param workspace: Tmp workspace for deconvolution (MB) + :type workspace: long (non-negative), optional, default=512 + + + :param no_bias: Whether to disable bias parameter. + :type no_bias: boolean, optional, default=True + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`Symbol`. + + + + + .. function:: Dropout(...) Apply dropout to input @@ -412,7 +463,7 @@ Public APIs .. function:: Softmax(...) - Perform a softmax transformation on input. + DEPRECATED: Perform a softmax transformation on input. Please use SoftmaxOutput :param data: Input data to softmax. :type data: Symbol @@ -433,9 +484,62 @@ Public APIs +.. function:: SoftmaxOutput(...) + + Perform a softmax transformation on input, backprop with logloss. + + :param data: Input data to softmax. + :type data: Symbol + + + :param grad_scale: Scale the gradient by a float factor + :type grad_scale: float, optional, default=1 + + + :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + :type multi_output: boolean, optional, default=False + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`Symbol`. + + + + + +.. function:: exp(...) + + Take exp of the src + + :param src: Source symbolic input to the function + :type src: Symbol + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`Symbol`. + + + + + +.. function:: log(...) + + Take log of the src + + :param src: Source symbolic input to the function + :type src: Symbol + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`Symbol`. + + + + + .. function:: sqrt(...) - Take square root of the src + Take sqrt of the src :param src: Source symbolic input to the function :type src: Symbol @@ -505,6 +609,25 @@ Internal APIs +.. function:: _Native(...) + + Stub for implementing an operator implemented in native frontend language. + + :param info: + :type info: , required + + + :param need_top_grad: Whether this layer needs out grad for backward. Should be false for loss layers. + :type need_top_grad: boolean, optional, default=True + + :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`Symbol`. + + + + + .. function:: _Plus(...) Perform an elementwise plus. diff --git a/docs/tutorial/mnist.rst b/docs/tutorial/mnist.rst index 5fe21cde181e..fc2e548dd1c2 100644 --- a/docs/tutorial/mnist.rst +++ b/docs/tutorial/mnist.rst @@ -41,11 +41,11 @@ Note each composition we take the previous symbol as the `data` argument, formin Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units where the last 10 units correspond to the 10 output classes (digits 0,...,9). We -then add a final ``Softmax`` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: +then add a final :class:`SoftmaxOutput` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: .. code-block:: julia - mlp = mx.Softmax(data = fc3, name=:softmax) + mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) As we can see, the MLP is just a chain of layers. For this case, we can also use the ``mx.chain`` macro. The same architecture above can be defined as @@ -58,7 +58,7 @@ the ``mx.chain`` macro. The same architecture above can be defined as mx.FullyConnected(name=:fc2, num_hidden=64) => mx.Activation(name=:relu2, act_type=:relu) => mx.FullyConnected(name=:fc3, num_hidden=10) => - mx.Softmax(name=:softmax) + mx.SoftmaxOutput(name=:softmax) After defining the architecture, we are ready to load the MNIST data. MXNet.jl provide built-in data providers for the MNIST dataset, which could automatically diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index b0703c56e5d1..05d008d5255c 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -11,7 +11,7 @@ using MXNet # fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) # act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) # fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) -# mlp = mx.Softmax(data = fc3, name=:softmax) +# mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) #-- Option 2: using the mx.chain macro mlp = @mx.chain mx.Variable(:data) => From 922fc8516f503da74591c6b97f8bee81ff5fdcc0 Mon Sep 17 00:00:00 2001 From: iamed2 Date: Mon, 2 Nov 2015 10:21:09 -0600 Subject: [PATCH 163/630] Reenable 0.4 testing. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index cae2bfbb7a22..90fd90c7de74 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,7 +5,7 @@ os: - linux - osx julia: - #- release + - 0.4 - nightly # dependent apt packages From 4acbce9f2f95452829566dd35e90abe5aff7a33f Mon Sep 17 00:00:00 2001 From: Eric Davies Date: Mon, 2 Nov 2015 10:41:37 -0600 Subject: [PATCH 164/630] Add coverage. --- .travis.yml | 3 +++ README.md | 1 + test/travis/run_coverage.sh | 1 + 3 files changed, 5 insertions(+) create mode 100644 test/travis/run_coverage.sh diff --git a/.travis.yml b/.travis.yml index 90fd90c7de74..ed0d03d4906c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,3 +34,6 @@ notifications: script: - source $TRAVIS/run_test.sh + +after_success: + - source $TRAVIS/run_coverage.sh diff --git a/README.md b/README.md index ea7b8577de3e..10862ff8a6a6 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # MXNet [![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) +[![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) [![Documentation Status](https://readthedocs.org/projects/mxnetjl/badge/?version=latest)](http://mxnetjl.readthedocs.org/en/latest/?badge=latest) [![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) diff --git a/test/travis/run_coverage.sh b/test/travis/run_coverage.sh new file mode 100644 index 000000000000..73816fac7b98 --- /dev/null +++ b/test/travis/run_coverage.sh @@ -0,0 +1 @@ +julia -e 'cd(Pkg.dir("MXNet")); Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())' \ No newline at end of file From c1d663df4b339838558400a1eabfea81ec6b5d3e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 22:27:40 -0500 Subject: [PATCH 165/630] run mlp.jl on travis CI --- test/runtests.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/runtests.jl b/test/runtests.jl index 1d5f49b4d1f1..71d7f8a79fc4 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -14,3 +14,7 @@ end include("common.jl") test_dir("unittest") +# run the basic MNIST mlp example +if haskey(ENV, "TRAVIS") && ENV["TRAVIS"] == "true" + include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp.jl")) +end From b1e882b46fd43b45c0353d0ae6825da017c05beb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 22:29:06 -0500 Subject: [PATCH 166/630] fix env variable for detecting Travis CI --- test/runtests.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/runtests.jl b/test/runtests.jl index 71d7f8a79fc4..af60ab9f9185 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -15,6 +15,6 @@ include("common.jl") test_dir("unittest") # run the basic MNIST mlp example -if haskey(ENV, "TRAVIS") && ENV["TRAVIS"] == "true" +if haskey(ENV, "CONTINUOUS_INTEGRATION") include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp.jl")) end From f82351028ccd7164f23f264c08e3128f93074077 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 22:35:45 -0500 Subject: [PATCH 167/630] only run 1 epoch for MNIST on travis CI --- examples/mnist/mlp.jl | 3 ++- test/runtests.jl | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 05d008d5255c..fd1bbf99e38b 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -34,7 +34,8 @@ model = mx.FeedForward(mlp, context=mx.cpu()) optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +mx.fit(model, optimizer, train_provider, eval_data=eval_provider, + n_epoch = isdefined(:n_epoch) ? n_epoch : 20) #-------------------------------------------------------------------------------- # Optional, demonstration of the predict API diff --git a/test/runtests.jl b/test/runtests.jl index af60ab9f9185..3d796bd7a5e2 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -16,5 +16,6 @@ test_dir("unittest") # run the basic MNIST mlp example if haskey(ENV, "CONTINUOUS_INTEGRATION") + n_epoch = 1 include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp.jl")) end From 23378f1d23f80e9b9950707b37c445a43927f2da Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 22:41:39 -0500 Subject: [PATCH 168/630] fix error due to API changes of subarray in Julia nightly --- src/metric.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 59acb529a9a7..c88239ab8cc8 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -14,9 +14,8 @@ function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDA n_sample = size(pred)[end] metric.n_sample += n_sample for i = 1:n_sample - klass = indmax(sub(pred,:,i)) + klass = indmax(pred[:,i]) metric.acc_sum += (klass-1) == label[i] - #println("$(sub(pred,:,i)) $(klass-1) <=> $(label[i])") end end From d26c36b8b0b1d254cddfadf4c4c1e11606d23360 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 23:10:27 -0500 Subject: [PATCH 169/630] a specific mlp script for Travis CI regression test. --- examples/mnist/mlp-test.jl | 75 ++++++++++++++++++++++++++++++++++++++ examples/mnist/mlp.jl | 3 +- test/runtests.jl | 3 +- 3 files changed, 77 insertions(+), 4 deletions(-) create mode 100644 examples/mnist/mlp-test.jl diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl new file mode 100644 index 000000000000..ad7446a98324 --- /dev/null +++ b/examples/mnist/mlp-test.jl @@ -0,0 +1,75 @@ +# This file is primarily to be included from runtest.jl. We tried to cover various +# features of MXNet.jl in this example in order to detect regression errors. + +using MXNet + + +function get_mnist_mlp() + mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => + mx.SoftmaxOutput(name=:softmax) + return mlp +end + +function get_mnist_data(batch_size=100) + include("mnist-data.jl") + return get_mnist_providers(batch_size) +end + +function mnist_fit_and_predict(optimizer, initializer, n_epoch) + mlp = get_mnist_mlp() + train_provider, eval_provider = get_mnist_data() + + # setup model + model = mx.FeedForward(mlp, context=mx.cpu()) + + # fit parameters + cp_prefix = "mnist-test-cp" + mx.fit(model, optimizer, train_provider, eval_data=eval_provider, n_epoch=n_epoch, + initializer=initializer, callbacks=[mx.speedometer(), mx.do_checkpoint(cp_prefix, save_epoch_0=true)]) + + # make sure the checkpoints are saved + @test isfile("$cp_prefix-symbol.json") + for i_epoch = 0:n_epoch + @test isfile(mx.format("{1}-{2:04d}.params", cp_prefix, i_epoch)) + end + mlp_load = mx.load("$cp_prefix-symbol.json", mx.Symbol) + @test mx.to_json(mlp_load) == mx.to_json(mlp) + mlp_load = mx.from_json(readall("$cp_prefix-symbol.json"), mx.Symbol) + @test mx.to_json(mlp_load) == mx.to_json(mlp) + + #-------------------------------------------------------------------------------- + # the predict API + probs = mx.predict(model, eval_provider) + + # collect all labels from eval data + labels = Array[] + for batch in eval_provider + push!(labels, copy(mx.get(eval_provider, batch, :softmax_label))) + end + labels = cat(1, labels...) + + # Now we use compute the accuracy + correct = 0 + for i = 1:length(labels) + # labels are 0...9 + if indmax(probs[:,i]) == labels[i]+1 + correct += 1 + end + end + accuracy = 100correct/length(labels) + println(mx.format("Accuracy on eval set: {1:.2f}%", accuracy)) + + return accuracy +end + +function test_mnist_mlp() + @test mnist_fit_and_predict(mx.SGD(lr=0.1, momentum=0.9), mx.UniformInitializer(0.01), 2) > 90 + @test mnist_fit_and_predict(mx.ADAM(), mx.NormalInitializer(), 2) > 90 +end + +test_mnist_mlp() diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index fd1bbf99e38b..f6fbbd5c0870 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -34,8 +34,7 @@ model = mx.FeedForward(mlp, context=mx.cpu()) optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) # fit parameters -mx.fit(model, optimizer, train_provider, eval_data=eval_provider, - n_epoch = isdefined(:n_epoch) ? n_epoch : 20) +mx.fit(model, optimizer, train_provider, eval_data=eval_provider, n_epoch=20) #-------------------------------------------------------------------------------- # Optional, demonstration of the predict API diff --git a/test/runtests.jl b/test/runtests.jl index 3d796bd7a5e2..53c8cbc3bb74 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -16,6 +16,5 @@ test_dir("unittest") # run the basic MNIST mlp example if haskey(ENV, "CONTINUOUS_INTEGRATION") - n_epoch = 1 - include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp.jl")) + include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp-test.jl")) end From 1786bdb742f7243fe44c8487c361cdd73b56776f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 23:12:56 -0500 Subject: [PATCH 170/630] wrap mnist test into a module --- examples/mnist/mlp-test.jl | 5 ++++- test/unittest/kvstore.jl | 10 ++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index ad7446a98324..71ed5d7747c9 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -1,8 +1,9 @@ # This file is primarily to be included from runtest.jl. We tried to cover various # features of MXNet.jl in this example in order to detect regression errors. -using MXNet +module MNISTTest +using MXNet function get_mnist_mlp() mlp = @mx.chain mx.Variable(:data) => @@ -73,3 +74,5 @@ function test_mnist_mlp() end test_mnist_mlp() + +end # module MNISTTest diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index bca77a556922..2770edf7a259 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -16,6 +16,15 @@ function init_kv() return kv end +function test_kv_basic() + info("KVStore::basic") + + kv = init_kv() + @test mx.get_type(kv) == :local + @test mx.get_rank(kv) == 0 + @test mx.get_num_workers(kv) == 1 +end + function test_single_kv_pair() info("KVStore::single") @@ -53,6 +62,7 @@ function test_aggregator() end end +test_kv_basic() test_single_kv_pair() test_aggregator() From 3bf43649c8ede0d7f6329a37e8ec6f66dcbfeee7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 2 Nov 2015 23:33:04 -0500 Subject: [PATCH 171/630] fix travis test error --- examples/mnist/mlp-test.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index 71ed5d7747c9..04d917c6543d 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -2,8 +2,8 @@ # features of MXNet.jl in this example in order to detect regression errors. module MNISTTest - using MXNet +using Base.Test function get_mnist_mlp() mlp = @mx.chain mx.Variable(:data) => From d0dadad888e16f65953abd60451923b437741a54 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 3 Nov 2015 07:41:15 -0500 Subject: [PATCH 172/630] pull-back only when update_on_kvstore --- src/model.jl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/model.jl b/src/model.jl index b978ec0c6f49..ef34ad1d7669 100644 --- a/src/model.jl +++ b/src/model.jl @@ -372,12 +372,13 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # init kv with gradients for idx = 1:length(param_arrays) param_on_devs = param_arrays[idx] - grad_on_devs = grad_arrays[idx] init!(kvstore, idx, self.arg_params[param_names[idx]]) - # pull weights back - pull!(kvstore, idx, param_on_devs, priority=-idx) + if update_on_kvstore + # pull weights back + pull!(kvstore, idx, param_on_devs, priority=-idx) + end end end From 75b3e68fa9cd57f946a5f206ee36606193952838 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 3 Nov 2015 13:43:56 -0500 Subject: [PATCH 173/630] SVMLight Data Provider --- plugins/io/svmlight.jl | 64 ++++++++++++++++++++++++++++++++++++++++++ src/io.jl | 24 ++++++++++++++-- 2 files changed, 86 insertions(+), 2 deletions(-) create mode 100644 plugins/io/svmlight.jl diff --git a/plugins/io/svmlight.jl b/plugins/io/svmlight.jl new file mode 100644 index 000000000000..ddc207854d4e --- /dev/null +++ b/plugins/io/svmlight.jl @@ -0,0 +1,64 @@ +using MXNet +using SVMLightLoader + +type SVMLightProvider <: mx.AbstractDataProvider + filename :: AbstractString + batch_size :: Int + fea_dim :: Int + data_name :: Symbol + label_name :: Symbol +end + +function SVMLightProvider(filename::AbstractString, batch_size::Int; fea_dim::Int=-1, + data_name::Symbol=:data, label_name::Symbol=:label) + if fea_dim == -1 + info("SVMLightProvider: going over file to get feature dimension of $filename") + f = SVMLightFile(filename) + for (data, label) in f + fea_dim = max(fea_dim, length(data)) + end + end + + return SVMLightProvider(filename, batch_size, fea_dim, data_name, label_name) +end + +mx.get_batch_size(provider :: SVMLightProvider) = provider.batch_size +function mx.provide_data(provider :: SVMLightProvider) + [(provider.data_name, (provider.fea_dim, provider.batch_size))] +end +function mx.provide_label(provider :: SVMLightProvider) + [(provider.label_name, (provider.batch_size,))] +end + +function mx.eachbatch(provider :: SVMLightProvider) + data_jl = zeros(mx.MX_float, (provider.fea_dim, provider.batch_size)) + data_nd = mx.empty(size(data_jl)) + label_jl = zeros(mx.MX_float, (provider.batch_size,)) + label_nd = mx.empty(size(label_jl)) + + batch = mx.DataBatch([data_nd], [label_nd], provider.batch_size) + function _svmlight_iter() + f = SVMLightFile(provider.filename) + while true + raw = collect(take(f, provider.batch_size)) + cnt = length(raw) + if cnt == 0 + # end of file, no more data to see + return + end + + data_jl[:] = 0 + for i = 1:provider.batch_size + vec, gnd = raw[min(i,cnt)] + data_jl[1:length(vec),i] = vec + label_jl[i] = gnd + end + mx.copy!(data_nd, data_jl) + mx.copy!(label_nd, label_jl) + batch.count = cnt + produce(batch) + end + end + + return Task(_svmlight_iter) +end diff --git a/src/io.jl b/src/io.jl index 6bbb9a5cf6d5..385de35a2936 100644 --- a/src/io.jl +++ b/src/io.jl @@ -42,7 +42,7 @@ and split it into mini-batches so that the model can consume the data in a unifo .. code-block:: julia - for batch in provider + for batch in eachbatch(provider) data = get_data(provider, batch) end @@ -50,12 +50,15 @@ and split it into mini-batches so that the model can consume the data in a unifo .. code-block:: julia - state = Base.start(provider) + state = Base.start(eachbatch(provider)) while !Base.done(provider, state) (batch, state) = Base.next(provider, state) data = get_data(provider, batch) end + By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface + is implemented on the provider type itself. But the extra layer of abstraction allows us to + implement a data provider easily via a Julia ``Task`` coroutine. The detailed interface function is listed below: .. function:: Base.eltype(provider) -> AbstractDataBatch @@ -189,6 +192,21 @@ abstract AbstractDataProviderState =# abstract AbstractDataBatch +#=doc +.. class:: DataBatch + + A basic subclass of :class:`AbstractDataBatch`, that implement the interface by + accessing member fields. +=# +type DataBatch <: AbstractDataBatch + data :: Vector{NDArray} + label :: Vector{NDArray} + count :: Int +end +count_samples(batch :: DataBatch) = batch.count +get_data(batch :: DataBatch) = batch.data +get_label(batch :: DataBatch) = batch.label + #=doc .. class:: SlicedNDArray @@ -240,6 +258,8 @@ function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name error("$name is not provided by this data provider") end +eachbatch(provider :: AbstractDataProvider) = provider + #=doc Built-in data providers ----------------------- From f6cbba1ca080089cabeb297ea44f1b7e0463c208 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 3 Nov 2015 13:47:09 -0500 Subject: [PATCH 174/630] add description of SVMLight Provider --- plugins/io/svmlight.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/plugins/io/svmlight.jl b/plugins/io/svmlight.jl index ddc207854d4e..94563357ee6e 100644 --- a/plugins/io/svmlight.jl +++ b/plugins/io/svmlight.jl @@ -1,3 +1,7 @@ +#=doc +SVMLight / LibSVM is a popular data format for sparse features. Some preprocessed +datasets in this format could be found at http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/ +=# using MXNet using SVMLightLoader From 0f22cfa3897668cb044db41059c51d06a600d608 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 3 Nov 2015 13:53:26 -0500 Subject: [PATCH 175/630] add readme for plugins --- plugins/README.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 plugins/README.md diff --git a/plugins/README.md b/plugins/README.md new file mode 100644 index 000000000000..38882889f494 --- /dev/null +++ b/plugins/README.md @@ -0,0 +1,14 @@ +# Plugins of MXNet.jl + +This directory contains *plugins* of MXNet.jl. A plugin is typically a component that could be part of MXNet.jl, but excluded from the `mx` namespace. The plugins are included here primarily for two reasons: + +* To minimize the dependency of MXNet.jl on other optional packages. +* To serve as examples on how to extend some components of MXNet.jl. + +The most straightforward way to use a plugin is to `include` the code. For example + +```julia +include(joinpath(Pkg.dir("MXNet"), "plugins", "io", "svmlight.jl")) + +provider = SVMLightProvider("/path/to/dataset", 100) +``` From 7ed591d7d83b10cd07f5ece128f8114e8bdbe9f8 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 5 Nov 2015 15:14:11 +0900 Subject: [PATCH 176/630] add libatlas-base for cblas --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index ed0d03d4906c..8771dfa989b6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,6 +19,7 @@ addons: - git - libcurl4-openssl-dev - unzip + - libatlas-base-dev - libatlas-dev - libopencv-dev - gcc-4.8 From ee855ecd36ff3bde759be569e4e0dcd3b49ef1d9 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 5 Nov 2015 09:00:46 -0500 Subject: [PATCH 177/630] fix cifar10 example --- examples/cifar10/cifar10.jl | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 37cfacd2cec0..e5ff751eca7d 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -77,9 +77,7 @@ gpus = [mx.Context(mx.GPU, i) for i = 0:num_gpus-1] model = mx.FeedForward(softmax, context=gpus) # optimizer -optimizer = mx.SGD(lr_scheduler=mx.FixedLearningRateScheduler(0.05), - mom_scheduler=mx.FixedMomentumScheduler(0.9), - weight_decay=0.0001) +optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.0001) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=num_epoch, eval_data=test_provider, From 726badd23dd6960ae800a01f048bab94eb91b3f2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 7 Nov 2015 18:59:26 -0500 Subject: [PATCH 178/630] update doc for updated data iter API --- docs/api/io.rst | 15 +++++++++++++-- src/model.jl | 6 +++--- 2 files changed, 16 insertions(+), 5 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index b601db39c7ec..a7568a7b6f71 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -43,7 +43,7 @@ and split it into mini-batches so that the model can consume the data in a unifo .. code-block:: julia - for batch in provider + for batch in eachbatch(provider) data = get_data(provider, batch) end @@ -51,12 +51,15 @@ and split it into mini-batches so that the model can consume the data in a unifo .. code-block:: julia - state = Base.start(provider) + state = Base.start(eachbatch(provider)) while !Base.done(provider, state) (batch, state) = Base.next(provider, state) data = get_data(provider, batch) end + By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface + is implemented on the provider type itself. But the extra layer of abstraction allows us to + implement a data provider easily via a Julia ``Task`` coroutine. The detailed interface function is listed below: .. function:: Base.eltype(provider) -> AbstractDataBatch @@ -191,6 +194,14 @@ and split it into mini-batches so that the model can consume the data in a unifo +.. class:: DataBatch + + A basic subclass of :class:`AbstractDataBatch`, that implement the interface by + accessing member fields. + + + + .. class:: SlicedNDArray A alias type of ``Tuple{UnitRange{Int},NDArray}``. diff --git a/src/model.jl b/src/model.jl index ef34ad1d7669..c291243093c6 100644 --- a/src/model.jl +++ b/src/model.jl @@ -185,7 +185,7 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::B batch_size = get_batch_size(data) data_arrays = [self.pred_exec.arg_dict[name] for name in data_names] output_list = [Array{MX_float}[] for i=1:length(self.pred_exec.outputs)] - for batch in data + for batch in eachbatch(data) load_data!(data, batch, data_arrays) forward(self.pred_exec, is_train=false) if isa(callback, Void) @@ -402,7 +402,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # invoke callbacks on iteration 0 _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) - for batch in data + for batch in eachbatch(data) load_data!(data, batch, data_arrays) load_label!(data, batch, label_arrays) @@ -473,7 +473,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra @assert(get_batch_size(opts.eval_data) == batch_size) reset!(opts.eval_metric) - for batch in opts.eval_data + for batch in eachbatch(opts.eval_data) load_data!(opts.eval_data, batch, data_arrays) # forward and backward From 637bf0cfbb28a93060e9f44281d28c3e7caf517c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 7 Nov 2015 19:09:35 -0500 Subject: [PATCH 179/630] avoid calling mxdataiter reset --- src/io.jl | 22 ++++++++++++++++++---- test/runtests.jl | 4 ++-- 2 files changed, 20 insertions(+), 6 deletions(-) diff --git a/src/io.jl b/src/io.jl index 385de35a2936..d618a4eef1be 100644 --- a/src/io.jl +++ b/src/io.jl @@ -473,6 +473,11 @@ type MXDataProvider <: AbstractDataProvider data_shape :: Vector{Tuple{Base.Symbol, Tuple}} label_shape:: Vector{Tuple{Base.Symbol, Tuple}} batch_size :: Int + + # those two a auxiliary variables to help avoid calling reset + # but still pre-fetch first batch to get shape information + first_epoch:: Bool + first_batch:: Bool end function _reset_data_iter(handle :: MX_DataIterHandle) @@ -499,7 +504,6 @@ function MXDataProvider(handle :: MX_DataIterHandle; label_name :: Union{Base.Symbol,Void}=:softmax_label, kwargs...) # for convenience, we ignore the rest keyword arguments # init iterator, load the first batch and get shapes - _reset_data_iter(handle) @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") data_shape = Tuple{Base.Symbol, Tuple}[(data_name, size(_get_data(handle)))] if !isa(label_name, Void) @@ -508,7 +512,7 @@ function MXDataProvider(handle :: MX_DataIterHandle; label_shape = Tuple{Base.Symbol, Tuple}[] end - MXDataProvider(handle, data_shape, label_shape, data_shape[1][2][end]) + MXDataProvider(handle, data_shape, label_shape, data_shape[1][2][end], true, true) end provide_data(provider::MXDataProvider) = provider.data_shape @@ -525,11 +529,21 @@ function Base.eltype(provider :: MXDataProvider) MXDataBatch end function Base.start(provider :: MXDataProvider) - _reset_data_iter(provider.handle) + if !provider.first_epoch + _reset_data_iter(provider.handle) + else + provider.first_epoch = false + end + return MXDataProviderState(true) end function Base.done(provider :: MXDataProvider, state :: MXDataProviderState) - state.has_next = _iter_next(provider.handle) + if provider.first_batch + state.has_next = true + provider.first_batch = false + else + state.has_next = _iter_next(provider.handle) + end return !state.has_next end function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) diff --git a/test/runtests.jl b/test/runtests.jl index 53c8cbc3bb74..cd9087b7202b 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -11,8 +11,8 @@ function test_dir(dir) end end -include("common.jl") -test_dir("unittest") +include(joinpath(dirname(@__FILE__), "common.jl")) +test_dir(joinpath(dirname(@__FILE__), "unittest")) # run the basic MNIST mlp example if haskey(ENV, "CONTINUOUS_INTEGRATION") From ee0583f82d093654b0d66379a118822fca26da1c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 8 Nov 2015 10:43:57 -0500 Subject: [PATCH 180/630] remove unused variable --- src/kvstore.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/kvstore.jl b/src/kvstore.jl index 562edc3d4850..d52433f567b2 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -107,7 +107,6 @@ end # extra handle parameter of the API to pass the updater object around. Fix this when someday # full closure cfunction is supported in Julia. function _kvstore_update_wrapper(index::Cint, nd_recv::MX_handle, nd_local::MX_handle, updater::Ptr{Void}) - x = unsafe_pointer_to_objref(updater) updater_func = unsafe_pointer_to_objref(updater) :: Function updater_func(Int(index), NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) return nothing From 315c1a4aff9355e2be6baa1d9993de26145e078b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 00:29:06 -0500 Subject: [PATCH 181/630] simplify a bit io test --- test/unittest/io.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/test/unittest/io.jl b/test/unittest/io.jl index d34fb674b689..33be1eac3da1 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -52,8 +52,7 @@ function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataPro info("IO::Array::#data=$(length(data)),#label=$(length(label)),batch_size=$batch_size") for (idx, batch) in zip(idx_all, provider) data_batch = [x[[Colon() for i=1:ndims(x)-1]..., idx:min(idx+batch_size-1,sample_count)] for x in data] - data_get = [mx.empty(size(x)[1:end-1]..., batch_size) for x in data] - mx.load_data!(provider, batch, [[(1:batch_size, x)] for x in data_get]) + data_get = mx.get_data(provider, batch) for (d_real, d_get) in zip(data_batch, data_get) @test reldiff(d_real, copy(d_get)[[1:n for n in size(d_real)]...]) < 1e-6 From 722eb369998133e0820f27611229fc60d15c6afa Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 00:50:51 -0500 Subject: [PATCH 182/630] fix array data iter provide size not properly reported (see #14) --- src/io.jl | 4 ++-- test/unittest/io.jl | 7 +++++++ 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/io.jl b/src/io.jl index d618a4eef1be..c7e5616c6565 100644 --- a/src/io.jl +++ b/src/io.jl @@ -394,10 +394,10 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bo end function provide_data(provider::ArrayDataProvider) - return collect(zip(provider.data_names, map(size, provider.data_arrays))) + return collect(zip(provider.data_names, map(size, provider.data_batch))) end function provide_label(provider::ArrayDataProvider) - return collect(zip(provider.label_names, map(size, provider.label_arrays))) + return collect(zip(provider.label_names, map(size, provider.label_batch))) end get_batch_size(provider::ArrayDataProvider) = provider.batch_size diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 33be1eac3da1..8ef3b57d948c 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -49,6 +49,13 @@ function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataPro batch_size = mx.get_batch_size(provider) idx_all = 1:batch_size:sample_count + for (d1, (_, d2)) in zip(data, mx.provide_data(provider)) + @test size(d1)[1:end-1] == d2[1:end-1] + end + for (d1, (_, d2)) in zip(label, mx.provide_label(provider)) + @test size(d1)[1:end-1] == d2[1:end-1] + end + info("IO::Array::#data=$(length(data)),#label=$(length(label)),batch_size=$batch_size") for (idx, batch) in zip(idx_all, provider) data_batch = [x[[Colon() for i=1:ndims(x)-1]..., idx:min(idx+batch_size-1,sample_count)] for x in data] From 189e755b0ba537d4f81e10d30579931ea727992a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 09:37:03 -0500 Subject: [PATCH 183/630] prepare for v0.0.4 --- NEWS.md | 9 ++++++++- docs/conf.py | 4 ++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/NEWS.md b/NEWS.md index 84858e3aca13..f970f7d64703 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,4 +1,11 @@ -# v.0.03 (2015.10.27) +# v0.0.4 (2015.11.09) + +* ADAM optimizer (@cbecker) +* Improved data provider API. +* More documentation. +* Fix a bug in array data iterator (@vchuravy) + +# v0.0.3 (2015.10.27) * Model prediction API. * Model checkpoint loading and saving. diff --git a/docs/conf.py b/docs/conf.py index ca57eed31146..b009877694a0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,9 @@ # built documents. # # The short X.Y version. -version = '0.0.3' +version = '0.0.4' # The full version, including alpha/beta/rc tags. -release = '0.0.3' +release = '0.0.4' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 76261c736743d3256a0a9502429b1d95f2796026 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:19:58 -0500 Subject: [PATCH 184/630] refactoring: Symbol -> Node --- src/MXNet.jl | 2 +- src/executor.jl | 16 ++-- src/io.jl | 2 +- src/model.jl | 12 +-- src/{symbol.jl => node.jl} | 122 +++++++++++++-------------- test/unittest/{symbol.jl => node.jl} | 16 ++-- 6 files changed, 85 insertions(+), 85 deletions(-) rename src/{symbol.jl => node.jl} (77%) rename test/unittest/{symbol.jl => node.jl} (91%) diff --git a/src/MXNet.jl b/src/MXNet.jl index 53553d417941..68875b62da90 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -15,7 +15,7 @@ include("ndarray.jl") include("random.jl") include("name.jl") -include("symbol.jl") +include("node.jl") include("executor.jl") include("metric.jl") diff --git a/src/executor.jl b/src/executor.jl index 4485bc6dce9a..6dea17763101 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -6,13 +6,13 @@ Executor #=doc .. class:: Executor - An executor is a realization of a symbolic architecture defined by a :class:`Symbol`. + An executor is a realization of a symbolic architecture defined by a :class:`Node`. The actual forward and backward computation specified by the network architecture can be carried out with an executor. =# type Executor handle :: MX_ExecutorHandle - symbol :: Symbol + symbol :: Node arg_arrays :: Vector{NDArray} grad_arrays :: Vector{Union{Void,NDArray}} aux_arrays :: Vector{NDArray} @@ -20,7 +20,7 @@ type Executor arg_dict :: Dict{Base.Symbol, NDArray} aux_dict :: Dict{Base.Symbol, NDArray} end -function Executor(hdr :: MX_ExecutorHandle, symbol :: Symbol, +function Executor(hdr :: MX_ExecutorHandle, symbol :: Node, arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, aux_arrays :: Vector{NDArray}) # get output arrays @@ -73,9 +73,9 @@ end #=doc .. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) - Create an :class:`Executor` by binding a :class:`Symbol` to concrete :class:`NDArray`. + Create an :class:`Executor` by binding a :class:`Node` to concrete :class:`NDArray`. - :param Symbol sym: the network architecture describing the computation graph. + :param Node sym: the network architecture describing the computation graph. :param Context ctx: the context on which the computation should run. :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include @@ -86,7 +86,7 @@ end :param grad_req: =# @enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 -function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; +function bind(self :: Node, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) @@ -116,7 +116,7 @@ function bind(self :: Symbol, ctx :: Context, args :: Union{Vector{NDArray},Dict executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, args, args_grad, aux_states) end -function bind(self :: Symbol; kwargs...) +function bind(self :: Node; kwargs...) kwargs = Dict(kwargs) @assert(haskey(kwargs, :args), "Must specify args") args = pop!(kwargs, :args) @@ -128,7 +128,7 @@ function bind(self :: Symbol; kwargs...) bind(self, context, args; kwargs...) end -function simple_bind(self :: Symbol, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) +function simple_bind(self :: Node, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") diff --git a/src/io.jl b/src/io.jl index c7e5616c6565..5c59e96bb611 100644 --- a/src/io.jl +++ b/src/io.jl @@ -35,7 +35,7 @@ and split it into mini-batches so that the model can consume the data in a unifo training stage, both *data* and *label* will be feeded into the model, while during prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and of any shapes. The provided data and label names here should match the input names in a target - :class:`Symbol`. + :class:`Node`. A data provider should also implement the Julia iteration interface, in order to allow iterating through the data set. The provider will be called in the following way: diff --git a/src/model.jl b/src/model.jl index c291243093c6..93189124d9b0 100644 --- a/src/model.jl +++ b/src/model.jl @@ -23,7 +23,7 @@ abstract AbstractModel that handles sequential data explicitly, please use **TODO**... =# type FeedForward <: AbstractModel - arch :: Symbol + arch :: Node ctx :: Vector{Context} arg_params :: Dict{Base.Symbol, NDArray} @@ -32,7 +32,7 @@ type FeedForward <: AbstractModel pred_exec :: Union{Executor, Void} # leave the rest fields undefined - FeedForward(arch :: Symbol, ctx :: Vector{Context}) = new(arch, ctx) + FeedForward(arch :: Node, ctx :: Vector{Context}) = new(arch, ctx) end """Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector @@ -52,14 +52,14 @@ function _split_inputs(batch_size :: Int, n_split :: Int) end #=doc -.. function:: FeedForward(arch :: Symbol, ctx) +.. function:: FeedForward(arch :: Node, ctx) :param arch: the architecture of the network constructed using the symbolic API. :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` or a list of :class:`Context` objects. In the latter case, data parallelization will be used for training. If no context is provided, the default context ``cpu()`` will be used. =# -function FeedForward(arch :: Symbol; context :: Union{Context, Vector{Context}, Void} = nothing) +function FeedForward(arch :: Node; context :: Union{Context, Vector{Context}, Void} = nothing) if isa(context, Void) context = [Context(CPU)] elseif isa(context, Context) @@ -514,7 +514,7 @@ end function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, state.curr_epoch) end -function save_checkpoint(sym :: Symbol, arg_params :: Dict{Base.Symbol, NDArray}, +function save_checkpoint(sym :: Node, arg_params :: Dict{Base.Symbol, NDArray}, aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) save("$prefix-symbol.json", sym) save_dict = merge(Dict([symbol("arg:$k") => v for (k,v) in arg_params]), @@ -525,7 +525,7 @@ function save_checkpoint(sym :: Symbol, arg_params :: Dict{Base.Symbol, NDArray} end function load_checkpoint(prefix :: AbstractString, epoch :: Int) - arch = load("$prefix-symbol.json", Symbol) + arch = load("$prefix-symbol.json", Node) saved_dict = load(format("{1}-{2:04d}.params", prefix, epoch), NDArray) arg_params = Dict{Base.Symbol, NDArray}() aux_params = Dict{Base.Symbol, NDArray}() diff --git a/src/symbol.jl b/src/node.jl similarity index 77% rename from src/symbol.jl rename to src/node.jl index 13338602f259..337d50864684 100644 --- a/src/symbol.jl +++ b/src/node.jl @@ -4,50 +4,50 @@ Symbolic API =# #=doc -.. class:: Symbol +.. class:: Node - Symbol is the basic building block of the symbolic graph in MXNet.jl. + Node is the basic building block of the symbolic graph in MXNet.jl. .. note:: - Throughout this documentation, ``Symbol`` always refer to this :class:`Symbol` type. - When we refer to the Julia's build-in symbol type (e.g. ``typeof(:foo)``), we always + Throughout this documentation, ``Node`` always refer to this :class:`Node` type. + When we refer to the Julia's build-in Node type (e.g. ``typeof(:foo)``), we always say ``Base.Symbol``. =# -type Symbol +type Node handle :: MX_SymbolHandle end -function Base.unsafe_convert(::Type{MX_handle}, obj::Symbol) +function Base.unsafe_convert(::Type{MX_handle}, obj::Node) Base.unsafe_convert(MX_handle, obj.handle) end -Base.convert(t::Type{MX_handle}, obj::Symbol) = Base.unsafe_convert(t, obj) -Base.cconvert(t::Type{MX_handle}, obj::Symbol) = Base.unsafe_convert(t, obj) +Base.convert(t::Type{MX_handle}, obj::Node) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::Node) = Base.unsafe_convert(t, obj) #=doc -.. function:: deepcopy(self :: Symbol) +.. function:: deepcopy(self :: Node) - Make a deep copy of a symbol. + Make a deep copy of a Node. =# -function Base.deepcopy(self :: Symbol) +function Base.deepcopy(self :: Node) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCopy, (MX_handle, Ref{MX_handle}), self, ref_hdr) - return Symbol(MX_SymbolHandle(ref_hdr[])) + return Node(MX_SymbolHandle(ref_hdr[])) end #=doc -.. function:: copy(self :: Symbol) +.. function:: copy(self :: Node) - Make a copy of a symbol. The same as making a deep copy. + Make a copy of a Node. The same as making a deep copy. =# -function Base.copy(self :: Symbol) +function Base.copy(self :: Node) Base.deepcopy(self) end -function Base.call(self :: Symbol, args :: Symbol...) +function Base.call(self :: Node, args :: Node...) s = deepcopy(self) _compose!(s, args...) end -function Base.call(self :: Symbol; kwargs...) +function Base.call(self :: Node; kwargs...) s = deepcopy(self) _compose!(s; kwargs...) end @@ -64,10 +64,10 @@ macro _list_symbol_info(self, func_name) return names end end -function list_arguments(self :: Symbol) +function list_arguments(self :: Node) @_list_symbol_info(self, :MXSymbolListArguments) end -function list_outputs(self :: Symbol) +function list_outputs(self :: Node) @_list_symbol_info(self, :MXSymbolListOutputs) end """List all auxiliary states in the symbool. @@ -77,31 +77,31 @@ and do not have gradient. But still be useful for the specific operations. A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. Most operators do not have Auxiliary states. """ -function list_auxiliary_states(self :: Symbol) +function list_auxiliary_states(self :: Node) @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) end -"Get a new grouped symbol whose output contains all the internal outputs of this symbol." -function get_internals(self :: Symbol) +"Get a new grouped Node whose output contains all the internal outputs of this Node." +function get_internals(self :: Node) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolGetInternals, (MX_handle, Ref{MX_handle}), self, ref_hdr) - return Symbol(MX_SymbolHandle(ref_hdr[])) + return Node(MX_SymbolHandle(ref_hdr[])) end "Create a symbolic variable with the given name" function Variable(name :: Union{Base.Symbol, AbstractString}) hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) - Symbol(MX_SymbolHandle(hdr_ref[])) + Node(MX_SymbolHandle(hdr_ref[])) end -"Create a symbol that groups symbols together" -function Group(symbols :: Symbol...) +"Create a Node that groups symbols together" +function Group(symbols :: Node...) handles = MX_handle[symbols...] ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateGroup, (MX_uint, Ptr{MX_handle}, Ref{MX_handle}), length(handles), handles, ref_hdr) - Symbol(MX_SymbolHandle(ref_hdr[])) + Node(MX_SymbolHandle(ref_hdr[])) end macro _infer_shape(self, keys, indptr, sdata) @@ -147,7 +147,7 @@ macro _infer_shape(self, keys, indptr, sdata) end end end -function infer_shape(self :: Symbol; kwargs...) +function infer_shape(self :: Node; kwargs...) sdata = MX_uint[] indptr = MX_uint[0] for (k,v) in kwargs @@ -157,7 +157,7 @@ function infer_shape(self :: Symbol; kwargs...) keys = AbstractString[string(x[1]) for x in kwargs] @_infer_shape(self, keys, indptr, sdata) end -function infer_shape(self :: Symbol, args :: Union{Tuple, Void}...) +function infer_shape(self :: Node, args :: Union{Tuple, Void}...) sdata = MX_uint[] indptr = MX_uint[0] for arg in args @@ -169,42 +169,42 @@ function infer_shape(self :: Symbol, args :: Union{Tuple, Void}...) @_infer_shape(self, keys, indptr, sdata) end -function Base.getindex(self :: Symbol, idx :: Union{Base.Symbol, AbstractString}) +function Base.getindex(self :: Node, idx :: Union{Base.Symbol, AbstractString}) idx = symbol(idx) i_idx = find(idx .== list_outputs(self)) @assert(length(i_idx) > 0, "Cannot find output with name '$idx'") @assert(length(i_idx) < 2, "Found duplicated output with name '$idx'") Base.getindex(self, i_idx[1]) end -function Base.getindex(self :: Symbol, idx :: Int) +function Base.getindex(self :: Node, idx :: Int) ref_hdr = Ref{MX_handle}(0) # note Julia is 1-based, while MXNet is 0-based @mxcall(:MXSymbolGetOutput, (MX_handle, MX_uint, Ref{MX_handle}), self, idx-1, ref_hdr) - return Symbol(MX_SymbolHandle(ref_hdr[])) + return Node(MX_SymbolHandle(ref_hdr[])) end import Base: +, .+ -function +(self :: Symbol, args :: Symbol...) +function +(self :: Node, args :: Node...) ret = self for arg in args ret = _Plus(ret, arg) end ret end -function .+(self :: Symbol, args :: Symbol...) +function .+(self :: Node, args :: Node...) +(self, args...) end import Base: -, .- -function -(self :: Symbol, arg :: Symbol) +function -(self :: Node, arg :: Node) _Minus(self, arg) end -function .-(self :: Symbol, arg :: Symbol) +function .-(self :: Node, arg :: Node) -(self, arg) end import Base: .* -function .*(self :: Symbol, args :: Symbol...) +function .*(self :: Node, args :: Node...) ret = self for arg in args ret = _Mul(ret, arg) @@ -213,11 +213,11 @@ function .*(self :: Symbol, args :: Symbol...) end import Base: ./ -function ./(self :: Symbol, arg :: Symbol) +function ./(self :: Node, arg :: Node) _Div(self, arg) end -function _compose!(sym :: Symbol; kwargs...) +function _compose!(sym :: Node; kwargs...) name = char_p(0) arg_keys = AbstractString[] arg_vals = MX_handle[] @@ -226,7 +226,7 @@ function _compose!(sym :: Symbol; kwargs...) if k == :name name = string(v) else - @assert(isa(v, Symbol), "Compose expect `Symbol` as arguments") + @assert(isa(v, Node), "Compose expect `Node` as arguments") push!(arg_keys, string(k)) push!(arg_vals, v) end @@ -237,10 +237,10 @@ function _compose!(sym :: Symbol; kwargs...) sym, name, length(arg_keys), arg_keys, arg_vals) return sym end -function _compose!(sym :: Symbol, args::Symbol...) +function _compose!(sym :: Node, args::Node...) _compose!(sym, char_p(0), args...) end -function _compose!(sym :: Symbol, name :: Union{Base.Symbol, char_p}, args::Symbol...) +function _compose!(sym :: Node, name :: Union{Base.Symbol, char_p}, args::Node...) if isa(name, Base.Symbol); name = string(name); end arg_keys = Ptr{char_p}(0) arg_vals = MX_handle[args...] @@ -251,27 +251,27 @@ function _compose!(sym :: Symbol, name :: Union{Base.Symbol, char_p}, args::Symb return sym end -"""Save Symbol into a JSON string""" -function to_json(self :: Symbol) +"""Save Node into a JSON string""" +function to_json(self :: Node) ref_json = Ref{char_p}(0) @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) return bytestring(ref_json[]) end -"""Load Symbol from a JSON string representation.""" -function from_json(repr :: AbstractString, ::Type{Symbol}) +"""Load Node from a JSON string representation.""" +function from_json(repr :: AbstractString, ::Type{Node}) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateFromJSON, (char_p, Ref{MX_handle}), repr, ref_hdr) - return Symbol(MX_SymbolHandle(ref_hdr[])) + return Node(MX_SymbolHandle(ref_hdr[])) end -"""Load Symbol from a JSON file.""" -function load(filename :: AbstractString, ::Type{Symbol}) +"""Load Node from a JSON file.""" +function load(filename :: AbstractString, ::Type{Node}) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateFromFile, (char_p, Ref{MX_handle}), filename, ref_hdr) - return Symbol(MX_SymbolHandle(ref_hdr[])) + return Node(MX_SymbolHandle(ref_hdr[])) end -function save(filename :: AbstractString, sym :: Symbol) +function save(filename :: AbstractString, sym :: Node) @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), sym, filename) end @@ -279,10 +279,10 @@ end libmxnet APIs ------------- -**autogen:EMBED:symbol:EMBED:autogen** +**autogen:EMBED:Node:EMBED:autogen** =# ################################################################################ -# Atomic Symbol functions dynamically imported from libmxnet +# Atomic Node functions dynamically imported from libmxnet ################################################################################ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) ref_name = Ref{char_p}(0) @@ -305,11 +305,11 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) if gen_docs f_desc = bytestring(ref_desc[]) * "\n\n" if !isempty(kv_nargs_s) - f_desc *= "This function support variable length positional :class:`Symbol` inputs.\n\n" + f_desc *= "This function support variable length positional :class:`Node` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional.\n\n" - f_desc *= ":return: the constructed :class:`Symbol`.\n\n" + f_desc *= ":param Base.Symbol name: The name of the Node. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":return: the constructed :class:`Node`.\n\n" return (func_name, f_desc) end @@ -325,7 +325,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) param_keys = AbstractString[] param_vals = AbstractString[] - symbol_kws = Dict{Base.Symbol, Symbol}() + symbol_kws = Dict{Base.Symbol, Node}() $(if kv_nargs != symbol("") quote @@ -338,7 +338,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) for (k,v) in kwargs if k == :name; continue; end - if isa(v, Symbol) + if isa(v, Node) symbol_kws[k] = v else push!(param_keys, string(k)) @@ -352,20 +352,20 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) $(if kv_nargs != symbol("") quote if length(symbol_kws) > 0 - @assert(false, "$func_name takes variable number of Symbol arguments, please pass input Symbols " * + @assert(false, "$func_name takes variable number of Node arguments, please pass input Symbols " * "via positional arguments, instead of keyword arguments.") end end end) - # create the symbol + # create the Node ref_sym_hdr = Ref{MX_handle}() @mxcall(:MXSymbolCreateAtomicSymbol, (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), $hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) sym_hdr = ref_sym_hdr[] - sym = Symbol(MX_SymbolHandle(sym_hdr)) + sym = Node(MX_SymbolHandle(sym_hdr)) hint = lowercase(string($func_name)) name = get!(DEFAULT_NAME_MANAGER, name, hint) diff --git a/test/unittest/symbol.jl b/test/unittest/node.jl similarity index 91% rename from test/unittest/symbol.jl rename to test/unittest/node.jl index 3397b7eaf70d..f7e9b51e60f0 100644 --- a/test/unittest/symbol.jl +++ b/test/unittest/node.jl @@ -1,4 +1,4 @@ -module TestSymbol +module TestNode using MXNet using Base.Test @@ -8,7 +8,7 @@ using ..Main: mlp2 # Test Implementations ################################################################################ function test_basic() - info("Symbol::basic") + info("Node::basic") model = mlp2() @test mx.list_arguments(model) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] @@ -17,7 +17,7 @@ function test_basic() end function test_internal() - info("Symbol::internal") + info("Node::internal") data = mx.Variable(:data) oldfc = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) @@ -31,7 +31,7 @@ function test_internal() end function test_compose() - info("Symbol::compose") + info("Node::compose") data = mx.Variable(:data) net1 = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) @@ -47,7 +47,7 @@ function test_compose() end function test_infer_shape() - info("Symbol::infer_shape::mlp2") + info("Node::infer_shape::mlp2") model = mlp2() data_shape = (100, 100) @@ -61,7 +61,7 @@ function test_infer_shape() end function test_infer_shape_error() - info("Symbol::infer_shape::throws") + info("Node::infer_shape::throws") model = mlp2() weight_shape = (100, 1) @@ -70,12 +70,12 @@ function test_infer_shape_error() end function test_saveload() - info("Symbol::saveload::mlp2") + info("Node::saveload::mlp2") model = mlp2() fname = tempname() mx.save(fname, model) - model2 = mx.load(fname, mx.Symbol) + model2 = mx.load(fname, mx.Node) @test mx.to_json(model) == mx.to_json(model2) rm(fname) From 955b6d2d1f028950e7540a9f5d468f3c91ba657e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:22:25 -0500 Subject: [PATCH 185/630] refactor: Node -> SymbolicNode --- src/MXNet.jl | 2 +- src/MXNet.jl-e | 35 + src/base.jl-e | 212 ++++++ src/callback.jl-e | 142 ++++ src/context.jl-e | 44 ++ src/executor.jl | 16 +- src/executor.jl-e | 197 +++++ src/initializer.jl-e | 117 +++ src/io.jl | 2 +- src/io.jl-e | 641 ++++++++++++++++ src/kvstore.jl | 2 +- src/kvstore.jl-e | 132 ++++ src/metric.jl-e | 37 + src/model.jl | 12 +- src/model.jl-e | 566 ++++++++++++++ src/name.jl-e | 44 ++ src/ndarray.jl-e | 800 ++++++++++++++++++++ src/optimizer.jl-e | 226 ++++++ src/random.jl-e | 25 + src/symbolic-node.jl | 437 +++++++++++ src/{node.jl => symbolic-node.jl-e} | 0 src/util.jl-e | 70 ++ test/unittest/{node.jl => symbolic-node.jl} | 16 +- 23 files changed, 3750 insertions(+), 25 deletions(-) create mode 100644 src/MXNet.jl-e create mode 100644 src/base.jl-e create mode 100644 src/callback.jl-e create mode 100644 src/context.jl-e create mode 100644 src/executor.jl-e create mode 100644 src/initializer.jl-e create mode 100644 src/io.jl-e create mode 100644 src/kvstore.jl-e create mode 100644 src/metric.jl-e create mode 100644 src/model.jl-e create mode 100644 src/name.jl-e create mode 100644 src/ndarray.jl-e create mode 100644 src/optimizer.jl-e create mode 100644 src/random.jl-e create mode 100644 src/symbolic-node.jl rename src/{node.jl => symbolic-node.jl-e} (100%) create mode 100644 src/util.jl-e rename test/unittest/{node.jl => symbolic-node.jl} (90%) diff --git a/src/MXNet.jl b/src/MXNet.jl index 68875b62da90..f9f9e8664c4f 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -15,7 +15,7 @@ include("ndarray.jl") include("random.jl") include("name.jl") -include("node.jl") +include("symbolic-node.jl") include("executor.jl") include("metric.jl") diff --git a/src/MXNet.jl-e b/src/MXNet.jl-e new file mode 100644 index 000000000000..f9f9e8664c4f --- /dev/null +++ b/src/MXNet.jl-e @@ -0,0 +1,35 @@ +__precompile__() + +module MXNet + +# we put everything in the namespace mx, because there are a lot of +# functions with the same names as built-in utilities like "zeros", etc. +export mx +module mx +using Formatting + +include("base.jl") +include("context.jl") + +include("ndarray.jl") +include("random.jl") + +include("name.jl") +include("symbolic-node.jl") +include("executor.jl") + +include("metric.jl") +include("optimizer.jl") +include("initializer.jl") + +include("io.jl") +include("kvstore.jl") + +include("callback.jl") +include("model.jl") + +include("util.jl") + +end # mx + +end # module MXNet diff --git a/src/base.jl-e b/src/base.jl-e new file mode 100644 index 000000000000..0988400b9d31 --- /dev/null +++ b/src/base.jl-e @@ -0,0 +1,212 @@ +"Exception thrown when an error occurred calling MXNet API." +immutable MXError <: Exception + msg :: AbstractString +end + +################################################################################ +# Common types used in MXNet API +################################################################################ +typealias MX_uint Cuint +typealias MX_float Cfloat +typealias MX_handle Ptr{Void} + +typealias char_p Ptr{UInt8} +typealias char_pp Ptr{char_p} + +################################################################################ +# Initialization and library API entrance +################################################################################ +const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], + [joinpath("$(get(ENV,"MXNET_HOME",""))","lib"), + joinpath(Pkg.dir("MXNet"),"deps/usr/lib")]) +if isempty(MXNET_LIB) + # touch this file, so that after the user properly build libmxnet, the precompiled + # MXNet.ji will be re-compiled to get MXNET_LIB properly. + touch(@__FILE__) + error("Cannot find or load libmxnet.so. Please see the document on how to build it.") +end + +function __init__() + _import_ndarray_functions() + _import_atomic_symbol_creators() + _import_io_iterators() + + atexit() do + # notify libmxnet we are shutting down + ccall( ("MXNotifyShutdown", MXNET_LIB), Cint, () ) + end +end + +function mx_get_last_error() + msg = ccall( ("MXGetLastError", MXNET_LIB), char_p, () ) + if msg == C_NULL + throw(MXError("Failed to get last error message")) + end + return bytestring(msg) +end + +"Utility macro to call MXNet API functions" +macro mxcall(fv, argtypes, args...) + f = eval(fv) + args = map(esc, args) + quote + _mxret = ccall( ($(Meta.quot(f)), $MXNET_LIB), + Cint, $argtypes, $(args...) ) + if _mxret != 0 + err_msg = mx_get_last_error() + throw(MXError(err_msg)) + end + end +end + +################################################################################ +# Handle types +################################################################################ +macro mx_define_handle_t(name, destructor) + name = esc(name) + quote + type $name + value :: MX_handle + + function $name(value = C_NULL) + hdr = new(value) + + $(if destructor != :nop + :(finalizer(hdr, delete!)) + end) + + return hdr + end + end + + $(if finalizer != :nop + quote + function delete!(h :: $name) + if h.value != C_NULL + @mxcall($(Meta.quot(destructor)), (MX_handle,), h.value) + h.value = C_NULL + end + end + end + end) + + function Base.unsafe_convert(::Type{MX_handle}, obj::$name) + obj.value + end + Base.convert(t::Type{MX_handle}, obj::$name) = Base.unsafe_convert(t, obj) + Base.cconvert(t::Type{MX_handle}, obj::$name) = Base.unsafe_convert(t, obj) + + function Base.isnull(obj::$name) obj.value == C_NULL end + end +end + +@mx_define_handle_t(MX_NDArrayHandle, MXNDArrayFree) +@mx_define_handle_t(MX_FunctionHandle, nop) +@mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) +@mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) +@mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) +@mx_define_handle_t(MX_KVStoreHandle, MXKVStoreFree) + +################################################################################ +# MXNet Params +# +# MXNet API use string to pass some common parameters like the configurations +# when defining layers. Typically, it is enough to use string(obj) to get a +# recognizable representation for libmxnet. However, there is currently a +# caveat: +# +# Because Julia use column-major ordering for tensors. In order to properly +# interact with Julia Arrays, the shape will look "reversed" from the Julia +# side. For example, a typical MNIST mini-batch tensor is of shape (28,28,1,100) +# from Julia side, while the shape information for the same piece of memory +# should be interpreted as (100,1,28,28) from C/C++/Python side. +# +# Therefore, when passing parameters to libmxnet, we should reverse the shape +# parameter. For example, when the user specify a non-square kernel size for +# a convolution or pooling layer. Unfortunately, those operators are automatically +# imported, and information about the type of each parameter is somehow limited. +# One hacky way is to match the type description for the string "Shape(tuple)" +# when importing operators. But currently we simply decided to reverse **all** +# NTuple{N, Int} passed to libmxnet. +# +# TODO: find a better solution in case this cause issues in the future. +################################################################################ +function dump_mx_param(val :: Any) + string(val) +end +function dump_mx_param{N,T<:Integer}(shape :: NTuple{N, T}) + string(tuple(flipdim([shape...],1)...)) +end + +"""A convenient macro copied from Mocha.jl that could be used to define structs +with default values and type checks. For example +```julia +@defstruct MyStruct Any ( + field1 :: Int = 0, + (field2 :: AbstractString = "", !isempty(field2)) +) +``` +where each field could be either +```julia +field_name :: field_type = default_value +``` +or put within a tuple, with the second element +specifying a validation check on the field value. +In the example above, the default value for +field2 does not satisfy the assertion, this +could be used to force user to provide a +valid value when no meaningful default value +is available. + +The macro will define a constructor that could accept +the keyword arguments. +""" +macro defstruct(name, super_name, fields) + @assert fields.head == :tuple + fields = fields.args + @assert length(fields) > 0 + name = esc(name) + super_name = esc(super_name) + + field_defs = Array(Expr, length(fields)) # :(field2 :: Int) + field_names = Array(Expr, length(fields)) # :field2 + field_defaults = Array(Expr, length(fields)) # :(field2 = 0) + field_types = Array(Expr, length(fields)) # Int + field_asserts = Array(Expr, length(fields)) # :(field2 >= 0) + + for i = 1:length(fields) + field = fields[i] + if field.head == :tuple + field_asserts[i] = esc(field.args[2]) + field = field.args[1] + end + field_defs[i] = esc(field.args[1]) + field_names[i] = esc(field.args[1].args[1]) + field_types[i] = esc(field.args[1].args[2]) + field_defaults[i] = Expr(:kw, field.args[1].args[1], esc(field.args[2])) + end + + # body of layer type, defining fields + type_body = Expr(:block, field_defs...) + + # constructor + converts = map(zip(field_names, field_types)) do param + f_name, f_type = param + :($f_name = convert($f_type, $f_name)) + end + asserts = map(filter(i -> isdefined(field_asserts,i), 1:length(fields))) do i + :(@assert($(field_asserts[i]))) + end + construct = Expr(:call, name, field_names...) + ctor_body = Expr(:block, converts..., asserts..., construct) + ctor_def = Expr(:call, name, Expr(:parameters, field_defaults...)) + ctor = Expr(:(=), ctor_def, ctor_body) + + quote + type $(name) <: $super_name + $type_body + end + + $ctor + end +end diff --git a/src/callback.jl-e b/src/callback.jl-e new file mode 100644 index 000000000000..9f3d85b576ff --- /dev/null +++ b/src/callback.jl-e @@ -0,0 +1,142 @@ +#=doc +Callbacks in training +===================== +=# + +#=doc +.. class:: AbstractCallback + + Abstract type of callback functions used in training. +=# +abstract AbstractCallback + +#=doc +.. class:: AbstractBatchCallback + + Abstract type of callbacks to be called every mini-batch. +=# +abstract AbstractBatchCallback <: AbstractCallback + +#=doc +.. class:: AbstractEpochCallback + + Abstract type of callbacks to be called every epoch. +=# +abstract AbstractEpochCallback <: AbstractCallback + +type BatchCallback <: AbstractBatchCallback + frequency :: Int + call_on_0 :: Bool + callback :: Function +end + +#=doc +.. function:: every_n_batch(callback :: Function, n :: Int; call_on_0 = false) + + A convenient function to construct a callback that runs every ``n`` mini-batches. + + :param Int call_on_0: keyword argument, default false. Unless set, the callback + will **not** be run on batch 0. + + For example, the :func:`speedometer` callback is defined as + + .. code-block:: julia + + every_n_iter(frequency, call_on_0=true) do state :: OptimizationState + if state.curr_batch == 0 + # reset timer + else + # compute and print speed + end + end + + :seealso: :func:`every_n_epoch`, :func:`speedometer`. +=# +function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) + BatchCallback(n, call_on_0, callback) +end +function Base.call(cb :: BatchCallback, state :: OptimizationState) + if state.curr_batch == 0 + if cb.call_on_0 + cb.callback(state) + end + elseif state.curr_batch % cb.frequency == 0 + cb.callback(state) + end +end + +#=doc +.. function:: speedometer(; frequency=50) + + Create an :class:`AbstractBatchCallback` that measure the training speed + (number of samples processed per second) every k mini-batches. + + :param Int frequency: keyword argument, default 50. The frequency (number of + min-batches) to measure and report the speed. +=# +function speedometer(;frequency::Int=50) + cl_tic = 0 + every_n_batch(frequency, call_on_0=true) do state :: OptimizationState + if state.curr_batch == 0 + # reset timer + cl_tic = time() + else + speed = frequency * state.batch_size / (time() - cl_tic) + info(format("Speed: {1:>6.2f} samples/sec", speed)) + cl_tic = time() + end + end +end + + +type EpochCallback <: AbstractEpochCallback + frequency :: Int + call_on_0 :: Bool + callback :: Function +end + +#=doc +.. function:: every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) + + A convenient function to construct a callback that runs every ``n`` full data-passes. + + :param Int call_on_0: keyword argument, default false. Unless set, the callback + will **not** be run on epoch 0. Epoch 0 means no training has been performed + yet. This is useful if you want to inspect the randomly initialized model + that has not seen any data yet. + + :seealso: :func:`every_n_iter`. +=# +function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) + EpochCallback(n, call_on_0, callback) +end +function Base.call(cb :: EpochCallback, model :: Any, state :: OptimizationState) + if state.curr_epoch == 0 + if cb.call_on_0 + cb.callback(model, state) + end + elseif state.curr_epoch % cb.frequency == 0 + cb.callback(model, state) + end +end + +#=doc +.. function:: do_checkpoint(prefix; frequency=1, save_epoch_0=false) + + Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. + The checkpoints can be loaded back later on. + + :param AbstractString prefix: the prefix of the filenames to save the model. The model + architecture will be saved to prefix-symbol.json, while the weights will be saved + to prefix-0012.params, for example, for the 12-th epoch. + :param Int frequency: keyword argument, default 1. The frequency (measured in epochs) to + save checkpoints. + :param Bool save_epoch_0: keyword argument, default false. Whether we should save a + checkpoint for epoch 0 (model initialized but not seen any data yet). +=# +function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) + mkpath(dirname(prefix)) + every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state + save_checkpoint(model, prefix, state) + end +end diff --git a/src/context.jl-e b/src/context.jl-e new file mode 100644 index 000000000000..1e96c305fb04 --- /dev/null +++ b/src/context.jl-e @@ -0,0 +1,44 @@ +#=doc +Context +======= +=# +@enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 + +#=doc +.. class:: Context + + A context describes the device type and id on which computation should be carried on. +=# +immutable Context + device_type :: CONTEXT_TYPE + device_id :: Int +end +Context(dev_type :: Union{CONTEXT_TYPE, Int}, dev_id :: Int = 0) = + Context(convert(CONTEXT_TYPE, dev_type), dev_id) + +function Base.show(io :: IO, ctx :: Context) + print(io, "$(ctx.device_type)$(ctx.device_id)") +end + +#=doc +.. function:: cpu(dev_id=0) + + :param Int dev_id: the CPU id. + + Get a CPU context with a specific id. ``cpu()`` is usually the default context for many + operations when no context is specified. +=# +function cpu(dev_id::Int=0) + return Context(CPU, dev_id) +end + +#=doc +.. function:: gpu(dev_id=0) + + :param Int dev_id: the GPU device id. + + Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. +=# +function gpu(dev_id::Int=0) + return Context(GPU, dev_id) +end diff --git a/src/executor.jl b/src/executor.jl index 6dea17763101..5844a62e446c 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -6,13 +6,13 @@ Executor #=doc .. class:: Executor - An executor is a realization of a symbolic architecture defined by a :class:`Node`. + An executor is a realization of a symbolic architecture defined by a :class:`SymbolicNode`. The actual forward and backward computation specified by the network architecture can be carried out with an executor. =# type Executor handle :: MX_ExecutorHandle - symbol :: Node + symbol :: SymbolicNode arg_arrays :: Vector{NDArray} grad_arrays :: Vector{Union{Void,NDArray}} aux_arrays :: Vector{NDArray} @@ -20,7 +20,7 @@ type Executor arg_dict :: Dict{Base.Symbol, NDArray} aux_dict :: Dict{Base.Symbol, NDArray} end -function Executor(hdr :: MX_ExecutorHandle, symbol :: Node, +function Executor(hdr :: MX_ExecutorHandle, symbol :: SymbolicNode, arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, aux_arrays :: Vector{NDArray}) # get output arrays @@ -73,9 +73,9 @@ end #=doc .. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) - Create an :class:`Executor` by binding a :class:`Node` to concrete :class:`NDArray`. + Create an :class:`Executor` by binding a :class:`SymbolicNode` to concrete :class:`NDArray`. - :param Node sym: the network architecture describing the computation graph. + :param SymbolicNode sym: the network architecture describing the computation graph. :param Context ctx: the context on which the computation should run. :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include @@ -86,7 +86,7 @@ end :param grad_req: =# @enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 -function bind(self :: Node, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; +function bind(self :: SymbolicNode, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) @@ -116,7 +116,7 @@ function bind(self :: Node, ctx :: Context, args :: Union{Vector{NDArray},Dict{B executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, args, args_grad, aux_states) end -function bind(self :: Node; kwargs...) +function bind(self :: SymbolicNode; kwargs...) kwargs = Dict(kwargs) @assert(haskey(kwargs, :args), "Must specify args") args = pop!(kwargs, :args) @@ -128,7 +128,7 @@ function bind(self :: Node; kwargs...) bind(self, context, args; kwargs...) end -function simple_bind(self :: Node, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) +function simple_bind(self :: SymbolicNode, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") diff --git a/src/executor.jl-e b/src/executor.jl-e new file mode 100644 index 000000000000..6dea17763101 --- /dev/null +++ b/src/executor.jl-e @@ -0,0 +1,197 @@ +#=doc +Executor +======== +=# + +#=doc +.. class:: Executor + + An executor is a realization of a symbolic architecture defined by a :class:`Node`. + The actual forward and backward computation specified by the network architecture can + be carried out with an executor. +=# +type Executor + handle :: MX_ExecutorHandle + symbol :: Node + arg_arrays :: Vector{NDArray} + grad_arrays :: Vector{Union{Void,NDArray}} + aux_arrays :: Vector{NDArray} + outputs :: Vector{NDArray} + arg_dict :: Dict{Base.Symbol, NDArray} + aux_dict :: Dict{Base.Symbol, NDArray} +end +function Executor(hdr :: MX_ExecutorHandle, symbol :: Node, + arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, + aux_arrays :: Vector{NDArray}) + # get output arrays + ref_size = Ref{MX_uint}(0) + ref_hdrs = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXExecutorOutputs, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_handle}}), + hdr, ref_size, ref_hdrs) + out_hdrs = pointer_to_array(ref_hdrs[], ref_size[]) + out_arrays = [NDArray(MX_NDArrayHandle(x)) for x in out_hdrs] + + arg_names = list_arguments(symbol) + @assert(length(arg_names) == length(unique(arg_names)), "Duplicated names in arguments: $arg_names") + arg_dict = Dict{Base.Symbol,NDArray}(zip(arg_names, arg_arrays)) + + aux_names = list_auxiliary_states(symbol) + @assert(length(aux_names) == length(unique(aux_names)), "Duplicated names in auxiliary states: $aux_names") + aux_dict = Dict{Base.Symbol,NDArray}(zip(aux_names, aux_arrays)) + + Executor(hdr, symbol, arg_arrays, grad_arrays, aux_arrays, out_arrays, arg_dict, aux_dict) +end + +function Base.unsafe_convert(::Type{MX_handle}, obj::Executor) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) + +function _get_ndarray_inputs(arg_key::AbstractString, args::Vector{NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + @assert(length(args) == length(arg_names), "Length of $arg_key does not match number of arguments") + return (MX_handle[args...], args) +end +function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + args_vec = map(arg_names) do name + arr = get(args, name, nothing) + if !allow_missing + @assert(!isa(arr, Void), "Must specify all arguments in $arg_key ($name is missing)") + end + arr + end + # help the type inference + if allow_missing + args_vec = Union{NDArray,Void}[args_vec...] + else + args_vec = NDArray[args_vec...] + end + args_hdr = MX_handle[(isa(x,Void) ? MX_handle(0) : x) for x in args_vec] + return (args_hdr, args_vec) +end + +#=doc +.. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) + + Create an :class:`Executor` by binding a :class:`Node` to concrete :class:`NDArray`. + + :param Node sym: the network architecture describing the computation graph. + :param Context ctx: the context on which the computation should run. + :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete + arrays for all the inputs in the network architecture. The inputs typically include + network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` + and :func:`infer_shape`. + :param args_grad: TODO + :param aux_states: + :param grad_req: +=# +@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 +function bind(self :: Node, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; + args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), + aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), + grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) + + arg_names = list_arguments(self) + + args_hdr, args = _get_ndarray_inputs("args", args, arg_names, false) + args_grad_hdr, args_grad = _get_ndarray_inputs("args_grad", args_grad, arg_names, true) + aux_args_hdr, aux_states = _get_ndarray_inputs("aux_states", aux_states, list_auxiliary_states(self), false) + + if isa(grad_req, GRAD_REQ) + reqs = MX_uint[grad_req for i=1:length(args)] + elseif isa(grad_req, Vector{GRAD_REQ}) + @assert(length(grad_req) == length(args)) + reqs = MX_uint[grad_req...] + elseif isa(grad_req, Dict{Base.Symbol, GRAD_REQ}) + reqs = MX_uint[get(grad_req, name, GRAD_NOP) for name in arg_names] + end + + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXExecutorBind, + (MX_handle, Cint, Cint, MX_uint, Ptr{MX_handle}, Ptr{MX_handle}, Ptr{MX_uint}, + MX_uint, Ptr{MX_handle}, Ref{MX_handle}), + self, ctx.device_type, ctx.device_id, length(args), args_hdr, + args_grad_hdr, reqs, length(aux_states), aux_args_hdr, ref_hdr) + args_grad = convert(Vector{Union{Void,NDArray}}, args_grad) + executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, + args, args_grad, aux_states) +end +function bind(self :: Node; kwargs...) + kwargs = Dict(kwargs) + @assert(haskey(kwargs, :args), "Must specify args") + args = pop!(kwargs, :args) + if haskey(kwargs, :context) + context = pop!(kwargs, :context) + else + context = cpu() + end + bind(self, context, args; kwargs...) +end + +function simple_bind(self :: Node, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) + arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) + @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") + + arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] + arg_names = list_arguments(self) + if grad_req == GRAD_NOP + grad_arrays = Dict{Base.Symbol,NDArray}() + else + provided_data_names = [x[1] for x in kwargs] + grad_arrays = Dict{Base.Symbol,NDArray}() + for (name, shape) in zip(arg_names, arg_shapes) + # if not in provided data, should be parameters + if !in(name, provided_data_names) + grad_arrays[name] = zeros(shape, ctx) + end + end + end + + aux_arrays = [zeros(shape, ctx) for shape in aux_shapes] + return bind(self, ctx, arg_arrays, args_grad=grad_arrays, grad_req=grad_req, aux_states=aux_arrays) +end + + +function forward(self :: Executor; is_train::Bool=false, kwargs...) + for (k,v) in kwargs + @assert(k ∈ self.arg_dict, "Unknown argument $k") + @assert(isa(v, NDArray), "Keyword argument $k must be an NDArray") + copy!(self.arg_dict[k], v) + end + + @mxcall(:MXExecutorForward, (MX_handle, Cint), self, is_train) +end + +function backward(self :: Executor) + backward(self, NDArray[]) +end +function backward(self :: Executor, out_grad :: NDArray) + backward(self, [out_grad]) +end +function backward(self :: Executor, out_grads :: Vector{NDArray}) + out_grads = MX_handle[out_grads...] + @mxcall(:MXExecutorBackward, (MX_handle, MX_uint, Ptr{MX_handle}), self, length(out_grads), out_grads) +end + + +function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, + aux_params::Union{Void,Dict{Base.Symbol,NDArray}}=nothing; + allow_extra_params::Bool=false) + for (name, array) in arg_params + if haskey(self.arg_dict, name) + copy!(self.arg_dict[name], array) + else + @assert(allow_extra_params, "Extra params $name not in the arguments") + end + end + + if !isa(aux_params, Void) + for (name, array) in aux_params + if haskey(self.aux_dict, name) + copy!(self.aux_dict[name], array) + else + @assert(allow_extra_params, "Extra auxiliary state $name not recognized") + end + end + end +end diff --git a/src/initializer.jl-e b/src/initializer.jl-e new file mode 100644 index 000000000000..8f78bfba9367 --- /dev/null +++ b/src/initializer.jl-e @@ -0,0 +1,117 @@ +#=doc +Initializers +============ +Interface +--------- +=# + +#=doc +.. class:: AbstractInitializer + + The abstract base class for all initializers. + +To define a new initializer, it is +enough to derive a new type, and implement one or more of the following methods: + +.. function:: _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + +Or, if full behavior customization is needed, override the following function + +.. function:: call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +=# +abstract AbstractInitializer + +function call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + strname = string(name) + if endswith(strname, "bias") + _init_bias(self, name, array) + elseif endswith(strname, "gamma") + _init_gamma(self, name, array) + elseif endswith(strname, "beta") + _init_beta(self, name, array) + elseif endswith(strname, "weight") + _init_weight(self, name, array) + elseif endswith(strname, "moving_mean") + _init_zero(self, name, array) + elseif endswith(strname, "moving_var") + _init_zero(self, name, array) + else + _init_default(self, name, array) + end +end + +function _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + array[:] = 0 +end +function _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + array[:] = 1 +end +function _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + array[:] = 0 +end +function _init_zero(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + array[:] = 0 +end + +#=doc +Built-in initializers +--------------------- +=# +#=doc +.. class:: UniformInitializer + + Initialize weights according to a uniform distribution within the provided scale. +=# +immutable UniformInitializer <: AbstractInitializer + scale :: AbstractFloat +end +#=doc +.. function UniformInitializer(scale=0.07) + + Construct a :class:`UniformInitializer` with the specified scale. +=# +UniformInitializer() = UniformInitializer(0.07) + +function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: NDArray) + rand!(-self.scale, self.scale, array) +end + +#=doc +.. class:: NormalInitializer + + Initialize weights according to a univariate Gaussian distribution. +=# +immutable NormalInitializer <: AbstractInitializer + μ :: AbstractFloat + σ :: AbstractFloat +end +#=doc +.. function:: NormalIninitializer(; mu=0, sigma=0.01) + + Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. +=# +NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) + +function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) + randn!(self.μ, self.σ, array) +end + +#=doc +.. class:: XaiverInitializer + + The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding + the difficulty of training deep feedforward neuralnetworks*. +=# +immutable XaiverInitializer <: AbstractInitializer +end + +function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) + dims = size(array) + fan_in = prod(dims[2:end]) + fan_out = dims[1] + scale = sqrt(3 / (fan_in + fan_out)) + rand!(-scale, scale, array) +end diff --git a/src/io.jl b/src/io.jl index 5c59e96bb611..71bbc38a7f50 100644 --- a/src/io.jl +++ b/src/io.jl @@ -35,7 +35,7 @@ and split it into mini-batches so that the model can consume the data in a unifo training stage, both *data* and *label* will be feeded into the model, while during prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and of any shapes. The provided data and label names here should match the input names in a target - :class:`Node`. + :class:`SymbolicNode`. A data provider should also implement the Julia iteration interface, in order to allow iterating through the data set. The provider will be called in the following way: diff --git a/src/io.jl-e b/src/io.jl-e new file mode 100644 index 000000000000..5c59e96bb611 --- /dev/null +++ b/src/io.jl-e @@ -0,0 +1,641 @@ +#=doc +Data Providers +============== +Interface +--------- + +Data providers are wrappers that load external data, be it images, text, or general tensors, +and split it into mini-batches so that the model can consume the data in a uniformed way. +=# + +#=doc +.. class:: AbstractDataProvider + + The root type for all data provider. A data provider should implement the following interfaces: + + .. function:: get_batch_size(provider) -> Int + + :param AbstractDataProvider provider: the data provider. + :return: the mini-batch size of the provided data. All the provided data should have the + same mini-batch size (i.e. the last dimension). + + .. function:: provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} + + :param AbstractDataProvider provider: the data provider. + :return: a vector of (name, shape) pairs describing the names of the data it provides, and + the corresponding shapes. + + .. function:: provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} + + :param AbstractDataProvider provider: the data provider. + :return: a vector of (name, shape) pairs describing the names of the labels it provides, and + the corresponding shapes. + + The difference between *data* and *label* is that during + training stage, both *data* and *label* will be feeded into the model, while during + prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and + of any shapes. The provided data and label names here should match the input names in a target + :class:`Node`. + + A data provider should also implement the Julia iteration interface, in order to allow iterating + through the data set. The provider will be called in the following way: + + .. code-block:: julia + + for batch in eachbatch(provider) + data = get_data(provider, batch) + end + + which will be translated by Julia compiler into + + .. code-block:: julia + + state = Base.start(eachbatch(provider)) + while !Base.done(provider, state) + (batch, state) = Base.next(provider, state) + data = get_data(provider, batch) + end + + By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface + is implemented on the provider type itself. But the extra layer of abstraction allows us to + implement a data provider easily via a Julia ``Task`` coroutine. + The detailed interface function is listed below: + + .. function:: Base.eltype(provider) -> AbstractDataBatch + + :param AbstractDataProvider provider: the data provider. + :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. + + .. function:: Base.start(provider) -> AbstractDataProviderState + + :param AbstractDataProvider provider: the data provider. + + This function is always called before iterating into the dataset. It should initialize + the iterator, reset the index, and do data shuffling if needed. + + .. function:: Base.done(provider, state) -> Bool + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. + :return: true if there is no more data to iterate in this dataset. + + .. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + + :param AbstractDataProvider provider: the data provider. + :return: the current data batch, and the state for the next iteration. + + Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that + is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this + case, you can safely assume that + + * :func:`Base.start` will always be called, and called only once before the iteration starts. + * :func:`Base.done` will always be called at the beginning of every iteration and always be called once. + * If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with + a call to :func:`Base.start`. + * :func:`Base.next` will always be called only once in each iteration. It will always be called after + one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will + not be called. + + With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation + of the built-in :class:`MXDataProvider` for example. + + .. caution:: + + Please do not use the one data provider simultaneously in two different places, either in parallel, + or in a nested loop. For example, the behavior for the following code is undefined + + .. code-block:: julia + + for batch in data + # updating the parameters + + # now let's test the performance on the training set + for b2 in data + # ... + end + end +=# +abstract AbstractDataProvider + +#=doc +.. class:: AbstractDataProviderState + + Base type for data provider states. +=# +abstract AbstractDataProviderState + +#=doc +.. class:: AbstractDataBatch + + Base type for a data mini-batch. It should implement the following interfaces: + + .. function:: count_samples(provider, batch) -> Int + + :param AbstractDataBatch batch: the data batch object. + :return: the number of samples in this batch. This number should be greater than 0, but + less than or equal to the batch size. This is used to indicate at the end of + the data set, there might not be enough samples for a whole mini-batch. + + .. function:: get_data(provider, batch) -> Vector{NDArray} + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :return: a vector of data in this batch, should be in the same order as declared in + :func:`provide_data() `. + + The last dimension of each :class:`NDArray` should always match the batch_size, even when + :func:`count_samples` returns a value less than the batch size. In this case, + the data provider is free to pad the remaining contents with any value. + + .. function:: get_label(provider, batch) -> Vector{NDArray} + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :return: a vector of labels in this batch. Similar to :func:`get_data`. + + + The following utility functions will be automatically defined. + + .. function:: get(provider, batch, name) -> NDArray + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param Base.Symbol name: the name of the data to get, should be one of the names + provided in either :func:`provide_data() ` + or :func:`provide_label() `. + :return: the corresponding data array corresponding to that name. + + .. function:: load_data!(provider, batch, targets) + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param targets: the targets to load data into. + :type targets: Vector{Vector{SlicedNDArray}} + + The targets is a list of the same length as number of data provided by this provider. + Each element in the list is a list of :class:`SlicedNDArray`. This list described a + spliting scheme of this data batch into different slices, each slice is specified by + a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch + that should be loaded into the corresponding *ndarray*. + + This utility function is used in data parallelization, where a mini-batch is splited + and computed on several different devices. + + .. function:: load_label!(provider, batch, targets) + + :param AbstractDataProvider provider: the data provider. + :param AbstractDataBatch batch: the data batch object. + :param targets: the targets to load label into. + :type targets: Vector{Vector{SlicedNDArray}} + + The same as :func:`load_data!`, except that this is for loading labels. +=# +abstract AbstractDataBatch + +#=doc +.. class:: DataBatch + + A basic subclass of :class:`AbstractDataBatch`, that implement the interface by + accessing member fields. +=# +type DataBatch <: AbstractDataBatch + data :: Vector{NDArray} + label :: Vector{NDArray} + count :: Int +end +count_samples(batch :: DataBatch) = batch.count +get_data(batch :: DataBatch) = batch.data +get_label(batch :: DataBatch) = batch.label + +#=doc +.. class:: SlicedNDArray + + A alias type of ``Tuple{UnitRange{Int},NDArray}``. +=# +typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} + +function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{Vector{SlicedNDArray}}, loader::Function) + data = loader(provider, batch) + for (d_src, d_targets) in zip(data, targets) + for (slice_idx, d_dst) in d_targets + copy!(d_dst, slice(d_src, slice_idx)) + end + end +end +function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{Vector{SlicedNDArray}}) + _load_general!(provider, batch, targets, get_data) +end +function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{Vector{SlicedNDArray}}) + _load_general!(provider, batch, targets, get_label) +end + +function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) + for (src, dst) in zip(get_data(provider, batch), targets) + copy!(dst, src) + end +end +function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) + for (src, dst) in zip(get_label(provider, batch), targets) + copy!(dst, src) + end +end + +import Base.get +function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name :: Base.Symbol) + for (idx, (k, s)) in enumerate(provide_data(provider)) + if name == k + return get_data(provider, batch)[idx] + end + end + for (idx, (k, s)) in enumerate(provide_label(provider)) + if name == k + return get_label(provider, batch)[idx] + end + end + error("$name is not provided by this data provider") +end + +eachbatch(provider :: AbstractDataProvider) = provider + +#=doc +Built-in data providers +----------------------- +=# + +################################################################################ +#=doc +.. class:: ArrayDataProvider + + A convenient tool to iterate :class:`NDArray` or Julia ``Array``. +=# +type ArrayDataProvider <: AbstractDataProvider + data_arrays :: Vector{Array{MX_float}} + data_names :: Vector{Base.Symbol} + label_arrays :: Vector{Array{MX_float}} + label_names :: Vector{Base.Symbol} + batch_size :: Int + sample_count :: Int + shuffle :: Bool + data_padding :: MX_float + label_padding :: MX_float + + data_batch :: Vector{NDArray} + label_batch :: Vector{NDArray} +end + +#=doc +.. function:: ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) + + Construct a data provider from :class:`NDArray` or Julia Arrays. + + :param data: the data, could be + + - a :class:`NDArray`, or a Julia Array. This is equivalent to ``:data => data``. + - a name-data pair, like ``:mydata => array``, where ``:mydata`` is the name of the data + and ``array`` is an :class:`NDArray` or a Julia Array. + - a list of name-data pairs. + + :param label: the same as the ``data`` parameter. When this argument is omitted, the constructed + provider will provide no labels. + :param Int batch_size: the batch size, default is 0, which means treating the whole array as a + single mini-batch. + :param Bool shuffle: turn on if the data should be shuffled at every epoch. + :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might + be less samples to include than a mini-batch. This value specify a scalar to pad the + contents of all the missing data points. + :param Real label_padding: the same as ``data_padding``, except for the labels. +=# +# Julia's type system is sometimes very frustrating. You cannot specify a function +# with argument Vector{Pair} to expect to be matched when calling with the parameter +# [:foo => zeros(2,3), :bar => zeros(3)] because the type inference gives very specific +# results, about the parametric type in the Pair{T1,T2} type, thus does not match the +# generic Pair type. In general, Int <: Number but Vector{Int} <: Vector{Number} is not +# true. So let us just use Any here... +function ArrayDataProvider(data::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) + ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle, data_padding=data_padding, label_padding=label_padding) +end +function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) + asarr{T}(arr :: Array{T}) = convert(Array{MX_float}, arr) + asarr(arr :: NDArray) = copy(arr) + + if isa(data, Union{NDArray, Array}) && eltype(data) <: Real + data_names = [:data] + data_arrays = Array{MX_float}[asarr(data)] + elseif isa(data, Pair) + @assert isa(data.first, Base.Symbol) && isa(data.second, Union{NDArray, Array}) + data_names = [data.first] + data_arrays = Array{MX_float}[asarr(data.second)] + elseif isa(data, Vector) || isa(data, Tuple) + map(data) do d + @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) + end + data_names = Base.Symbol[d.first for d in data] + data_arrays = Array{MX_float}[asarr(d.second) for d in data] + else + error("Invalid data argument type") + end + + if isa(label, Union{NDArray, Array}) && eltype(label) <: Real + label_names = [:softmax_label] + label_arrays = Array{MX_float}[asarr(label)] + elseif isa(label, Pair) + @assert isa(label.first, Base.Symbol) && isa(label.second, Union{NDArray, Array}) + label_names = [label.first] + label_arrays = Array{MX_float}[asarr(label.second)] + elseif isa(label, Vector) || isa(label, Tuple) + map(label) do d + @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) + end + label_names = Base.Symbol[d.first for d in label] + label_arrays = Array{MX_float}[asarr(d.second) for d in label] + else + error("Invalid label argument type") + end + + @assert length(data_arrays) > 0 + sample_count = size(data_arrays[1])[end] + for i = 1:length(data_names) + @assert(size(data_arrays[i])[end] == sample_count, + "Number of samples in $(data_names[i]) is mismatch with $(data_names[1])") + end + for i = 1:length(label_names) + @assert(size(label_arrays[i])[end] == sample_count, + "Number of samples in $(label_names[i]) is mismatch with $(data_names[1])") + end + + if batch_size == 0 + batch_size = sample_count + end + @assert 0 < batch_size <= sample_count + + function gen_batch_nds(arrs :: Vector{Array{MX_float}}, bsize :: Int) + map(arrs) do arr + shape = size(arr) + empty(shape[1:end-1]..., bsize) + end + end + + data_batch = gen_batch_nds(data_arrays, batch_size) + label_batch = gen_batch_nds(label_arrays, batch_size) + + # reshape data and labels into 2D tensors, so that it is easier to work with them + data_arrays = map(data_arrays) do arr + reshape(arr, prod(size(arr)[1:end-1]), size(arr)[end]) + end + label_arrays = map(label_arrays) do arr + reshape(arr, prod(size(arr)[1:end-1]), size(arr)[end]) + end + + ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, + sample_count, shuffle, data_padding, label_padding, data_batch, label_batch) +end + +function provide_data(provider::ArrayDataProvider) + return collect(zip(provider.data_names, map(size, provider.data_batch))) +end +function provide_label(provider::ArrayDataProvider) + return collect(zip(provider.label_names, map(size, provider.label_batch))) +end +get_batch_size(provider::ArrayDataProvider) = provider.batch_size + +immutable ArrayDataProviderState <: AbstractDataProviderState + curr_idx :: Int +end + +function Base.eltype(provider :: ArrayDataProvider) + ArrayDataProviderState +end + +function Base.start(provider :: ArrayDataProvider) + if provider.shuffle + # re-shuffle all data + idx_perm = randperm(provider.sample_count) + provider.data_arrays = map(x->x[:,idx_perm], provider.data_arrays) + provider.label_arrays = map(x->x[:,idx_perm], provider.label_arrays) + end + + return ArrayDataProviderState(1) +end + +function Base.done(provider::ArrayDataProvider, state :: ArrayDataProviderState) + return state.curr_idx > provider.sample_count +end + +immutable ArrayDataBatch <: AbstractDataBatch + idx :: UnitRange{Int} +end +function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) + idx = state.curr_idx:min(state.curr_idx+provider.batch_size-1, provider.sample_count) + return (ArrayDataBatch(idx), ArrayDataProviderState(idx.stop+1)) +end + +function count_samples(provider :: ArrayDataProvider, batch :: ArrayDataBatch) + return length(batch.idx) +end + +function get_data(provider :: ArrayDataProvider, batch :: ArrayDataBatch) + for (src, dst) in zip(provider.data_arrays, provider.data_batch) + copy_ignore_shape!(dst[1:length(batch.idx)], src[:, batch.idx]) + if length(batch.idx) < provider.batch_size + dst[length(batch.idx)+1:provider.batch_size] = provider.data_padding + end + end + return provider.data_batch +end +function get_label(provider :: ArrayDataProvider, batch :: ArrayDataBatch) + for (src, dst) in zip(provider.label_arrays, provider.label_batch) + copy_ignore_shape!(dst[1:length(batch.idx)], src[:, batch.idx]) + if length(batch.idx) < provider.batch_size + dst[length(batch.idx)+1:provider.batch_size] = provider.label_padding + end + end + return provider.label_batch +end + + +#=doc +libmxnet data providers +----------------------- +=# + +################################################################################ +#=doc +.. class:: MXDataProvider + + A data provider that wrap built-in data iterators from libmxnet. See below for + a list of built-in data iterators. +=# +type MXDataProvider <: AbstractDataProvider + handle :: MX_DataIterHandle + data_shape :: Vector{Tuple{Base.Symbol, Tuple}} + label_shape:: Vector{Tuple{Base.Symbol, Tuple}} + batch_size :: Int + + # those two a auxiliary variables to help avoid calling reset + # but still pre-fetch first batch to get shape information + first_epoch:: Bool + first_batch:: Bool +end + +function _reset_data_iter(handle :: MX_DataIterHandle) + @mxcall(:MXDataIterBeforeFirst, (MX_handle,), handle) +end +function _iter_next(handle :: MX_DataIterHandle) + ref_ret = Ref{Cint}(0) + @mxcall(:MXDataIterNext, (MX_handle, Ref{Cint}), handle, ref_ret) + return Bool(ref_ret[]) +end +function _get_data(handle :: MX_DataIterHandle) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXDataIterGetData, (MX_handle, Ref{MX_handle}), handle, ref_hdr) + return NDArray(MX_NDArrayHandle(ref_hdr[]), false) +end +function _get_label(handle :: MX_DataIterHandle) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXDataIterGetLabel, (MX_handle, Ref{MX_handle}), handle, ref_hdr) + return NDArray(MX_NDArrayHandle(ref_hdr[]), false) +end + +function MXDataProvider(handle :: MX_DataIterHandle; + data_name :: Base.Symbol=:data, + label_name :: Union{Base.Symbol,Void}=:softmax_label, + kwargs...) # for convenience, we ignore the rest keyword arguments + # init iterator, load the first batch and get shapes + @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") + data_shape = Tuple{Base.Symbol, Tuple}[(data_name, size(_get_data(handle)))] + if !isa(label_name, Void) + label_shape = Tuple{Base.Symbol, Tuple}[(label_name::Base.Symbol, size(_get_label(handle)))] + else + label_shape = Tuple{Base.Symbol, Tuple}[] + end + + MXDataProvider(handle, data_shape, label_shape, data_shape[1][2][end], true, true) +end + +provide_data(provider::MXDataProvider) = provider.data_shape +provide_label(provider::MXDataProvider) = provider.label_shape +get_batch_size(provider::MXDataProvider) = provider.batch_size + +type MXDataProviderState <: AbstractDataProviderState + has_next :: Bool +end +immutable MXDataBatch <: AbstractDataBatch +end + +function Base.eltype(provider :: MXDataProvider) + MXDataBatch +end +function Base.start(provider :: MXDataProvider) + if !provider.first_epoch + _reset_data_iter(provider.handle) + else + provider.first_epoch = false + end + + return MXDataProviderState(true) +end +function Base.done(provider :: MXDataProvider, state :: MXDataProviderState) + if provider.first_batch + state.has_next = true + provider.first_batch = false + else + state.has_next = _iter_next(provider.handle) + end + return !state.has_next +end +function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) + return (MXDataBatch(), state) +end + +function get_data(provider :: MXDataProvider, batch :: MXDataBatch) + return NDArray[_get_data(provider.handle)] +end +function get_label(provider :: MXDataProvider, batch :: MXDataBatch) + return NDArray[_get_label(provider.handle)] +end +function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) + ref_pad = Ref{Cint}(0) + @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), provider.handle, ref_pad) + return provider.batch_size - Int(ref_pad[]) +end + +#=doc +**autogen:EMBED:io:EMBED:autogen** +=# +function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXDataIterGetIterInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), + hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) + + iter_name = symbol(bytestring(ref_name[])) + + if gen_docs + if endswith(string(iter_name), "Iter") + f_desc = "Can also be called with the alias ``$(string(iter_name)[1:end-4] * "Provider")``.\n" + else + f_desc = "" + end + f_desc *= bytestring(ref_desc[]) * "\n\n" + f_desc *= ":param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data.\n" + f_desc *= ":param Base.Symbol label_name: keyword argument, default ``:softmax_label``. " * + "The name of the label. Could be ``nothing`` if no label is presented in this dataset.\n\n" + f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) + f_desc *= ":return: the constructed :class:`MXDataProvider`." + return (iter_name, f_desc) + end + + defun = quote + function $iter_name(; kwargs...) + arg_keys = AbstractString[string(k) for (k,v) in kwargs] + arg_vals = AbstractString[dump_mx_param(v) for (k,v) in kwargs] + ref_hdr = Ref{MX_handle}(0) + + @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), + $hdr, length(arg_keys), arg_keys, arg_vals, ref_hdr) + + return MXDataProvider(MX_DataIterHandle(ref_hdr[]); kwargs...) + end + end + eval(defun) + + # add an alias XXXProvider => XXXIter + if endswith(string(iter_name), "Iter") + alias_name = symbol(string(iter_name)[1:end-4] * "Provider") + eval(:($alias_name = $iter_name)) + end +end + +function _import_io_iterators(;gen_docs::Bool=false) + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + n_creators = n_ref[] + h_creators = pointer_to_array(h_ref[], n_creators) + + if gen_docs + docs = Dict{Base.Symbol, AbstractString}() + end + + for i = 1:n_creators + creator_hdr = h_creators[i] + ret = _define_data_iter_creator(creator_hdr; gen_docs=gen_docs) + if gen_docs + docs[ret[1]] = ret[2] + end + end + + if gen_docs + return docs + end +end diff --git a/src/kvstore.jl b/src/kvstore.jl index d52433f567b2..1d8c98935768 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -121,7 +121,7 @@ end function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) ref_is_worker = Ref{Cint}(0) - @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref_is_worker) + @mxcall(:MXKVStoreIsWorkerSymbolicNode, (Ref{Cint},), ref_is_worker) is_worker = ref_is_worker[] if ismatch(r"dist", string(get_type(self))) && is_worker diff --git a/src/kvstore.jl-e b/src/kvstore.jl-e new file mode 100644 index 000000000000..d52433f567b2 --- /dev/null +++ b/src/kvstore.jl-e @@ -0,0 +1,132 @@ +type KVStore + handle :: MX_KVStoreHandle + updater_c :: Ptr{Void} + updater :: Function + + KVStore(hdr :: MX_KVStoreHandle) = new(hdr, Ptr{Void}(0)) +end + +function KVStore(kv_type::Base.Symbol = :local) + #@assert(kv_type ∈ [:local]) # TODO: update with allowed types + + ref_hdr = Ref{MX_handle}(0) + kv_type = string(kv_type) + @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), kv_type, ref_hdr) + return KVStore(MX_KVStoreHandle(ref_hdr[])) +end +function Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) + +function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) + @assert length(keys) == length(vals) + keys_flt = Int[] + vals_flt = NDArray[] + for (k,v) in zip(keys, vals) + append!(keys_flt, Base.ones(Int, length(v))*k) + append!(vals_flt, v) + end + return (keys_flt, vals_flt) +end + +function init!(self :: KVStore, key :: Int, val :: NDArray) + init!(self, [key], [val]) +end +function init!(self :: KVStore, key :: Int, vals :: Vector{NDArray}) + init!(self, Base.ones(Int, length(vals))*key, vals) +end +function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) + init!(self, _flatten_kvlist(keys, vals)...) +end +function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}) + @assert length(keys) == length(vals) + keys = Cint[keys...] + vals = MX_handle[vals...] + @mxcall(:MXKVStoreInit, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}), + self, length(keys), keys, vals) +end + +import Base.push! +function push!(self :: KVStore, key :: Int, val :: NDArray; priority :: Int = 0) + push!(self, [key], [val]; priority = priority) +end +function push!(self :: KVStore, key :: Int, vals :: Vector{NDArray}; priority :: Int = 0) + push!(self, Base.ones(Int, length(vals))*key, vals; priority = priority) +end +function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}; priority::Int=0) + push!(self, _flatten_kvlist(keys, vals)...; priority = priority) +end +function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}; priority::Int=0) + @assert length(keys) == length(vals) + keys = Cint[keys...] + vals = MX_handle[vals...] + @mxcall(:MXKVStorePush, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), + self, length(keys), keys, vals, priority) +end + +function pull!(self :: KVStore, key :: Int, out :: NDArray; priority :: Int = 0) + pull!(self, [key], [out]) +end +function pull!(self :: KVStore, key :: Int, outs :: Vector{NDArray}; priority :: Int = 0) + pull!(self, Base.ones(Int, length(outs))*key, outs; priority = priority) +end +function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{Vector{NDArray}}; priority::Int=0) + pull!(self, _flatten_kvlist(keys, outs)...; priority = priority) +end +function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{NDArray}; priority::Int=0) + @assert length(keys) == length(outs) + keys = Cint[keys...] + outs = MX_handle[outs...] + @mxcall(:MXKVStorePull, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), + self, length(keys), keys, outs, priority) +end + + +function get_type(self :: KVStore) + type_ref = Ref{char_p}(0) + @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) + return symbol(bytestring(type_ref[])) +end + +function get_num_workers(self :: KVStore) + ref_size = Ref{Cint}(0) + @mxcall(:MXKVStoreGetGroupSize, (MX_handle, Ref{Cint}), self, ref_size) + return Int(ref_size[]) +end + +function get_rank(self :: KVStore) + ref_rank = Ref{Cint}(0) + @mxcall(:MXKVStoreGetRank, (MX_handle, Ref{Cint}), self, ref_rank) + return Int(ref_rank[]) +end + + +# TODO: Currently Julia does not support closure in c-callbacks, so we are making use of the +# extra handle parameter of the API to pass the updater object around. Fix this when someday +# full closure cfunction is supported in Julia. +function _kvstore_update_wrapper(index::Cint, nd_recv::MX_handle, nd_local::MX_handle, updater::Ptr{Void}) + updater_func = unsafe_pointer_to_objref(updater) :: Function + updater_func(Int(index), NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) + return nothing +end +function set_updater(self :: KVStore, updater :: Function) + self.updater = updater # keep a reference to the julia object so that updater_c is kept valid + self.updater_c = cfunction(_kvstore_update_wrapper, Void, (Cint, MX_handle, MX_handle, Ptr{Void})) + + @mxcall(:MXKVStoreSetUpdater, (MX_handle, Ptr{Void}, Any), + self, self.updater_c, updater) +end + +function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) + ref_is_worker = Ref{Cint}(0) + @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref_is_worker) + is_worker = ref_is_worker[] + + if ismatch(r"dist", string(get_type(self))) && is_worker + # TODO + else + set_updater(self, get_updater(optimizer)) + end +end diff --git a/src/metric.jl-e b/src/metric.jl-e new file mode 100644 index 000000000000..c88239ab8cc8 --- /dev/null +++ b/src/metric.jl-e @@ -0,0 +1,37 @@ +abstract AbstractEvalMetric + +type Accuracy <: AbstractEvalMetric + acc_sum :: Float64 + n_sample :: Int + + Accuracy() = new(0.0, 0) +end + +function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) + label = copy(label) + pred = copy(pred) + + n_sample = size(pred)[end] + metric.n_sample += n_sample + for i = 1:n_sample + klass = indmax(pred[:,i]) + metric.acc_sum += (klass-1) == label[i] + end +end + +function update!(metric :: Accuracy, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + @assert length(labels) == length(preds) + for i = 1:length(labels) + _update_single_output(metric, labels[i], preds[i]) + end +end + +import Base: get +function get(metric :: Accuracy) + return [(:accuracy, metric.acc_sum / metric.n_sample)] +end + +function reset!(metric :: Accuracy) + metric.acc_sum = 0.0 + metric.n_sample = 0 +end diff --git a/src/model.jl b/src/model.jl index 93189124d9b0..1a9d11facbe2 100644 --- a/src/model.jl +++ b/src/model.jl @@ -23,7 +23,7 @@ abstract AbstractModel that handles sequential data explicitly, please use **TODO**... =# type FeedForward <: AbstractModel - arch :: Node + arch :: SymbolicNode ctx :: Vector{Context} arg_params :: Dict{Base.Symbol, NDArray} @@ -32,7 +32,7 @@ type FeedForward <: AbstractModel pred_exec :: Union{Executor, Void} # leave the rest fields undefined - FeedForward(arch :: Node, ctx :: Vector{Context}) = new(arch, ctx) + FeedForward(arch :: SymbolicNode, ctx :: Vector{Context}) = new(arch, ctx) end """Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector @@ -52,14 +52,14 @@ function _split_inputs(batch_size :: Int, n_split :: Int) end #=doc -.. function:: FeedForward(arch :: Node, ctx) +.. function:: FeedForward(arch :: SymbolicNode, ctx) :param arch: the architecture of the network constructed using the symbolic API. :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` or a list of :class:`Context` objects. In the latter case, data parallelization will be used for training. If no context is provided, the default context ``cpu()`` will be used. =# -function FeedForward(arch :: Node; context :: Union{Context, Vector{Context}, Void} = nothing) +function FeedForward(arch :: SymbolicNode; context :: Union{Context, Vector{Context}, Void} = nothing) if isa(context, Void) context = [Context(CPU)] elseif isa(context, Context) @@ -514,7 +514,7 @@ end function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, state.curr_epoch) end -function save_checkpoint(sym :: Node, arg_params :: Dict{Base.Symbol, NDArray}, +function save_checkpoint(sym :: SymbolicNode, arg_params :: Dict{Base.Symbol, NDArray}, aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) save("$prefix-symbol.json", sym) save_dict = merge(Dict([symbol("arg:$k") => v for (k,v) in arg_params]), @@ -525,7 +525,7 @@ function save_checkpoint(sym :: Node, arg_params :: Dict{Base.Symbol, NDArray}, end function load_checkpoint(prefix :: AbstractString, epoch :: Int) - arch = load("$prefix-symbol.json", Node) + arch = load("$prefix-symbol.json", SymbolicNode) saved_dict = load(format("{1}-{2:04d}.params", prefix, epoch), NDArray) arg_params = Dict{Base.Symbol, NDArray}() aux_params = Dict{Base.Symbol, NDArray}() diff --git a/src/model.jl-e b/src/model.jl-e new file mode 100644 index 000000000000..93189124d9b0 --- /dev/null +++ b/src/model.jl-e @@ -0,0 +1,566 @@ +#=doc +Models +====== + +The model API provides convenient high-level interface to do training and predicting on +a network described using the symbolic API. +=# + +#=doc +.. class:: AbstractModel + + The abstract super type of all models in MXNet.jl. +=# +abstract AbstractModel + +#=doc +.. class:: FeedForward + + The feedforward model provides convenient interface to train and predict on + feedforward architectures like multi-layer MLP, ConvNets, etc. There is no + explicitly handling of *time index*, but it is relatively easy to implement + unrolled RNN / LSTM under this framework (**TODO**: add example). For models + that handles sequential data explicitly, please use **TODO**... +=# +type FeedForward <: AbstractModel + arch :: Node + ctx :: Vector{Context} + + arg_params :: Dict{Base.Symbol, NDArray} + aux_params :: Dict{Base.Symbol, NDArray} + + pred_exec :: Union{Executor, Void} + + # leave the rest fields undefined + FeedForward(arch :: Node, ctx :: Vector{Context}) = new(arch, ctx) +end + +"""Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector + of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that + piece. +""" +function _split_inputs(batch_size :: Int, n_split :: Int) + @assert(batch_size >= n_split) + per_split = floor(Int, batch_size / n_split) + counts = Base.zeros(Int, n_split)+per_split + extra = batch_size - sum(counts) + counts[1:extra] += 1 + + cum = [0, cumsum(counts)...] + idx = [cum[i-1]+1:cum[i] for i = 2:length(cum)] + return idx +end + +#=doc +.. function:: FeedForward(arch :: Node, ctx) + + :param arch: the architecture of the network constructed using the symbolic API. + :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` + or a list of :class:`Context` objects. In the latter case, data parallelization will be used + for training. If no context is provided, the default context ``cpu()`` will be used. +=# +function FeedForward(arch :: Node; context :: Union{Context, Vector{Context}, Void} = nothing) + if isa(context, Void) + context = [Context(CPU)] + elseif isa(context, Context) + context = [context] + end + FeedForward(arch, context) +end + +#=doc +.. function:: init_model(self, initializer; overwrite=false, input_shapes...) + + Initialize the weights in the model. + + This method will be called automatically when training a model. So there is usually no + need to call this method unless one needs to inspect a model with only randomly initialized + weights. + + :param FeedForward self: the model to be initialized. + :param AbstractInitializer initializer: an initializer describing how the weights should be initialized. + :param Bool overwrite: keyword argument, force initialization even when weights already exists. + :param input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. + For example, ``data=(28,28,1,100), label=(100,)``. +=# +function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) + # all arg names, including data, label, and parameters + arg_names = list_arguments(self.arch) + + input_names = [x[1] for x in input_shapes] + + param_names = setdiff(arg_names, input_names) + aux_names = list_auxiliary_states(self.arch) + + arg_defined = true + aux_defined = true + + arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; input_shapes...) + if !isdefined(self, :arg_params) + param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) + self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) + arg_defined = false + end + if !isdefined(self, :aux_params) + self.aux_params = Dict([name => empty(shape) for (name,shape) in zip(aux_names,aux_shapes)]) + aux_defined = false + end + + # initialize the contents of the parameters + if !arg_defined || overwrite + for (k,v) in self.arg_params + initializer(k, v) + end + end + if !aux_defined || overwrite + for (k,v) in self.aux_params + initializer(k, v) + end + end + + return (arg_names, param_names, aux_names) +end + +function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_shapes...) + if !isdefined(self, :pred_exec) || isa(self.pred_exec, Void) || overwrite + if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) + @assert(false, "Model weights not defined, please init or train the model, or load from file") + end + + # the predictor use only the first device + self.pred_exec = simple_bind(self.arch, self.ctx[1]; grad_req=GRAD_NOP, data_shapes...) + copy_params_from(self.pred_exec, self.arg_params, self.aux_params) + else + # make sure the new setup is compatible with the existing one + for (d_name, d_shape) in data_shapes + @assert(d_shape == size(self.pred_exec.arg_dict[d_name]), + "Shape of $d_name mismatch with existing predictor, use overwrite=true overwrite existing predictor") + end + end +end + +#=doc +.. function:: + predict(self, data; overwrite=false, callback=nothing) + + Predict using an existing model. The model should be already initialized, or trained or loaded from + a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, + so it is possible to do + + .. code-block:: julia + + predict(model, data) do batch_output + # consume or write batch_output to file + end + + :param FeedForward self: the model. + :param AbstractDataProvider data: the data to perform prediction on. + :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory + allocation of the :class:`Executor` depends on the mini-batch size of the test + data provider. If you call predict twice with data provider of the same batch-size, + then the executor can be re-used. Otherwise, if ``overwrite`` is false (default), + an error will be raised; if ``overwrite`` is set to true, a new :class:`Executor` + will be created to replace the old one. + + .. note:: + + Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO + for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better + to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a + concern. + + For the same reason, currently prediction will only use the first device even if multiple devices are + provided to construct the model. + + :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` +=# +function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = false) + predict(self, data; overwrite = overwrite, callback=callback) +end +function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=false, callback::Union{Function,Void}=nothing) + data_shapes = provide_data(data) + data_names = [x[1] for x in data_shapes] + _setup_predictor(self, overwrite; data_shapes...) + + batch_size = get_batch_size(data) + data_arrays = [self.pred_exec.arg_dict[name] for name in data_names] + output_list = [Array{MX_float}[] for i=1:length(self.pred_exec.outputs)] + for batch in eachbatch(data) + load_data!(data, batch, data_arrays) + forward(self.pred_exec, is_train=false) + if isa(callback, Void) + # no callback, accumulate the data and return at the end + for (o_list, o_nd) in zip(output_list, self.pred_exec.outputs) + push!(o_list, copy(slice(o_nd, 1:count_samples(data, batch)))) + end + else + outputs = self.pred_exec.outputs + if length(outputs) == 1 + outputs = outputs[1] + end + callback(outputs) + end + end + + if !isa(callback, Void) + # callback exists, do not accumulate data + return nothing + end + + if isempty(output_list) + # maybe model does not have outputs + return nothing + end + if isempty(output_list[1]) + # maybe no output because data is empty + return length(output_list) == 1 ? output_list[1] : output_list + end + + # concatenate along mini-batches + output_arrays = [cat(ndims(x[1]), x...) for x in output_list] + if length(output_arrays) == 1 + # only 1 output, return it directly, instead of a list + output_arrays = output_arrays[1] + end + return output_arrays +end + +function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer, overwrite :: Bool) + init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) +end + +function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) + if num_device == 1 && !ismatch(r"dist", string(kv_type)) + kv = nothing + else + if kv_type == :local + max_size = maximum([prod(size(param)) for (k,param) in arg_params]) + if max_size < 1024 * 1024 * 16 + kv_type = :local_update_cpu + else + kv_type = :local_allreduce_cpu + end + info("Auto-select kvstore type = $kv_type") + end + kv = KVStore(kv_type) + end + + update_on_kvstore = true + if isa(kv, Void) || ismatch(r"local_allreduce", string(get_type(kv))) + update_on_kvstore = false + end + + return (kv, update_on_kvstore) +end + +@defstruct TrainingOptions Any ( + initializer :: AbstractInitializer = UniformInitializer(0.01), + n_epoch :: Int = 10, + eval_data :: Union{Void, AbstractDataProvider} = nothing, + eval_metric :: AbstractEvalMetric = Accuracy(), + kvstore :: Union{Base.Symbol, KVStore} = :local, + force_init :: Bool = false, + callbacks :: Vector{AbstractCallback} = AbstractCallback[], +) + +function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, + state::OptimizationState, type_filter::Type) + map(callbacks) do cb + if isa(cb, type_filter) + if type_filter == AbstractEpochCallback + # epoch callback have extra access to the model object + cb(self, state) + else + cb(state) + end + end + end +end + +#=doc +.. function:: train(model :: FeedForward, ...) + + Alias to :func:`fit`. +=# +function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) + fit(self, optimizer, data; kwargs...) +end + +#=doc +.. function:: fit(model :: FeedForward, optimizer, data; kwargs...) + + Train the ``model`` on ``data`` with the ``optimizer``. + + :param FeedForward model: the model to be trained. + :param AbstractOptimizer optimizer: the optimization algorithm to use. + :param AbstractDataProvider data: the training data provider. + :param Int n_epoch: default 10, the number of full data-passes to run. + :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for + the validation set. + :param AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used + to evaluate the training performance. If ``eval_data`` is provided, the same metric is also + calculated on the validation set. + :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients + and parameters when multiple devices are used for training. + :type kvstore: :class:`KVStore` or ``Base.Symbol`` + :param AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. + :param Bool force_init: keyword argument, default false. By default, the random initialization using the + provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous + call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When + this option is set, it will always do random initialization at the begining of training. + :param callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, + see :class:`AbstractCallback`. + :type callbacks: ``Vector{AbstractCallback}`` +=# +function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) + opts = TrainingOptions(; kwargs...) + + info("Start training on $(self.ctx)") + + batch_size = get_batch_size(data) + num_dev = length(self.ctx) + slices = _split_inputs(batch_size, num_dev) + + # initialize parameters + info("Initializing parameters...") + arg_names, param_names, aux_names = _init_model(self, data, opts.initializer, opts.force_init) + + # setup kvstore + kvstore = opts.kvstore + if isa(kvstore, Base.Symbol) + info("Creating KVStore...") + kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) + end + + train_execs = Array(Executor, num_dev) + for i = 1:num_dev + data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] + label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] + train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes..., label_shapes...) + + copy_params_from(train_execs[i], self.arg_params, self.aux_params) + end + + # set up input data structures + data_names = [x[1] for x in provide_data(data)] + label_names = [x[1] for x in provide_label(data)] + + data_arrays = [SlicedNDArray[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] + for name in data_names] + label_arrays = [SlicedNDArray[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] + for name in label_names] + + param_idx = filter(i -> in(arg_names[i], param_names), 1:length(arg_names)) + + param_arrays = [NDArray[exec.arg_arrays[i] for exec in train_execs] for i in param_idx] + grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] + aux_arrays = [NDArray[exec.aux_arrays[i] for exec in train_execs] for i = 1:length(aux_names)] + + op_state = OptimizationState(batch_size) + optimizer.state = op_state + + if !update_on_kvstore + updater = get_updater(optimizer) + end + + if !isa(kvstore, Void) + if update_on_kvstore + set_optimizer(kvstore, optimizer) + end + + info("Initializing KVStore...") + # init kv with gradients + for idx = 1:length(param_arrays) + param_on_devs = param_arrays[idx] + + init!(kvstore, idx, self.arg_params[param_names[idx]]) + + if update_on_kvstore + # pull weights back + pull!(kvstore, idx, param_on_devs, priority=-idx) + end + end + end + + # set up output and labels in CPU for evaluation metric + output_shapes = [tuple(size(x)[1:end-1]...,batch_size) for x in train_execs[1].outputs] + cpu_dev = Context(CPU) + cpu_output_arrays = [empty(shape, cpu_dev) for shape in output_shapes] + cpu_label_arrays = [empty(shape, cpu_dev) for (name,shape) in provide_label(data)] + + # invoke callbacks on epoch 0 + _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) + + # now start training... + for i_epoch = 1:opts.n_epoch + time_start = time() + reset!(opts.eval_metric) + + op_state.curr_epoch = i_epoch + op_state.curr_batch = 0 + + # invoke callbacks on iteration 0 + _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) + + for batch in eachbatch(data) + load_data!(data, batch, data_arrays) + load_label!(data, batch, label_arrays) + + # forward and backward + for (texec, islice) in zip(train_execs, slices) + forward(texec, is_train=true) + + # copy outputs into cpu ndarray, for evaluation metric + for (cpu_out, dev_out) in zip(cpu_output_arrays, texec.outputs) + copy!(slice(cpu_out, islice), dev_out) + end + + backward(texec) + end + + op_state.curr_iter += 1 + op_state.curr_batch += 1 + optimizer.state = op_state + + # update parameters + for idx = 1:length(param_names) + # gradient synchronization + if !isa(kvstore, Void) + # push gradient, priority is negative index + push!(kvstore, idx, grad_arrays[idx], priority=-idx) + if update_on_kvstore + # pull back the weights + pull!(kvstore, idx, param_arrays[idx], priority=-idx) + else + # pull back the sum-ed gradients, to the same locations + pull!(kvstore, idx, grad_arrays[idx], priority=-idx) + end + end + + if !update_on_kvstore + # manual updating + for i_dev = 1:num_dev + # create a fake index, so that the updater create states + # for different param AND different devices, TODO(mli) + # use a better solution later + fake_idx = idx * num_dev + i_dev + updater(fake_idx, grad_arrays[idx][i_dev], param_arrays[idx][i_dev]) + end + end + end + + # invoke callbacks after finishing each iteration + _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) + + # update evaluation metric on training set + load_label!(data, batch, cpu_label_arrays) + update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) + end # end of one epoch + + time_stop = time() + info(format("== Epoch {1:0>3d} ==========", i_epoch)) + info("## Training summary") + for (name, value) in get(opts.eval_metric) + info(format("{1:>15s} = {2:.4f}", name, value)) + end + info(format("{1:>15s} = {2:.4f} seconds", "time", time_stop-time_start)) + + # evaluation on validation set + if !isa(opts.eval_data, Void) + # because we are re-using the memory allocated for the training network, + # the batch_size of the validation dataset must be the same as the training + # batch_size + @assert(get_batch_size(opts.eval_data) == batch_size) + + reset!(opts.eval_metric) + for batch in eachbatch(opts.eval_data) + load_data!(opts.eval_data, batch, data_arrays) + + # forward and backward + for (texec, islice) in zip(train_execs, slices) + forward(texec, is_train=true) + + # copy outputs into cpu ndarray, for evaluation metric + for (cpu_out, dev_out) in zip(cpu_output_arrays, texec.outputs) + copy!(slice(cpu_out, islice), dev_out) + end + end + load_label!(opts.eval_data, batch, cpu_label_arrays) + update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) + end + + info("## Validation summary") + for (name, value) in get(opts.eval_metric) + info(format("{1:>15s} = {2:.4f}", name, value)) + end + end + + if i_epoch == opts.n_epoch || any(x->isa(x, AbstractEpochCallback), opts.callbacks) + # copy data back to cpu + for (name, weights) in zip(param_names, param_arrays) + # average parameters across devices + weight = +([copy(w, cpu()) for w in weights]...) / length(weights) + copy!(self.arg_params[name], weight) + end + for (name, aux_devs) in zip(aux_names, aux_arrays) + aux_avg = +([copy(aux, cpu()) for aux in aux_devs]...) / length(aux_devs) + copy!(self.aux_params[name], aux_avg) + end + end + _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) + end # end of all epochs +end + +function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) + save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, state.curr_epoch) +end +function save_checkpoint(sym :: Node, arg_params :: Dict{Base.Symbol, NDArray}, + aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) + save("$prefix-symbol.json", sym) + save_dict = merge(Dict([symbol("arg:$k") => v for (k,v) in arg_params]), + Dict([symbol("aux:$k") => v for (k,v) in aux_params])) + save_filename = format("{1}-{2:04d}.params", prefix, epoch) + save(save_filename, save_dict) + info("Saved checkpoint to '$save_filename'") +end + +function load_checkpoint(prefix :: AbstractString, epoch :: Int) + arch = load("$prefix-symbol.json", Node) + saved_dict = load(format("{1}-{2:04d}.params", prefix, epoch), NDArray) + arg_params = Dict{Base.Symbol, NDArray}() + aux_params = Dict{Base.Symbol, NDArray}() + for (k,v) in saved_dict + tp, name = split(string(k), ':') + name = symbol(name) + if tp == "arg" + arg_params[name] = v + else + aux_params[name] = v + end + end + + return (arch, arg_params, aux_params) +end + +function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForward}) + arch, arg_params, aux_params = load_checkpoint(prefix, epoch) + model = FeedForward(arch) + model.arg_params = arg_params + model.aux_params = aux_params + return model +end + +function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: Int; overwrite :: Bool = true) + if isdefined(self, :arg_params) && isdefined(self, :aux_params) && !overwrite + info("model weights already exists, skip loading... (call with overwrite=true if needed)") + return self + end + + arch, arg_params, aux_params = load_checkpoint(prefix, epoch) + # TODO: is there better way to compare two symbols + @assert(to_json(self.arch) == to_json(arch), "Cannot load from a checkpoint with different network architecture") + self.arg_params = arg_params + self.aux_params = aux_params + return self +end + diff --git a/src/name.jl-e b/src/name.jl-e new file mode 100644 index 000000000000..5ebf10917ce6 --- /dev/null +++ b/src/name.jl-e @@ -0,0 +1,44 @@ +abstract AbstractNameManager +typealias NameType Union{Base.Symbol, AbstractString} +typealias NameCounter Dict{Base.Symbol, Int} + +import Base: get! + +# Default implementation for generating a name for a symbol. +# When a name is specified by the user, it will be used. Otherwise, a name +# is automatically generated based on the hint string. +function _default_get_name!(counter :: NameCounter, name :: NameType, hint :: NameType) + if isa(name, Base.Symbol) || !isempty(name) + return symbol(name) + end + + hint = symbol(hint) + if !haskey(counter, hint) + counter[hint] = 0 + end + name = symbol("$hint$(counter[hint])") + counter[hint] += 1 + return name +end + +type BasicNameManager <: AbstractNameManager + counter :: NameCounter +end +BasicNameManager() = BasicNameManager(NameCounter()) + +function get!(manager :: BasicNameManager, name :: NameType, hint :: NameType) + _default_get_name!(manager.counter, name, hint) +end + +type PrefixNameManager <: AbstractNameManager + prefix :: Base.Symbol + counter :: NameCounter +end +PrefixNameManager(prefix :: NameType) = PrefixNameManager(symbol(prefix), NameCounter()) + +function get!(manager :: PrefixNameManager, name :: NameType, hint :: NameType) + name = _default_get_name!(manager.counter, name, hint) + return symbol("$(manager.prefix)$name") +end + +DEFAULT_NAME_MANAGER = BasicNameManager() diff --git a/src/ndarray.jl-e b/src/ndarray.jl-e new file mode 100644 index 000000000000..82293d2fdbe9 --- /dev/null +++ b/src/ndarray.jl-e @@ -0,0 +1,800 @@ +#=doc +NDArray API +=========== +=# + +# create a NDArray handle of specific shape +function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) + h_ref = Ref{MX_handle}(0) + shape = flipdim(MX_uint[shape...],1) + @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{MX_handle}), + shape, length(shape), ctx.device_type, ctx.device_id, delay_alloc, h_ref) + handle = MX_NDArrayHandle(h_ref[]) + return handle +end + +# create a handle to an empty NDArray, this handle can be used to hold +# results returned by libmx API calls +function _ndarray_alloc() + h_ref = Ref{MX_handle}(0) + @mxcall(:MXNDArrayCreateNone, (Ref{MX_handle},), h_ref) + return MX_NDArrayHandle(h_ref[]) +end + +################################################################################ +# NDArray Type +################################################################################ +#=doc +.. class:: NDArray + + Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block + of tensor-based computation. + + .. _ndarray-shape-note: + + .. note:: + + since C/C++ use row-major ordering for arrays while Julia follows a + column-major ordering. To keep things consistent, we keep the underlying data + in their original layout, but use *language-native* convention when we talk + about shapes. For example, a mini-batch of 100 MNIST images is a tensor of + C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory + have shape (28,28,1,100). +=# +type NDArray + handle :: MX_NDArrayHandle + writable :: Bool + + function NDArray(handle, writable=true) + new(handle, writable) + end +end + +function Base.show(io :: IO, arr :: NDArray) + print(io, "mx.NDArray$(size(arr))") +end + +function NDArray{T<:Real}(data :: Array{T}) + copy(data, cpu()) +end + +function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) + +################################################################################ +# NDArray functions exported to the users +################################################################################ +#=doc +.. function:: context(arr :: NDArray) + + Get the context that this :class:`NDArray` lives on. +=# +function context(arr :: NDArray) + ref_typeid = Ref{Cint}(0) + ref_devid = Ref{Cint}(0) + @mxcall(:MXNDArrayGetContext, (MX_handle, Ref{Cint}, Ref{Cint}), + arr, ref_typeid, ref_devid) + return Context(ref_typeid[], ref_devid[]) +end + +#=doc +.. function:: + empty(shape :: Tuple, ctx :: Context) + empty(shape :: Tuple) + empty(dim1, dim2, ...) + + Allocate memory for an uninitialized :class:`NDArray` with specific shape. +=# +function empty{N}(shape :: NTuple{N, Int}) + empty(shape, cpu()) +end +function empty{N}(shape :: NTuple{N, Int}, ctx :: Context) + NDArray(_ndarray_alloc(shape, ctx, false)) +end +function empty(shape :: Int...) + empty(shape) +end + +#=doc +Interface functions similar to Julia Arrays +------------------------------------------- +=# + +#=doc +.. function:: + zeros(shape :: Tuple, ctx :: Context) + zeros(shape :: Tuple) + zeros(dim1, dim2, ...) + + Create zero-ed :class:`NDArray` with specific shape. +=# +function zeros{N}(shape :: NTuple{N, Int}) + zeros(shape, cpu()) +end +function zeros{N}(shape :: NTuple{N, Int}, ctx :: Context) + arr = empty(shape, ctx) + arr[:] = 0 + return arr +end +function zeros(shape :: Int...) + zeros(shape) +end + +#=doc +.. function:: + ones(shape :: Tuple, ctx :: Context) + ones(shape :: Tuple) + ones(dim1, dim2, ...) + + Create an :class:`NDArray` with specific shape and initialize with 1. +=# +function ones{N}(shape :: NTuple{N, Int}) + ones(shape, cpu()) +end +function ones{N}(shape :: NTuple{N, Int}, ctx :: Context) + arr = empty(shape, ctx) + arr[:] = 1 + return arr +end +function ones(shape :: Int...) + ones(shape) +end + +import Base: size, length, ndims, eltype + +#=doc +.. function:: + size(arr :: NDArray) + size(arr :: NDArray, dim :: Int) + + Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See + also the :ref:`notes on NDArray shapes `. +=# +function size(arr :: NDArray) + ref_ndim = Ref{MX_uint}(0) + ref_shape = Ref{Ptr{MX_uint}}(0) + @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), + arr, ref_ndim, ref_shape) + tuple(map(Int, flipdim(pointer_to_array(ref_shape[], ref_ndim[]),1))...) +end +function size(arr :: NDArray, dim :: Int) + size(arr)[dim] +end + +#=doc +.. function:: length(arr :: NDArray) + + Get the number of elements in an :class:`NDArray`. +=# +function length(arr :: NDArray) + prod(size(arr)) +end + +#=doc +.. function:: ndims(arr :: NDArray) + + Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. +=# +function ndims(arr :: NDArray) + length(size(arr)) +end + +#=doc +.. function:: eltype(arr :: NDArray) + + Get the element type of an :class:`NDArray`. Currently the element type is always ``mx.MX_float``. +=# +function eltype(arr :: NDArray) + MX_float +end + + +import Base: slice +#=doc +.. function:: slice(arr :: NDArray, start:stop) + + Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest + changing dimension is supported. In Julia's column-major perspective, this is the last + dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create + a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is + used in data parallelization to split mini-batch into sub-batches for different devices. +=# +function slice(arr :: NDArray, ::Colon) + arr +end +function slice(arr :: NDArray, slice::UnitRange{Int}) + dim1 = size(arr)[end] + @assert(1 <= slice.start <= slice.stop <= dim1) + if slice.start == 1 && slice.stop == dim1 + return arr + end + + hdr_ref = Ref{MX_handle}(0) + # note Julia is 1-based, inclusive-inclusive indexing, while C++ is + # 0-based, inclusive-exclusive indexing. So 1:3 in Julia should + # translates into 0:3 in C++. + @mxcall(:MXNDArraySlice, (MX_handle, MX_uint, MX_uint, Ref{MX_handle}), + arr, slice.start-1, slice.stop, hdr_ref) + return NDArray(MX_NDArrayHandle(hdr_ref[]), arr.writable) +end + +import Base: setindex! + +#=doc +.. function:: setindex!(arr :: NDArray, val, idx) + + Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following + scenarios are supported + + - ``arr[:] = val``: whole array assignment, ``val`` could be a scalar or an array (Julia ``Array`` + or :class:`NDArray`) of the same shape. + - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of + the same shape to the slice. See also :func:`slice`. +=# +function setindex!(arr :: NDArray, val :: Real, ::Colon) + @assert(arr.writable) + _set_value(val, arr) + return arr +end +function setindex!{T<:Real}(arr :: NDArray, val :: Array{T}, ::Colon) + copy!(arr, val) +end +function setindex!(arr :: NDArray, val :: NDArray, ::Colon) + copy!(arr, val) +end +function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) + setindex!(slice(arr, idx), val, Colon()) +end + +#=doc +.. function:: getindex(arr :: NDArray, idx) + + Shortcut for :func:`slice`. A typical use is to write + + .. code-block:: julia + + arr[:] += 5 + + which translates into + + .. code-block:: julia + + arr[:] = arr[:] + 5 + + which furthur translates into + + .. code-block:: julia + + setindex!(getindex(arr, Colon()), 5, Colon()) + + .. note:: + + The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` + create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is + a *slice* that shares the memory. +=# +import Base: getindex +"""Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a +copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. +""" +function getindex(arr :: NDArray, ::Colon) + return arr +end +function getindex(arr :: NDArray, idx::UnitRange{Int}) + slice(arr, idx) +end + +#=doc +Copying functions +----------------- +=# +import Base: copy!, copy, convert +#=doc +.. function:: + copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) + + Copy contents of ``src`` into ``dst``. +=# +function copy!(dst :: NDArray, src :: NDArray) + @assert(dst.writable) + if dst.handle == src.handle + warn("Copying an NDArray to itself") + return + end + + _copyto(src, dst) + return dst +end + +function copy!(dst :: Array{MX_float}, src :: NDArray) + @assert size(dst) == size(src) + @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), + src, pointer(dst), length(dst)) + return dst +end +function copy!{T<:Real}(dst :: Array{T}, src :: NDArray) + copy!(dst, copy(src)) +end + +function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) + @assert dst.writable + @assert size(dst) == size(src) + src = convert(Array{MX_float}, src) # this might involve copying + @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), + dst.handle, pointer(src), length(src)) + return dst +end + +function copy_ignore_shape!{T<:Real}(dst :: NDArray, src :: Array{T}) + @assert dst.writable + @assert length(dst) == length(src) + src = convert(Array{MX_float}, src) # this might involve copying + @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), + dst.handle, pointer(src), length(src)) + return dst +end + + +#=doc +.. function:: + copy(arr :: NDArray) + copy(arr :: NDArray, ctx :: Context) + copy(arr :: Array, ctx :: Context) + + Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. + Otherwise, create an :class:`NDArray` on the specified context. +=# +# Create copy: NDArray -> Julia Array +function copy(arr :: NDArray) + j_arr = Array(MX_float, size(arr)) + copy!(j_arr, arr) +end + +# Create copy: NDArray -> NDArray in a given context +function copy(arr :: NDArray, ctx :: Context) + dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) + copy!(dst, arr) +end + +# Create copy: Julia Array -> NDArray in a given context +function copy{T<:Real}(arr :: Array{T}, ctx :: Context) + dst = empty(size(arr), ctx) + copy!(dst, arr) +end + +#=doc +.. function:: convert(::Type{Array{T}}, arr :: NDArray) + + Convert an :class:`NDArray` into a Julia ``Array`` of specific type. +=# +# Convert copy: NDArray -> Julia Array +function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) + convert(t, copy(arr)) +end + + +#=doc +Basic arithmetics +----------------- +=# + +#=doc +.. function:: @inplace + + Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), + When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new + memory for the results, and the newly allocated :class:`NDArray` object is then assigned + back to a, while the original contents in a is discarded. This is very inefficient + when we want to do inplace update. + + This macro is a simple utility to implement this behavior. Write + + .. code-block:: julia + + @mx.inplace a += b + + will translate into + + .. code-block:: julia + + mx.add_to!(a, b) + + which will do inplace adding of the contents of ``b`` into ``a``. +=# +macro inplace(stmt) + if stmt.head == :+= || stmt.head == :.+= + Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) + elseif stmt.head == :-= || stmt.head == :.-= + Expr(:call, :sub_from!, esc(stmt.args[1]), esc(stmt.args[2])) + elseif stmt.head == :.*= + Expr(:call, :mul_to!, esc(stmt.args[1]), esc(stmt.args[2])) + elseif stmt.head == :./= + Expr(:call, :div_from!, esc(stmt.args[1]), esc(stmt.args[2])) + else + error("unsupported inplace translation for $stmt") + end +end + +#=doc +.. function:: add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) + + Add a bunch of arguments into ``dst``. Inplace updating. +=# +function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) + @assert dst.writable + for arg in args + if isa(arg, Real) + _plus_scalar(dst, arg, dst) + else + _plus(dst, arg, dst) + end + end + return dst +end + +#=doc +.. function:: + +(args...) + .+(args...) + + Summation. Multiple arguments of either scalar or :class:`NDArray` could be + added together. Note at least the first or second argument needs to be an :class:`NDArray` to + avoid ambiguity of built-in summation. +=# +import Base: +, .+ +function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) + ret = copy(arg0, context(arg0)) + add_to!(ret, args...) +end +function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) + +(arg0, args...) +end +function +(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) + +(arg1, arg0, args...) +end +function .+(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) + .+(arg1, arg0, args...) +end + +#=doc +.. function:: sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) + + Subtract a bunch of arguments from ``dst``. Inplace updating. +=# +function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + @assert dst.writable + if isa(arg, Real) + _minus_scalar(dst, arg, dst) + else + _minus(dst, arg, dst) + end +end + +#=doc +.. function:: + -(arg0, arg1) + -(arg0) + .-(arg0, arg1) + + Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create + the negative of ``arg0``. +=# +import Base: -, .- +function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) + ret = copy(arg0, context(arg0)) + sub_from!(ret, arg1) +end +function .-(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) + -(arg0, arg1) +end +function -(arg0 :: Real, arg1 :: NDArray) + ret = -arg1 + add_to!(ret, arg0) + return ret +end +function .-(arg0 :: Real, arg1 :: NDArray) + -(arg0, arg1) +end + +function -(arg0 :: NDArray) + _mul_scalar(arg0, -1.0) +end + +#=doc +.. function:: mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. + Inplace updating. +=# +function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + @assert dst.writable + if isa(arg, Real) + _mul_scalar(dst, arg, dst) + else + _mul(dst, arg, dst) + end + return dst +end + +#=doc +.. function:: + .*(arg0, arg1) + + Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. +=# +import Base: .*, * +function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) + ret = copy(arg0, context(arg0)) + mul_to!(ret, arg) +end +function .*(arg0 :: Real, arg :: NDArray) + .*(arg, arg0) +end + +#=doc +.. function:: + *(arg0, arg1) + + Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication + is to be added soon. +=# +function *(arg0 :: NDArray, arg :: Real) + ret = copy(arg0, context(arg0)) + mul_to!(ret, arg) +end +function *(arg0 :: Real, arg :: NDArray) + *(arg, arg0) +end + +#=doc +.. function:: div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. +=# +function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + @assert dst.writable + if isa(arg, Real) + _div_scalar(dst, arg, dst) + else + _div(dst, arg, dst) + end +end + +import Base: ./, / +#=doc +.. function:: ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) + + Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. +=# +function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) + ret = copy(arg0, context(arg0)) + div_from!(ret, arg) +end + +#=doc +.. function:: /(arg0 :: NDArray, arg :: Real) + + Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. +=# +function /(arg0 :: NDArray, arg :: Real) + ./(arg0, arg) +end + +#=doc +IO +-- +=# +#=doc +.. function:: load(filename, ::Type{NDArray}) + + Load NDArrays from binary file. + + :param AbstractString filename: the path of the file to load. It could be S3 or HDFS address. + :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. + + If the ``libmxnet`` is built with the corresponding component enabled. Examples + + * ``s3://my-bucket/path/my-s3-ndarray`` + * ``hdfs://my-bucket/path/my-hdfs-ndarray`` + * ``/path-to/my-local-ndarray`` +=# +function load(filename::AbstractString, ::Type{NDArray}) + out_size = Ref{MX_uint}(0) + out_hdrs = Ref{Ptr{MX_handle}}(0) + out_name_size = Ref{MX_uint}(0) + out_names = Ref{char_pp}(0) + @mxcall(:MXNDArrayLoad, (char_p, Ref{MX_uint}, Ref{Ptr{MX_handle}}, Ref{MX_uint}, Ref{char_pp}), + filename, out_size, out_hdrs, out_name_size, out_names) + out_name_size = out_name_size[] + out_size = out_size[] + if out_name_size == 0 + return [NDArray(MX_NDArrayHandle(hdr)) for hdr in pointer_to_array(out_hdrs[], out_size)] + else + @assert out_size == out_name_size + return Dict([(symbol(bytestring(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in + zip(pointer_to_array(out_names[], out_size), pointer_to_array(out_hdrs[], out_size))]) + end +end + +#=doc +.. function:: save(filename :: AbstractString, data) + + Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built + with corresponding support. + + :param AbstractString filename: path to the binary file to write to. + :param data: data to save to file. + :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. +=# +function save(filename::AbstractString, data::NDArray) + save(filename, [data]) +end +function save(filename::AbstractString, data::Vector{NDArray}) + @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), + filename, length(data), MX_handle[data...], char_pp(0)) +end +function save(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) + names = [k for k in keys(data)] + arrays = MX_handle[data[k] for k in names] + names = AbstractString[string(k) for k in names] + + @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), + filename, length(names), arrays, names) +end + +#=doc +libmxnet APIs +------------- +=# +################################################################################ +# NDArray functions dynamically imported from libmxnet +################################################################################ +function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars) + @mxcall(:MXFuncInvoke, + (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), + func_handle, use_vars, scalars, mut_vars) +end + +@enum(LIBMX_FUNC_TYPE_MASK, + NDARRAY_ARG_BEFORE_SCALAR = 1, + ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) +) + +# Import corresponding math functions from base so the automatically defined libmxnet +# functions can overload them +import Base: sqrt + +#=doc +The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed +here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered +as + +.. code-block:: julia + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) + +unless ``NDARRAY_ARG_BEFORE_SCALAR`` is not set. In this case, the scalars are put before the input arguments: + +.. code-block:: julia + + func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) + + +If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the output arguments will also be defined: + +.. code-block:: julia + + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) + +Upon calling, the output arguments will be automatically initialized with empty NDArrays. + +Those functions always return the output arguments. If there is only one output (the typical situation), that +object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. + +**autogen:EMBED:ndarray:EMBED:autogen** +=# +function _import_ndarray_functions(;gen_docs=false) + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + n_funcs = n_ref[] + h_funcs = pointer_to_array(h_ref[], n_funcs) + + if gen_docs + docs = Dict{Base.Symbol, AbstractString}() + end + + for i = 1:n_funcs + func_handle = h_funcs[i] + + #---------------------------------------- + # get function information (human readable) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) + + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXFuncGetInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), + func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) + + func_name = symbol(bytestring(ref_name[])) + + if gen_docs + # generate document only + f_desc = bytestring(ref_desc[]) * "\n\n" + f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) + docs[func_name] = f_desc + else + #---------------------------------------- + # get function specification + ref_n_use_vars = Ref{MX_uint}(0) + ref_n_scalars = Ref{MX_uint}(0) + ref_n_mut_vars = Ref{MX_uint}(0) + ref_type_mask = Ref{Cint}(0) + @mxcall(:MXFuncDescribe, + (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), + func_handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) + + #---------------------------------------- + # prepare function definition + n_used_vars = ref_n_use_vars[] + n_scalars = ref_n_scalars[] + n_mutate_vars = ref_n_mut_vars[] + type_mask = ref_type_mask[] + accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 + arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 + + # general ndarray function + if arg_before_scalar + args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + else + args = vcat([Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + end + + _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) + _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) + _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) + stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) + if n_mutate_vars == 1 + stmt_ret = :(return out1) + else + stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) + end + + func_body = Expr(:block, stmt_call, stmt_ret) + func_head = Expr(:call, func_name, args...) + + func_def = Expr(:function, func_head, func_body) + eval(func_def) + + if accept_empty_mutate + args0 = args[1:n_used_vars+n_scalars] + func_head0 = Expr(:call, func_name, args0...) + _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] + stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) + func_body0 = Expr(:block, stmt_call0) + func_head0 = Expr(:call, func_name, args0...) + + func_def0 = Expr(:function, func_head0, func_body0) + eval(func_def0) + end + end + end + + if gen_docs + return docs + end +end + diff --git a/src/optimizer.jl-e b/src/optimizer.jl-e new file mode 100644 index 000000000000..a5f0bfd5ec60 --- /dev/null +++ b/src/optimizer.jl-e @@ -0,0 +1,226 @@ +#=doc +Optimizers +========== + +Common interfaces +----------------- +=# + + +#=doc +.. class:: AbstractOptimizer + + Base type for all optimizers. +=# +abstract AbstractOptimizer + +#=doc +.. class:: AbstractLearningRateScheduler + + Base type for all learning rate scheduler. +=# +abstract AbstractLearningRateScheduler + +#=doc +.. class:: AbstractMomentumScheduler + + Base type for all momentum scheduler. +=# +abstract AbstractMomentumScheduler + + + +#=doc +.. class:: OptimizationState + + .. attribute:: batch_size + + The size of the mini-batch used in stochastic training. + + .. attribute:: curr_epoch + + The current epoch count. Epoch 0 means no training yet, during the first + pass through the data, the epoch will be 1; during the second pass, the + epoch count will be 1, and so on. + + .. attribute:: curr_batch + + The current mini-batch count. The batch count is reset during every epoch. + The batch count 0 means the beginning of each epoch, with no mini-batch + seen yet. During the first mini-batch, the mini-batch count will be 1. + + .. attribute:: curr_iter + + The current iteration count. One iteration corresponds to one mini-batch, + but unlike the mini-batch count, the iteration count does **not** reset + in each epoch. So it track the *total* number of mini-batches seen so far. +=# +type OptimizationState + batch_size :: Int + curr_epoch :: Int + curr_batch :: Int + curr_iter :: Int +end +OptimizationState(batch_size::Int) = OptimizationState(batch_size, 0, 0, 0) + + +#=doc +.. function:: get_learning_rate(scheduler, state) + + :param AbstractLearningRateScheduler scheduler: a learning rate scheduler. + :param OptimizationState state: the current state about epoch, mini-batch and iteration count. + :return: the current learning rate. +=# +function get_learning_rate +end + +################################################################################ +# The learning rate module +module LearningRate +import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate + +#=doc +.. class:: LearningRate.Fixed + + Fixed learning rate scheduler always return the same learning rate. +=# +type Fixed <: AbstractLearningRateScheduler + learning_rate :: Float64 +end +get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate + +#=doc +.. class:: LearningRate.Exp + + :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration + count if ``decay_on_iteration`` is set to true. +=# +type Exp <: AbstractLearningRateScheduler + learning_rate :: Float64 + gamma :: Float64 + on_iteration :: Bool +end +function Exp(base_lr::Real; gamma::Real=0.9, decay_on_iteration::Bool=false) + @assert(0 < gamma < 1) + Exp(Float64(base_lr), Float64(gamma), decay_on_iteration) +end +get_learning_rate(self :: Exp, state :: OptimizationState) = + self.learning_rate * self.gamma ^ (self.on_iteration ? state.curr_iter : state.curr_epoch) + +end # module LearningRate +################################################################################ +function get_lr_scheduler(scheduler :: Any, lr :: Real) + if isa(scheduler, AbstractLearningRateScheduler) + return scheduler + else + return LearningRate.Fixed(lr) + end +end + + +#=doc +.. function:: get_momentum(scheduler, state) + + :param AbstractMomentumScheduler scheduler: the momentum scheduler. + :param OptimizationState state: the state about current epoch, mini-batch and iteration count. + :return: the current momentum. +=# +function get_momentum +end + + +################################################################################ +# The Momentum module +module Momentum +import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum + +#=doc +.. class:: Momentum.Null + + The null momentum scheduler always returns 0 for momentum. It is also used to + explicitly indicate momentum should not be used. +=# +type Null <: AbstractMomentumScheduler +end +get_momentum(self :: Null, state :: OptimizationState) = 0.0 + +#=doc +.. class:: Momentum.Fixed + + Fixed momentum scheduler always returns the same value. +=# +type Fixed <: AbstractMomentumScheduler + momentum :: Float64 +end +get_momentum(self :: Fixed, state :: OptimizationState) = self.momentum +end # module Momentum +################################################################################ +function get_momentum_scheduler(scheduler :: Any, momentum :: Real) + if isa(scheduler, AbstractMomentumScheduler) + return scheduler + elseif momentum == 0 + return Momentum.Null() + else + return Momentum.Fixed(momentum) + end +end + + +#=doc +.. function:: get_updater(optimizer) + + :param AbstractOptimizer optimizer: the underlying optimizer. + + A utility function to create an updater function, that uses its closure to + store all the states needed for each weights. +=# +function get_updater(optimizer :: AbstractOptimizer) + states = Dict{Int,Any}() + function updater(index :: Int, grad :: NDArray, weight :: NDArray) + if !haskey(states, index) + states[index] = create_state(optimizer, index, weight) + end + update(optimizer, index, weight, grad, states[index]) + end + return updater +end + +################################################################################ +#=doc +Built-in optimizers +------------------- +=# + +#=doc +.. class:: AbstractOptimizerOptions + + Base class for all optimizer options. +=# +abstract AbstractOptimizerOptions + +#=doc +.. function:: normalized_gradient(opts, state, grad) + + :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field + ``grad_scale``, ``grad_clip`` and ``weight_decay``. + :param OptimizationState state: the current optimization state. + :param NDArray weight: the trainable weights. + :param NDArray grad: the original gradient of the weights. + + Get the properly normalized gradient (re-scaled and clipped if necessary). +=# +function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, + weight::NDArray, grad::NDArray) + grad_scale = opts.grad_scale / state.batch_size + + grad = grad_scale * grad + if opts.grad_clip > 0 + grad = clip(grad, -opts.grad_clip, opts.grad_clip) + end + @inplace grad += opts.weight_decay * weight + + return grad +end + +include("optimizers/sgd.jl") +include("optimizers/adam.jl") diff --git a/src/random.jl-e b/src/random.jl-e new file mode 100644 index 000000000000..79a8b6e9e20b --- /dev/null +++ b/src/random.jl-e @@ -0,0 +1,25 @@ +function rand!(low::Real, high::Real, out::NDArray) + _random_uniform(low, high, out) +end +function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}) + rand(low, high, shape, cpu()) +end +function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context) + out = empty(shape, ctx) + rand!(low, high, out) +end + +function randn!(mean::Real, stdvar::Real, out::NDArray) + _random_gaussian(mean, stdvar, out) +end +function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}) + randn(mean, stdvar, shape, cpu()) +end +function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context) + out = empty(shape, ctx) + randn!(mean, stdvar, out) +end + +function srand!(seed_state::Int) + @mxcall(:MXRandomSeed, (Cint,), seed_state) +end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl new file mode 100644 index 000000000000..0c3760240844 --- /dev/null +++ b/src/symbolic-node.jl @@ -0,0 +1,437 @@ +#=doc +Symbolic API +============ +=# + +#=doc +.. class:: SymbolicNode + + SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. + + .. note:: + + Throughout this documentation, ``SymbolicNode`` always refer to this :class:`SymbolicNode` type. + When we refer to the Julia's build-in SymbolicNode type (e.g. ``typeof(:foo)``), we always + say ``Base.Symbol``. +=# +type SymbolicNode + handle :: MX_SymbolHandle +end +function Base.unsafe_convert(::Type{MX_handle}, obj::SymbolicNode) + Base.unsafe_convert(MX_handle, obj.handle) +end +Base.convert(t::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(t, obj) +Base.cconvert(t::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(t, obj) + +#=doc +.. function:: deepcopy(self :: SymbolicNode) + + Make a deep copy of a SymbolicNode. +=# +function Base.deepcopy(self :: SymbolicNode) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCopy, (MX_handle, Ref{MX_handle}), self, ref_hdr) + return SymbolicNode(MX_SymbolHandle(ref_hdr[])) +end + +#=doc +.. function:: copy(self :: SymbolicNode) + + Make a copy of a SymbolicNode. The same as making a deep copy. +=# +function Base.copy(self :: SymbolicNode) + Base.deepcopy(self) +end + +function Base.call(self :: SymbolicNode, args :: SymbolicNode...) + s = deepcopy(self) + _compose!(s, args...) +end +function Base.call(self :: SymbolicNode; kwargs...) + s = deepcopy(self) + _compose!(s; kwargs...) +end + +macro _list_symbol_info(self, func_name) + quote + ref_sz = Ref{MX_uint}(0) + ref_names = Ref{char_pp}(0) + @mxcall($func_name, (MX_handle, Ref{MX_uint}, Ref{char_pp}), + $self, ref_sz, ref_names) + narg = ref_sz[] + names = pointer_to_array(ref_names[], narg) + names = [symbol(bytestring(x)) for x in names] + return names + end +end +function list_arguments(self :: SymbolicNode) + @_list_symbol_info(self, :MXSymbolListArguments) +end +function list_outputs(self :: SymbolicNode) + @_list_symbol_info(self, :MXSymbolListOutputs) +end +"""List all auxiliary states in the symbool. + +Auxiliary states are special states of symbols that do not corresponds to an argument, +and do not have gradient. But still be useful for the specific operations. +A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. +Most operators do not have Auxiliary states. +""" +function list_auxiliary_states(self :: SymbolicNode) + @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) +end + +"Get a new grouped SymbolicNode whose output contains all the internal outputs of this SymbolicNode." +function get_internals(self :: SymbolicNode) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolGetInternals, (MX_handle, Ref{MX_handle}), self, ref_hdr) + return SymbolicNode(MX_SymbolHandle(ref_hdr[])) +end + +"Create a symbolic variable with the given name" +function Variable(name :: Union{Base.Symbol, AbstractString}) + hdr_ref = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) + SymbolicNode(MX_SymbolHandle(hdr_ref[])) +end + +"Create a SymbolicNode that groups symbols together" +function Group(symbols :: SymbolicNode...) + handles = MX_handle[symbols...] + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateGroup, (MX_uint, Ptr{MX_handle}, Ref{MX_handle}), + length(handles), handles, ref_hdr) + SymbolicNode(MX_SymbolHandle(ref_hdr[])) +end + +macro _infer_shape(self, keys, indptr, sdata) + quote + ref_arg_shape_size = Ref{MX_uint}(0) + ref_arg_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_arg_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_out_shape_size = Ref{MX_uint}(0) + ref_out_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_out_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_aux_shape_size = Ref{MX_uint}(0) + ref_aux_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_aux_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_complete = Ref{Cint}(0) + @mxcall(:MXSymbolInferShape, + (MX_handle, MX_uint, char_pp, Ptr{MX_uint}, Ptr{MX_uint}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{Cint}), + self, length(indptr)-1, keys, indptr, sdata, + ref_arg_shape_size, ref_arg_shape_ndim, ref_arg_shape_data, + ref_out_shape_size, ref_out_shape_ndim, ref_out_shape_data, + ref_aux_shape_size, ref_aux_shape_ndim, ref_aux_shape_data, + ref_complete) + if ref_complete[] == 0 + return (nothing, nothing, nothing) + else + function build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) + shape_ndim = pointer_to_array(shape_ndim, shape_size) + shape_data = pointer_to_array(shape_data, shape_size) + shapes = map(1:shape_size) do i + my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) + tuple(flipdim(Int[my_shape...],1)...) + end + convert(Vector{Tuple}, shapes) + end + return ( + build_shapes(ref_arg_shape_size[], ref_arg_shape_ndim[], ref_arg_shape_data[]), + build_shapes(ref_out_shape_size[], ref_out_shape_ndim[], ref_out_shape_data[]), + build_shapes(ref_aux_shape_size[], ref_aux_shape_ndim[], ref_aux_shape_data[]) + ) + end + end +end +function infer_shape(self :: SymbolicNode; kwargs...) + sdata = MX_uint[] + indptr = MX_uint[0] + for (k,v) in kwargs + append!(sdata, flipdim([v...],1)) + push!(indptr, length(sdata)) + end + keys = AbstractString[string(x[1]) for x in kwargs] + @_infer_shape(self, keys, indptr, sdata) +end +function infer_shape(self :: SymbolicNode, args :: Union{Tuple, Void}...) + sdata = MX_uint[] + indptr = MX_uint[0] + for arg in args + if isa(arg, Void); continue; end + append!(sdata, flipdim([arg...],1)) + push!(indptr, length(sdata)) + end + keys = Ptr{char_p}(0) + @_infer_shape(self, keys, indptr, sdata) +end + +function Base.getindex(self :: SymbolicNode, idx :: Union{Base.Symbol, AbstractString}) + idx = symbol(idx) + i_idx = find(idx .== list_outputs(self)) + @assert(length(i_idx) > 0, "Cannot find output with name '$idx'") + @assert(length(i_idx) < 2, "Found duplicated output with name '$idx'") + Base.getindex(self, i_idx[1]) +end +function Base.getindex(self :: SymbolicNode, idx :: Int) + ref_hdr = Ref{MX_handle}(0) + # note Julia is 1-based, while MXNet is 0-based + @mxcall(:MXSymbolGetOutput, (MX_handle, MX_uint, Ref{MX_handle}), self, idx-1, ref_hdr) + return SymbolicNode(MX_SymbolHandle(ref_hdr[])) +end + +import Base: +, .+ +function +(self :: SymbolicNode, args :: SymbolicNode...) + ret = self + for arg in args + ret = _Plus(ret, arg) + end + ret +end +function .+(self :: SymbolicNode, args :: SymbolicNode...) + +(self, args...) +end + +import Base: -, .- +function -(self :: SymbolicNode, arg :: SymbolicNode) + _Minus(self, arg) +end +function .-(self :: SymbolicNode, arg :: SymbolicNode) + -(self, arg) +end + +import Base: .* +function .*(self :: SymbolicNode, args :: SymbolicNode...) + ret = self + for arg in args + ret = _Mul(ret, arg) + end + ret +end + +import Base: ./ +function ./(self :: SymbolicNode, arg :: SymbolicNode) + _Div(self, arg) +end + +function _compose!(sym :: SymbolicNode; kwargs...) + name = char_p(0) + arg_keys = AbstractString[] + arg_vals = MX_handle[] + + for (k,v) in kwargs + if k == :name + name = string(v) + else + @assert(isa(v, SymbolicNode), "Compose expect `SymbolicNode` as arguments") + push!(arg_keys, string(k)) + push!(arg_vals, v) + end + end + + @mxcall(:MXSymbolCompose, + (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), + sym, name, length(arg_keys), arg_keys, arg_vals) + return sym +end +function _compose!(sym :: SymbolicNode, args::SymbolicNode...) + _compose!(sym, char_p(0), args...) +end +function _compose!(sym :: SymbolicNode, name :: Union{Base.Symbol, char_p}, args::SymbolicNode...) + if isa(name, Base.Symbol); name = string(name); end + arg_keys = Ptr{char_p}(0) + arg_vals = MX_handle[args...] + + @mxcall(:MXSymbolCompose, + (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), + sym, name, length(arg_vals), arg_keys, arg_vals) + return sym +end + +"""Save SymbolicNode into a JSON string""" +function to_json(self :: SymbolicNode) + ref_json = Ref{char_p}(0) + @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) + return bytestring(ref_json[]) +end + +"""Load SymbolicNode from a JSON string representation.""" +function from_json(repr :: AbstractString, ::Type{SymbolicNode}) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateFromJSON, (char_p, Ref{MX_handle}), repr, ref_hdr) + return SymbolicNode(MX_SymbolHandle(ref_hdr[])) +end + +"""Load SymbolicNode from a JSON file.""" +function load(filename :: AbstractString, ::Type{SymbolicNode}) + ref_hdr = Ref{MX_handle}(0) + @mxcall(:MXSymbolCreateFromFile, (char_p, Ref{MX_handle}), filename, ref_hdr) + return SymbolicNode(MX_SymbolHandle(ref_hdr[])) +end +function save(filename :: AbstractString, sym :: SymbolicNode) + @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), sym, filename) +end + +#=doc +libmxnet APIs +------------- + +**autogen:EMBED:SymbolicNode:EMBED:autogen** +=# +################################################################################ +# Atomic SymbolicNode functions dynamically imported from libmxnet +################################################################################ +function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_kv_nargs = Ref{char_p}(0) + ref_nargs = Ref{MX_uint}(0) + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXSymbolGetAtomicSymbolInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, + Ref{char_pp}, Ref{char_p}), + hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs) + + func_name = symbol(bytestring(ref_name[])) + kv_nargs_s = bytestring(ref_kv_nargs[]) + kv_nargs = symbol(kv_nargs_s) + + if gen_docs + f_desc = bytestring(ref_desc[]) * "\n\n" + if !isempty(kv_nargs_s) + f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" + end + f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) + f_desc *= ":param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":return: the constructed :class:`SymbolicNode`.\n\n" + return (func_name, f_desc) + end + + # function $func_name(args...; kwargs...) + func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) + func_body = quote + idx = findfirst(x -> x[1] == :name, kwargs) + if idx > 0 + name = kwargs[idx][2] + else + name = "" + end + + param_keys = AbstractString[] + param_vals = AbstractString[] + symbol_kws = Dict{Base.Symbol, SymbolicNode}() + + $(if kv_nargs != symbol("") + quote + if !in($kv_nargs_s, param_keys) + push!(param_keys, $kv_nargs_s) + push!(param_vals, string(length(args))) + end + end + end) + + for (k,v) in kwargs + if k == :name; continue; end + if isa(v, SymbolicNode) + symbol_kws[k] = v + else + push!(param_keys, string(k)) + push!(param_vals, dump_mx_param(v)) + end + end + + if length(args) != 0 && length(symbol_kws) != 0 + @assert(false, "$func_name only accepts Symbols either as positional or keyword arguments, not both.") + end + $(if kv_nargs != symbol("") + quote + if length(symbol_kws) > 0 + @assert(false, "$func_name takes variable number of SymbolicNode arguments, please pass input Symbols " * + "via positional arguments, instead of keyword arguments.") + end + end + end) + + # create the SymbolicNode + ref_sym_hdr = Ref{MX_handle}() + @mxcall(:MXSymbolCreateAtomicSymbol, + (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), + $hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) + sym_hdr = ref_sym_hdr[] + + sym = SymbolicNode(MX_SymbolHandle(sym_hdr)) + hint = lowercase(string($func_name)) + name = get!(DEFAULT_NAME_MANAGER, name, hint) + + if length(args) != 0 + _compose!(sym, name, args...) + else + _compose!(sym; name=name, symbol_kws...) + end + + return sym + end + + func_def = Expr(:function, func_head, Expr(:block, func_body)) + eval(func_def) +end + +function _import_atomic_symbol_creators(;gen_docs=false) + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXSymbolListAtomicSymbolCreators, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + n_creators = n_ref[] + h_creators = pointer_to_array(h_ref[], n_creators) + + if gen_docs + docs = Dict{Base.Symbol, AbstractString}() + end + + for i = 1:n_creators + creator_hdr = h_creators[i] + ret = _define_atomic_symbol_creator(creator_hdr, gen_docs=gen_docs) + if gen_docs + docs[ret[1]] = ret[2] + end + end + + if gen_docs + return docs + end +end + +################################################################################ +# Utility macros to chain up symbols +################################################################################ +macro chain(layers) + exprs = [] + last_layer = nothing + function _chain_layer(layer, last_layer) + if isa(last_layer, Void) + esc(layer) + else + @assert(isa(layer, Expr) && layer.head == :call, "Do not know how to chain up $layer") + return Expr(:call, esc(layer.args[1]), last_layer, map(esc, layer.args[2:end])...) + end + end + while true + if layers.head == :(=>) + new_layer = gensym() + push!(exprs, :($new_layer = $(_chain_layer(layers.args[1], last_layer)))) + last_layer = new_layer + layers = layers.args[2] + else + push!(exprs, _chain_layer(layers, last_layer)) + break + end + end + return Expr(:block, exprs...) +end + diff --git a/src/node.jl b/src/symbolic-node.jl-e similarity index 100% rename from src/node.jl rename to src/symbolic-node.jl-e diff --git a/src/util.jl-e b/src/util.jl-e new file mode 100644 index 000000000000..4ca613cbf7d1 --- /dev/null +++ b/src/util.jl-e @@ -0,0 +1,70 @@ +################################################################################ +# Dataset related utilities +################################################################################ +function get_data_dir() + data_dir = joinpath(Pkg.dir("MXNet"), "data") + mkpath(data_dir) + data_dir +end + +function get_mnist_ubyte() + data_dir = get_data_dir() + mnist_dir = joinpath(data_dir, "mnist") + mkpath(mnist_dir) + filenames = Dict(:train_data => "train-images-idx3-ubyte", + :train_label => "train-labels-idx1-ubyte", + :test_data => "t10k-images-idx3-ubyte", + :test_label => "t10k-labels-idx1-ubyte") + filenames = [k => joinpath(mnist_dir, v) for (k,v) in filenames] + if !all(isfile, values(filenames)) + cd(mnist_dir) do + run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip`) + run(`unzip -u mnist.zip`) + end + end + return filenames +end + +function get_cifar10() + data_dir = get_data_dir() + cifar10_dir = joinpath(data_dir, "cifar10") + mkpath(cifar10_dir) + filenames = Dict(:train => "cifar/train.rec", :test => "cifar/test.rec") + filenames = [k => joinpath(cifar10_dir, v) for (k,v) in filenames] + if !all(isfile, values(filenames)) + cd(cifar10_dir) do + run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) + run(`unzip -u cifar10.zip`) + end + end + + filenames[:mean] = joinpath(cifar10_dir, "cifar/cifar_mean.bin") + return filenames +end + + +################################################################################ +# Internal Utilities +################################################################################ +const DOC_EMBED_ANCHOR = "**autogen:EMBED:{1}:EMBED:autogen**" +function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) + param_keys = Set{AbstractString}() + + arg_names = pointer_to_array(arg_names[], narg) + arg_types = pointer_to_array(arg_types[], narg) + arg_descs = pointer_to_array(arg_descs[], narg) + docstrings = AbstractString[] + + for i = 1:narg + arg_name = bytestring(arg_names[i]) + if arg_name ∈ param_keys && remove_dup + continue + end + push!(param_keys, arg_name) + + arg_type = bytestring(arg_types[i]) + arg_desc = bytestring(arg_descs[i]) + push!(docstrings, ":param $arg_name: $arg_desc\n:type $arg_name: $arg_type\n\n") + end + return join(docstrings, "\n") +end diff --git a/test/unittest/node.jl b/test/unittest/symbolic-node.jl similarity index 90% rename from test/unittest/node.jl rename to test/unittest/symbolic-node.jl index f7e9b51e60f0..33948adfcd40 100644 --- a/test/unittest/node.jl +++ b/test/unittest/symbolic-node.jl @@ -1,4 +1,4 @@ -module TestNode +module TestSymbolicNode using MXNet using Base.Test @@ -8,7 +8,7 @@ using ..Main: mlp2 # Test Implementations ################################################################################ function test_basic() - info("Node::basic") + info("SymbolicNode::basic") model = mlp2() @test mx.list_arguments(model) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] @@ -17,7 +17,7 @@ function test_basic() end function test_internal() - info("Node::internal") + info("SymbolicNode::internal") data = mx.Variable(:data) oldfc = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) @@ -31,7 +31,7 @@ function test_internal() end function test_compose() - info("Node::compose") + info("SymbolicNode::compose") data = mx.Variable(:data) net1 = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) @@ -47,7 +47,7 @@ function test_compose() end function test_infer_shape() - info("Node::infer_shape::mlp2") + info("SymbolicNode::infer_shape::mlp2") model = mlp2() data_shape = (100, 100) @@ -61,7 +61,7 @@ function test_infer_shape() end function test_infer_shape_error() - info("Node::infer_shape::throws") + info("SymbolicNode::infer_shape::throws") model = mlp2() weight_shape = (100, 1) @@ -70,12 +70,12 @@ function test_infer_shape_error() end function test_saveload() - info("Node::saveload::mlp2") + info("SymbolicNode::saveload::mlp2") model = mlp2() fname = tempname() mx.save(fname, model) - model2 = mx.load(fname, mx.Node) + model2 = mx.load(fname, mx.SymbolicNode) @test mx.to_json(model) == mx.to_json(model2) rm(fname) From aab8784c12724e39976e5ac597b895ede55e5629 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:25:17 -0500 Subject: [PATCH 186/630] upate doc for refactoring --- docs/api/executor.rst | 6 +- docs/api/io.rst | 2 +- docs/api/model.rst | 2 +- docs/api/{symbol.rst => symbolic-node.rst} | 132 ++-- docs/build-api.jl | 4 +- docs/index.rst | 2 +- src/MXNet.jl-e | 35 - src/base.jl-e | 212 ------ src/callback.jl-e | 142 ---- src/context.jl-e | 44 -- src/executor.jl-e | 197 ----- src/initializer.jl-e | 117 --- src/io.jl-e | 641 ----------------- src/kvstore.jl-e | 132 ---- src/metric.jl-e | 37 - src/model.jl-e | 566 --------------- src/name.jl-e | 44 -- src/ndarray.jl-e | 800 --------------------- src/optimizer.jl-e | 226 ------ src/random.jl-e | 25 - src/symbolic-node.jl | 2 +- src/symbolic-node.jl-e | 437 ----------- src/util.jl-e | 70 -- 23 files changed, 75 insertions(+), 3800 deletions(-) rename docs/api/{symbol.rst => symbolic-node.rst} (69%) delete mode 100644 src/MXNet.jl-e delete mode 100644 src/base.jl-e delete mode 100644 src/callback.jl-e delete mode 100644 src/context.jl-e delete mode 100644 src/executor.jl-e delete mode 100644 src/initializer.jl-e delete mode 100644 src/io.jl-e delete mode 100644 src/kvstore.jl-e delete mode 100644 src/metric.jl-e delete mode 100644 src/model.jl-e delete mode 100644 src/name.jl-e delete mode 100644 src/ndarray.jl-e delete mode 100644 src/optimizer.jl-e delete mode 100644 src/random.jl-e delete mode 100644 src/symbolic-node.jl-e delete mode 100644 src/util.jl-e diff --git a/docs/api/executor.rst b/docs/api/executor.rst index 8887d4ce1d24..69c78137cd0e 100644 --- a/docs/api/executor.rst +++ b/docs/api/executor.rst @@ -7,7 +7,7 @@ Executor .. class:: Executor - An executor is a realization of a symbolic architecture defined by a :class:`Symbol`. + An executor is a realization of a symbolic architecture defined by a :class:`SymbolicNode`. The actual forward and backward computation specified by the network architecture can be carried out with an executor. @@ -16,9 +16,9 @@ Executor .. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) - Create an :class:`Executor` by binding a :class:`Symbol` to concrete :class:`NDArray`. + Create an :class:`Executor` by binding a :class:`SymbolicNode` to concrete :class:`NDArray`. - :param Symbol sym: the network architecture describing the computation graph. + :param SymbolicNode sym: the network architecture describing the computation graph. :param Context ctx: the context on which the computation should run. :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include diff --git a/docs/api/io.rst b/docs/api/io.rst index a7568a7b6f71..e9d9c04e9f3c 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -36,7 +36,7 @@ and split it into mini-batches so that the model can consume the data in a unifo training stage, both *data* and *label* will be feeded into the model, while during prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and of any shapes. The provided data and label names here should match the input names in a target - :class:`Symbol`. + :class:`SymbolicNode`. A data provider should also implement the Julia iteration interface, in order to allow iterating through the data set. The provider will be called in the following way: diff --git a/docs/api/model.rst b/docs/api/model.rst index ab330db9bcfa..d5245614061b 100644 --- a/docs/api/model.rst +++ b/docs/api/model.rst @@ -26,7 +26,7 @@ a network described using the symbolic API. -.. function:: FeedForward(arch :: Symbol, ctx) +.. function:: FeedForward(arch :: SymbolicNode, ctx) :param arch: the architecture of the network constructed using the symbolic API. :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` diff --git a/docs/api/symbol.rst b/docs/api/symbolic-node.rst similarity index 69% rename from docs/api/symbol.rst rename to docs/api/symbolic-node.rst index e01ecb35980d..9386c10a8614 100644 --- a/docs/api/symbol.rst +++ b/docs/api/symbolic-node.rst @@ -5,29 +5,29 @@ Symbolic API -.. class:: Symbol +.. class:: SymbolicNode - Symbol is the basic building block of the symbolic graph in MXNet.jl. + SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. .. note:: - Throughout this documentation, ``Symbol`` always refer to this :class:`Symbol` type. - When we refer to the Julia's build-in symbol type (e.g. ``typeof(:foo)``), we always + Throughout this documentation, ``SymbolicNode`` always refer to this :class:`SymbolicNode` type. + When we refer to the Julia's build-in SymbolicNode type (e.g. ``typeof(:foo)``), we always say ``Base.Symbol``. -.. function:: deepcopy(self :: Symbol) +.. function:: deepcopy(self :: SymbolicNode) - Make a deep copy of a symbol. + Make a deep copy of a SymbolicNode. -.. function:: copy(self :: Symbol) +.. function:: copy(self :: SymbolicNode) - Make a copy of a symbol. The same as making a deep copy. + Make a copy of a SymbolicNode. The same as making a deep copy. @@ -48,9 +48,9 @@ Public APIs :param act_type: Activation function to be applied. :type act_type: {'relu', 'sigmoid', 'tanh'}, required - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -71,9 +71,9 @@ Public APIs :param momentum: Momentum for moving average :type momentum: float, optional, default=0.1 - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -86,9 +86,9 @@ Public APIs :param data: Input data. :type data: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -98,14 +98,14 @@ Public APIs Perform an feature concat on channel dim (dim 1) over all the inputs. - This function support variable length positional :class:`Symbol` inputs. + This function support variable length positional :class:`SymbolicNode` inputs. :param num_args: Number of inputs to be concated. :type num_args: int, required - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -154,9 +154,9 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -205,9 +205,9 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=True - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -224,9 +224,9 @@ Public APIs :param p: Fraction of the input that gets dropped out at training time :type p: float, optional, default=0.5 - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -236,14 +236,14 @@ Public APIs Perform an elementwise sum over all the inputs. - This function support variable length positional :class:`Symbol` inputs. + This function support variable length positional :class:`SymbolicNode` inputs. :param num_args: Number of inputs to be sumed. :type num_args: int, required - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -256,9 +256,9 @@ Public APIs :param data: Input data to flatten. :type data: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -287,9 +287,9 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -318,9 +318,9 @@ Public APIs :param nsize: normalization window width in elements. :type nsize: int (non-negative), required - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -349,9 +349,9 @@ Public APIs :param upper_bound: Upper bound of random slope. (For rrelu only) :type upper_bound: float, optional, default=0.334 - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -368,9 +368,9 @@ Public APIs :param label: Input label to function. :type label: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -388,9 +388,9 @@ Public APIs :param label: Input label to function. :type label: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -419,9 +419,9 @@ Public APIs :param pad: pad for pooling: (y, x) :type pad: Shape(tuple), optional, default=(0, 0) - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -438,9 +438,9 @@ Public APIs :param target_shape: Target new shape :type target_shape: Shape(tuple), required - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -453,9 +453,9 @@ Public APIs :param num_outputs: Number of outputs to be sliced. :type num_outputs: int, required - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -476,9 +476,9 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -499,9 +499,9 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -514,9 +514,9 @@ Public APIs :param src: Source symbolic input to the function :type src: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -529,9 +529,9 @@ Public APIs :param src: Source symbolic input to the function :type src: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -544,9 +544,9 @@ Public APIs :param src: Source symbolic input to the function :type src: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -559,9 +559,9 @@ Public APIs :param src: Source symbolic input to the function :type src: Symbol - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -577,9 +577,9 @@ Internal APIs Perform an elementwise div. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -589,9 +589,9 @@ Internal APIs Perform an elementwise minus. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -601,9 +601,9 @@ Internal APIs Perform an elementwise mul. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -620,9 +620,9 @@ Internal APIs :param need_top_grad: Whether this layer needs out grad for backward. Should be false for loss layers. :type need_top_grad: boolean, optional, default=True - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. @@ -632,9 +632,9 @@ Internal APIs Perform an elementwise plus. - :param Base.Symbol name: The name of the symbol. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. - :return: the constructed :class:`Symbol`. + :return: the constructed :class:`SymbolicNode`. diff --git a/docs/build-api.jl b/docs/build-api.jl index b70c480c9d79..510426ee2004 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -79,8 +79,8 @@ extract_doc("context.rst", "context.jl") extract_doc("ndarray.rst", "ndarray.jl") embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) -extract_doc("symbol.rst", "symbol.jl") -embed_mxnet_api("symbol.rst", "symbol", mx._import_atomic_symbol_creators) +extract_doc("symbolic-node.rst", "symbolic-node.jl") +embed_mxnet_api("symbolic-node.rst", "symbolic-node", mx._import_atomic_symbol_creators) extract_doc("executor.rst", "executor.jl") diff --git a/docs/index.rst b/docs/index.rst index 7520047db11a..6203ad3a1af4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -38,7 +38,7 @@ For more details, see documentation below. Please also checkout the `examples api/callback api/io api/ndarray - api/symbol + api/symbolic-node api/executor Indices and tables diff --git a/src/MXNet.jl-e b/src/MXNet.jl-e deleted file mode 100644 index f9f9e8664c4f..000000000000 --- a/src/MXNet.jl-e +++ /dev/null @@ -1,35 +0,0 @@ -__precompile__() - -module MXNet - -# we put everything in the namespace mx, because there are a lot of -# functions with the same names as built-in utilities like "zeros", etc. -export mx -module mx -using Formatting - -include("base.jl") -include("context.jl") - -include("ndarray.jl") -include("random.jl") - -include("name.jl") -include("symbolic-node.jl") -include("executor.jl") - -include("metric.jl") -include("optimizer.jl") -include("initializer.jl") - -include("io.jl") -include("kvstore.jl") - -include("callback.jl") -include("model.jl") - -include("util.jl") - -end # mx - -end # module MXNet diff --git a/src/base.jl-e b/src/base.jl-e deleted file mode 100644 index 0988400b9d31..000000000000 --- a/src/base.jl-e +++ /dev/null @@ -1,212 +0,0 @@ -"Exception thrown when an error occurred calling MXNet API." -immutable MXError <: Exception - msg :: AbstractString -end - -################################################################################ -# Common types used in MXNet API -################################################################################ -typealias MX_uint Cuint -typealias MX_float Cfloat -typealias MX_handle Ptr{Void} - -typealias char_p Ptr{UInt8} -typealias char_pp Ptr{char_p} - -################################################################################ -# Initialization and library API entrance -################################################################################ -const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], - [joinpath("$(get(ENV,"MXNET_HOME",""))","lib"), - joinpath(Pkg.dir("MXNet"),"deps/usr/lib")]) -if isempty(MXNET_LIB) - # touch this file, so that after the user properly build libmxnet, the precompiled - # MXNet.ji will be re-compiled to get MXNET_LIB properly. - touch(@__FILE__) - error("Cannot find or load libmxnet.so. Please see the document on how to build it.") -end - -function __init__() - _import_ndarray_functions() - _import_atomic_symbol_creators() - _import_io_iterators() - - atexit() do - # notify libmxnet we are shutting down - ccall( ("MXNotifyShutdown", MXNET_LIB), Cint, () ) - end -end - -function mx_get_last_error() - msg = ccall( ("MXGetLastError", MXNET_LIB), char_p, () ) - if msg == C_NULL - throw(MXError("Failed to get last error message")) - end - return bytestring(msg) -end - -"Utility macro to call MXNet API functions" -macro mxcall(fv, argtypes, args...) - f = eval(fv) - args = map(esc, args) - quote - _mxret = ccall( ($(Meta.quot(f)), $MXNET_LIB), - Cint, $argtypes, $(args...) ) - if _mxret != 0 - err_msg = mx_get_last_error() - throw(MXError(err_msg)) - end - end -end - -################################################################################ -# Handle types -################################################################################ -macro mx_define_handle_t(name, destructor) - name = esc(name) - quote - type $name - value :: MX_handle - - function $name(value = C_NULL) - hdr = new(value) - - $(if destructor != :nop - :(finalizer(hdr, delete!)) - end) - - return hdr - end - end - - $(if finalizer != :nop - quote - function delete!(h :: $name) - if h.value != C_NULL - @mxcall($(Meta.quot(destructor)), (MX_handle,), h.value) - h.value = C_NULL - end - end - end - end) - - function Base.unsafe_convert(::Type{MX_handle}, obj::$name) - obj.value - end - Base.convert(t::Type{MX_handle}, obj::$name) = Base.unsafe_convert(t, obj) - Base.cconvert(t::Type{MX_handle}, obj::$name) = Base.unsafe_convert(t, obj) - - function Base.isnull(obj::$name) obj.value == C_NULL end - end -end - -@mx_define_handle_t(MX_NDArrayHandle, MXNDArrayFree) -@mx_define_handle_t(MX_FunctionHandle, nop) -@mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) -@mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) -@mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) -@mx_define_handle_t(MX_KVStoreHandle, MXKVStoreFree) - -################################################################################ -# MXNet Params -# -# MXNet API use string to pass some common parameters like the configurations -# when defining layers. Typically, it is enough to use string(obj) to get a -# recognizable representation for libmxnet. However, there is currently a -# caveat: -# -# Because Julia use column-major ordering for tensors. In order to properly -# interact with Julia Arrays, the shape will look "reversed" from the Julia -# side. For example, a typical MNIST mini-batch tensor is of shape (28,28,1,100) -# from Julia side, while the shape information for the same piece of memory -# should be interpreted as (100,1,28,28) from C/C++/Python side. -# -# Therefore, when passing parameters to libmxnet, we should reverse the shape -# parameter. For example, when the user specify a non-square kernel size for -# a convolution or pooling layer. Unfortunately, those operators are automatically -# imported, and information about the type of each parameter is somehow limited. -# One hacky way is to match the type description for the string "Shape(tuple)" -# when importing operators. But currently we simply decided to reverse **all** -# NTuple{N, Int} passed to libmxnet. -# -# TODO: find a better solution in case this cause issues in the future. -################################################################################ -function dump_mx_param(val :: Any) - string(val) -end -function dump_mx_param{N,T<:Integer}(shape :: NTuple{N, T}) - string(tuple(flipdim([shape...],1)...)) -end - -"""A convenient macro copied from Mocha.jl that could be used to define structs -with default values and type checks. For example -```julia -@defstruct MyStruct Any ( - field1 :: Int = 0, - (field2 :: AbstractString = "", !isempty(field2)) -) -``` -where each field could be either -```julia -field_name :: field_type = default_value -``` -or put within a tuple, with the second element -specifying a validation check on the field value. -In the example above, the default value for -field2 does not satisfy the assertion, this -could be used to force user to provide a -valid value when no meaningful default value -is available. - -The macro will define a constructor that could accept -the keyword arguments. -""" -macro defstruct(name, super_name, fields) - @assert fields.head == :tuple - fields = fields.args - @assert length(fields) > 0 - name = esc(name) - super_name = esc(super_name) - - field_defs = Array(Expr, length(fields)) # :(field2 :: Int) - field_names = Array(Expr, length(fields)) # :field2 - field_defaults = Array(Expr, length(fields)) # :(field2 = 0) - field_types = Array(Expr, length(fields)) # Int - field_asserts = Array(Expr, length(fields)) # :(field2 >= 0) - - for i = 1:length(fields) - field = fields[i] - if field.head == :tuple - field_asserts[i] = esc(field.args[2]) - field = field.args[1] - end - field_defs[i] = esc(field.args[1]) - field_names[i] = esc(field.args[1].args[1]) - field_types[i] = esc(field.args[1].args[2]) - field_defaults[i] = Expr(:kw, field.args[1].args[1], esc(field.args[2])) - end - - # body of layer type, defining fields - type_body = Expr(:block, field_defs...) - - # constructor - converts = map(zip(field_names, field_types)) do param - f_name, f_type = param - :($f_name = convert($f_type, $f_name)) - end - asserts = map(filter(i -> isdefined(field_asserts,i), 1:length(fields))) do i - :(@assert($(field_asserts[i]))) - end - construct = Expr(:call, name, field_names...) - ctor_body = Expr(:block, converts..., asserts..., construct) - ctor_def = Expr(:call, name, Expr(:parameters, field_defaults...)) - ctor = Expr(:(=), ctor_def, ctor_body) - - quote - type $(name) <: $super_name - $type_body - end - - $ctor - end -end diff --git a/src/callback.jl-e b/src/callback.jl-e deleted file mode 100644 index 9f3d85b576ff..000000000000 --- a/src/callback.jl-e +++ /dev/null @@ -1,142 +0,0 @@ -#=doc -Callbacks in training -===================== -=# - -#=doc -.. class:: AbstractCallback - - Abstract type of callback functions used in training. -=# -abstract AbstractCallback - -#=doc -.. class:: AbstractBatchCallback - - Abstract type of callbacks to be called every mini-batch. -=# -abstract AbstractBatchCallback <: AbstractCallback - -#=doc -.. class:: AbstractEpochCallback - - Abstract type of callbacks to be called every epoch. -=# -abstract AbstractEpochCallback <: AbstractCallback - -type BatchCallback <: AbstractBatchCallback - frequency :: Int - call_on_0 :: Bool - callback :: Function -end - -#=doc -.. function:: every_n_batch(callback :: Function, n :: Int; call_on_0 = false) - - A convenient function to construct a callback that runs every ``n`` mini-batches. - - :param Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on batch 0. - - For example, the :func:`speedometer` callback is defined as - - .. code-block:: julia - - every_n_iter(frequency, call_on_0=true) do state :: OptimizationState - if state.curr_batch == 0 - # reset timer - else - # compute and print speed - end - end - - :seealso: :func:`every_n_epoch`, :func:`speedometer`. -=# -function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) - BatchCallback(n, call_on_0, callback) -end -function Base.call(cb :: BatchCallback, state :: OptimizationState) - if state.curr_batch == 0 - if cb.call_on_0 - cb.callback(state) - end - elseif state.curr_batch % cb.frequency == 0 - cb.callback(state) - end -end - -#=doc -.. function:: speedometer(; frequency=50) - - Create an :class:`AbstractBatchCallback` that measure the training speed - (number of samples processed per second) every k mini-batches. - - :param Int frequency: keyword argument, default 50. The frequency (number of - min-batches) to measure and report the speed. -=# -function speedometer(;frequency::Int=50) - cl_tic = 0 - every_n_batch(frequency, call_on_0=true) do state :: OptimizationState - if state.curr_batch == 0 - # reset timer - cl_tic = time() - else - speed = frequency * state.batch_size / (time() - cl_tic) - info(format("Speed: {1:>6.2f} samples/sec", speed)) - cl_tic = time() - end - end -end - - -type EpochCallback <: AbstractEpochCallback - frequency :: Int - call_on_0 :: Bool - callback :: Function -end - -#=doc -.. function:: every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) - - A convenient function to construct a callback that runs every ``n`` full data-passes. - - :param Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on epoch 0. Epoch 0 means no training has been performed - yet. This is useful if you want to inspect the randomly initialized model - that has not seen any data yet. - - :seealso: :func:`every_n_iter`. -=# -function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) - EpochCallback(n, call_on_0, callback) -end -function Base.call(cb :: EpochCallback, model :: Any, state :: OptimizationState) - if state.curr_epoch == 0 - if cb.call_on_0 - cb.callback(model, state) - end - elseif state.curr_epoch % cb.frequency == 0 - cb.callback(model, state) - end -end - -#=doc -.. function:: do_checkpoint(prefix; frequency=1, save_epoch_0=false) - - Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. - The checkpoints can be loaded back later on. - - :param AbstractString prefix: the prefix of the filenames to save the model. The model - architecture will be saved to prefix-symbol.json, while the weights will be saved - to prefix-0012.params, for example, for the 12-th epoch. - :param Int frequency: keyword argument, default 1. The frequency (measured in epochs) to - save checkpoints. - :param Bool save_epoch_0: keyword argument, default false. Whether we should save a - checkpoint for epoch 0 (model initialized but not seen any data yet). -=# -function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) - mkpath(dirname(prefix)) - every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state - save_checkpoint(model, prefix, state) - end -end diff --git a/src/context.jl-e b/src/context.jl-e deleted file mode 100644 index 1e96c305fb04..000000000000 --- a/src/context.jl-e +++ /dev/null @@ -1,44 +0,0 @@ -#=doc -Context -======= -=# -@enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 - -#=doc -.. class:: Context - - A context describes the device type and id on which computation should be carried on. -=# -immutable Context - device_type :: CONTEXT_TYPE - device_id :: Int -end -Context(dev_type :: Union{CONTEXT_TYPE, Int}, dev_id :: Int = 0) = - Context(convert(CONTEXT_TYPE, dev_type), dev_id) - -function Base.show(io :: IO, ctx :: Context) - print(io, "$(ctx.device_type)$(ctx.device_id)") -end - -#=doc -.. function:: cpu(dev_id=0) - - :param Int dev_id: the CPU id. - - Get a CPU context with a specific id. ``cpu()`` is usually the default context for many - operations when no context is specified. -=# -function cpu(dev_id::Int=0) - return Context(CPU, dev_id) -end - -#=doc -.. function:: gpu(dev_id=0) - - :param Int dev_id: the GPU device id. - - Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. -=# -function gpu(dev_id::Int=0) - return Context(GPU, dev_id) -end diff --git a/src/executor.jl-e b/src/executor.jl-e deleted file mode 100644 index 6dea17763101..000000000000 --- a/src/executor.jl-e +++ /dev/null @@ -1,197 +0,0 @@ -#=doc -Executor -======== -=# - -#=doc -.. class:: Executor - - An executor is a realization of a symbolic architecture defined by a :class:`Node`. - The actual forward and backward computation specified by the network architecture can - be carried out with an executor. -=# -type Executor - handle :: MX_ExecutorHandle - symbol :: Node - arg_arrays :: Vector{NDArray} - grad_arrays :: Vector{Union{Void,NDArray}} - aux_arrays :: Vector{NDArray} - outputs :: Vector{NDArray} - arg_dict :: Dict{Base.Symbol, NDArray} - aux_dict :: Dict{Base.Symbol, NDArray} -end -function Executor(hdr :: MX_ExecutorHandle, symbol :: Node, - arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, - aux_arrays :: Vector{NDArray}) - # get output arrays - ref_size = Ref{MX_uint}(0) - ref_hdrs = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXExecutorOutputs, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_handle}}), - hdr, ref_size, ref_hdrs) - out_hdrs = pointer_to_array(ref_hdrs[], ref_size[]) - out_arrays = [NDArray(MX_NDArrayHandle(x)) for x in out_hdrs] - - arg_names = list_arguments(symbol) - @assert(length(arg_names) == length(unique(arg_names)), "Duplicated names in arguments: $arg_names") - arg_dict = Dict{Base.Symbol,NDArray}(zip(arg_names, arg_arrays)) - - aux_names = list_auxiliary_states(symbol) - @assert(length(aux_names) == length(unique(aux_names)), "Duplicated names in auxiliary states: $aux_names") - aux_dict = Dict{Base.Symbol,NDArray}(zip(aux_names, aux_arrays)) - - Executor(hdr, symbol, arg_arrays, grad_arrays, aux_arrays, out_arrays, arg_dict, aux_dict) -end - -function Base.unsafe_convert(::Type{MX_handle}, obj::Executor) - Base.unsafe_convert(MX_handle, obj.handle) -end -Base.convert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) -Base.cconvert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) - -function _get_ndarray_inputs(arg_key::AbstractString, args::Vector{NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) - @assert(length(args) == length(arg_names), "Length of $arg_key does not match number of arguments") - return (MX_handle[args...], args) -end -function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) - args_vec = map(arg_names) do name - arr = get(args, name, nothing) - if !allow_missing - @assert(!isa(arr, Void), "Must specify all arguments in $arg_key ($name is missing)") - end - arr - end - # help the type inference - if allow_missing - args_vec = Union{NDArray,Void}[args_vec...] - else - args_vec = NDArray[args_vec...] - end - args_hdr = MX_handle[(isa(x,Void) ? MX_handle(0) : x) for x in args_vec] - return (args_hdr, args_vec) -end - -#=doc -.. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) - - Create an :class:`Executor` by binding a :class:`Node` to concrete :class:`NDArray`. - - :param Node sym: the network architecture describing the computation graph. - :param Context ctx: the context on which the computation should run. - :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete - arrays for all the inputs in the network architecture. The inputs typically include - network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` - and :func:`infer_shape`. - :param args_grad: TODO - :param aux_states: - :param grad_req: -=# -@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 -function bind(self :: Node, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; - args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), - aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), - grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) - - arg_names = list_arguments(self) - - args_hdr, args = _get_ndarray_inputs("args", args, arg_names, false) - args_grad_hdr, args_grad = _get_ndarray_inputs("args_grad", args_grad, arg_names, true) - aux_args_hdr, aux_states = _get_ndarray_inputs("aux_states", aux_states, list_auxiliary_states(self), false) - - if isa(grad_req, GRAD_REQ) - reqs = MX_uint[grad_req for i=1:length(args)] - elseif isa(grad_req, Vector{GRAD_REQ}) - @assert(length(grad_req) == length(args)) - reqs = MX_uint[grad_req...] - elseif isa(grad_req, Dict{Base.Symbol, GRAD_REQ}) - reqs = MX_uint[get(grad_req, name, GRAD_NOP) for name in arg_names] - end - - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXExecutorBind, - (MX_handle, Cint, Cint, MX_uint, Ptr{MX_handle}, Ptr{MX_handle}, Ptr{MX_uint}, - MX_uint, Ptr{MX_handle}, Ref{MX_handle}), - self, ctx.device_type, ctx.device_id, length(args), args_hdr, - args_grad_hdr, reqs, length(aux_states), aux_args_hdr, ref_hdr) - args_grad = convert(Vector{Union{Void,NDArray}}, args_grad) - executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, - args, args_grad, aux_states) -end -function bind(self :: Node; kwargs...) - kwargs = Dict(kwargs) - @assert(haskey(kwargs, :args), "Must specify args") - args = pop!(kwargs, :args) - if haskey(kwargs, :context) - context = pop!(kwargs, :context) - else - context = cpu() - end - bind(self, context, args; kwargs...) -end - -function simple_bind(self :: Node, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) - arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) - @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") - - arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] - arg_names = list_arguments(self) - if grad_req == GRAD_NOP - grad_arrays = Dict{Base.Symbol,NDArray}() - else - provided_data_names = [x[1] for x in kwargs] - grad_arrays = Dict{Base.Symbol,NDArray}() - for (name, shape) in zip(arg_names, arg_shapes) - # if not in provided data, should be parameters - if !in(name, provided_data_names) - grad_arrays[name] = zeros(shape, ctx) - end - end - end - - aux_arrays = [zeros(shape, ctx) for shape in aux_shapes] - return bind(self, ctx, arg_arrays, args_grad=grad_arrays, grad_req=grad_req, aux_states=aux_arrays) -end - - -function forward(self :: Executor; is_train::Bool=false, kwargs...) - for (k,v) in kwargs - @assert(k ∈ self.arg_dict, "Unknown argument $k") - @assert(isa(v, NDArray), "Keyword argument $k must be an NDArray") - copy!(self.arg_dict[k], v) - end - - @mxcall(:MXExecutorForward, (MX_handle, Cint), self, is_train) -end - -function backward(self :: Executor) - backward(self, NDArray[]) -end -function backward(self :: Executor, out_grad :: NDArray) - backward(self, [out_grad]) -end -function backward(self :: Executor, out_grads :: Vector{NDArray}) - out_grads = MX_handle[out_grads...] - @mxcall(:MXExecutorBackward, (MX_handle, MX_uint, Ptr{MX_handle}), self, length(out_grads), out_grads) -end - - -function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, - aux_params::Union{Void,Dict{Base.Symbol,NDArray}}=nothing; - allow_extra_params::Bool=false) - for (name, array) in arg_params - if haskey(self.arg_dict, name) - copy!(self.arg_dict[name], array) - else - @assert(allow_extra_params, "Extra params $name not in the arguments") - end - end - - if !isa(aux_params, Void) - for (name, array) in aux_params - if haskey(self.aux_dict, name) - copy!(self.aux_dict[name], array) - else - @assert(allow_extra_params, "Extra auxiliary state $name not recognized") - end - end - end -end diff --git a/src/initializer.jl-e b/src/initializer.jl-e deleted file mode 100644 index 8f78bfba9367..000000000000 --- a/src/initializer.jl-e +++ /dev/null @@ -1,117 +0,0 @@ -#=doc -Initializers -============ -Interface ---------- -=# - -#=doc -.. class:: AbstractInitializer - - The abstract base class for all initializers. - -To define a new initializer, it is -enough to derive a new type, and implement one or more of the following methods: - -.. function:: _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - -Or, if full behavior customization is needed, override the following function - -.. function:: call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -=# -abstract AbstractInitializer - -function call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - strname = string(name) - if endswith(strname, "bias") - _init_bias(self, name, array) - elseif endswith(strname, "gamma") - _init_gamma(self, name, array) - elseif endswith(strname, "beta") - _init_beta(self, name, array) - elseif endswith(strname, "weight") - _init_weight(self, name, array) - elseif endswith(strname, "moving_mean") - _init_zero(self, name, array) - elseif endswith(strname, "moving_var") - _init_zero(self, name, array) - else - _init_default(self, name, array) - end -end - -function _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - array[:] = 0 -end -function _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - array[:] = 1 -end -function _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - array[:] = 0 -end -function _init_zero(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - array[:] = 0 -end - -#=doc -Built-in initializers ---------------------- -=# -#=doc -.. class:: UniformInitializer - - Initialize weights according to a uniform distribution within the provided scale. -=# -immutable UniformInitializer <: AbstractInitializer - scale :: AbstractFloat -end -#=doc -.. function UniformInitializer(scale=0.07) - - Construct a :class:`UniformInitializer` with the specified scale. -=# -UniformInitializer() = UniformInitializer(0.07) - -function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: NDArray) - rand!(-self.scale, self.scale, array) -end - -#=doc -.. class:: NormalInitializer - - Initialize weights according to a univariate Gaussian distribution. -=# -immutable NormalInitializer <: AbstractInitializer - μ :: AbstractFloat - σ :: AbstractFloat -end -#=doc -.. function:: NormalIninitializer(; mu=0, sigma=0.01) - - Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. -=# -NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) - -function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) - randn!(self.μ, self.σ, array) -end - -#=doc -.. class:: XaiverInitializer - - The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding - the difficulty of training deep feedforward neuralnetworks*. -=# -immutable XaiverInitializer <: AbstractInitializer -end - -function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) - dims = size(array) - fan_in = prod(dims[2:end]) - fan_out = dims[1] - scale = sqrt(3 / (fan_in + fan_out)) - rand!(-scale, scale, array) -end diff --git a/src/io.jl-e b/src/io.jl-e deleted file mode 100644 index 5c59e96bb611..000000000000 --- a/src/io.jl-e +++ /dev/null @@ -1,641 +0,0 @@ -#=doc -Data Providers -============== -Interface ---------- - -Data providers are wrappers that load external data, be it images, text, or general tensors, -and split it into mini-batches so that the model can consume the data in a uniformed way. -=# - -#=doc -.. class:: AbstractDataProvider - - The root type for all data provider. A data provider should implement the following interfaces: - - .. function:: get_batch_size(provider) -> Int - - :param AbstractDataProvider provider: the data provider. - :return: the mini-batch size of the provided data. All the provided data should have the - same mini-batch size (i.e. the last dimension). - - .. function:: provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - - :param AbstractDataProvider provider: the data provider. - :return: a vector of (name, shape) pairs describing the names of the data it provides, and - the corresponding shapes. - - .. function:: provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - - :param AbstractDataProvider provider: the data provider. - :return: a vector of (name, shape) pairs describing the names of the labels it provides, and - the corresponding shapes. - - The difference between *data* and *label* is that during - training stage, both *data* and *label* will be feeded into the model, while during - prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and - of any shapes. The provided data and label names here should match the input names in a target - :class:`Node`. - - A data provider should also implement the Julia iteration interface, in order to allow iterating - through the data set. The provider will be called in the following way: - - .. code-block:: julia - - for batch in eachbatch(provider) - data = get_data(provider, batch) - end - - which will be translated by Julia compiler into - - .. code-block:: julia - - state = Base.start(eachbatch(provider)) - while !Base.done(provider, state) - (batch, state) = Base.next(provider, state) - data = get_data(provider, batch) - end - - By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface - is implemented on the provider type itself. But the extra layer of abstraction allows us to - implement a data provider easily via a Julia ``Task`` coroutine. - The detailed interface function is listed below: - - .. function:: Base.eltype(provider) -> AbstractDataBatch - - :param AbstractDataProvider provider: the data provider. - :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. - - .. function:: Base.start(provider) -> AbstractDataProviderState - - :param AbstractDataProvider provider: the data provider. - - This function is always called before iterating into the dataset. It should initialize - the iterator, reset the index, and do data shuffling if needed. - - .. function:: Base.done(provider, state) -> Bool - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. - :return: true if there is no more data to iterate in this dataset. - - .. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) - - :param AbstractDataProvider provider: the data provider. - :return: the current data batch, and the state for the next iteration. - - Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that - is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this - case, you can safely assume that - - * :func:`Base.start` will always be called, and called only once before the iteration starts. - * :func:`Base.done` will always be called at the beginning of every iteration and always be called once. - * If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with - a call to :func:`Base.start`. - * :func:`Base.next` will always be called only once in each iteration. It will always be called after - one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will - not be called. - - With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation - of the built-in :class:`MXDataProvider` for example. - - .. caution:: - - Please do not use the one data provider simultaneously in two different places, either in parallel, - or in a nested loop. For example, the behavior for the following code is undefined - - .. code-block:: julia - - for batch in data - # updating the parameters - - # now let's test the performance on the training set - for b2 in data - # ... - end - end -=# -abstract AbstractDataProvider - -#=doc -.. class:: AbstractDataProviderState - - Base type for data provider states. -=# -abstract AbstractDataProviderState - -#=doc -.. class:: AbstractDataBatch - - Base type for a data mini-batch. It should implement the following interfaces: - - .. function:: count_samples(provider, batch) -> Int - - :param AbstractDataBatch batch: the data batch object. - :return: the number of samples in this batch. This number should be greater than 0, but - less than or equal to the batch size. This is used to indicate at the end of - the data set, there might not be enough samples for a whole mini-batch. - - .. function:: get_data(provider, batch) -> Vector{NDArray} - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :return: a vector of data in this batch, should be in the same order as declared in - :func:`provide_data() `. - - The last dimension of each :class:`NDArray` should always match the batch_size, even when - :func:`count_samples` returns a value less than the batch size. In this case, - the data provider is free to pad the remaining contents with any value. - - .. function:: get_label(provider, batch) -> Vector{NDArray} - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :return: a vector of labels in this batch. Similar to :func:`get_data`. - - - The following utility functions will be automatically defined. - - .. function:: get(provider, batch, name) -> NDArray - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param Base.Symbol name: the name of the data to get, should be one of the names - provided in either :func:`provide_data() ` - or :func:`provide_label() `. - :return: the corresponding data array corresponding to that name. - - .. function:: load_data!(provider, batch, targets) - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param targets: the targets to load data into. - :type targets: Vector{Vector{SlicedNDArray}} - - The targets is a list of the same length as number of data provided by this provider. - Each element in the list is a list of :class:`SlicedNDArray`. This list described a - spliting scheme of this data batch into different slices, each slice is specified by - a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch - that should be loaded into the corresponding *ndarray*. - - This utility function is used in data parallelization, where a mini-batch is splited - and computed on several different devices. - - .. function:: load_label!(provider, batch, targets) - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param targets: the targets to load label into. - :type targets: Vector{Vector{SlicedNDArray}} - - The same as :func:`load_data!`, except that this is for loading labels. -=# -abstract AbstractDataBatch - -#=doc -.. class:: DataBatch - - A basic subclass of :class:`AbstractDataBatch`, that implement the interface by - accessing member fields. -=# -type DataBatch <: AbstractDataBatch - data :: Vector{NDArray} - label :: Vector{NDArray} - count :: Int -end -count_samples(batch :: DataBatch) = batch.count -get_data(batch :: DataBatch) = batch.data -get_label(batch :: DataBatch) = batch.label - -#=doc -.. class:: SlicedNDArray - - A alias type of ``Tuple{UnitRange{Int},NDArray}``. -=# -typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} - -function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, - targets :: Vector{Vector{SlicedNDArray}}, loader::Function) - data = loader(provider, batch) - for (d_src, d_targets) in zip(data, targets) - for (slice_idx, d_dst) in d_targets - copy!(d_dst, slice(d_src, slice_idx)) - end - end -end -function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, - targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(provider, batch, targets, get_data) -end -function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, - targets :: Vector{Vector{SlicedNDArray}}) - _load_general!(provider, batch, targets, get_label) -end - -function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) - for (src, dst) in zip(get_data(provider, batch), targets) - copy!(dst, src) - end -end -function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) - for (src, dst) in zip(get_label(provider, batch), targets) - copy!(dst, src) - end -end - -import Base.get -function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name :: Base.Symbol) - for (idx, (k, s)) in enumerate(provide_data(provider)) - if name == k - return get_data(provider, batch)[idx] - end - end - for (idx, (k, s)) in enumerate(provide_label(provider)) - if name == k - return get_label(provider, batch)[idx] - end - end - error("$name is not provided by this data provider") -end - -eachbatch(provider :: AbstractDataProvider) = provider - -#=doc -Built-in data providers ------------------------ -=# - -################################################################################ -#=doc -.. class:: ArrayDataProvider - - A convenient tool to iterate :class:`NDArray` or Julia ``Array``. -=# -type ArrayDataProvider <: AbstractDataProvider - data_arrays :: Vector{Array{MX_float}} - data_names :: Vector{Base.Symbol} - label_arrays :: Vector{Array{MX_float}} - label_names :: Vector{Base.Symbol} - batch_size :: Int - sample_count :: Int - shuffle :: Bool - data_padding :: MX_float - label_padding :: MX_float - - data_batch :: Vector{NDArray} - label_batch :: Vector{NDArray} -end - -#=doc -.. function:: ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) - - Construct a data provider from :class:`NDArray` or Julia Arrays. - - :param data: the data, could be - - - a :class:`NDArray`, or a Julia Array. This is equivalent to ``:data => data``. - - a name-data pair, like ``:mydata => array``, where ``:mydata`` is the name of the data - and ``array`` is an :class:`NDArray` or a Julia Array. - - a list of name-data pairs. - - :param label: the same as the ``data`` parameter. When this argument is omitted, the constructed - provider will provide no labels. - :param Int batch_size: the batch size, default is 0, which means treating the whole array as a - single mini-batch. - :param Bool shuffle: turn on if the data should be shuffled at every epoch. - :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might - be less samples to include than a mini-batch. This value specify a scalar to pad the - contents of all the missing data points. - :param Real label_padding: the same as ``data_padding``, except for the labels. -=# -# Julia's type system is sometimes very frustrating. You cannot specify a function -# with argument Vector{Pair} to expect to be matched when calling with the parameter -# [:foo => zeros(2,3), :bar => zeros(3)] because the type inference gives very specific -# results, about the parametric type in the Pair{T1,T2} type, thus does not match the -# generic Pair type. In general, Int <: Number but Vector{Int} <: Vector{Number} is not -# true. So let us just use Any here... -function ArrayDataProvider(data::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) - ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle, data_padding=data_padding, label_padding=label_padding) -end -function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) - asarr{T}(arr :: Array{T}) = convert(Array{MX_float}, arr) - asarr(arr :: NDArray) = copy(arr) - - if isa(data, Union{NDArray, Array}) && eltype(data) <: Real - data_names = [:data] - data_arrays = Array{MX_float}[asarr(data)] - elseif isa(data, Pair) - @assert isa(data.first, Base.Symbol) && isa(data.second, Union{NDArray, Array}) - data_names = [data.first] - data_arrays = Array{MX_float}[asarr(data.second)] - elseif isa(data, Vector) || isa(data, Tuple) - map(data) do d - @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) - end - data_names = Base.Symbol[d.first for d in data] - data_arrays = Array{MX_float}[asarr(d.second) for d in data] - else - error("Invalid data argument type") - end - - if isa(label, Union{NDArray, Array}) && eltype(label) <: Real - label_names = [:softmax_label] - label_arrays = Array{MX_float}[asarr(label)] - elseif isa(label, Pair) - @assert isa(label.first, Base.Symbol) && isa(label.second, Union{NDArray, Array}) - label_names = [label.first] - label_arrays = Array{MX_float}[asarr(label.second)] - elseif isa(label, Vector) || isa(label, Tuple) - map(label) do d - @assert isa(d, Pair) && isa(d.first, Base.Symbol) && isa(d.second, Union{NDArray, Array}) - end - label_names = Base.Symbol[d.first for d in label] - label_arrays = Array{MX_float}[asarr(d.second) for d in label] - else - error("Invalid label argument type") - end - - @assert length(data_arrays) > 0 - sample_count = size(data_arrays[1])[end] - for i = 1:length(data_names) - @assert(size(data_arrays[i])[end] == sample_count, - "Number of samples in $(data_names[i]) is mismatch with $(data_names[1])") - end - for i = 1:length(label_names) - @assert(size(label_arrays[i])[end] == sample_count, - "Number of samples in $(label_names[i]) is mismatch with $(data_names[1])") - end - - if batch_size == 0 - batch_size = sample_count - end - @assert 0 < batch_size <= sample_count - - function gen_batch_nds(arrs :: Vector{Array{MX_float}}, bsize :: Int) - map(arrs) do arr - shape = size(arr) - empty(shape[1:end-1]..., bsize) - end - end - - data_batch = gen_batch_nds(data_arrays, batch_size) - label_batch = gen_batch_nds(label_arrays, batch_size) - - # reshape data and labels into 2D tensors, so that it is easier to work with them - data_arrays = map(data_arrays) do arr - reshape(arr, prod(size(arr)[1:end-1]), size(arr)[end]) - end - label_arrays = map(label_arrays) do arr - reshape(arr, prod(size(arr)[1:end-1]), size(arr)[end]) - end - - ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, - sample_count, shuffle, data_padding, label_padding, data_batch, label_batch) -end - -function provide_data(provider::ArrayDataProvider) - return collect(zip(provider.data_names, map(size, provider.data_batch))) -end -function provide_label(provider::ArrayDataProvider) - return collect(zip(provider.label_names, map(size, provider.label_batch))) -end -get_batch_size(provider::ArrayDataProvider) = provider.batch_size - -immutable ArrayDataProviderState <: AbstractDataProviderState - curr_idx :: Int -end - -function Base.eltype(provider :: ArrayDataProvider) - ArrayDataProviderState -end - -function Base.start(provider :: ArrayDataProvider) - if provider.shuffle - # re-shuffle all data - idx_perm = randperm(provider.sample_count) - provider.data_arrays = map(x->x[:,idx_perm], provider.data_arrays) - provider.label_arrays = map(x->x[:,idx_perm], provider.label_arrays) - end - - return ArrayDataProviderState(1) -end - -function Base.done(provider::ArrayDataProvider, state :: ArrayDataProviderState) - return state.curr_idx > provider.sample_count -end - -immutable ArrayDataBatch <: AbstractDataBatch - idx :: UnitRange{Int} -end -function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) - idx = state.curr_idx:min(state.curr_idx+provider.batch_size-1, provider.sample_count) - return (ArrayDataBatch(idx), ArrayDataProviderState(idx.stop+1)) -end - -function count_samples(provider :: ArrayDataProvider, batch :: ArrayDataBatch) - return length(batch.idx) -end - -function get_data(provider :: ArrayDataProvider, batch :: ArrayDataBatch) - for (src, dst) in zip(provider.data_arrays, provider.data_batch) - copy_ignore_shape!(dst[1:length(batch.idx)], src[:, batch.idx]) - if length(batch.idx) < provider.batch_size - dst[length(batch.idx)+1:provider.batch_size] = provider.data_padding - end - end - return provider.data_batch -end -function get_label(provider :: ArrayDataProvider, batch :: ArrayDataBatch) - for (src, dst) in zip(provider.label_arrays, provider.label_batch) - copy_ignore_shape!(dst[1:length(batch.idx)], src[:, batch.idx]) - if length(batch.idx) < provider.batch_size - dst[length(batch.idx)+1:provider.batch_size] = provider.label_padding - end - end - return provider.label_batch -end - - -#=doc -libmxnet data providers ------------------------ -=# - -################################################################################ -#=doc -.. class:: MXDataProvider - - A data provider that wrap built-in data iterators from libmxnet. See below for - a list of built-in data iterators. -=# -type MXDataProvider <: AbstractDataProvider - handle :: MX_DataIterHandle - data_shape :: Vector{Tuple{Base.Symbol, Tuple}} - label_shape:: Vector{Tuple{Base.Symbol, Tuple}} - batch_size :: Int - - # those two a auxiliary variables to help avoid calling reset - # but still pre-fetch first batch to get shape information - first_epoch:: Bool - first_batch:: Bool -end - -function _reset_data_iter(handle :: MX_DataIterHandle) - @mxcall(:MXDataIterBeforeFirst, (MX_handle,), handle) -end -function _iter_next(handle :: MX_DataIterHandle) - ref_ret = Ref{Cint}(0) - @mxcall(:MXDataIterNext, (MX_handle, Ref{Cint}), handle, ref_ret) - return Bool(ref_ret[]) -end -function _get_data(handle :: MX_DataIterHandle) - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXDataIterGetData, (MX_handle, Ref{MX_handle}), handle, ref_hdr) - return NDArray(MX_NDArrayHandle(ref_hdr[]), false) -end -function _get_label(handle :: MX_DataIterHandle) - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXDataIterGetLabel, (MX_handle, Ref{MX_handle}), handle, ref_hdr) - return NDArray(MX_NDArrayHandle(ref_hdr[]), false) -end - -function MXDataProvider(handle :: MX_DataIterHandle; - data_name :: Base.Symbol=:data, - label_name :: Union{Base.Symbol,Void}=:softmax_label, - kwargs...) # for convenience, we ignore the rest keyword arguments - # init iterator, load the first batch and get shapes - @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") - data_shape = Tuple{Base.Symbol, Tuple}[(data_name, size(_get_data(handle)))] - if !isa(label_name, Void) - label_shape = Tuple{Base.Symbol, Tuple}[(label_name::Base.Symbol, size(_get_label(handle)))] - else - label_shape = Tuple{Base.Symbol, Tuple}[] - end - - MXDataProvider(handle, data_shape, label_shape, data_shape[1][2][end], true, true) -end - -provide_data(provider::MXDataProvider) = provider.data_shape -provide_label(provider::MXDataProvider) = provider.label_shape -get_batch_size(provider::MXDataProvider) = provider.batch_size - -type MXDataProviderState <: AbstractDataProviderState - has_next :: Bool -end -immutable MXDataBatch <: AbstractDataBatch -end - -function Base.eltype(provider :: MXDataProvider) - MXDataBatch -end -function Base.start(provider :: MXDataProvider) - if !provider.first_epoch - _reset_data_iter(provider.handle) - else - provider.first_epoch = false - end - - return MXDataProviderState(true) -end -function Base.done(provider :: MXDataProvider, state :: MXDataProviderState) - if provider.first_batch - state.has_next = true - provider.first_batch = false - else - state.has_next = _iter_next(provider.handle) - end - return !state.has_next -end -function Base.next(provider :: MXDataProvider, state :: MXDataProviderState) - return (MXDataBatch(), state) -end - -function get_data(provider :: MXDataProvider, batch :: MXDataBatch) - return NDArray[_get_data(provider.handle)] -end -function get_label(provider :: MXDataProvider, batch :: MXDataBatch) - return NDArray[_get_label(provider.handle)] -end -function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) - ref_pad = Ref{Cint}(0) - @mxcall(:MXDataIterGetPadNum, (MX_handle, Ref{Cint}), provider.handle, ref_pad) - return provider.batch_size - Int(ref_pad[]) -end - -#=doc -**autogen:EMBED:io:EMBED:autogen** -=# -function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) - ref_name = Ref{char_p}(0) - ref_desc = Ref{char_p}(0) - ref_narg = Ref{MX_uint}(0) - ref_arg_names = Ref{char_pp}(0) - ref_arg_types = Ref{char_pp}(0) - ref_arg_descs = Ref{char_pp}(0) - - @mxcall(:MXDataIterGetIterInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), - hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - - iter_name = symbol(bytestring(ref_name[])) - - if gen_docs - if endswith(string(iter_name), "Iter") - f_desc = "Can also be called with the alias ``$(string(iter_name)[1:end-4] * "Provider")``.\n" - else - f_desc = "" - end - f_desc *= bytestring(ref_desc[]) * "\n\n" - f_desc *= ":param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data.\n" - f_desc *= ":param Base.Symbol label_name: keyword argument, default ``:softmax_label``. " * - "The name of the label. Could be ``nothing`` if no label is presented in this dataset.\n\n" - f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":return: the constructed :class:`MXDataProvider`." - return (iter_name, f_desc) - end - - defun = quote - function $iter_name(; kwargs...) - arg_keys = AbstractString[string(k) for (k,v) in kwargs] - arg_vals = AbstractString[dump_mx_param(v) for (k,v) in kwargs] - ref_hdr = Ref{MX_handle}(0) - - @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), - $hdr, length(arg_keys), arg_keys, arg_vals, ref_hdr) - - return MXDataProvider(MX_DataIterHandle(ref_hdr[]); kwargs...) - end - end - eval(defun) - - # add an alias XXXProvider => XXXIter - if endswith(string(iter_name), "Iter") - alias_name = symbol(string(iter_name)[1:end-4] * "Provider") - eval(:($alias_name = $iter_name)) - end -end - -function _import_io_iterators(;gen_docs::Bool=false) - n_ref = Ref{MX_uint}(0) - h_ref = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - - n_creators = n_ref[] - h_creators = pointer_to_array(h_ref[], n_creators) - - if gen_docs - docs = Dict{Base.Symbol, AbstractString}() - end - - for i = 1:n_creators - creator_hdr = h_creators[i] - ret = _define_data_iter_creator(creator_hdr; gen_docs=gen_docs) - if gen_docs - docs[ret[1]] = ret[2] - end - end - - if gen_docs - return docs - end -end diff --git a/src/kvstore.jl-e b/src/kvstore.jl-e deleted file mode 100644 index d52433f567b2..000000000000 --- a/src/kvstore.jl-e +++ /dev/null @@ -1,132 +0,0 @@ -type KVStore - handle :: MX_KVStoreHandle - updater_c :: Ptr{Void} - updater :: Function - - KVStore(hdr :: MX_KVStoreHandle) = new(hdr, Ptr{Void}(0)) -end - -function KVStore(kv_type::Base.Symbol = :local) - #@assert(kv_type ∈ [:local]) # TODO: update with allowed types - - ref_hdr = Ref{MX_handle}(0) - kv_type = string(kv_type) - @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), kv_type, ref_hdr) - return KVStore(MX_KVStoreHandle(ref_hdr[])) -end -function Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) - Base.unsafe_convert(MX_handle, obj.handle) -end -Base.convert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) -Base.cconvert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) - -function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) - @assert length(keys) == length(vals) - keys_flt = Int[] - vals_flt = NDArray[] - for (k,v) in zip(keys, vals) - append!(keys_flt, Base.ones(Int, length(v))*k) - append!(vals_flt, v) - end - return (keys_flt, vals_flt) -end - -function init!(self :: KVStore, key :: Int, val :: NDArray) - init!(self, [key], [val]) -end -function init!(self :: KVStore, key :: Int, vals :: Vector{NDArray}) - init!(self, Base.ones(Int, length(vals))*key, vals) -end -function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) - init!(self, _flatten_kvlist(keys, vals)...) -end -function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}) - @assert length(keys) == length(vals) - keys = Cint[keys...] - vals = MX_handle[vals...] - @mxcall(:MXKVStoreInit, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}), - self, length(keys), keys, vals) -end - -import Base.push! -function push!(self :: KVStore, key :: Int, val :: NDArray; priority :: Int = 0) - push!(self, [key], [val]; priority = priority) -end -function push!(self :: KVStore, key :: Int, vals :: Vector{NDArray}; priority :: Int = 0) - push!(self, Base.ones(Int, length(vals))*key, vals; priority = priority) -end -function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}; priority::Int=0) - push!(self, _flatten_kvlist(keys, vals)...; priority = priority) -end -function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}; priority::Int=0) - @assert length(keys) == length(vals) - keys = Cint[keys...] - vals = MX_handle[vals...] - @mxcall(:MXKVStorePush, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), - self, length(keys), keys, vals, priority) -end - -function pull!(self :: KVStore, key :: Int, out :: NDArray; priority :: Int = 0) - pull!(self, [key], [out]) -end -function pull!(self :: KVStore, key :: Int, outs :: Vector{NDArray}; priority :: Int = 0) - pull!(self, Base.ones(Int, length(outs))*key, outs; priority = priority) -end -function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{Vector{NDArray}}; priority::Int=0) - pull!(self, _flatten_kvlist(keys, outs)...; priority = priority) -end -function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{NDArray}; priority::Int=0) - @assert length(keys) == length(outs) - keys = Cint[keys...] - outs = MX_handle[outs...] - @mxcall(:MXKVStorePull, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), - self, length(keys), keys, outs, priority) -end - - -function get_type(self :: KVStore) - type_ref = Ref{char_p}(0) - @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) - return symbol(bytestring(type_ref[])) -end - -function get_num_workers(self :: KVStore) - ref_size = Ref{Cint}(0) - @mxcall(:MXKVStoreGetGroupSize, (MX_handle, Ref{Cint}), self, ref_size) - return Int(ref_size[]) -end - -function get_rank(self :: KVStore) - ref_rank = Ref{Cint}(0) - @mxcall(:MXKVStoreGetRank, (MX_handle, Ref{Cint}), self, ref_rank) - return Int(ref_rank[]) -end - - -# TODO: Currently Julia does not support closure in c-callbacks, so we are making use of the -# extra handle parameter of the API to pass the updater object around. Fix this when someday -# full closure cfunction is supported in Julia. -function _kvstore_update_wrapper(index::Cint, nd_recv::MX_handle, nd_local::MX_handle, updater::Ptr{Void}) - updater_func = unsafe_pointer_to_objref(updater) :: Function - updater_func(Int(index), NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) - return nothing -end -function set_updater(self :: KVStore, updater :: Function) - self.updater = updater # keep a reference to the julia object so that updater_c is kept valid - self.updater_c = cfunction(_kvstore_update_wrapper, Void, (Cint, MX_handle, MX_handle, Ptr{Void})) - - @mxcall(:MXKVStoreSetUpdater, (MX_handle, Ptr{Void}, Any), - self, self.updater_c, updater) -end - -function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) - ref_is_worker = Ref{Cint}(0) - @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref_is_worker) - is_worker = ref_is_worker[] - - if ismatch(r"dist", string(get_type(self))) && is_worker - # TODO - else - set_updater(self, get_updater(optimizer)) - end -end diff --git a/src/metric.jl-e b/src/metric.jl-e deleted file mode 100644 index c88239ab8cc8..000000000000 --- a/src/metric.jl-e +++ /dev/null @@ -1,37 +0,0 @@ -abstract AbstractEvalMetric - -type Accuracy <: AbstractEvalMetric - acc_sum :: Float64 - n_sample :: Int - - Accuracy() = new(0.0, 0) -end - -function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) - label = copy(label) - pred = copy(pred) - - n_sample = size(pred)[end] - metric.n_sample += n_sample - for i = 1:n_sample - klass = indmax(pred[:,i]) - metric.acc_sum += (klass-1) == label[i] - end -end - -function update!(metric :: Accuracy, labels :: Vector{NDArray}, preds :: Vector{NDArray}) - @assert length(labels) == length(preds) - for i = 1:length(labels) - _update_single_output(metric, labels[i], preds[i]) - end -end - -import Base: get -function get(metric :: Accuracy) - return [(:accuracy, metric.acc_sum / metric.n_sample)] -end - -function reset!(metric :: Accuracy) - metric.acc_sum = 0.0 - metric.n_sample = 0 -end diff --git a/src/model.jl-e b/src/model.jl-e deleted file mode 100644 index 93189124d9b0..000000000000 --- a/src/model.jl-e +++ /dev/null @@ -1,566 +0,0 @@ -#=doc -Models -====== - -The model API provides convenient high-level interface to do training and predicting on -a network described using the symbolic API. -=# - -#=doc -.. class:: AbstractModel - - The abstract super type of all models in MXNet.jl. -=# -abstract AbstractModel - -#=doc -.. class:: FeedForward - - The feedforward model provides convenient interface to train and predict on - feedforward architectures like multi-layer MLP, ConvNets, etc. There is no - explicitly handling of *time index*, but it is relatively easy to implement - unrolled RNN / LSTM under this framework (**TODO**: add example). For models - that handles sequential data explicitly, please use **TODO**... -=# -type FeedForward <: AbstractModel - arch :: Node - ctx :: Vector{Context} - - arg_params :: Dict{Base.Symbol, NDArray} - aux_params :: Dict{Base.Symbol, NDArray} - - pred_exec :: Union{Executor, Void} - - # leave the rest fields undefined - FeedForward(arch :: Node, ctx :: Vector{Context}) = new(arch, ctx) -end - -"""Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector - of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that - piece. -""" -function _split_inputs(batch_size :: Int, n_split :: Int) - @assert(batch_size >= n_split) - per_split = floor(Int, batch_size / n_split) - counts = Base.zeros(Int, n_split)+per_split - extra = batch_size - sum(counts) - counts[1:extra] += 1 - - cum = [0, cumsum(counts)...] - idx = [cum[i-1]+1:cum[i] for i = 2:length(cum)] - return idx -end - -#=doc -.. function:: FeedForward(arch :: Node, ctx) - - :param arch: the architecture of the network constructed using the symbolic API. - :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` - or a list of :class:`Context` objects. In the latter case, data parallelization will be used - for training. If no context is provided, the default context ``cpu()`` will be used. -=# -function FeedForward(arch :: Node; context :: Union{Context, Vector{Context}, Void} = nothing) - if isa(context, Void) - context = [Context(CPU)] - elseif isa(context, Context) - context = [context] - end - FeedForward(arch, context) -end - -#=doc -.. function:: init_model(self, initializer; overwrite=false, input_shapes...) - - Initialize the weights in the model. - - This method will be called automatically when training a model. So there is usually no - need to call this method unless one needs to inspect a model with only randomly initialized - weights. - - :param FeedForward self: the model to be initialized. - :param AbstractInitializer initializer: an initializer describing how the weights should be initialized. - :param Bool overwrite: keyword argument, force initialization even when weights already exists. - :param input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. - For example, ``data=(28,28,1,100), label=(100,)``. -=# -function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) - # all arg names, including data, label, and parameters - arg_names = list_arguments(self.arch) - - input_names = [x[1] for x in input_shapes] - - param_names = setdiff(arg_names, input_names) - aux_names = list_auxiliary_states(self.arch) - - arg_defined = true - aux_defined = true - - arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; input_shapes...) - if !isdefined(self, :arg_params) - param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) - self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) - arg_defined = false - end - if !isdefined(self, :aux_params) - self.aux_params = Dict([name => empty(shape) for (name,shape) in zip(aux_names,aux_shapes)]) - aux_defined = false - end - - # initialize the contents of the parameters - if !arg_defined || overwrite - for (k,v) in self.arg_params - initializer(k, v) - end - end - if !aux_defined || overwrite - for (k,v) in self.aux_params - initializer(k, v) - end - end - - return (arg_names, param_names, aux_names) -end - -function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_shapes...) - if !isdefined(self, :pred_exec) || isa(self.pred_exec, Void) || overwrite - if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) - @assert(false, "Model weights not defined, please init or train the model, or load from file") - end - - # the predictor use only the first device - self.pred_exec = simple_bind(self.arch, self.ctx[1]; grad_req=GRAD_NOP, data_shapes...) - copy_params_from(self.pred_exec, self.arg_params, self.aux_params) - else - # make sure the new setup is compatible with the existing one - for (d_name, d_shape) in data_shapes - @assert(d_shape == size(self.pred_exec.arg_dict[d_name]), - "Shape of $d_name mismatch with existing predictor, use overwrite=true overwrite existing predictor") - end - end -end - -#=doc -.. function:: - predict(self, data; overwrite=false, callback=nothing) - - Predict using an existing model. The model should be already initialized, or trained or loaded from - a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, - so it is possible to do - - .. code-block:: julia - - predict(model, data) do batch_output - # consume or write batch_output to file - end - - :param FeedForward self: the model. - :param AbstractDataProvider data: the data to perform prediction on. - :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory - allocation of the :class:`Executor` depends on the mini-batch size of the test - data provider. If you call predict twice with data provider of the same batch-size, - then the executor can be re-used. Otherwise, if ``overwrite`` is false (default), - an error will be raised; if ``overwrite`` is set to true, a new :class:`Executor` - will be created to replace the old one. - - .. note:: - - Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO - for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better - to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a - concern. - - For the same reason, currently prediction will only use the first device even if multiple devices are - provided to construct the model. - - :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` -=# -function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = false) - predict(self, data; overwrite = overwrite, callback=callback) -end -function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=false, callback::Union{Function,Void}=nothing) - data_shapes = provide_data(data) - data_names = [x[1] for x in data_shapes] - _setup_predictor(self, overwrite; data_shapes...) - - batch_size = get_batch_size(data) - data_arrays = [self.pred_exec.arg_dict[name] for name in data_names] - output_list = [Array{MX_float}[] for i=1:length(self.pred_exec.outputs)] - for batch in eachbatch(data) - load_data!(data, batch, data_arrays) - forward(self.pred_exec, is_train=false) - if isa(callback, Void) - # no callback, accumulate the data and return at the end - for (o_list, o_nd) in zip(output_list, self.pred_exec.outputs) - push!(o_list, copy(slice(o_nd, 1:count_samples(data, batch)))) - end - else - outputs = self.pred_exec.outputs - if length(outputs) == 1 - outputs = outputs[1] - end - callback(outputs) - end - end - - if !isa(callback, Void) - # callback exists, do not accumulate data - return nothing - end - - if isempty(output_list) - # maybe model does not have outputs - return nothing - end - if isempty(output_list[1]) - # maybe no output because data is empty - return length(output_list) == 1 ? output_list[1] : output_list - end - - # concatenate along mini-batches - output_arrays = [cat(ndims(x[1]), x...) for x in output_list] - if length(output_arrays) == 1 - # only 1 output, return it directly, instead of a list - output_arrays = output_arrays[1] - end - return output_arrays -end - -function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer, overwrite :: Bool) - init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) -end - -function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) - if num_device == 1 && !ismatch(r"dist", string(kv_type)) - kv = nothing - else - if kv_type == :local - max_size = maximum([prod(size(param)) for (k,param) in arg_params]) - if max_size < 1024 * 1024 * 16 - kv_type = :local_update_cpu - else - kv_type = :local_allreduce_cpu - end - info("Auto-select kvstore type = $kv_type") - end - kv = KVStore(kv_type) - end - - update_on_kvstore = true - if isa(kv, Void) || ismatch(r"local_allreduce", string(get_type(kv))) - update_on_kvstore = false - end - - return (kv, update_on_kvstore) -end - -@defstruct TrainingOptions Any ( - initializer :: AbstractInitializer = UniformInitializer(0.01), - n_epoch :: Int = 10, - eval_data :: Union{Void, AbstractDataProvider} = nothing, - eval_metric :: AbstractEvalMetric = Accuracy(), - kvstore :: Union{Base.Symbol, KVStore} = :local, - force_init :: Bool = false, - callbacks :: Vector{AbstractCallback} = AbstractCallback[], -) - -function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, - state::OptimizationState, type_filter::Type) - map(callbacks) do cb - if isa(cb, type_filter) - if type_filter == AbstractEpochCallback - # epoch callback have extra access to the model object - cb(self, state) - else - cb(state) - end - end - end -end - -#=doc -.. function:: train(model :: FeedForward, ...) - - Alias to :func:`fit`. -=# -function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) - fit(self, optimizer, data; kwargs...) -end - -#=doc -.. function:: fit(model :: FeedForward, optimizer, data; kwargs...) - - Train the ``model`` on ``data`` with the ``optimizer``. - - :param FeedForward model: the model to be trained. - :param AbstractOptimizer optimizer: the optimization algorithm to use. - :param AbstractDataProvider data: the training data provider. - :param Int n_epoch: default 10, the number of full data-passes to run. - :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for - the validation set. - :param AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used - to evaluate the training performance. If ``eval_data`` is provided, the same metric is also - calculated on the validation set. - :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients - and parameters when multiple devices are used for training. - :type kvstore: :class:`KVStore` or ``Base.Symbol`` - :param AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. - :param Bool force_init: keyword argument, default false. By default, the random initialization using the - provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous - call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When - this option is set, it will always do random initialization at the begining of training. - :param callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, - see :class:`AbstractCallback`. - :type callbacks: ``Vector{AbstractCallback}`` -=# -function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) - opts = TrainingOptions(; kwargs...) - - info("Start training on $(self.ctx)") - - batch_size = get_batch_size(data) - num_dev = length(self.ctx) - slices = _split_inputs(batch_size, num_dev) - - # initialize parameters - info("Initializing parameters...") - arg_names, param_names, aux_names = _init_model(self, data, opts.initializer, opts.force_init) - - # setup kvstore - kvstore = opts.kvstore - if isa(kvstore, Base.Symbol) - info("Creating KVStore...") - kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) - end - - train_execs = Array(Executor, num_dev) - for i = 1:num_dev - data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] - label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] - train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes..., label_shapes...) - - copy_params_from(train_execs[i], self.arg_params, self.aux_params) - end - - # set up input data structures - data_names = [x[1] for x in provide_data(data)] - label_names = [x[1] for x in provide_label(data)] - - data_arrays = [SlicedNDArray[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] - for name in data_names] - label_arrays = [SlicedNDArray[(slices[i], exec.arg_dict[name]) for (i,exec) in enumerate(train_execs)] - for name in label_names] - - param_idx = filter(i -> in(arg_names[i], param_names), 1:length(arg_names)) - - param_arrays = [NDArray[exec.arg_arrays[i] for exec in train_execs] for i in param_idx] - grad_arrays = [NDArray[exec.grad_arrays[i] for exec in train_execs] for i in param_idx] - aux_arrays = [NDArray[exec.aux_arrays[i] for exec in train_execs] for i = 1:length(aux_names)] - - op_state = OptimizationState(batch_size) - optimizer.state = op_state - - if !update_on_kvstore - updater = get_updater(optimizer) - end - - if !isa(kvstore, Void) - if update_on_kvstore - set_optimizer(kvstore, optimizer) - end - - info("Initializing KVStore...") - # init kv with gradients - for idx = 1:length(param_arrays) - param_on_devs = param_arrays[idx] - - init!(kvstore, idx, self.arg_params[param_names[idx]]) - - if update_on_kvstore - # pull weights back - pull!(kvstore, idx, param_on_devs, priority=-idx) - end - end - end - - # set up output and labels in CPU for evaluation metric - output_shapes = [tuple(size(x)[1:end-1]...,batch_size) for x in train_execs[1].outputs] - cpu_dev = Context(CPU) - cpu_output_arrays = [empty(shape, cpu_dev) for shape in output_shapes] - cpu_label_arrays = [empty(shape, cpu_dev) for (name,shape) in provide_label(data)] - - # invoke callbacks on epoch 0 - _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) - - # now start training... - for i_epoch = 1:opts.n_epoch - time_start = time() - reset!(opts.eval_metric) - - op_state.curr_epoch = i_epoch - op_state.curr_batch = 0 - - # invoke callbacks on iteration 0 - _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) - - for batch in eachbatch(data) - load_data!(data, batch, data_arrays) - load_label!(data, batch, label_arrays) - - # forward and backward - for (texec, islice) in zip(train_execs, slices) - forward(texec, is_train=true) - - # copy outputs into cpu ndarray, for evaluation metric - for (cpu_out, dev_out) in zip(cpu_output_arrays, texec.outputs) - copy!(slice(cpu_out, islice), dev_out) - end - - backward(texec) - end - - op_state.curr_iter += 1 - op_state.curr_batch += 1 - optimizer.state = op_state - - # update parameters - for idx = 1:length(param_names) - # gradient synchronization - if !isa(kvstore, Void) - # push gradient, priority is negative index - push!(kvstore, idx, grad_arrays[idx], priority=-idx) - if update_on_kvstore - # pull back the weights - pull!(kvstore, idx, param_arrays[idx], priority=-idx) - else - # pull back the sum-ed gradients, to the same locations - pull!(kvstore, idx, grad_arrays[idx], priority=-idx) - end - end - - if !update_on_kvstore - # manual updating - for i_dev = 1:num_dev - # create a fake index, so that the updater create states - # for different param AND different devices, TODO(mli) - # use a better solution later - fake_idx = idx * num_dev + i_dev - updater(fake_idx, grad_arrays[idx][i_dev], param_arrays[idx][i_dev]) - end - end - end - - # invoke callbacks after finishing each iteration - _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) - - # update evaluation metric on training set - load_label!(data, batch, cpu_label_arrays) - update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) - end # end of one epoch - - time_stop = time() - info(format("== Epoch {1:0>3d} ==========", i_epoch)) - info("## Training summary") - for (name, value) in get(opts.eval_metric) - info(format("{1:>15s} = {2:.4f}", name, value)) - end - info(format("{1:>15s} = {2:.4f} seconds", "time", time_stop-time_start)) - - # evaluation on validation set - if !isa(opts.eval_data, Void) - # because we are re-using the memory allocated for the training network, - # the batch_size of the validation dataset must be the same as the training - # batch_size - @assert(get_batch_size(opts.eval_data) == batch_size) - - reset!(opts.eval_metric) - for batch in eachbatch(opts.eval_data) - load_data!(opts.eval_data, batch, data_arrays) - - # forward and backward - for (texec, islice) in zip(train_execs, slices) - forward(texec, is_train=true) - - # copy outputs into cpu ndarray, for evaluation metric - for (cpu_out, dev_out) in zip(cpu_output_arrays, texec.outputs) - copy!(slice(cpu_out, islice), dev_out) - end - end - load_label!(opts.eval_data, batch, cpu_label_arrays) - update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) - end - - info("## Validation summary") - for (name, value) in get(opts.eval_metric) - info(format("{1:>15s} = {2:.4f}", name, value)) - end - end - - if i_epoch == opts.n_epoch || any(x->isa(x, AbstractEpochCallback), opts.callbacks) - # copy data back to cpu - for (name, weights) in zip(param_names, param_arrays) - # average parameters across devices - weight = +([copy(w, cpu()) for w in weights]...) / length(weights) - copy!(self.arg_params[name], weight) - end - for (name, aux_devs) in zip(aux_names, aux_arrays) - aux_avg = +([copy(aux, cpu()) for aux in aux_devs]...) / length(aux_devs) - copy!(self.aux_params[name], aux_avg) - end - end - _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) - end # end of all epochs -end - -function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) - save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, state.curr_epoch) -end -function save_checkpoint(sym :: Node, arg_params :: Dict{Base.Symbol, NDArray}, - aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) - save("$prefix-symbol.json", sym) - save_dict = merge(Dict([symbol("arg:$k") => v for (k,v) in arg_params]), - Dict([symbol("aux:$k") => v for (k,v) in aux_params])) - save_filename = format("{1}-{2:04d}.params", prefix, epoch) - save(save_filename, save_dict) - info("Saved checkpoint to '$save_filename'") -end - -function load_checkpoint(prefix :: AbstractString, epoch :: Int) - arch = load("$prefix-symbol.json", Node) - saved_dict = load(format("{1}-{2:04d}.params", prefix, epoch), NDArray) - arg_params = Dict{Base.Symbol, NDArray}() - aux_params = Dict{Base.Symbol, NDArray}() - for (k,v) in saved_dict - tp, name = split(string(k), ':') - name = symbol(name) - if tp == "arg" - arg_params[name] = v - else - aux_params[name] = v - end - end - - return (arch, arg_params, aux_params) -end - -function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForward}) - arch, arg_params, aux_params = load_checkpoint(prefix, epoch) - model = FeedForward(arch) - model.arg_params = arg_params - model.aux_params = aux_params - return model -end - -function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: Int; overwrite :: Bool = true) - if isdefined(self, :arg_params) && isdefined(self, :aux_params) && !overwrite - info("model weights already exists, skip loading... (call with overwrite=true if needed)") - return self - end - - arch, arg_params, aux_params = load_checkpoint(prefix, epoch) - # TODO: is there better way to compare two symbols - @assert(to_json(self.arch) == to_json(arch), "Cannot load from a checkpoint with different network architecture") - self.arg_params = arg_params - self.aux_params = aux_params - return self -end - diff --git a/src/name.jl-e b/src/name.jl-e deleted file mode 100644 index 5ebf10917ce6..000000000000 --- a/src/name.jl-e +++ /dev/null @@ -1,44 +0,0 @@ -abstract AbstractNameManager -typealias NameType Union{Base.Symbol, AbstractString} -typealias NameCounter Dict{Base.Symbol, Int} - -import Base: get! - -# Default implementation for generating a name for a symbol. -# When a name is specified by the user, it will be used. Otherwise, a name -# is automatically generated based on the hint string. -function _default_get_name!(counter :: NameCounter, name :: NameType, hint :: NameType) - if isa(name, Base.Symbol) || !isempty(name) - return symbol(name) - end - - hint = symbol(hint) - if !haskey(counter, hint) - counter[hint] = 0 - end - name = symbol("$hint$(counter[hint])") - counter[hint] += 1 - return name -end - -type BasicNameManager <: AbstractNameManager - counter :: NameCounter -end -BasicNameManager() = BasicNameManager(NameCounter()) - -function get!(manager :: BasicNameManager, name :: NameType, hint :: NameType) - _default_get_name!(manager.counter, name, hint) -end - -type PrefixNameManager <: AbstractNameManager - prefix :: Base.Symbol - counter :: NameCounter -end -PrefixNameManager(prefix :: NameType) = PrefixNameManager(symbol(prefix), NameCounter()) - -function get!(manager :: PrefixNameManager, name :: NameType, hint :: NameType) - name = _default_get_name!(manager.counter, name, hint) - return symbol("$(manager.prefix)$name") -end - -DEFAULT_NAME_MANAGER = BasicNameManager() diff --git a/src/ndarray.jl-e b/src/ndarray.jl-e deleted file mode 100644 index 82293d2fdbe9..000000000000 --- a/src/ndarray.jl-e +++ /dev/null @@ -1,800 +0,0 @@ -#=doc -NDArray API -=========== -=# - -# create a NDArray handle of specific shape -function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) - h_ref = Ref{MX_handle}(0) - shape = flipdim(MX_uint[shape...],1) - @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{MX_handle}), - shape, length(shape), ctx.device_type, ctx.device_id, delay_alloc, h_ref) - handle = MX_NDArrayHandle(h_ref[]) - return handle -end - -# create a handle to an empty NDArray, this handle can be used to hold -# results returned by libmx API calls -function _ndarray_alloc() - h_ref = Ref{MX_handle}(0) - @mxcall(:MXNDArrayCreateNone, (Ref{MX_handle},), h_ref) - return MX_NDArrayHandle(h_ref[]) -end - -################################################################################ -# NDArray Type -################################################################################ -#=doc -.. class:: NDArray - - Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block - of tensor-based computation. - - .. _ndarray-shape-note: - - .. note:: - - since C/C++ use row-major ordering for arrays while Julia follows a - column-major ordering. To keep things consistent, we keep the underlying data - in their original layout, but use *language-native* convention when we talk - about shapes. For example, a mini-batch of 100 MNIST images is a tensor of - C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory - have shape (28,28,1,100). -=# -type NDArray - handle :: MX_NDArrayHandle - writable :: Bool - - function NDArray(handle, writable=true) - new(handle, writable) - end -end - -function Base.show(io :: IO, arr :: NDArray) - print(io, "mx.NDArray$(size(arr))") -end - -function NDArray{T<:Real}(data :: Array{T}) - copy(data, cpu()) -end - -function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) - Base.unsafe_convert(MX_handle, obj.handle) -end -Base.convert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) -Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) - -################################################################################ -# NDArray functions exported to the users -################################################################################ -#=doc -.. function:: context(arr :: NDArray) - - Get the context that this :class:`NDArray` lives on. -=# -function context(arr :: NDArray) - ref_typeid = Ref{Cint}(0) - ref_devid = Ref{Cint}(0) - @mxcall(:MXNDArrayGetContext, (MX_handle, Ref{Cint}, Ref{Cint}), - arr, ref_typeid, ref_devid) - return Context(ref_typeid[], ref_devid[]) -end - -#=doc -.. function:: - empty(shape :: Tuple, ctx :: Context) - empty(shape :: Tuple) - empty(dim1, dim2, ...) - - Allocate memory for an uninitialized :class:`NDArray` with specific shape. -=# -function empty{N}(shape :: NTuple{N, Int}) - empty(shape, cpu()) -end -function empty{N}(shape :: NTuple{N, Int}, ctx :: Context) - NDArray(_ndarray_alloc(shape, ctx, false)) -end -function empty(shape :: Int...) - empty(shape) -end - -#=doc -Interface functions similar to Julia Arrays -------------------------------------------- -=# - -#=doc -.. function:: - zeros(shape :: Tuple, ctx :: Context) - zeros(shape :: Tuple) - zeros(dim1, dim2, ...) - - Create zero-ed :class:`NDArray` with specific shape. -=# -function zeros{N}(shape :: NTuple{N, Int}) - zeros(shape, cpu()) -end -function zeros{N}(shape :: NTuple{N, Int}, ctx :: Context) - arr = empty(shape, ctx) - arr[:] = 0 - return arr -end -function zeros(shape :: Int...) - zeros(shape) -end - -#=doc -.. function:: - ones(shape :: Tuple, ctx :: Context) - ones(shape :: Tuple) - ones(dim1, dim2, ...) - - Create an :class:`NDArray` with specific shape and initialize with 1. -=# -function ones{N}(shape :: NTuple{N, Int}) - ones(shape, cpu()) -end -function ones{N}(shape :: NTuple{N, Int}, ctx :: Context) - arr = empty(shape, ctx) - arr[:] = 1 - return arr -end -function ones(shape :: Int...) - ones(shape) -end - -import Base: size, length, ndims, eltype - -#=doc -.. function:: - size(arr :: NDArray) - size(arr :: NDArray, dim :: Int) - - Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See - also the :ref:`notes on NDArray shapes `. -=# -function size(arr :: NDArray) - ref_ndim = Ref{MX_uint}(0) - ref_shape = Ref{Ptr{MX_uint}}(0) - @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), - arr, ref_ndim, ref_shape) - tuple(map(Int, flipdim(pointer_to_array(ref_shape[], ref_ndim[]),1))...) -end -function size(arr :: NDArray, dim :: Int) - size(arr)[dim] -end - -#=doc -.. function:: length(arr :: NDArray) - - Get the number of elements in an :class:`NDArray`. -=# -function length(arr :: NDArray) - prod(size(arr)) -end - -#=doc -.. function:: ndims(arr :: NDArray) - - Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. -=# -function ndims(arr :: NDArray) - length(size(arr)) -end - -#=doc -.. function:: eltype(arr :: NDArray) - - Get the element type of an :class:`NDArray`. Currently the element type is always ``mx.MX_float``. -=# -function eltype(arr :: NDArray) - MX_float -end - - -import Base: slice -#=doc -.. function:: slice(arr :: NDArray, start:stop) - - Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest - changing dimension is supported. In Julia's column-major perspective, this is the last - dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create - a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is - used in data parallelization to split mini-batch into sub-batches for different devices. -=# -function slice(arr :: NDArray, ::Colon) - arr -end -function slice(arr :: NDArray, slice::UnitRange{Int}) - dim1 = size(arr)[end] - @assert(1 <= slice.start <= slice.stop <= dim1) - if slice.start == 1 && slice.stop == dim1 - return arr - end - - hdr_ref = Ref{MX_handle}(0) - # note Julia is 1-based, inclusive-inclusive indexing, while C++ is - # 0-based, inclusive-exclusive indexing. So 1:3 in Julia should - # translates into 0:3 in C++. - @mxcall(:MXNDArraySlice, (MX_handle, MX_uint, MX_uint, Ref{MX_handle}), - arr, slice.start-1, slice.stop, hdr_ref) - return NDArray(MX_NDArrayHandle(hdr_ref[]), arr.writable) -end - -import Base: setindex! - -#=doc -.. function:: setindex!(arr :: NDArray, val, idx) - - Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following - scenarios are supported - - - ``arr[:] = val``: whole array assignment, ``val`` could be a scalar or an array (Julia ``Array`` - or :class:`NDArray`) of the same shape. - - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of - the same shape to the slice. See also :func:`slice`. -=# -function setindex!(arr :: NDArray, val :: Real, ::Colon) - @assert(arr.writable) - _set_value(val, arr) - return arr -end -function setindex!{T<:Real}(arr :: NDArray, val :: Array{T}, ::Colon) - copy!(arr, val) -end -function setindex!(arr :: NDArray, val :: NDArray, ::Colon) - copy!(arr, val) -end -function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) - setindex!(slice(arr, idx), val, Colon()) -end - -#=doc -.. function:: getindex(arr :: NDArray, idx) - - Shortcut for :func:`slice`. A typical use is to write - - .. code-block:: julia - - arr[:] += 5 - - which translates into - - .. code-block:: julia - - arr[:] = arr[:] + 5 - - which furthur translates into - - .. code-block:: julia - - setindex!(getindex(arr, Colon()), 5, Colon()) - - .. note:: - - The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` - create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is - a *slice* that shares the memory. -=# -import Base: getindex -"""Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a -copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. -""" -function getindex(arr :: NDArray, ::Colon) - return arr -end -function getindex(arr :: NDArray, idx::UnitRange{Int}) - slice(arr, idx) -end - -#=doc -Copying functions ------------------ -=# -import Base: copy!, copy, convert -#=doc -.. function:: - copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) - - Copy contents of ``src`` into ``dst``. -=# -function copy!(dst :: NDArray, src :: NDArray) - @assert(dst.writable) - if dst.handle == src.handle - warn("Copying an NDArray to itself") - return - end - - _copyto(src, dst) - return dst -end - -function copy!(dst :: Array{MX_float}, src :: NDArray) - @assert size(dst) == size(src) - @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), - src, pointer(dst), length(dst)) - return dst -end -function copy!{T<:Real}(dst :: Array{T}, src :: NDArray) - copy!(dst, copy(src)) -end - -function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) - @assert dst.writable - @assert size(dst) == size(src) - src = convert(Array{MX_float}, src) # this might involve copying - @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), - dst.handle, pointer(src), length(src)) - return dst -end - -function copy_ignore_shape!{T<:Real}(dst :: NDArray, src :: Array{T}) - @assert dst.writable - @assert length(dst) == length(src) - src = convert(Array{MX_float}, src) # this might involve copying - @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), - dst.handle, pointer(src), length(src)) - return dst -end - - -#=doc -.. function:: - copy(arr :: NDArray) - copy(arr :: NDArray, ctx :: Context) - copy(arr :: Array, ctx :: Context) - - Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. - Otherwise, create an :class:`NDArray` on the specified context. -=# -# Create copy: NDArray -> Julia Array -function copy(arr :: NDArray) - j_arr = Array(MX_float, size(arr)) - copy!(j_arr, arr) -end - -# Create copy: NDArray -> NDArray in a given context -function copy(arr :: NDArray, ctx :: Context) - dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) - copy!(dst, arr) -end - -# Create copy: Julia Array -> NDArray in a given context -function copy{T<:Real}(arr :: Array{T}, ctx :: Context) - dst = empty(size(arr), ctx) - copy!(dst, arr) -end - -#=doc -.. function:: convert(::Type{Array{T}}, arr :: NDArray) - - Convert an :class:`NDArray` into a Julia ``Array`` of specific type. -=# -# Convert copy: NDArray -> Julia Array -function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) - convert(t, copy(arr)) -end - - -#=doc -Basic arithmetics ------------------ -=# - -#=doc -.. function:: @inplace - - Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), - When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new - memory for the results, and the newly allocated :class:`NDArray` object is then assigned - back to a, while the original contents in a is discarded. This is very inefficient - when we want to do inplace update. - - This macro is a simple utility to implement this behavior. Write - - .. code-block:: julia - - @mx.inplace a += b - - will translate into - - .. code-block:: julia - - mx.add_to!(a, b) - - which will do inplace adding of the contents of ``b`` into ``a``. -=# -macro inplace(stmt) - if stmt.head == :+= || stmt.head == :.+= - Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) - elseif stmt.head == :-= || stmt.head == :.-= - Expr(:call, :sub_from!, esc(stmt.args[1]), esc(stmt.args[2])) - elseif stmt.head == :.*= - Expr(:call, :mul_to!, esc(stmt.args[1]), esc(stmt.args[2])) - elseif stmt.head == :./= - Expr(:call, :div_from!, esc(stmt.args[1]), esc(stmt.args[2])) - else - error("unsupported inplace translation for $stmt") - end -end - -#=doc -.. function:: add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) - - Add a bunch of arguments into ``dst``. Inplace updating. -=# -function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) - @assert dst.writable - for arg in args - if isa(arg, Real) - _plus_scalar(dst, arg, dst) - else - _plus(dst, arg, dst) - end - end - return dst -end - -#=doc -.. function:: - +(args...) - .+(args...) - - Summation. Multiple arguments of either scalar or :class:`NDArray` could be - added together. Note at least the first or second argument needs to be an :class:`NDArray` to - avoid ambiguity of built-in summation. -=# -import Base: +, .+ -function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) - ret = copy(arg0, context(arg0)) - add_to!(ret, args...) -end -function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) - +(arg0, args...) -end -function +(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) - +(arg1, arg0, args...) -end -function .+(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) - .+(arg1, arg0, args...) -end - -#=doc -.. function:: sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) - - Subtract a bunch of arguments from ``dst``. Inplace updating. -=# -function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) - @assert dst.writable - if isa(arg, Real) - _minus_scalar(dst, arg, dst) - else - _minus(dst, arg, dst) - end -end - -#=doc -.. function:: - -(arg0, arg1) - -(arg0) - .-(arg0, arg1) - - Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create - the negative of ``arg0``. -=# -import Base: -, .- -function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) - ret = copy(arg0, context(arg0)) - sub_from!(ret, arg1) -end -function .-(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) - -(arg0, arg1) -end -function -(arg0 :: Real, arg1 :: NDArray) - ret = -arg1 - add_to!(ret, arg0) - return ret -end -function .-(arg0 :: Real, arg1 :: NDArray) - -(arg0, arg1) -end - -function -(arg0 :: NDArray) - _mul_scalar(arg0, -1.0) -end - -#=doc -.. function:: mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) - - Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. - Inplace updating. -=# -function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) - @assert dst.writable - if isa(arg, Real) - _mul_scalar(dst, arg, dst) - else - _mul(dst, arg, dst) - end - return dst -end - -#=doc -.. function:: - .*(arg0, arg1) - - Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. -=# -import Base: .*, * -function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) - ret = copy(arg0, context(arg0)) - mul_to!(ret, arg) -end -function .*(arg0 :: Real, arg :: NDArray) - .*(arg, arg0) -end - -#=doc -.. function:: - *(arg0, arg1) - - Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication - is to be added soon. -=# -function *(arg0 :: NDArray, arg :: Real) - ret = copy(arg0, context(arg0)) - mul_to!(ret, arg) -end -function *(arg0 :: Real, arg :: NDArray) - *(arg, arg0) -end - -#=doc -.. function:: div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) - - Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. -=# -function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) - @assert dst.writable - if isa(arg, Real) - _div_scalar(dst, arg, dst) - else - _div(dst, arg, dst) - end -end - -import Base: ./, / -#=doc -.. function:: ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) - - Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. -=# -function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) - ret = copy(arg0, context(arg0)) - div_from!(ret, arg) -end - -#=doc -.. function:: /(arg0 :: NDArray, arg :: Real) - - Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. -=# -function /(arg0 :: NDArray, arg :: Real) - ./(arg0, arg) -end - -#=doc -IO --- -=# -#=doc -.. function:: load(filename, ::Type{NDArray}) - - Load NDArrays from binary file. - - :param AbstractString filename: the path of the file to load. It could be S3 or HDFS address. - :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. - - If the ``libmxnet`` is built with the corresponding component enabled. Examples - - * ``s3://my-bucket/path/my-s3-ndarray`` - * ``hdfs://my-bucket/path/my-hdfs-ndarray`` - * ``/path-to/my-local-ndarray`` -=# -function load(filename::AbstractString, ::Type{NDArray}) - out_size = Ref{MX_uint}(0) - out_hdrs = Ref{Ptr{MX_handle}}(0) - out_name_size = Ref{MX_uint}(0) - out_names = Ref{char_pp}(0) - @mxcall(:MXNDArrayLoad, (char_p, Ref{MX_uint}, Ref{Ptr{MX_handle}}, Ref{MX_uint}, Ref{char_pp}), - filename, out_size, out_hdrs, out_name_size, out_names) - out_name_size = out_name_size[] - out_size = out_size[] - if out_name_size == 0 - return [NDArray(MX_NDArrayHandle(hdr)) for hdr in pointer_to_array(out_hdrs[], out_size)] - else - @assert out_size == out_name_size - return Dict([(symbol(bytestring(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in - zip(pointer_to_array(out_names[], out_size), pointer_to_array(out_hdrs[], out_size))]) - end -end - -#=doc -.. function:: save(filename :: AbstractString, data) - - Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built - with corresponding support. - - :param AbstractString filename: path to the binary file to write to. - :param data: data to save to file. - :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. -=# -function save(filename::AbstractString, data::NDArray) - save(filename, [data]) -end -function save(filename::AbstractString, data::Vector{NDArray}) - @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), - filename, length(data), MX_handle[data...], char_pp(0)) -end -function save(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) - names = [k for k in keys(data)] - arrays = MX_handle[data[k] for k in names] - names = AbstractString[string(k) for k in names] - - @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), - filename, length(names), arrays, names) -end - -#=doc -libmxnet APIs -------------- -=# -################################################################################ -# NDArray functions dynamically imported from libmxnet -################################################################################ -function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars) - @mxcall(:MXFuncInvoke, - (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), - func_handle, use_vars, scalars, mut_vars) -end - -@enum(LIBMX_FUNC_TYPE_MASK, - NDARRAY_ARG_BEFORE_SCALAR = 1, - ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) -) - -# Import corresponding math functions from base so the automatically defined libmxnet -# functions can overload them -import Base: sqrt - -#=doc -The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed -here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered -as - -.. code-block:: julia - - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) - -unless ``NDARRAY_ARG_BEFORE_SCALAR`` is not set. In this case, the scalars are put before the input arguments: - -.. code-block:: julia - - func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) - - -If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the output arguments will also be defined: - -.. code-block:: julia - - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) - -Upon calling, the output arguments will be automatically initialized with empty NDArrays. - -Those functions always return the output arguments. If there is only one output (the typical situation), that -object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. - -**autogen:EMBED:ndarray:EMBED:autogen** -=# -function _import_ndarray_functions(;gen_docs=false) - n_ref = Ref{MX_uint}(0) - h_ref = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - - n_funcs = n_ref[] - h_funcs = pointer_to_array(h_ref[], n_funcs) - - if gen_docs - docs = Dict{Base.Symbol, AbstractString}() - end - - for i = 1:n_funcs - func_handle = h_funcs[i] - - #---------------------------------------- - # get function information (human readable) - ref_name = Ref{char_p}(0) - ref_desc = Ref{char_p}(0) - ref_narg = Ref{MX_uint}(0) - - ref_arg_names = Ref{char_pp}(0) - ref_arg_types = Ref{char_pp}(0) - ref_arg_descs = Ref{char_pp}(0) - - @mxcall(:MXFuncGetInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), - func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - - func_name = symbol(bytestring(ref_name[])) - - if gen_docs - # generate document only - f_desc = bytestring(ref_desc[]) * "\n\n" - f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) - docs[func_name] = f_desc - else - #---------------------------------------- - # get function specification - ref_n_use_vars = Ref{MX_uint}(0) - ref_n_scalars = Ref{MX_uint}(0) - ref_n_mut_vars = Ref{MX_uint}(0) - ref_type_mask = Ref{Cint}(0) - @mxcall(:MXFuncDescribe, - (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), - func_handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) - - #---------------------------------------- - # prepare function definition - n_used_vars = ref_n_use_vars[] - n_scalars = ref_n_scalars[] - n_mutate_vars = ref_n_mut_vars[] - type_mask = ref_type_mask[] - accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 - arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - - # general ndarray function - if arg_before_scalar - args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - else - args = vcat([Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - end - - _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) - _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) - _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) - stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) - if n_mutate_vars == 1 - stmt_ret = :(return out1) - else - stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) - end - - func_body = Expr(:block, stmt_call, stmt_ret) - func_head = Expr(:call, func_name, args...) - - func_def = Expr(:function, func_head, func_body) - eval(func_def) - - if accept_empty_mutate - args0 = args[1:n_used_vars+n_scalars] - func_head0 = Expr(:call, func_name, args0...) - _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] - stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) - func_body0 = Expr(:block, stmt_call0) - func_head0 = Expr(:call, func_name, args0...) - - func_def0 = Expr(:function, func_head0, func_body0) - eval(func_def0) - end - end - end - - if gen_docs - return docs - end -end - diff --git a/src/optimizer.jl-e b/src/optimizer.jl-e deleted file mode 100644 index a5f0bfd5ec60..000000000000 --- a/src/optimizer.jl-e +++ /dev/null @@ -1,226 +0,0 @@ -#=doc -Optimizers -========== - -Common interfaces ------------------ -=# - - -#=doc -.. class:: AbstractOptimizer - - Base type for all optimizers. -=# -abstract AbstractOptimizer - -#=doc -.. class:: AbstractLearningRateScheduler - - Base type for all learning rate scheduler. -=# -abstract AbstractLearningRateScheduler - -#=doc -.. class:: AbstractMomentumScheduler - - Base type for all momentum scheduler. -=# -abstract AbstractMomentumScheduler - - - -#=doc -.. class:: OptimizationState - - .. attribute:: batch_size - - The size of the mini-batch used in stochastic training. - - .. attribute:: curr_epoch - - The current epoch count. Epoch 0 means no training yet, during the first - pass through the data, the epoch will be 1; during the second pass, the - epoch count will be 1, and so on. - - .. attribute:: curr_batch - - The current mini-batch count. The batch count is reset during every epoch. - The batch count 0 means the beginning of each epoch, with no mini-batch - seen yet. During the first mini-batch, the mini-batch count will be 1. - - .. attribute:: curr_iter - - The current iteration count. One iteration corresponds to one mini-batch, - but unlike the mini-batch count, the iteration count does **not** reset - in each epoch. So it track the *total* number of mini-batches seen so far. -=# -type OptimizationState - batch_size :: Int - curr_epoch :: Int - curr_batch :: Int - curr_iter :: Int -end -OptimizationState(batch_size::Int) = OptimizationState(batch_size, 0, 0, 0) - - -#=doc -.. function:: get_learning_rate(scheduler, state) - - :param AbstractLearningRateScheduler scheduler: a learning rate scheduler. - :param OptimizationState state: the current state about epoch, mini-batch and iteration count. - :return: the current learning rate. -=# -function get_learning_rate -end - -################################################################################ -# The learning rate module -module LearningRate -import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate - -#=doc -.. class:: LearningRate.Fixed - - Fixed learning rate scheduler always return the same learning rate. -=# -type Fixed <: AbstractLearningRateScheduler - learning_rate :: Float64 -end -get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate - -#=doc -.. class:: LearningRate.Exp - - :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration - count if ``decay_on_iteration`` is set to true. -=# -type Exp <: AbstractLearningRateScheduler - learning_rate :: Float64 - gamma :: Float64 - on_iteration :: Bool -end -function Exp(base_lr::Real; gamma::Real=0.9, decay_on_iteration::Bool=false) - @assert(0 < gamma < 1) - Exp(Float64(base_lr), Float64(gamma), decay_on_iteration) -end -get_learning_rate(self :: Exp, state :: OptimizationState) = - self.learning_rate * self.gamma ^ (self.on_iteration ? state.curr_iter : state.curr_epoch) - -end # module LearningRate -################################################################################ -function get_lr_scheduler(scheduler :: Any, lr :: Real) - if isa(scheduler, AbstractLearningRateScheduler) - return scheduler - else - return LearningRate.Fixed(lr) - end -end - - -#=doc -.. function:: get_momentum(scheduler, state) - - :param AbstractMomentumScheduler scheduler: the momentum scheduler. - :param OptimizationState state: the state about current epoch, mini-batch and iteration count. - :return: the current momentum. -=# -function get_momentum -end - - -################################################################################ -# The Momentum module -module Momentum -import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum - -#=doc -.. class:: Momentum.Null - - The null momentum scheduler always returns 0 for momentum. It is also used to - explicitly indicate momentum should not be used. -=# -type Null <: AbstractMomentumScheduler -end -get_momentum(self :: Null, state :: OptimizationState) = 0.0 - -#=doc -.. class:: Momentum.Fixed - - Fixed momentum scheduler always returns the same value. -=# -type Fixed <: AbstractMomentumScheduler - momentum :: Float64 -end -get_momentum(self :: Fixed, state :: OptimizationState) = self.momentum -end # module Momentum -################################################################################ -function get_momentum_scheduler(scheduler :: Any, momentum :: Real) - if isa(scheduler, AbstractMomentumScheduler) - return scheduler - elseif momentum == 0 - return Momentum.Null() - else - return Momentum.Fixed(momentum) - end -end - - -#=doc -.. function:: get_updater(optimizer) - - :param AbstractOptimizer optimizer: the underlying optimizer. - - A utility function to create an updater function, that uses its closure to - store all the states needed for each weights. -=# -function get_updater(optimizer :: AbstractOptimizer) - states = Dict{Int,Any}() - function updater(index :: Int, grad :: NDArray, weight :: NDArray) - if !haskey(states, index) - states[index] = create_state(optimizer, index, weight) - end - update(optimizer, index, weight, grad, states[index]) - end - return updater -end - -################################################################################ -#=doc -Built-in optimizers -------------------- -=# - -#=doc -.. class:: AbstractOptimizerOptions - - Base class for all optimizer options. -=# -abstract AbstractOptimizerOptions - -#=doc -.. function:: normalized_gradient(opts, state, grad) - - :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field - ``grad_scale``, ``grad_clip`` and ``weight_decay``. - :param OptimizationState state: the current optimization state. - :param NDArray weight: the trainable weights. - :param NDArray grad: the original gradient of the weights. - - Get the properly normalized gradient (re-scaled and clipped if necessary). -=# -function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, - weight::NDArray, grad::NDArray) - grad_scale = opts.grad_scale / state.batch_size - - grad = grad_scale * grad - if opts.grad_clip > 0 - grad = clip(grad, -opts.grad_clip, opts.grad_clip) - end - @inplace grad += opts.weight_decay * weight - - return grad -end - -include("optimizers/sgd.jl") -include("optimizers/adam.jl") diff --git a/src/random.jl-e b/src/random.jl-e deleted file mode 100644 index 79a8b6e9e20b..000000000000 --- a/src/random.jl-e +++ /dev/null @@ -1,25 +0,0 @@ -function rand!(low::Real, high::Real, out::NDArray) - _random_uniform(low, high, out) -end -function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}) - rand(low, high, shape, cpu()) -end -function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context) - out = empty(shape, ctx) - rand!(low, high, out) -end - -function randn!(mean::Real, stdvar::Real, out::NDArray) - _random_gaussian(mean, stdvar, out) -end -function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}) - randn(mean, stdvar, shape, cpu()) -end -function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context) - out = empty(shape, ctx) - randn!(mean, stdvar, out) -end - -function srand!(seed_state::Int) - @mxcall(:MXRandomSeed, (Cint,), seed_state) -end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 0c3760240844..b33c89c18616 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -279,7 +279,7 @@ end libmxnet APIs ------------- -**autogen:EMBED:SymbolicNode:EMBED:autogen** +**autogen:EMBED:symbolic-node:EMBED:autogen** =# ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet diff --git a/src/symbolic-node.jl-e b/src/symbolic-node.jl-e deleted file mode 100644 index 337d50864684..000000000000 --- a/src/symbolic-node.jl-e +++ /dev/null @@ -1,437 +0,0 @@ -#=doc -Symbolic API -============ -=# - -#=doc -.. class:: Node - - Node is the basic building block of the symbolic graph in MXNet.jl. - - .. note:: - - Throughout this documentation, ``Node`` always refer to this :class:`Node` type. - When we refer to the Julia's build-in Node type (e.g. ``typeof(:foo)``), we always - say ``Base.Symbol``. -=# -type Node - handle :: MX_SymbolHandle -end -function Base.unsafe_convert(::Type{MX_handle}, obj::Node) - Base.unsafe_convert(MX_handle, obj.handle) -end -Base.convert(t::Type{MX_handle}, obj::Node) = Base.unsafe_convert(t, obj) -Base.cconvert(t::Type{MX_handle}, obj::Node) = Base.unsafe_convert(t, obj) - -#=doc -.. function:: deepcopy(self :: Node) - - Make a deep copy of a Node. -=# -function Base.deepcopy(self :: Node) - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXSymbolCopy, (MX_handle, Ref{MX_handle}), self, ref_hdr) - return Node(MX_SymbolHandle(ref_hdr[])) -end - -#=doc -.. function:: copy(self :: Node) - - Make a copy of a Node. The same as making a deep copy. -=# -function Base.copy(self :: Node) - Base.deepcopy(self) -end - -function Base.call(self :: Node, args :: Node...) - s = deepcopy(self) - _compose!(s, args...) -end -function Base.call(self :: Node; kwargs...) - s = deepcopy(self) - _compose!(s; kwargs...) -end - -macro _list_symbol_info(self, func_name) - quote - ref_sz = Ref{MX_uint}(0) - ref_names = Ref{char_pp}(0) - @mxcall($func_name, (MX_handle, Ref{MX_uint}, Ref{char_pp}), - $self, ref_sz, ref_names) - narg = ref_sz[] - names = pointer_to_array(ref_names[], narg) - names = [symbol(bytestring(x)) for x in names] - return names - end -end -function list_arguments(self :: Node) - @_list_symbol_info(self, :MXSymbolListArguments) -end -function list_outputs(self :: Node) - @_list_symbol_info(self, :MXSymbolListOutputs) -end -"""List all auxiliary states in the symbool. - -Auxiliary states are special states of symbols that do not corresponds to an argument, -and do not have gradient. But still be useful for the specific operations. -A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. -Most operators do not have Auxiliary states. -""" -function list_auxiliary_states(self :: Node) - @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) -end - -"Get a new grouped Node whose output contains all the internal outputs of this Node." -function get_internals(self :: Node) - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXSymbolGetInternals, (MX_handle, Ref{MX_handle}), self, ref_hdr) - return Node(MX_SymbolHandle(ref_hdr[])) -end - -"Create a symbolic variable with the given name" -function Variable(name :: Union{Base.Symbol, AbstractString}) - hdr_ref = Ref{MX_handle}(0) - @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) - Node(MX_SymbolHandle(hdr_ref[])) -end - -"Create a Node that groups symbols together" -function Group(symbols :: Node...) - handles = MX_handle[symbols...] - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXSymbolCreateGroup, (MX_uint, Ptr{MX_handle}, Ref{MX_handle}), - length(handles), handles, ref_hdr) - Node(MX_SymbolHandle(ref_hdr[])) -end - -macro _infer_shape(self, keys, indptr, sdata) - quote - ref_arg_shape_size = Ref{MX_uint}(0) - ref_arg_shape_ndim = Ref{Ptr{MX_uint}}(0) - ref_arg_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) - ref_out_shape_size = Ref{MX_uint}(0) - ref_out_shape_ndim = Ref{Ptr{MX_uint}}(0) - ref_out_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) - ref_aux_shape_size = Ref{MX_uint}(0) - ref_aux_shape_ndim = Ref{Ptr{MX_uint}}(0) - ref_aux_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) - ref_complete = Ref{Cint}(0) - @mxcall(:MXSymbolInferShape, - (MX_handle, MX_uint, char_pp, Ptr{MX_uint}, Ptr{MX_uint}, - Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, - Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, - Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, - Ref{Cint}), - self, length(indptr)-1, keys, indptr, sdata, - ref_arg_shape_size, ref_arg_shape_ndim, ref_arg_shape_data, - ref_out_shape_size, ref_out_shape_ndim, ref_out_shape_data, - ref_aux_shape_size, ref_aux_shape_ndim, ref_aux_shape_data, - ref_complete) - if ref_complete[] == 0 - return (nothing, nothing, nothing) - else - function build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) - shape_ndim = pointer_to_array(shape_ndim, shape_size) - shape_data = pointer_to_array(shape_data, shape_size) - shapes = map(1:shape_size) do i - my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) - tuple(flipdim(Int[my_shape...],1)...) - end - convert(Vector{Tuple}, shapes) - end - return ( - build_shapes(ref_arg_shape_size[], ref_arg_shape_ndim[], ref_arg_shape_data[]), - build_shapes(ref_out_shape_size[], ref_out_shape_ndim[], ref_out_shape_data[]), - build_shapes(ref_aux_shape_size[], ref_aux_shape_ndim[], ref_aux_shape_data[]) - ) - end - end -end -function infer_shape(self :: Node; kwargs...) - sdata = MX_uint[] - indptr = MX_uint[0] - for (k,v) in kwargs - append!(sdata, flipdim([v...],1)) - push!(indptr, length(sdata)) - end - keys = AbstractString[string(x[1]) for x in kwargs] - @_infer_shape(self, keys, indptr, sdata) -end -function infer_shape(self :: Node, args :: Union{Tuple, Void}...) - sdata = MX_uint[] - indptr = MX_uint[0] - for arg in args - if isa(arg, Void); continue; end - append!(sdata, flipdim([arg...],1)) - push!(indptr, length(sdata)) - end - keys = Ptr{char_p}(0) - @_infer_shape(self, keys, indptr, sdata) -end - -function Base.getindex(self :: Node, idx :: Union{Base.Symbol, AbstractString}) - idx = symbol(idx) - i_idx = find(idx .== list_outputs(self)) - @assert(length(i_idx) > 0, "Cannot find output with name '$idx'") - @assert(length(i_idx) < 2, "Found duplicated output with name '$idx'") - Base.getindex(self, i_idx[1]) -end -function Base.getindex(self :: Node, idx :: Int) - ref_hdr = Ref{MX_handle}(0) - # note Julia is 1-based, while MXNet is 0-based - @mxcall(:MXSymbolGetOutput, (MX_handle, MX_uint, Ref{MX_handle}), self, idx-1, ref_hdr) - return Node(MX_SymbolHandle(ref_hdr[])) -end - -import Base: +, .+ -function +(self :: Node, args :: Node...) - ret = self - for arg in args - ret = _Plus(ret, arg) - end - ret -end -function .+(self :: Node, args :: Node...) - +(self, args...) -end - -import Base: -, .- -function -(self :: Node, arg :: Node) - _Minus(self, arg) -end -function .-(self :: Node, arg :: Node) - -(self, arg) -end - -import Base: .* -function .*(self :: Node, args :: Node...) - ret = self - for arg in args - ret = _Mul(ret, arg) - end - ret -end - -import Base: ./ -function ./(self :: Node, arg :: Node) - _Div(self, arg) -end - -function _compose!(sym :: Node; kwargs...) - name = char_p(0) - arg_keys = AbstractString[] - arg_vals = MX_handle[] - - for (k,v) in kwargs - if k == :name - name = string(v) - else - @assert(isa(v, Node), "Compose expect `Node` as arguments") - push!(arg_keys, string(k)) - push!(arg_vals, v) - end - end - - @mxcall(:MXSymbolCompose, - (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), - sym, name, length(arg_keys), arg_keys, arg_vals) - return sym -end -function _compose!(sym :: Node, args::Node...) - _compose!(sym, char_p(0), args...) -end -function _compose!(sym :: Node, name :: Union{Base.Symbol, char_p}, args::Node...) - if isa(name, Base.Symbol); name = string(name); end - arg_keys = Ptr{char_p}(0) - arg_vals = MX_handle[args...] - - @mxcall(:MXSymbolCompose, - (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), - sym, name, length(arg_vals), arg_keys, arg_vals) - return sym -end - -"""Save Node into a JSON string""" -function to_json(self :: Node) - ref_json = Ref{char_p}(0) - @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) - return bytestring(ref_json[]) -end - -"""Load Node from a JSON string representation.""" -function from_json(repr :: AbstractString, ::Type{Node}) - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXSymbolCreateFromJSON, (char_p, Ref{MX_handle}), repr, ref_hdr) - return Node(MX_SymbolHandle(ref_hdr[])) -end - -"""Load Node from a JSON file.""" -function load(filename :: AbstractString, ::Type{Node}) - ref_hdr = Ref{MX_handle}(0) - @mxcall(:MXSymbolCreateFromFile, (char_p, Ref{MX_handle}), filename, ref_hdr) - return Node(MX_SymbolHandle(ref_hdr[])) -end -function save(filename :: AbstractString, sym :: Node) - @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), sym, filename) -end - -#=doc -libmxnet APIs -------------- - -**autogen:EMBED:Node:EMBED:autogen** -=# -################################################################################ -# Atomic Node functions dynamically imported from libmxnet -################################################################################ -function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) - ref_name = Ref{char_p}(0) - ref_desc = Ref{char_p}(0) - ref_kv_nargs = Ref{char_p}(0) - ref_nargs = Ref{MX_uint}(0) - ref_arg_names = Ref{char_pp}(0) - ref_arg_types = Ref{char_pp}(0) - ref_arg_descs = Ref{char_pp}(0) - - @mxcall(:MXSymbolGetAtomicSymbolInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, - Ref{char_pp}, Ref{char_p}), - hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs) - - func_name = symbol(bytestring(ref_name[])) - kv_nargs_s = bytestring(ref_kv_nargs[]) - kv_nargs = symbol(kv_nargs_s) - - if gen_docs - f_desc = bytestring(ref_desc[]) * "\n\n" - if !isempty(kv_nargs_s) - f_desc *= "This function support variable length positional :class:`Node` inputs.\n\n" - end - f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Base.Symbol name: The name of the Node. (e.g. `:my_symbol`), optional.\n\n" - f_desc *= ":return: the constructed :class:`Node`.\n\n" - return (func_name, f_desc) - end - - # function $func_name(args...; kwargs...) - func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) - func_body = quote - idx = findfirst(x -> x[1] == :name, kwargs) - if idx > 0 - name = kwargs[idx][2] - else - name = "" - end - - param_keys = AbstractString[] - param_vals = AbstractString[] - symbol_kws = Dict{Base.Symbol, Node}() - - $(if kv_nargs != symbol("") - quote - if !in($kv_nargs_s, param_keys) - push!(param_keys, $kv_nargs_s) - push!(param_vals, string(length(args))) - end - end - end) - - for (k,v) in kwargs - if k == :name; continue; end - if isa(v, Node) - symbol_kws[k] = v - else - push!(param_keys, string(k)) - push!(param_vals, dump_mx_param(v)) - end - end - - if length(args) != 0 && length(symbol_kws) != 0 - @assert(false, "$func_name only accepts Symbols either as positional or keyword arguments, not both.") - end - $(if kv_nargs != symbol("") - quote - if length(symbol_kws) > 0 - @assert(false, "$func_name takes variable number of Node arguments, please pass input Symbols " * - "via positional arguments, instead of keyword arguments.") - end - end - end) - - # create the Node - ref_sym_hdr = Ref{MX_handle}() - @mxcall(:MXSymbolCreateAtomicSymbol, - (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), - $hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) - sym_hdr = ref_sym_hdr[] - - sym = Node(MX_SymbolHandle(sym_hdr)) - hint = lowercase(string($func_name)) - name = get!(DEFAULT_NAME_MANAGER, name, hint) - - if length(args) != 0 - _compose!(sym, name, args...) - else - _compose!(sym; name=name, symbol_kws...) - end - - return sym - end - - func_def = Expr(:function, func_head, Expr(:block, func_body)) - eval(func_def) -end - -function _import_atomic_symbol_creators(;gen_docs=false) - n_ref = Ref{MX_uint}(0) - h_ref = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXSymbolListAtomicSymbolCreators, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - - n_creators = n_ref[] - h_creators = pointer_to_array(h_ref[], n_creators) - - if gen_docs - docs = Dict{Base.Symbol, AbstractString}() - end - - for i = 1:n_creators - creator_hdr = h_creators[i] - ret = _define_atomic_symbol_creator(creator_hdr, gen_docs=gen_docs) - if gen_docs - docs[ret[1]] = ret[2] - end - end - - if gen_docs - return docs - end -end - -################################################################################ -# Utility macros to chain up symbols -################################################################################ -macro chain(layers) - exprs = [] - last_layer = nothing - function _chain_layer(layer, last_layer) - if isa(last_layer, Void) - esc(layer) - else - @assert(isa(layer, Expr) && layer.head == :call, "Do not know how to chain up $layer") - return Expr(:call, esc(layer.args[1]), last_layer, map(esc, layer.args[2:end])...) - end - end - while true - if layers.head == :(=>) - new_layer = gensym() - push!(exprs, :($new_layer = $(_chain_layer(layers.args[1], last_layer)))) - last_layer = new_layer - layers = layers.args[2] - else - push!(exprs, _chain_layer(layers, last_layer)) - break - end - end - return Expr(:block, exprs...) -end - diff --git a/src/util.jl-e b/src/util.jl-e deleted file mode 100644 index 4ca613cbf7d1..000000000000 --- a/src/util.jl-e +++ /dev/null @@ -1,70 +0,0 @@ -################################################################################ -# Dataset related utilities -################################################################################ -function get_data_dir() - data_dir = joinpath(Pkg.dir("MXNet"), "data") - mkpath(data_dir) - data_dir -end - -function get_mnist_ubyte() - data_dir = get_data_dir() - mnist_dir = joinpath(data_dir, "mnist") - mkpath(mnist_dir) - filenames = Dict(:train_data => "train-images-idx3-ubyte", - :train_label => "train-labels-idx1-ubyte", - :test_data => "t10k-images-idx3-ubyte", - :test_label => "t10k-labels-idx1-ubyte") - filenames = [k => joinpath(mnist_dir, v) for (k,v) in filenames] - if !all(isfile, values(filenames)) - cd(mnist_dir) do - run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip`) - run(`unzip -u mnist.zip`) - end - end - return filenames -end - -function get_cifar10() - data_dir = get_data_dir() - cifar10_dir = joinpath(data_dir, "cifar10") - mkpath(cifar10_dir) - filenames = Dict(:train => "cifar/train.rec", :test => "cifar/test.rec") - filenames = [k => joinpath(cifar10_dir, v) for (k,v) in filenames] - if !all(isfile, values(filenames)) - cd(cifar10_dir) do - run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) - run(`unzip -u cifar10.zip`) - end - end - - filenames[:mean] = joinpath(cifar10_dir, "cifar/cifar_mean.bin") - return filenames -end - - -################################################################################ -# Internal Utilities -################################################################################ -const DOC_EMBED_ANCHOR = "**autogen:EMBED:{1}:EMBED:autogen**" -function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) - param_keys = Set{AbstractString}() - - arg_names = pointer_to_array(arg_names[], narg) - arg_types = pointer_to_array(arg_types[], narg) - arg_descs = pointer_to_array(arg_descs[], narg) - docstrings = AbstractString[] - - for i = 1:narg - arg_name = bytestring(arg_names[i]) - if arg_name ∈ param_keys && remove_dup - continue - end - push!(param_keys, arg_name) - - arg_type = bytestring(arg_types[i]) - arg_desc = bytestring(arg_descs[i]) - push!(docstrings, ":param $arg_name: $arg_desc\n:type $arg_name: $arg_type\n\n") - end - return join(docstrings, "\n") -end From c13f88f7c04883e721c8d24c20ea2dc02d5765e2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:49:45 -0500 Subject: [PATCH 187/630] add nn-factory --- README.md | 8 ++------ examples/mnist/mlp.jl | 17 ++++++++++------ src/MXNet.jl | 2 ++ src/nn-factory.jl | 46 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 61 insertions(+), 12 deletions(-) create mode 100644 src/nn-factory.jl diff --git a/README.md b/README.md index 10862ff8a6a6..7dc99a24a7b4 100644 --- a/README.md +++ b/README.md @@ -15,12 +15,8 @@ Here is an exmple of how training a simple 3-layer MLP on MNIST looks like: ```julia using MXNet -mlp = @mx.chain mx.Variable(:data) => - mx.FullyConnected(name=:fc1, num_hidden=128) => - mx.Activation(name=:relu1, act_type=:relu) => - mx.FullyConnected(name=:fc2, num_hidden=64) => - mx.Activation(name=:relu2, act_type=:relu) => - mx.FullyConnected(name=:fc3, num_hidden=10) => +mlp = @mx.chain mx.Variable(:data) => + mx.MLP([128, 64, 10]) => mx.SoftmaxOutput(name=:softmax) # data provider diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index f6fbbd5c0870..cdb0064da8e5 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -14,12 +14,17 @@ using MXNet # mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) #-- Option 2: using the mx.chain macro -mlp = @mx.chain mx.Variable(:data) => - mx.FullyConnected(name=:fc1, num_hidden=128) => - mx.Activation(name=:relu1, act_type=:relu) => - mx.FullyConnected(name=:fc2, num_hidden=64) => - mx.Activation(name=:relu2, act_type=:relu) => - mx.FullyConnected(name=:fc3, num_hidden=10) => +# mlp = @mx.chain mx.Variable(:data) => +# mx.FullyConnected(name=:fc1, num_hidden=128) => +# mx.Activation(name=:relu1, act_type=:relu) => +# mx.FullyConnected(name=:fc2, num_hidden=64) => +# mx.Activation(name=:relu2, act_type=:relu) => +# mx.FullyConnected(name=:fc3, num_hidden=10) => +# mx.SoftmaxOutput(name=:softmax) + +#-- Option 3: using nn-factory +mlp = @mx.chain mx.Variable(:data) => + mx.MLP([128, 64, 10]) => mx.SoftmaxOutput(name=:softmax) # data provider diff --git a/src/MXNet.jl b/src/MXNet.jl index f9f9e8664c4f..a3e280d26a11 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -30,6 +30,8 @@ include("model.jl") include("util.jl") +include("nn-factory.jl") + end # mx end # module MXNet diff --git a/src/nn-factory.jl b/src/nn-factory.jl new file mode 100644 index 000000000000..79aec78a1746 --- /dev/null +++ b/src/nn-factory.jl @@ -0,0 +1,46 @@ +#=doc +Neural Networks Factory +======================= + +Neural network factory provide convenient helper functions to define +common neural networks. +=# + +#=doc +.. function:: MLP(input, spec) + + Construct a multi-layer perceptron. + + :param SymbolicNode input: the input to the mlp. + :param spec: the mlp specification, a list of hidden dimensions. For example, + ``[128, (512, :sigmoid), 10]``. The number in the list indicate the + number of hidden units in each layer. A tuple could be used to specify + the activation of each layer. Otherwise, the default activation will + be used (except for the last layer). + :param Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating + the default activation for hidden layers. The specification here could be overwritten + by layer-wise specification in the ``spec`` argument. Also activation is not + applied to the last, i.e. the prediction layer. + :param prefix: keyword argument, default ``gensym()``, used as the prefix to + name the constructed layers. +=# +function MLP(input, spec; hidden_activation::Base.Symbol=:relu, prefix=gensym()) + spec = convert(Vector{Union{Int,Tuple}}, spec) + + n_layer = length(spec) + for (i, s) in enumerate(spec) + if isa(s, Tuple) + n_unit, act_type = s + else + n_unit = s + act_type = hidden_activation + end + input = FullyConnected(input, name=symbol(prefix, "fc$i"), num_hidden=n_unit) + if i < n_layer || isa(s, Tuple) + # will not add activation unless the user explicitly specified + input = Activation(input, name=symbol(prefix, "$act_type$i"), act_type=act_type) + end + end + + return input +end From 3bc475a2e16391e7faffe2fd4e809d6f20751c9b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:50:45 -0500 Subject: [PATCH 188/630] add doc for nn factory --- docs/api/nn-factory.rst | 29 +++++++++++++++++++++++++++++ docs/build-api.jl | 2 ++ docs/index.rst | 1 + 3 files changed, 32 insertions(+) create mode 100644 docs/api/nn-factory.rst diff --git a/docs/api/nn-factory.rst b/docs/api/nn-factory.rst new file mode 100644 index 000000000000..02e3d2ca73f1 --- /dev/null +++ b/docs/api/nn-factory.rst @@ -0,0 +1,29 @@ + +Neural Networks Factory +======================= + +Neural network factory provide convenient helper functions to define +common neural networks. + + + + +.. function:: MLP(input, spec) + + Construct a multi-layer perceptron. + + :param SymbolicNode input: the input to the mlp. + :param spec: the mlp specification, a list of hidden dimensions. For example, + ``[128, (512, :sigmoid), 10]``. The number in the list indicate the + number of hidden units in each layer. A tuple could be used to specify + the activation of each layer. Otherwise, the default activation will + be used (except for the last layer). + :param Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating + the default activation for hidden layers. The specification here could be overwritten + by layer-wise specification in the ``spec`` argument. Also activation is not + applied to the last, i.e. the prediction layer. + :param prefix: keyword argument, default ``gensym()``, used as the prefix to + name the constructed layers. + + + diff --git a/docs/build-api.jl b/docs/build-api.jl index 510426ee2004..d49996a24cb3 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -92,3 +92,5 @@ extract_doc("optimizer.rst", "optimizer.jl") extract_doc("io.rst", "io.jl") embed_mxnet_api("io.rst", "io", mx._import_io_iterators) + +extract_doc("nn-factory.rst", "nn-factory.jl") diff --git a/docs/index.rst b/docs/index.rst index 6203ad3a1af4..c10f44780a2b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -39,6 +39,7 @@ For more details, see documentation below. Please also checkout the `examples api/io api/ndarray api/symbolic-node + api/nn-factory api/executor Indices and tables From 41c20ae0ca7fff41bbc85aa93841b86f231f2dbb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:53:13 -0500 Subject: [PATCH 189/630] fix travis CI build error due to refactoring --- examples/mnist/mlp-test.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index 04d917c6543d..feabd1140a89 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -38,9 +38,9 @@ function mnist_fit_and_predict(optimizer, initializer, n_epoch) for i_epoch = 0:n_epoch @test isfile(mx.format("{1}-{2:04d}.params", cp_prefix, i_epoch)) end - mlp_load = mx.load("$cp_prefix-symbol.json", mx.Symbol) + mlp_load = mx.load("$cp_prefix-symbol.json", mx.SymbolicNode) @test mx.to_json(mlp_load) == mx.to_json(mlp) - mlp_load = mx.from_json(readall("$cp_prefix-symbol.json"), mx.Symbol) + mlp_load = mx.from_json(readall("$cp_prefix-symbol.json"), mx.SymbolicNode) @test mx.to_json(mlp_load) == mx.to_json(mlp) #-------------------------------------------------------------------------------- From 3e8627d59142e05692c401e06e2b165b2e926832 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 15:57:51 -0500 Subject: [PATCH 190/630] document clean up --- docs/api/nn-factory.rst | 8 ++++++-- docs/api/symbolic-node.rst | 6 ------ src/nn-factory.jl | 8 ++++++-- src/symbolic-node.jl | 6 ------ 4 files changed, 12 insertions(+), 16 deletions(-) diff --git a/docs/api/nn-factory.rst b/docs/api/nn-factory.rst index 02e3d2ca73f1..44569c640da9 100644 --- a/docs/api/nn-factory.rst +++ b/docs/api/nn-factory.rst @@ -10,7 +10,8 @@ common neural networks. .. function:: MLP(input, spec) - Construct a multi-layer perceptron. + Construct a multi-layer perceptron. A MLP is a multi-layer neural network with + fully connected layers. :param SymbolicNode input: the input to the mlp. :param spec: the mlp specification, a list of hidden dimensions. For example, @@ -21,9 +22,12 @@ common neural networks. :param Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating the default activation for hidden layers. The specification here could be overwritten by layer-wise specification in the ``spec`` argument. Also activation is not - applied to the last, i.e. the prediction layer. + applied to the last, i.e. the prediction layer. See :func:`Activation` for a + list of supported activation types. :param prefix: keyword argument, default ``gensym()``, used as the prefix to name the constructed layers. + :return: the constructed MLP. + diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 9386c10a8614..bead11843799 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -9,12 +9,6 @@ Symbolic API SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. - .. note:: - - Throughout this documentation, ``SymbolicNode`` always refer to this :class:`SymbolicNode` type. - When we refer to the Julia's build-in SymbolicNode type (e.g. ``typeof(:foo)``), we always - say ``Base.Symbol``. - diff --git a/src/nn-factory.jl b/src/nn-factory.jl index 79aec78a1746..984f50a5f782 100644 --- a/src/nn-factory.jl +++ b/src/nn-factory.jl @@ -9,7 +9,8 @@ common neural networks. #=doc .. function:: MLP(input, spec) - Construct a multi-layer perceptron. + Construct a multi-layer perceptron. A MLP is a multi-layer neural network with + fully connected layers. :param SymbolicNode input: the input to the mlp. :param spec: the mlp specification, a list of hidden dimensions. For example, @@ -20,9 +21,12 @@ common neural networks. :param Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating the default activation for hidden layers. The specification here could be overwritten by layer-wise specification in the ``spec`` argument. Also activation is not - applied to the last, i.e. the prediction layer. + applied to the last, i.e. the prediction layer. See :func:`Activation` for a + list of supported activation types. :param prefix: keyword argument, default ``gensym()``, used as the prefix to name the constructed layers. + + :return: the constructed MLP. =# function MLP(input, spec; hidden_activation::Base.Symbol=:relu, prefix=gensym()) spec = convert(Vector{Union{Int,Tuple}}, spec) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index b33c89c18616..6258810b0a34 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -7,12 +7,6 @@ Symbolic API .. class:: SymbolicNode SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. - - .. note:: - - Throughout this documentation, ``SymbolicNode`` always refer to this :class:`SymbolicNode` type. - When we refer to the Julia's build-in SymbolicNode type (e.g. ``typeof(:foo)``), we always - say ``Base.Symbol``. =# type SymbolicNode handle :: MX_SymbolHandle From be8cbdace438a9faf7b5a617bba1799574477b1d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 16:13:19 -0500 Subject: [PATCH 191/630] more test for IO --- test/unittest/io.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 8ef3b57d948c..39e37f1bfe9a 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -51,9 +51,11 @@ function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataPro for (d1, (_, d2)) in zip(data, mx.provide_data(provider)) @test size(d1)[1:end-1] == d2[1:end-1] + @test batch_size == d2[end] end for (d1, (_, d2)) in zip(label, mx.provide_label(provider)) @test size(d1)[1:end-1] == d2[1:end-1] + @test batch_size == d2[end] end info("IO::Array::#data=$(length(data)),#label=$(length(label)),batch_size=$batch_size") From b49dc626b0bd60953b4cc1c0a78129e6d0b0c342 Mon Sep 17 00:00:00 2001 From: = <=> Date: Mon, 9 Nov 2015 19:32:40 -0500 Subject: [PATCH 192/630] update doc (Symbol -> SymbolicNode) --- docs/api/io.rst | 4 +- docs/api/symbolic-node.rst | 112 ++++++++++++++++++------------------- src/symbolic-node.jl | 2 +- src/util.jl | 5 +- 4 files changed, 63 insertions(+), 60 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index e9d9c04e9f3c..6c9d71836108 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -317,7 +317,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -460,7 +460,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index bead11843799..f811d41aaad5 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -36,13 +36,13 @@ Public APIs Apply activation function to input. :param data: Input data to activation function. - :type data: Symbol + :type data: SymbolicNode :param act_type: Activation function to be applied. :type act_type: {'relu', 'sigmoid', 'tanh'}, required - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -55,7 +55,7 @@ Public APIs Apply batch normalization to input. :param data: Input data to batch normalization - :type data: Symbol + :type data: SymbolicNode :param eps: Epsilon to prevent div 0 @@ -65,7 +65,7 @@ Public APIs :param momentum: Momentum for moving average :type momentum: float, optional, default=0.1 - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -78,9 +78,9 @@ Public APIs Get output from a symbol and pass 0 gradient back :param data: Input data. - :type data: Symbol + :type data: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -97,7 +97,7 @@ Public APIs :param num_args: Number of inputs to be concated. :type num_args: int, required - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -110,15 +110,15 @@ Public APIs Apply convolution to input then add a bias. :param data: Input data to the ConvolutionOp. - :type data: Symbol + :type data: SymbolicNode :param weight: Weight matrix. - :type weight: Symbol + :type weight: SymbolicNode :param bias: Bias parameter. - :type bias: Symbol + :type bias: SymbolicNode :param kernel: convolution kernel size: (y, x) @@ -148,7 +148,7 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -161,15 +161,15 @@ Public APIs Apply deconvolution to input then add a bias. :param data: Input data to the DeconvolutionOp. - :type data: Symbol + :type data: SymbolicNode :param weight: Weight matrix. - :type weight: Symbol + :type weight: SymbolicNode :param bias: Bias parameter. - :type bias: Symbol + :type bias: SymbolicNode :param kernel: deconvolution kernel size: (y, x) @@ -199,7 +199,7 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=True - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -212,13 +212,13 @@ Public APIs Apply dropout to input :param data: Input data to dropout. - :type data: Symbol + :type data: SymbolicNode :param p: Fraction of the input that gets dropped out at training time :type p: float, optional, default=0.5 - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -235,7 +235,7 @@ Public APIs :param num_args: Number of inputs to be sumed. :type num_args: int, required - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -248,9 +248,9 @@ Public APIs Flatten input :param data: Input data to flatten. - :type data: Symbol + :type data: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -263,15 +263,15 @@ Public APIs Apply matrix multiplication to input then add a bias. :param data: Input data to the FullyConnectedOp. - :type data: Symbol + :type data: SymbolicNode :param weight: Weight matrix. - :type weight: Symbol + :type weight: SymbolicNode :param bias: Bias parameter. - :type bias: Symbol + :type bias: SymbolicNode :param num_hidden: Number of hidden nodes of the output. @@ -281,7 +281,7 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -294,7 +294,7 @@ Public APIs Apply convolution to input then add a bias. :param data: Input data to the ConvolutionOp. - :type data: Symbol + :type data: SymbolicNode :param alpha: value of the alpha variance scaling parameter in the normalization formula @@ -312,7 +312,7 @@ Public APIs :param nsize: normalization window width in elements. :type nsize: int (non-negative), required - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -325,7 +325,7 @@ Public APIs Apply activation function to input. :param data: Input data to activation function. - :type data: Symbol + :type data: SymbolicNode :param act_type: Activation function to be applied. @@ -343,7 +343,7 @@ Public APIs :param upper_bound: Upper bound of random slope. (For rrelu only) :type upper_bound: float, optional, default=0.334 - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -356,13 +356,13 @@ Public APIs Use linear regression for final output, this is used on final output of a net. :param data: Input data to function. - :type data: Symbol + :type data: SymbolicNode :param label: Input label to function. - :type label: Symbol + :type label: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -376,13 +376,13 @@ Public APIs Logistic regression is suitable for binary classification or probability prediction tasks. :param data: Input data to function. - :type data: Symbol + :type data: SymbolicNode :param label: Input label to function. - :type label: Symbol + :type label: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -395,7 +395,7 @@ Public APIs Perform spatial pooling on inputs. :param data: Input data to the pooling operator. - :type data: Symbol + :type data: SymbolicNode :param kernel: pooling kernel size: (y, x) @@ -413,7 +413,7 @@ Public APIs :param pad: pad for pooling: (y, x) :type pad: Shape(tuple), optional, default=(0, 0) - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -426,13 +426,13 @@ Public APIs Reshape input to target shape :param data: Input data to reshape. - :type data: Symbol + :type data: SymbolicNode :param target_shape: Target new shape :type target_shape: Shape(tuple), required - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -447,7 +447,7 @@ Public APIs :param num_outputs: Number of outputs to be sliced. :type num_outputs: int, required - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -460,7 +460,7 @@ Public APIs DEPRECATED: Perform a softmax transformation on input. Please use SoftmaxOutput :param data: Input data to softmax. - :type data: Symbol + :type data: SymbolicNode :param grad_scale: Scale the gradient by a float factor @@ -470,7 +470,7 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -483,7 +483,7 @@ Public APIs Perform a softmax transformation on input, backprop with logloss. :param data: Input data to softmax. - :type data: Symbol + :type data: SymbolicNode :param grad_scale: Scale the gradient by a float factor @@ -493,7 +493,7 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -506,9 +506,9 @@ Public APIs Take exp of the src :param src: Source symbolic input to the function - :type src: Symbol + :type src: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -521,9 +521,9 @@ Public APIs Take log of the src :param src: Source symbolic input to the function - :type src: Symbol + :type src: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -536,9 +536,9 @@ Public APIs Take sqrt of the src :param src: Source symbolic input to the function - :type src: Symbol + :type src: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -551,9 +551,9 @@ Public APIs Take square of the src :param src: Source symbolic input to the function - :type src: Symbol + :type src: SymbolicNode - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -571,7 +571,7 @@ Internal APIs Perform an elementwise div. - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -583,7 +583,7 @@ Internal APIs Perform an elementwise minus. - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -595,7 +595,7 @@ Internal APIs Perform an elementwise mul. - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -614,7 +614,7 @@ Internal APIs :param need_top_grad: Whether this layer needs out grad for backward. Should be false for loss layers. :type need_top_grad: boolean, optional, default=True - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -626,7 +626,7 @@ Internal APIs Perform an elementwise plus. - :param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional. + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 6258810b0a34..7f993c7491b5 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -302,7 +302,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Base.Symbol name: The name of the SymbolicNode. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n\n" f_desc *= ":return: the constructed :class:`SymbolicNode`.\n\n" return (func_name, f_desc) end diff --git a/src/util.jl b/src/util.jl index 4ca613cbf7d1..be27d1e2310c 100644 --- a/src/util.jl +++ b/src/util.jl @@ -47,6 +47,9 @@ end # Internal Utilities ################################################################################ const DOC_EMBED_ANCHOR = "**autogen:EMBED:{1}:EMBED:autogen**" +function _format_typestring(typestr :: AbstractString) + replace(typestr, r"\bSymbol\b", "SymbolicNode") +end function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) param_keys = Set{AbstractString}() @@ -62,7 +65,7 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch end push!(param_keys, arg_name) - arg_type = bytestring(arg_types[i]) + arg_type = _format_typestring(bytestring(arg_types[i])) arg_desc = bytestring(arg_descs[i]) push!(docstrings, ":param $arg_name: $arg_desc\n:type $arg_name: $arg_type\n\n") end From 591871cd0cbf0e411a7d16f5a05b80f34e92bc7b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 9 Nov 2015 23:59:20 -0500 Subject: [PATCH 193/630] add doc for metrics --- docs/api/io.rst | 4 ++-- docs/api/metric.rst | 46 +++++++++++++++++++++++++++++++++++++++++++++ docs/build-api.jl | 1 + docs/index.rst | 1 + src/metric.jl | 43 ++++++++++++++++++++++++++++++++++++++++++ 5 files changed, 93 insertions(+), 2 deletions(-) create mode 100644 docs/api/metric.rst diff --git a/docs/api/io.rst b/docs/api/io.rst index 6c9d71836108..e9d9c04e9f3c 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -317,7 +317,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -460,7 +460,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/metric.rst b/docs/api/metric.rst new file mode 100644 index 000000000000..db18ae731a59 --- /dev/null +++ b/docs/api/metric.rst @@ -0,0 +1,46 @@ + +Evaluation Metrics +================== + +Evaluation metrics provide a way to evaluate the performance of a learned model. +This is typically used during training to monitor performance on the validation +set. + + + + +.. class:: AbstractEvalMetric + + The base class for all evaluation metrics. The sub-types should implement the following + interfaces. + + .. function:: update!(metric, labels, preds) + + Update and accumulate metrics. + + :param AbstractEvalMetric metric: the metric object. + :param labels: the labels from the data provider. + :type labels: Vector{NDArray} + :param preds: the outputs (predictions) of the network. + :type preds: Vector{NDArray} + + .. function:: reset!(metric) + + Reset the accumulation counter. + + .. function:: get(metric) + + Get the accumulated metrics. + + :return: ``Vector{Tuple{Base.Symbol, Real}}``, a list of name-value pairs. For + example, ``[(:accuracy, 0.9)]``. + + + + +.. class:: Accuracy + + Multiclass classification accuracy. + + + diff --git a/docs/build-api.jl b/docs/build-api.jl index d49996a24cb3..99ae5ad1c709 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -89,6 +89,7 @@ extract_doc("callback.rst", "callback.jl") extract_doc("model.rst", "model.jl") extract_doc("optimizer.rst", "optimizer.jl") +extract_doc("metric.rst", "metric.jl") extract_doc("io.rst", "io.jl") embed_mxnet_api("io.rst", "io", mx._import_io_iterators) diff --git a/docs/index.rst b/docs/index.rst index c10f44780a2b..da28e3080440 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -36,6 +36,7 @@ For more details, see documentation below. Please also checkout the `examples api/initializer api/optimizer api/callback + api/metric api/io api/ndarray api/symbolic-node diff --git a/src/metric.jl b/src/metric.jl index c88239ab8cc8..73e4b9a30a7b 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -1,5 +1,46 @@ +#=doc +Evaluation Metrics +================== + +Evaluation metrics provide a way to evaluate the performance of a learned model. +This is typically used during training to monitor performance on the validation +set. +=# + +#=doc +.. class:: AbstractEvalMetric + + The base class for all evaluation metrics. The sub-types should implement the following + interfaces. + + .. function:: update!(metric, labels, preds) + + Update and accumulate metrics. + + :param AbstractEvalMetric metric: the metric object. + :param labels: the labels from the data provider. + :type labels: Vector{NDArray} + :param preds: the outputs (predictions) of the network. + :type preds: Vector{NDArray} + + .. function:: reset!(metric) + + Reset the accumulation counter. + + .. function:: get(metric) + + Get the accumulated metrics. + + :return: ``Vector{Tuple{Base.Symbol, Real}}``, a list of name-value pairs. For + example, ``[(:accuracy, 0.9)]``. +=# abstract AbstractEvalMetric +#=doc +.. class:: Accuracy + + Multiclass classification accuracy. +=# type Accuracy <: AbstractEvalMetric acc_sum :: Float64 n_sample :: Int @@ -35,3 +76,5 @@ function reset!(metric :: Accuracy) metric.acc_sum = 0.0 metric.n_sample = 0 end + + From 2b85c9caf5ee5621308fb8c57976fb26baa35418 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 10 Nov 2015 00:29:20 -0500 Subject: [PATCH 194/630] share NDArray data with Julia Array. --- src/ndarray.jl | 53 +++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 52 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 82293d2fdbe9..eceabb3eb5be 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -368,13 +368,64 @@ end #=doc .. function:: convert(::Type{Array{T}}, arr :: NDArray) - Convert an :class:`NDArray` into a Julia ``Array`` of specific type. + Convert an :class:`NDArray` into a Julia ``Array`` of specific type. Data will be copied. =# # Convert copy: NDArray -> Julia Array function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) convert(t, copy(arr)) end +# NOTE: internal use only. Accessing pointers on a different device (e.g. accessing GPU +# pointers from CPU) leads to undefined behavior. +import Base.pointer +function pointer(arr :: NDArray) + pdata = Ref{Ptr{MX_float}}(0) + @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{MX_float}}), arr, pdata) + return pdata[] +end +#=doc +.. function:: try_get_shared(arr) + + Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. + + :param NDArray arr: the array to be shared. + + .. warning:: + + The returned array does not guarantee to share data with the underlying :class:`NDArray`. + In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. +=# +function try_get_shared(arr :: NDArray) + if context(arr).device_type == CPU + # try to do data sharing + vec = pointer_to_array(pointer(arr), length(arr)) + return reshape(vec, size(arr)) + else + # impossible to share, just copying + return copy(arr) + end +end + +#=doc +.. function:: is_shared(j_arr, arr) + + Test whether ``j_arr`` is sharing data with ``arr``. + + :param Array j_arr: the Julia Array. + :param NDArray arr: the :class:`NDArray`. +=# +function is_shared{T}(j_arr :: Array{T}, arr :: NDArray) + false +end +function is_shared(j_arr :: Array{MX_float}, arr :: NDArray) + if length(j_arr) != length(arr) + return false + end + if context(arr).device_type != CPU + return false + end + return pointer(j_arr) == pointer(arr) +end #=doc Basic arithmetics From 2055727383eb3d135b2619eab719687c4afd83a0 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 10 Nov 2015 01:41:10 -0500 Subject: [PATCH 195/630] nd_as_jl macro --- src/ndarray.jl | 212 +++++++++++++++++++++++++++++++++++++------------ 1 file changed, 160 insertions(+), 52 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index eceabb3eb5be..9fc86002fcad 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -375,58 +375,6 @@ function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) convert(t, copy(arr)) end -# NOTE: internal use only. Accessing pointers on a different device (e.g. accessing GPU -# pointers from CPU) leads to undefined behavior. -import Base.pointer -function pointer(arr :: NDArray) - pdata = Ref{Ptr{MX_float}}(0) - @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{MX_float}}), arr, pdata) - return pdata[] -end -#=doc -.. function:: try_get_shared(arr) - - Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. - - :param NDArray arr: the array to be shared. - - .. warning:: - - The returned array does not guarantee to share data with the underlying :class:`NDArray`. - In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. -=# -function try_get_shared(arr :: NDArray) - if context(arr).device_type == CPU - # try to do data sharing - vec = pointer_to_array(pointer(arr), length(arr)) - return reshape(vec, size(arr)) - else - # impossible to share, just copying - return copy(arr) - end -end - -#=doc -.. function:: is_shared(j_arr, arr) - - Test whether ``j_arr`` is sharing data with ``arr``. - - :param Array j_arr: the Julia Array. - :param NDArray arr: the :class:`NDArray`. -=# -function is_shared{T}(j_arr :: Array{T}, arr :: NDArray) - false -end -function is_shared(j_arr :: Array{MX_float}, arr :: NDArray) - if length(j_arr) != length(arr) - return false - end - if context(arr).device_type != CPU - return false - end - return pointer(j_arr) == pointer(arr) -end - #=doc Basic arithmetics ----------------- @@ -634,6 +582,166 @@ function /(arg0 :: NDArray, arg :: Real) ./(arg0, arg) end + +#=doc +Manipulating as Julia Arrays +---------------------------- + +.. function:: @nd_as_jl(captures..., statement) + + A convenient macro that allows to operate :class:`NDArray` as Julia Arrays. For example, + + .. code-block:: julia + + x = mx.zeros(3,4) + y = mx.ones(3,4) + z = mx.zeros((3,4), mx.gpu()) + + @mx.nd_as_jl ro=(x,y) rw=z begin + # now x, y, z are just ordinary Julia Arrays + z[:,1] = y[:,2] + z[:,2] = 5 + end + + Under the hood, the macro convert all the declared captures from :class:`NDArray` into Julia + Arrays, by using :func:`try_get_shared`. And automatically commit the modifications back into + the :class:`NDArray` that is declared as ``rw``. This is useful for fast prototyping and when + implement non-critical computations, such as :class:`AbstractEvalMetric`. + + .. note:: + + - Multiple ``rw`` and / or ``ro`` capture declaration could be made. + - The macro does **not** check to make sure that ``ro`` captures are not modified. If the + original :class:`NDArray` lives in CPU memory, then it is very likely the corresponding + Julia Array shares data with the :class:`NDArray`, so modifying the Julia Array will also + modify the underlying :class:`NDArray`. + - When an :class:`NDArray` is declared to be captured as ``rw``, its contents is always sync + back in the end. + - The execution results of the expanded macro is always ``nothing``. + - The statements are wrapped in a ``let``, thus locally introduced new variables will not be + available after the statements. So you will need to declare the variables before calling the + macro if needed. +=# +macro nd_as_jl(m_args...) + @assert(length(m_args) > 0) + stmts = m_args[end] + @assert(isa(stmts, Expr) && stmts.head == :block, + "The last argument should be a statement block (begin-end); but get $stmts") + stmts = esc(stmts) + + dclrs = m_args[1:end-1] + nd_ro = [] + nd_rw = [] + nd_all = [] + for declr in dclrs + @assert(isa(declr, Expr) && declr.head == :(=) && length(declr.args)==2 && declr.args[1] ∈ (:ro,:rw), + "Invalid declaration, should be rw=(x,y) or ro=z; but get $declr") + + declr_vars = declr.args[2] + if isa(declr_vars, Symbol) + declr_vars = (declr_vars,) + elseif isa(declr_vars, Expr) + @assert(declr_vars.head ∈ (:tuple, :vect), + "Capture declaration should be a variable or a tuple of variables; but got $declr_vars") + declr_vars = declr_vars.args + else + @assert(false, "Capture declaration should be a variable or a tuple of variables; but got $declr_vars") + end + for declr_var in declr_vars + @assert(isa(declr_var, Symbol), + "Captured ndarrays in ro/rw declaration should be variables, but get $(declr_var)") + end + append!(nd_all, [declr_vars...]) + if declr.args[1] == :ro + append!(nd_ro, [declr_vars...]) + else + append!(nd_rw, [declr_vars...]) + end + end + + nd_ro = map(esc, nd_ro) + nd_rw = map(esc, nd_rw) + nd_all = map(esc, nd_all) + rw_origs = [gensym() for _ in nd_rw] + + save_statements = Expr(:block, [:($v_orig = $v) for (v_orig, v) in zip(rw_origs, nd_rw)]...) + clear_statements = Expr(:block, [:($v_orig = nothing) for v_orig in rw_origs]...) + let_assignments = [:($v = try_get_shared($v)) for v in nd_all] + sync_statements = map(rw_origs, nd_rw) do v_orig, v + quote + if !is_shared($v, $v_orig) + # copy data back if not or no longer sharing data + copy!($v_orig, $v) + end + end + end + sync_statements = Expr(:block, sync_statements...) + + let_statement = Expr(:let, quote + $sync_statements + end, let_assignments...) + m_body = quote + $save_statements + $let_statement + $clear_statements + nothing # the final results is always nothing + end + + m_body +end + +# NOTE: internal use only. Accessing pointers on a different device (e.g. accessing GPU +# pointers from CPU) leads to undefined behavior. +import Base.pointer +function pointer(arr :: NDArray) + pdata = Ref{Ptr{MX_float}}(0) + @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{MX_float}}), arr, pdata) + return pdata[] +end +#=doc +.. function:: try_get_shared(arr) + + Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. + + :param NDArray arr: the array to be shared. + + .. warning:: + + The returned array does not guarantee to share data with the underlying :class:`NDArray`. + In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. +=# +function try_get_shared(arr :: NDArray) + if context(arr).device_type == CPU + # try to do data sharing + vec = pointer_to_array(pointer(arr), length(arr)) + return reshape(vec, size(arr)) + else + # impossible to share, just copying + return copy(arr) + end +end + +#=doc +.. function:: is_shared(j_arr, arr) + + Test whether ``j_arr`` is sharing data with ``arr``. + + :param Array j_arr: the Julia Array. + :param NDArray arr: the :class:`NDArray`. +=# +function is_shared{T}(j_arr :: Array{T}, arr :: NDArray) + false +end +function is_shared(j_arr :: Array{MX_float}, arr :: NDArray) + if length(j_arr) != length(arr) + return false + end + if context(arr).device_type != CPU + return false + end + return pointer(j_arr) == pointer(arr) +end + #=doc IO -- From ea90b55aef0385134018634473c63434fdffffdb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 10 Nov 2015 02:24:50 -0500 Subject: [PATCH 196/630] use nd_as_jl to re-write accuracy --- docs/api/ndarray.rst | 71 +++++++++++++++++++++++++++++++++++++++++++- src/metric.jl | 15 +++++----- src/ndarray.jl | 15 ++++++++++ 3 files changed, 92 insertions(+), 9 deletions(-) diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 05a3dccba7dc..270d85bab837 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -178,7 +178,7 @@ Copying functions .. function:: convert(::Type{Array{T}}, arr :: NDArray) - Convert an :class:`NDArray` into a Julia ``Array`` of specific type. + Convert an :class:`NDArray` into a Julia ``Array`` of specific type. Data will be copied. @@ -296,6 +296,75 @@ Basic arithmetics +Manipulating as Julia Arrays +---------------------------- + +.. function:: @nd_as_jl(captures..., statement) + + A convenient macro that allows to operate :class:`NDArray` as Julia Arrays. For example, + + .. code-block:: julia + + x = mx.zeros(3,4) + y = mx.ones(3,4) + z = mx.zeros((3,4), mx.gpu()) + + @mx.nd_as_jl ro=(x,y) rw=z begin + # now x, y, z are just ordinary Julia Arrays + z[:,1] = y[:,2] + z[:,2] = 5 + end + + Under the hood, the macro convert all the declared captures from :class:`NDArray` into Julia + Arrays, by using :func:`try_get_shared`. And automatically commit the modifications back into + the :class:`NDArray` that is declared as ``rw``. This is useful for fast prototyping and when + implement non-critical computations, such as :class:`AbstractEvalMetric`. + + .. note:: + + - Multiple ``rw`` and / or ``ro`` capture declaration could be made. + - The macro does **not** check to make sure that ``ro`` captures are not modified. If the + original :class:`NDArray` lives in CPU memory, then it is very likely the corresponding + Julia Array shares data with the :class:`NDArray`, so modifying the Julia Array will also + modify the underlying :class:`NDArray`. + - More importantly, since the :class:`NDArray` is + asynchronized, we will wait for *writing* for ``rw`` variables but wait only for *reading* + in ``ro`` variables. If we write into those ``ro`` variables, **and** if the memory is + shared, racing condition might happen, and the behavior is undefined. + - When an :class:`NDArray` is declared to be captured as ``rw``, its contents is always sync + back in the end. + - The execution results of the expanded macro is always ``nothing``. + - The statements are wrapped in a ``let``, thus locally introduced new variables will not be + available after the statements. So you will need to declare the variables before calling the + macro if needed. + + + + +.. function:: try_get_shared(arr) + + Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. + + :param NDArray arr: the array to be shared. + + .. warning:: + + The returned array does not guarantee to share data with the underlying :class:`NDArray`. + In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. + + + + +.. function:: is_shared(j_arr, arr) + + Test whether ``j_arr`` is sharing data with ``arr``. + + :param Array j_arr: the Julia Array. + :param NDArray arr: the :class:`NDArray`. + + + + IO -- diff --git a/src/metric.jl b/src/metric.jl index 73e4b9a30a7b..297aee384079 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -49,14 +49,13 @@ type Accuracy <: AbstractEvalMetric end function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) - label = copy(label) - pred = copy(pred) - - n_sample = size(pred)[end] - metric.n_sample += n_sample - for i = 1:n_sample - klass = indmax(pred[:,i]) - metric.acc_sum += (klass-1) == label[i] + @nd_as_jl ro=(label,pred) begin + n_sample = size(pred)[end] + metric.n_sample += n_sample + for i = 1:n_sample + klass = indmax(pred[:,i]) + metric.acc_sum += (klass-1) == label[i] + end end end diff --git a/src/ndarray.jl b/src/ndarray.jl index 9fc86002fcad..e477bf6fd2f4 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -615,6 +615,10 @@ Manipulating as Julia Arrays original :class:`NDArray` lives in CPU memory, then it is very likely the corresponding Julia Array shares data with the :class:`NDArray`, so modifying the Julia Array will also modify the underlying :class:`NDArray`. + - More importantly, since the :class:`NDArray` is + asynchronized, we will wait for *writing* for ``rw`` variables but wait only for *reading* + in ``ro`` variables. If we write into those ``ro`` variables, **and** if the memory is + shared, racing condition might happen, and the behavior is undefined. - When an :class:`NDArray` is declared to be captured as ``rw``, its contents is always sync back in the end. - The execution results of the expanded macro is always ``nothing``. @@ -665,6 +669,8 @@ macro nd_as_jl(m_args...) rw_origs = [gensym() for _ in nd_rw] save_statements = Expr(:block, [:($v_orig = $v) for (v_orig, v) in zip(rw_origs, nd_rw)]...) + wait_statements = Expr(:block, [:(_wait_to_read($v)) for v in nd_ro]..., + [:(_wait_to_write($v)) for v in nd_rw]...) clear_statements = Expr(:block, [:($v_orig = nothing) for v_orig in rw_origs]...) let_assignments = [:($v = try_get_shared($v)) for v in nd_all] sync_statements = map(rw_origs, nd_rw) do v_orig, v @@ -678,9 +684,11 @@ macro nd_as_jl(m_args...) sync_statements = Expr(:block, sync_statements...) let_statement = Expr(:let, quote + $stmts $sync_statements end, let_assignments...) m_body = quote + $wait_statements $save_statements $let_statement $clear_statements @@ -698,6 +706,13 @@ function pointer(arr :: NDArray) @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{MX_float}}), arr, pdata) return pdata[] end +function _wait_to_read(arr :: NDArray) + @mxcall(:MXNDArrayWaitToRead, (MX_handle,), arr) +end +function _wait_to_write(arr :: NDArray) + @mxcall(:MXNDArrayWaitToWrite, (MX_handle,), arr) +end + #=doc .. function:: try_get_shared(arr) From b5c0d96af18c9ea341ade0b714a1965c6776fa42 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 11 Nov 2015 13:00:37 +0900 Subject: [PATCH 197/630] reformultate accuracy with multi_output in mind --- src/metric.jl | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 297aee384079..12860a5ae0d9 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -50,11 +50,27 @@ end function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin - n_sample = size(pred)[end] - metric.n_sample += n_sample - for i = 1:n_sample - klass = indmax(pred[:,i]) - metric.acc_sum += (klass-1) == label[i] + if ndims(label) > 1 # Multidimensional case + # Construct cartesian index + initial = tuple([1 for _ in 1:ndims(label)-1]...) + final = tuple([size(label, i) for i in 1:ndims(label)-1]...) + crange = CartesianRange(CartesianIndex(initial), CartesianIndex(final)) + + for sample in 1:size(label, ndims(label)) + for i in crange + ps = sub(pred, i.I..., :, sample) + klass = indmax(ps) + metric.acc_sum += (klass-1) == label[i.I..., sample] + metric.n_sample += 1 + end + end + else # 1-dimensional case + for sample in 1:size(label, 1) + ps = sub(pred, :, sample) + klass = indmax(ps) + metric.acc_sum += (klass-1) == label[sample] + metric.n_sample += 1 + end end end end From 8322b6337f32dbcfe597f6ba95e0d369490eeb3d Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 11 Nov 2015 14:19:13 +0900 Subject: [PATCH 198/630] Calculate accuracy based on size of pred. The label array is reshaped and misses the information about the size of the first few dimensions. --- src/metric.jl | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 12860a5ae0d9..aed5ca0fede6 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -50,17 +50,18 @@ end function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin - if ndims(label) > 1 # Multidimensional case + if ndims(pred) > 2 # Multidimensional case # Construct cartesian index - initial = tuple([1 for _ in 1:ndims(label)-1]...) - final = tuple([size(label, i) for i in 1:ndims(label)-1]...) - crange = CartesianRange(CartesianIndex(initial), CartesianIndex(final)) + initial = tuple(fill(1, ndims(pred)-2)...) + dims = size(pred, (1:ndims(pred)-2)...) + crange = CartesianRange(CartesianIndex(initial), CartesianIndex(dims)) for sample in 1:size(label, ndims(label)) for i in crange + l_i = sub2ind(dims, i.I...) ps = sub(pred, i.I..., :, sample) klass = indmax(ps) - metric.acc_sum += (klass-1) == label[i.I..., sample] + metric.acc_sum += (klass-1) == label[l_i, sample] metric.n_sample += 1 end end From c30097172a54fffa8d21185bd2c7b7d3ce8bf477 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 11 Nov 2015 14:37:02 +0900 Subject: [PATCH 199/630] remove sub from accuracy --- src/metric.jl | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index aed5ca0fede6..8d5838aff4ba 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -48,27 +48,44 @@ type Accuracy <: AbstractEvalMetric Accuracy() = new(0.0, 0) end +""" +Implementation taken from findmax in Julia base. +Searches for the maximum value in p_dim of a. +I and n are values for the other dimensions. +""" +function _indmax(a, I, p_dim, n) + m = a[I..., 1, n] + mi = 1 + for i in 2:size(a, p_dim) + ai = a[I..., i, n] + if ai > m || m!=m + m = ai + mi = i + end + end + return mi +end + function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin if ndims(pred) > 2 # Multidimensional case # Construct cartesian index - initial = tuple(fill(1, ndims(pred)-2)...) - dims = size(pred, (1:ndims(pred)-2)...) + p_dim = ndims(pred)-1 + initial = tuple(fill(1,p_dim-1)...) + dims = size(pred, (1:p_dim-1)...) crange = CartesianRange(CartesianIndex(initial), CartesianIndex(dims)) for sample in 1:size(label, ndims(label)) for i in crange l_i = sub2ind(dims, i.I...) - ps = sub(pred, i.I..., :, sample) - klass = indmax(ps) + klass = _indmax(pred, i.I, p_dim, sample) metric.acc_sum += (klass-1) == label[l_i, sample] metric.n_sample += 1 end end else # 1-dimensional case for sample in 1:size(label, 1) - ps = sub(pred, :, sample) - klass = indmax(ps) + klass = indmax(pred[:, sample]) metric.acc_sum += (klass-1) == label[sample] metric.n_sample += 1 end From d51d2af73a23ebbed7488a479a9be44261e31972 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 11 Nov 2015 14:44:23 +0900 Subject: [PATCH 200/630] add comments for accuracy and rebuild docs --- docs/api/io.rst | 4 ++-- docs/api/metric.rst | 3 +++ docs/api/symbolic-node.rst | 23 +++++++++++++++++++++++ src/metric.jl | 3 +++ 4 files changed, 31 insertions(+), 2 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index e9d9c04e9f3c..6c9d71836108 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -317,7 +317,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -460,7 +460,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/metric.rst b/docs/api/metric.rst index db18ae731a59..5f13bd7006c3 100644 --- a/docs/api/metric.rst +++ b/docs/api/metric.rst @@ -42,5 +42,8 @@ set. Multiclass classification accuracy. + Calculates the mean accuracy per sample for softmax in one dimension. + For a multi-dimensional softmax the mean accuracy over all dimensions is calculated. + diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index f811d41aaad5..6106b54e6a8f 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -501,6 +501,29 @@ Public APIs +.. function:: SwapAxis(...) + + Apply swapaxis to input. + + :param data: Input data to the SwapAxisOp. + :type data: SymbolicNode + + + :param dim1: the first axis to be swapped. + :type dim1: int (non-negative), optional, default=0 + + + :param dim2: the second axis to be swapped. + :type dim2: int (non-negative), optional, default=0 + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: exp(...) Take exp of the src diff --git a/src/metric.jl b/src/metric.jl index 8d5838aff4ba..3f35e7455ac5 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -40,6 +40,9 @@ abstract AbstractEvalMetric .. class:: Accuracy Multiclass classification accuracy. + + Calculates the mean accuracy per sample for softmax in one dimension. + For a multi-dimensional softmax the mean accuracy over all dimensions is calculated. =# type Accuracy <: AbstractEvalMetric acc_sum :: Float64 From 4362525754175dc095a1f6ac9e42a4a4276dee08 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 11 Nov 2015 09:03:36 -0500 Subject: [PATCH 201/630] add nd_as_jl unit test --- test/unittest/ndarray.jl | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index cfab1dea0d86..b7208f532222 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -239,6 +239,27 @@ function test_sqrt() @test reldiff(copy(sqrt_ed), sqrt(j_array)) < 1e-6 end +function test_nd_as_jl() + dims = (2,3) + info("NDArray::nd_as_jl::dims = $dims") + + x = mx.zeros(dims) + 5 + y = mx.ones(dims) + z = mx.zeros(dims) + @mx.nd_as_jl ro=x rw=(y,z) begin + for i = 1:length(z) + z[i] = x[i] + end + + z[:,1] = y[:,1] + y[:] = 0 + end + + @test reldiff(copy(y), 0) < 1e-6 + @test reldiff(copy(z)[:,1], 1) < 1e-6 + @test reldiff(copy(z)[:,2:end], copy(x)[:,2:end]) < 1e-6 +end + ################################################################################ # Run tests @@ -254,5 +275,6 @@ test_gd() test_saveload() test_clip() test_sqrt() +test_nd_as_jl() end From e8846927c3fb0b6585dcd777e5cc0f2b2ae448ad Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 11 Nov 2015 09:50:05 -0500 Subject: [PATCH 202/630] update ipython notebook BGR -> RGB --- .../Prediction with Pre-trained Model.ipynb | 40 +++++++++++++------ .../ijulia-pretrained-predict/imagehelper.py | 4 +- 2 files changed, 31 insertions(+), 13 deletions(-) diff --git a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb index 9f4f2b8cb1d3..2d5d0ee7dd97 100644 --- a/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb +++ b/examples/imagenet/ijulia-pretrained-predict/Prediction with Pre-trained Model.ipynb @@ -32,7 +32,7 @@ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAAABGdBTUEAALGPC/xhBQAAACBjSFJNAAB6JgAAgIQAAPoAAACA6AAAdTAAAOpgAAA6mAAAF3CculE8AAAABmJLR0QA/wD/AP+gvaeTAACAAElEQVR42kz9WbMlW5Iehn2fu68VEXvvM+Rw761b062qnrvR6AEQ0E0QFKwhoQEZZaIJRsr0Jj3pp0lmepBkMhpkFCROMooQSKoJoNlo9Dygpjtk5hn23hHLBz3EqTblQ+bLSTvn7Fix3P3zb+DP/U9/qoCqqqokACooWUKvKgAAyBIRsCiCirElAEq6+/UyMiUyPSkcFS5Q640ssFTVtInSzJp1VbVGterWtFHEzEzVWmu9NxZEVESaqFes6+rbtl7Wh/PlPDxHXS7rdh1mUBMRoWYyqiqVkIJE+NbUjktvXU0BoKois0pGcBtjG+eQzCoiREwhDBYsM0UUZSwzQtRTQsSEzVAiFE6aEu4+MlE+mLGum29rVWl4RaUqp25tMumhjdKMEpTUkggSHSVjBJJSQKKqSI1yEds/56oiCSCGswBRjwIYESwgSZKNKMnMQkQEsjpbpQ5eqkA0k9Zm2Ak2yWGal/vj6aDWaK1VAUlElqa7RxYA602WzlYAVBWzMPH84cPT0/Pzh3j66vr41Xl93lpYh0DUEResAjW3QFW2TAeTYelRGd2gNMmV1CKSSTKraKomkFJJEgWSWpFVJaZZqhRtUgabOU2qk8mxlt570zZTuxYUWSJ0VG7p11zP5WtcnmI85vXpcn5cc5QEkUUVEckYIEOAEhdXVRYAj0pInW4WA1nlBBRAIVkKFuv/7/STRGaKElWAqO2PSsysNdk2z0ypQAGwIodfqwpFFXELUsnRe7Qu5qLKTVZrS+9wC7PIjHRvrYmUmSWILIWMRHjBBV6+jYrM9AiqvRwXkACYSBYCqo1KL5R7gEKrQlaG59jgGVVUqAmTopBKzUJVAXRPsFiRKCUr2JUiBZFwAshaK9KTYwt3j+DY6IOZWZWUMDURUBIoVQMplQJmIgMsJ01gxUIlBCgWIKUsJIqkCCK8igUBQQBAugupZhAACEkgCFRCTEQks3JdWzbARoZzVaiMRhXv8PUS00JVRlqjNQune7XWpmWSqQl4mOab+xvtssErY13Xeep9Xpo9qzJrOEdewpGwikqLjtLIzGCKE0RaFSlgwN2LyQQ0IGw2mRC10kKkxIRUFoogWRRPFFUoFAkUSBGBEAAiX+5fQMFKRCSSiKqI9GLKGJHuompmbapg1UhEJgFCzbwSqCREpJiQMhUmgNTeLJlEGcnKzNR6uYtIxU8qw/4vQAAiAiAiqgoQkTQzkh6XcCEE2IAkpQr7J0Qqihs8oqypmbWmqKiqXobKykhjlbQGCJCRsX/nikowI0ZERQRJsCICAPny46kZUFCYNey1CclilYCZhQRHRAYLBEWVRFWgEkWiKjNIhoeaRQ4CfTJVMc3wyCHliYoq+tAIjjVHSIxKR0FEoSyiRAvw/eQyq1gsIimwTMmqqkBp5MtXAKGieLlqsgokARawf+iiKYSqFVAGaSJSESEigGSxhLGtsAgXZjLLM1dHDcg57BznjvMlb+5O05TzzJzCrDdrfdJ+dzicjq2pmc3zPM0tWNfr+ny92LamkErpNbjGFNh0nMf1uqXT0EBWk6BnBKAKAYgsgISSUpZ746CSCohBJi0CGAJVahUJUASAA5UVFQAaDACE2pspW9c2qViSBYEkEembxxY5uF0zNmRWpReTpKq4l1eSEuUFkgQpQGaIkAKyuqn1SZuYwiksFLIg2F8Evpz73E+YiJDc/87c+x9BsSrVZK8SHjNLwJFIpJBa5RTJFAIizABQkMwaiUBpMfHSEAgAJQnJKKiwGJUR9Nzf9hij1nWQFCGSJYVKKaEppbSZZ5ClqgCEBsmIZCJLMopQIFQNQEVFQSn58loXySqIYGwXVbbW21zNUgV0JqMqfdUx0mO459hyeCKRiWIqm5iI1X67qIpw/2OVUimRQtGMyCySBdsvNsCzUCgl9/uFVQQoBWtAZpKt1ARkaGhDlJimKgOxl2dopVcw4AEvcfhWvm5w2DPK8PTueX0zjreHw02b5jje1t3dab5ZptNhXpbj7VEmg1RvWklMkEUvF43a3Le+TYfTIjq5e4zqH7brh3VcAo6XI+GWlSRBQIUCJZRIJRPNxCSSKd0oSqaICclAlQikqhqqIsKLomo2qSUyCTGdu4hIIfeWT9jNLEhLlNPLx/AIVGpGIlnF8qrMzNT9LhFSiUqyur0UFhGA5NRuXt2aFIoAKlAle5kFQSKrivKT1pPcO1SyRAxZ6UFSlShUce7ToA93VAKNVNUCRKWBSWYlPQpSXpE05palgOE6WhNlQ1SWFwVKASNybJGgZ7nnGFtViCigJEypzcTILkCWlKhVlYkWiyQrhVWlGYwIAqIgFcjMYqqY1X4pAyxUzlFn0VwOh76gLdmNFQqiilrmgx61Dd+uW4XtzSsgun9GomoUKQEFRGSJQTS8sgSQqiBE1SpZSBEhMmGJQNV4eWBaSCBbbykFKEhVUWMxAZSUmDJSNKVJAeEukmXVFBWg00dpI1vLNcYYKOSG9z/+6nq9Pq+H+djYT6dX2Q79eHfq82SL6aFRoZSqstWqygfmSdelX7epr1PmWVQ29ZNOvdt6GZeH7fp+SJjEfn2OAtWUlcygSCNpQomQlCZqZKUWQRoJI6GZKC8hBFEAqZOKVGZkJPenqdIELiJAAkhUVonpdLCqovp+OUpZePEntbTJT9p4YaBUFeYpIMVAFrRrO8zt0A1ZQu41JzMDxZeysTcwBJiZJKsIe2k8WFDV2jsMpkgRkfAAWFMmUNm0V1UxySIhJqCJoGTvqDKcaRgjkJuVIaKZqLR0B5CZ7r73Wt26nOR8PqPQu82TWSvtEGMQJeIxRPeXF6IaCCUqMwEzrapAvNSMdMrekxQorEoUS7OGiByPergJO7hMjGBtpFCSvkWW+ojt6j60nBQpUETMhFollawmWqzMl5mWezcPUVX3yEyRBFRIFXF3AAQKUNUqeoQ26b2DfzWAiZimeFUAUJUSqqrDIUlj62jVt/CI0JRsqObcUnQKoXegmm6owMOH81Nsd3KzbP2WiK5hyFYwtKmXFCJV1KIGIjMTJa3sUOZlEnmlDofJbNZP2g9T6Xn7/JIBYYhE0UTSlCihoylLqqqEalDNFJZSKsF9UmKwBAgIzQQmIgqke6ZASmJ4JjUqARGKNFTWy1OVQtJqmtUH070iQVdIsAAECvmCKJAswgQwI9USTdSm1qd56cveEiQAIfbHtbei+5i7P6EMgCgBoyjg3rSyAFQKWa0TDC2RMUcgIpg0aYAnEtgbDKoqkHvDk1HUGptrL0TmFfO8ACaSIVXC/buw0Kz3U0/44TDv3X9rOs2iTZIwulfSKsr5UgNXqBSlPF+6SRCQTHpAoVlIAsXKiChqphTkMs3z8d6mm63aWtCpHaJiOJ08Dx+brmuMrYgGoFCqIgpIiUIFSgIoIlGAkKCQARRRJAyIvxoO8idvCblfelmVfW6ttZLKDCGLqMyqYgmKkGIWeM1GEtoEWlGeBK0MqFGIUMnIitxKxdIk1AFGWsEEvZU2QMrTE0EyIsYYbVoyfd18bO5+TURUrj5GPtOGzqKyhUO2Sgs6zfJG7Dl1/SoqUiiNjeKiIQQQaKpCLQooKFYSBJEZhKhI7mOqVEYVSoVgBVAFgTBr28b5kpOUAkVRKRKQBECkipWISPWmsuioDSEjWFuSEOE+UGXmjjWpGooiYl1Npfc+WWtqpqoREREQeQFVCpm+X2/c625lFVgZSVGoyQ6uECZiSrUesqC1CWp+9XUdvuUYgyosyxBEVRVZKFRSlIBHoRLhUJZwYJV9aiwVUVDLTKT6fq2LiCr3ykCpqOGVhBQuWZVZqAggWWwoug+BkKqVXhpMZUpVVohKpkqmAxBjnyyIYZMeSm6cc8zzHFv1ZKq/T4WgVIYji2osdzFNAnAK1ERMBRDso1MklEVJJgIl2HtMQHWKIojAqEqKogRklItUM6OwKqSEZPDl164qBiuLxkQqnTqVkvRKd2yiKvvRkOQQwjAhJ0oSm8TVMUltPqmc7o+3t7fTYUlIjG1d16YdqZmX7ToycN02v1yv69O6Xq7ndVwu6WezqNDUDBVVYc9RFZereB7ue5O6PkauKQJRsqgAzRzR2Ix7d5Sg7qAAhAVUMUYos5FrFUoqvFStNzQx04IksKZP0rSrqEoxc6AUACI807dQdtLJ/VqJ/czuvboUCCZZEapSG4iiEV3YTCYrk41pYkpSlBFRlSQpEgOlWQFIZTnVyCpElGQgk9baPiiwsk2iXWXuy8H6rIRl9ss1zk8R7sNRXpnIAWzumWXaxaKECRYQiMqoQVICJmqAcNpPr3aS1DIxqjQxCkHyfD6f/XqJcyYFUVmshghlJiqIxRA73h5SESVV4qYVklQaZS1l+e001dw8x2dv3kh/9ywf2mwfvV7Wy/bjd1/d33/y9fXj7z99HzmAiXCwEhWZJsomaiKSRFCtDGBFUbIoyEwWSbAxan8RVEj3FYDKVAhUVhaR3VTMClCNqih2IVgVEYmiGoRV3kiVOdwRPsqlobFDGFNplqqOdC+nqaSVJ2VoiUCLoovgUHKSaloYHsft4mde5zRcZIuxuY7rdn58l3Fxv665eW1mWkSam7YJiMgRwm1jAQ7bQpCjnFoWohUpWdWAMBqJqsHK2qt5iWQ15RbuIkWpqCpDAdiCZNXm6HNvS0MrTDV1MbOpmVDTq2mG1+aMKqx1vcZ6uawbtuuW2/ryfilMELEfZcgOqUUNiVZa64AIjFkQwdRpmdGamM3uuV63MUZWqIjsu6WsYhaz9uNUMiqVYEIo0kobZMrpMNnSlpP2JtOs2mS91vMZ6/O2nmtbz170bbgkXCszUgAhM2vsM8IOgUhZFkAT0oTa2FszMwgFL7M4QGSaSSdTdYvyIIuJDO4IbAMkib0jqkhT3TGoMho1Spzo5DR3Oc1lnHK7e318Oy9XvGr3h+f1x6/v3jytsWJd5CI05IVxBSycBqZRpdSEmkDt+zsgM0NVqRqZIraDTpkECkKhpgelWBIIVr1AIUro3iXs+EdSsyiRVTvGwKqsGAFPV/W9sVBLZDGoBaBaCpru1ZWIzBiQrWSSLZMT5KT9VkMGiSyOsc6m56fL5SJ1jcv1+fky1qeLjwczg7nzSit3jxpoRWVUUZyoaVqKyNwEbWujq/n7zOdEEGKSVJYKCyPhQmFJJsBwoYhVum/BMkgJIqMchWCgZBGIsstyO+lsU1NrJczM9EwhUVQ1X71QHrVtEYMYJaUFlqdk0TR8kACQSArTQ4XpI5OBEmOfe29qFCuMTK3SaWp9mnyM8/m8rQN0SpFKyIjIrFRjCUhP94hJxSbT2aR7m/KwpDU9LG1aWuucpuxLbXM997w8T+vg0CZYsfnYGFkKahPj5JujKE0CGRFiUhmZSIWxAS9gsYhm7pdvRrkzXhqLGplRBc9wbF6DMUynIFBQCpWEYkf9k1QYKGCbzU7WhJNhkGs8nU5f/3e+9+++v/74P/+9Pz28/uh7X/vuH/7p75+vH1xMcBObm0vKgLBR1EAWhaoiYpWoQTYrorDDbJqQl82JCIDKpEKoGVFJIUHsVXefi2rvE6WJKQKidI/MEWuEu5R4VkqlhhilgV60ohVHVgFt6KLMlunMoSIpPTC6NGmtTapNBF4YEbKurAyVed18fVq3p6fz83N4lYS1i5ont1Kn5sZRXWwipMy6qCWvffZClpSTBmArXffVtppWIau8MliKEkCrEpSCRoQSVRKejuoUJiJkZOgsfTIoRFtTnXubu5mp18ZWmwz3fZuSfZ58G3LeJENSIAgkRCg1xvB9TIgKVkFoYOeIRKSSRmwZFpGUFNVPfvpeRFVVBNZknm2aZlEdlV6RmawiKpFEI6hAwfdFTRLt1KZFpy7LwVrXaenLIststujcVRWQqiIoKM3MyH03kTbptMwwtdZUxZlJINPMBFX0UQHA1EQoUo5AFphUT8kgRo2oEbENHyPC4VtdxFK0QC+qqTSxqn3Kp8DAHacstZpvlpySEjdT//TNx29Oh+v43KbpF77xq995/a3t/VfixTDP+el9Pn+5oug1TFSUQFqbhCWEqApVCEpBFNQkKUbRBNRUVKiyw6ncUZ+fbBeFsu89SSGlXgBoLRJMHzlGjLXKS1KYLArjpbTXDj/L3vaqgsUoAUX0ZWRklRRH79Zn08msydQaqaKsqpHx+HR+fDg/f1ifn87b8/XyeF0337anwqiUjJFYSxwMSIau3aY2taHPxW2HOYXmGfTUeNnmKVpVZWaMQgmhO6q4nxgTKBAjt0hUr2xeESG6tPnQptlsnqmwxj6pCkW0qYqhTa2qSBQcTC0WWKkVzMhwz0BlRWaM3IGgApS2l899gyRqbMKmbWrTcZoW06/99BtRUoJCU2lTm+bWZ21mVA0gMhMgtYqVVbI/XEOIY1AxL733bksdlkObrc+tH+c29ePxZjocI2345uFRXok+zfOhtcX6ZNo7lTaJNJQAkEqwpBhVUVlJipRIhAQlqYAUFQlcx7au13V9Oo/rFr7GmjWSwzoFsCprs1IL+55OTBQUF1FFaNHEDjpbE2Vp/uK3Pvupm49ubj96/Orp177+G//wN/5XP/u1z/RJ3n/+8NUXX3799Te//LcPOxjDQpXb3EmqwlQBAUM15IV9pKJGiojY1Ptk3M/A/pPszXCAVQAIIRNIKVCouq8s6RjlGNuoUVraKIRgX2OWJHci0Y7gkmpUq4gkRUukJJNgkalu0vrU0FALrbE1y0JEDa/reX16OD98uLz/6vHp3dN29XTkdqGAqtpaaUKSksIhfVLth75MU9cJIkNFMxAoL5dIA5zuDg3FgHvVjn/vYyiFrNKsLIZFclQQLSpDRgD92I43S+9zaxM1rZFKIUdEInfwJyLBqsjwACcPJAylBkPBtxprlY/M8kigAMhehcNDoCbaRLpYF+2ija1Tv/Yzp4SrSrPWmomKiLRpmqfJmk29m5mIBpBFiDiRrMoiBRUjwlo7HHpb6nBYjgfTg+nUjnZYltNyuhVtLIwR27Y1ldvb4/G0LKe5d6VpmyxZAmpr6ZXhmVkoIl8IIQK2YlcBqZUYI3PEuKzn6/VyXcd1rCO3TIekyr5LVWPbe25Sda8gYEJUm0qqqQlE9ND6ocntcV6or+rm177zG3/nF3/rtR5uXn1889Ev/cLP/dbX7z79nX/2T3Bdg8u6YZIOIDJ0EiHMSlRJVqU2UpSYiqQKyWnq86GrESwKBUIClZXISO7YfxazdF/A/4QXAFTkXmWTqCY7KF0UiWJUwZGVTIUTkKAXSwVUiJUYqIQIBKKpqmbCpmJqaqwiLTy3NS/n7fp4ffzq8vjVkz9XRVFUskyERmslbV/isfdpmu7ubt/eHu7mabJuRGZlZl19VIV648Zy1CaSlZUFsJixM6Oo1AJFLDw8wLJwZGDfeHlqVLZ5nnoTkzb1rEikmoiJSDVT94iIzIoERUnLSEmBi1eMKvesgVw9ghlpkMlaiUApZuxqTfsySVdtqpO1bv3Q9JOfma213idVVVWKQFTEBGI29W5qYq3PU+8z2VAFYhAlTJLhqKo+83BaWpNlEZ05tfkwn9RmlTa3ZpyQUiWtt8PBlsPSp2Zdp6mpCYVIZFZGjHEN30QSQsjO+NoGPGJ1+Fbna11Xv25jva7n4dvmmeXbGCTNlEqqqU0UE1XdX2coRArKKisYtZSmRnDq0+nY527L4Xhox1/57Fd/7mu/sET//I//8PIXf8CxfuMX/tHf+63/TTxczt//N4wVdiNzt9aHX1sryn7LQATsVrAEqYShTzofJptV9sW1kkURyciIRAkqXwpCCgUqiEImMxOFgkRV7oTQQkBLzZFVWZkK4f7xp4AojWK2JrCSRqhQVIzGkiIIVqmayaSlW65ZzBFxqffn9fzhLI9Za25DCGksiqVszbI10IZriPbb0+uP3356e3Naej8cTjfLq9mOkcjKbQMG6G1cKy7ktpcFLyAdmqoGkX0pK9qaSHqGZ9AldzylOIYwxGNrJn1u7dBSUk2lo6tNsxLmkSVIVEKjKob7GJJVXiPcURmFKF93xNlURERKUE10mXqT1rR16820yzS3aZmtm7757L71Zi8rATNrapYFYRKVCCpMm5Im2UTmxt7AQgEqQGF4BKIZtYlMnKw3O7Xp2KyZqnDKgujUDkubmnVbltl6t9Z6pzZ41tgiPce2jm1grCMzMjO3a54v43xdHzZ/utbTsz9u2M7jfNku1/XZaxu1eaT11lqnYOoqpqoiQlOliKgIhWUVIKgsqinMfbATGZ/ev25JL/yNb//GT03f+eov//J3/8W/+PM//u//7H/4gz/7g9/5/l/+N9/57l/79d/8xz/zzW/94Z/+tz98fP/p20+m3j88fdFa3wl5qqCxRBwhmhC1yeZlmqZmvSm5L8hU2v6SR1QGQCECRUIKQVZUFTTTUelBeYG99lUyq3KvDdz3qYXITK/QYId1pVU1UkWYJtLEQCmUlKo2CkV6KQpVoTV43bb12eO81Vp0EfSIjRJijRpqUoKy1DbN0/H1zavXd/c3p9f3t2/nw83hcNt0AjUSsQZ8Y2KLsXllaG2oACAMMTNhAK7K6dR1CZ3mdqCa+tD0LGKNTFDNSgmiH0wnzJOZ0IxspiogMuGxFekjI0eOuoxt3VwCNQpZ6Ts3kdIUxmxkE7VsXczYDjbNRiuZqbPAaLP2Q9PbrzVTmomaalM1pagQKpVSkZVZWQ5m1l4PoUKKqDbwBeEdW2aKmYq1w3w69KO1Wa0rVLSbaVF6M+uAlJiKlXXIJEHZthGxpbtfY13X1YdfuW4edvXYPCN0czurbtJG5QgfHheKgEomTPYL3Vqpqs4KK0tQrUmyoNp3dlTtbWE1MFjq5ajxjdcff/2jj1+Pu9/81r/zoz/6w9/7N//dw/qejuVwfC57/v4Xv//P/uO7Cd/5W//LX/qV3/yd/+z/8OMvf3z38Scfnh9KCW3ampioSOlebITCqdvp5kanpkqRLKKKBc+gj+GelRQEUNiJfxk7w3CfgmonBr60Q9x7YVQRidipQ7KT1UOYAjahlbKhFVrthDQUiVBRbaJNhSSiCpFIZwXGOcfFt4vnOqpk5xgIILYkVhEWA4q+nF7fv31zejXdvDoebk/Hu9vjq6nNUWXaJWpLjNxip+lnRiQieirVVQiCLQGcjoflruuxHQ/9eHvgyfrNJJ1eEYQ1scVsatMJot5677OWBKFlzoK7J0tk3zxlJisKUVIWKVlSUYxkUVSgsI7eWjNr3djEZmuLahfpgqZUQmFdpnnS/pGWkFpiKiKonT8nUSAL5ZWeXhnIqMyI8AytF1o0hEJBFkduAjnO8zTPp5s71gtJgCJmXbUlQJVAUKkqfTr0PvuI9br6GGON63Ud2zo28TG0p3UCbN0OJ5xuTBeqlXZVM20iZqCJqomqgWK01nqqUmy2vl8bO6NVMnbWpzBFcowRsOkwTRlXq/Hzbz/7tW//bTxefvAnv18jTtNNpr66ub+ZGm2Ktf3gX//Xdze33/yFv/vzP/XX/tP/6v/yOCqaVJPWRbrYJGxVpJpRyxqWuS+LNSMkgJ2CK1FIxzZGOlRMwKwEUVUQRYGQRCWquK/kWdh/ft9v/wwKFeBOkIJYgckSlqmRXpqU/XoKSJGKgiiBqkQmPdI907ldwtfL9pR5VUahHFICiVEWZq2KLi3N6rAs96/f3n70tddvbl+/uZ+XuU+tJCGCApC+xnCItVltZktPXy+qEOmKrgghptPST9bmNi3z8dXCLm3ufZkOh2NbejtObZa+aOuTNUqHNTObqAiskcPDRwzPUSg1Q6qkVFR5sXZ9U0XUtjmoUjvcKtZVTMRUm0qT3vWv+oKCtN5aN1HR6UaQgqyqRAnBysr0BCoBB0t2UUlFREXFyLQdb/KxywxSrTISolNvvfVlXvq0IKGgiE66TDaJ7kKIzHRVsWUy1W29nM+P6xbrJS+Xy/V6HRsPi928apE+zXo89uWGy0G0SZtIlLLpzglFy9zValQznWjNWmvNmrZuKipCFYpWie6SqjSqChmsFvj47g3dfvb4jV/77i//8e/+988Pj6fjrZai2eWymWmbl8Px9n2s1z//V/fdvv43/tEvf+/nf/+f/59+bAjNpReasCElCIaQFdOsx+Pcu4mKqVQlIJAeEZer+xiVOzUILwPEjg/KvuOTAD1dwMJOJdv3GcliRILKyqogZf+CfGmKFFIhxWYkKM62LzuZOVTlJ1wuzagcFQO1jXFFbOBOLWOhFNCAW2NvLTJs0tvbu/tXr9+8/ejVq/uPPnp7Op1UhcqsAiojxrZtEVW1tHbTT01nkusY41rpkBo29cPtPJ3YDq0tvTT71I83J2uqDe3A5bZNBxNNbRSjdCsplaACEpt7VCTTY3h4VmWkghEVm2PEtnlElCIDFSVNQIiBjdDSLjaZTa2ZkKRKiYiamKlJ66KHux5bRuzzVUUiYgBZERVZyUqke4RXZlVmVkZF5nB/Wa8iSkNV19yaNRWByLIcunUmVSeTpgpTBgpC97HPKRG4XJ4+PLw7n9dt5LZu4es0692rRTRb5+EopzvtB1gTGlSIAsUICS+UVgpFKM1aNaXqYjZbExNtNvfWd1puEyFSgG5mJnPXtK2b19g+++Szv/3Z3zj/8PMP7398c3yFKpswH7pvo2xqmqp2Oh6fPqzx8Pmbt8dv/uLfP8j1//m7/6SaHeZZVaCVggKc0ZrOy9Sa9t72q4QgRbNkXdftPCKCxUpkReyvL7DjP1UV6aKiprHXLJQIdhkGwAJRYBUrWWQBoFBAsJigWC+mULSxZIhQAO5XVKIclYgRlYVkrDWq9uVDelbsSwy4ZG/CQiJ00pv7+/v716/vXr356KPTze18OBShyoKI2LatT+uzh7NoEGHvtvQ+S+q2pV+uCrSlT4c2H2U+zjK1ZjYtc5ubTW06NZuEUhRYkzZTm4q11qG9RAAmdYfRXmhRmZUeUvBIonx4RBYlc9fGclclShNrKko1tW7WdIfmEkHVHX5WI1l6840pqjKZkZmentsY4Q7ntm0ea6bvgFdEhkc6KmN/B7JGwaFCbfuwHxEBlHBp07EfkBrYaQIjJYss1hZjHRvK13U9Pz8+Pj9sq2/XQfdlstu7uTURwe19P5x0PkInoYTDKbBm+2/rxc1dyBLQVKR3aWaiJqLWtFlr3SbT1pp23eF1agPN+8SmaFbzXG/6/a9++9eeP/+wXc9N2rx068bMyYSgUddtPTSbb99Ua88/+BeHN7c/9bf+ox/83n/xJz/68/bmtTB1l6gyiGiTzlPrRlP5iSgGlRxrbKtvq1dUvUgGWLs4DXxRzxW02+3NzduP3izLvDMUwz0TpOzdEvZdDKSAQlF2sExUFE13asauhaWESok0gWZ6JQGtQBUqmV7D3auaqLVOokaOSoHQKyOpsMnu39wcTof7t28++vqnN3cHnSjK3ptnhucYvl6vl3X19bpD2qqWkeVpIruWyCBSyY5+WrSbmS7HuSaVSZbTbF21N4jSmnTTbtbZFVNvJRJA2dq6mlK43xE/2SzsAjIC2tq0TK3vC0U2FRVRocK69MlaMxGSLztqElSKSEVVBJH6tZ++EbHMcg8fSM9y+jWuV79exzZ8G+4R23AflSGVEuHukRGZQRFtbV/QCuE+2PQ6VkTcnk4JeqbHdR1nr6DI5sPHdlnPvp193S6X8+W8hlesrqz7u5vDcVap080yHbks0idhQ+QVQlVT1czyRMaL3KeYqQIxU+t9r+ZCa9ZMRERFqU3LxFo3bdm6svF26lOzudtf+/rPnsZxOz8dZCqE9oNEVUZvTZAA29TG2DLW17dvS/CDP/5X3/7F3/zZX/qb/9l/8X9+aFs7tEowMOBmOpvNU2tmKFSBFCTdc9tivbhv8XIQE+B+DUntZ7nApje3N6/ub9+8fv36zf3t7W0zZda6bfkizAaq9rEYFN813FUsTtaiZZWrFrRKqAqgZGduF4XKkozKispyTwpPy3SaZjXT1syYmeVAsEjnWG7a/avTzf3N64/evnrzqk+m/QUm2bbr8+Xy9PhwvjzGtkqFaqcZhU2lq0QGtswUH464CgGzNvXDMsvc+qHPh2Wemgj7PLXeYPKT1kVEQGNSSkssiTRTch/TX4QSWbmzT3ufeltYbM0Ox2U5zLd3N8eb+XCYe29m2kx3agF20SbAKoFI7WLu0k+/d9eamJFgRoSXCEXkul4vV79e/fLs2+q+ZXi547qtPnzfxu0diAohWXCS1J3Uko+Xx+V0uLu5iXEZ4zp8eIQIfIx1u66Xp8t1jcz1EtfLlutG+GFph5s2tb4cpM/VZ/TG1ltgZK0glW2/8BKSIQrbr9MkIU6FaiOaKK2Z7tujKpEygZm0KVVi6jpP1lrNE24Ptz/95qdfr6fnz79soM1zmUqVj3WeF0dRtaT7iEoMXt8cPsnzdr385bd+7h+JXP/r//Y/wWES6Tk2UMz0ps9Te1naibRKZiFczpdxPY/wF7k1qvBCUqmdfqvNDofl9eu7m5tDbzZP0+1pvr05zIclkdfrNbN2AGQX6OU+Pe9qVbDAkgBZrBfaxa6ZTtlZAGBVxc7RQcHU2tTuTof7+5vbm9vD6TQvfWqWGU/bRq3jfbNj3bw+fPzJx29ff7QcDtZEm0A1w7dtPD89PT5+OF8e3C+QbNYhCE8pmfvcbWop28gYq7IkSyjTcemHZtO0HA/NtApipl2bSZvVTCBClTLdN3pqIoCYklKoXVWyE0DMzGxuMCmVLBFp3VqzZV5at9774XBYlvlwWI6n4zzNZkpJH7mv0irSrJnaSNdv/PwrsaJ6MLS1w818uJmmQ2uTZXpEbtdtW2NsGQ4fft3W2FiObVwQMJFCkiVKQFlkZbpfI2Se3ry+txojR72gloHy8/n56fxwHWPb3EfmqPChwvl0sK69SW/Z5rSuvZMiWQ54wgGByIixjULt4jSvrF1PXvtpEjWqUJA7zJhCmEhrNc3ZLKVBmfevT0vrb/s3fv7tr8UPr+a+rddpmaVhJ+pN3frp5vH5WXJAatb2fHl+ePb7jz85f/nHxz7/2r/z7//L3/m//fEPf6jz7RbPaq2pHDpNhSgVK7zsttY11mtUFrijPLsEb2cyVElJt+PN6dX97ek498mmrofeWnvBMYp1vlzGdXtxUsELdY4FvpjYSAoBL0rBKKCiCJHSnfikun8vERTKrE3dDofD65vTcry5ubl9/fr+7tXN7eGofaqe02SnV215fXz15vU3Pv309es3fenWSIGq+Iinh8f37z88PDw8Pbx/Hl+hRNiqskqQmsPhUJpy6qyxrRmRlQXhLH0yNopIFkM1idZMBOho1l609ajhnjHKsqJKS4QgVFVopn2xucvcRJXatE299WYqJqIk95ZHKK331ro1m6dpPlhrHbUjB4XdlkKp3/zrbxIxMppNd3enm5vDsszLcjje3t3dLofjrGqbj1gdG2obLzw/L8Su2d5pLxqVIklGZgqlzxM1b2+XeVlSCIEaKJEVz4/P56eHaziifA33KkltOM7LZKYTdKrWlOLSQWvXWD1DTEBW6rgWUhMIKVUVmHtpcO8HACq0oAI1ShOIuhlU2E21iWp2yZL89P7Nr73+u+1hGg+fm85mWlAG4E6T5+u6LLfjmtt2OUqy8nDzpp+mh4cfv3n9cX744enbvzgJ/+v/73/uhyUvQ0QPNs+dBEQ7iCobwAjfhoy4bhwmu/JWSgMIpFAmYC315djfvLq/nZaJ6L2rWWv9cFim3sYY18t62a7uCahJq8idJha7D4OwKcL2iRfWmhjYU3upQdv24megWcVWbdJpnm7e3r89vr69v3l1f7g/HOab4/E43x0P8+3dbFNC8s3b+zcfffzq7cfH2xl9mBhlQvl6Pj88Xj+8f3j37sdfPX5xva4o7rp9yWTJ8IqAX5Hb5p6Pl22M4FrhVdpL4NhUW5WkDm2AElaFylpRycK6XkDHfrZq32TvpEMqtIsKKoqpQk5N59a6mZl1qa6tKcW6WbOqSo9MT1SjtiatSeua4PAXKEc/++XXQE3TdLw5LIfDvMzzMh8O7XQ63dzen46nZZmsMTI2v3puCqsQVL3A1rLTYUJRgDgpJssyLcdZDW3G21dHNUCiNAJji3G+Xp/Ol9gGKit2BaSrZV+MAp1VBEyKkGZb+GW7jMoSRNIHq4zQKkiYVQeBJJIZuY0VCcJEd2nqLuFX1Uk5QRy8sPHmcKOoy+XyvU9/tn9JPD6INqpCikgV6G69sY7T6dW2ObRFstLvbw+Uev/+cZnm+9tX3/zFX/vTP/jd3//T37+fjstk0jBLiU6U9mLwQdkix3YFUkqKSBZUMguAmkKyQr1KlHc3N6fTjU6td+2Ttd5EOE2tTW3d1ut6Hdv64h4ie62T2g1YSgqNL3pvh6T2HSj2QihEbEKxChRtqkvr8+lw8+r17enV/f396e3t/f2rpU1ouajpQee5H47LfHN8++r+sCzsOingmhHX89Pjhw/P7x4+vPvyfHnYtufLeo7VCaoaCz62zPQYVeXn8XS+XC5XdWW284j0DFTvs6qQbNNkZqoUCqlII3Yp2ItcNF+cN5BFT0clBVUCNmVlQNmEikKp5E46UlOh2ovXmJpily/uMINpaw1SntvwqyP1Z379ralO83w4LctxmabpcFgOx8MyL/M8t957a603m/YFMXJkZkGlNaESQlGoiBBR5pXzbDe30/H2YItoG/OiOqm0grr78HB3v1xW9x3bBrIqHQIx2txEo2pX5mdWPl/P63ap0lHIkOEFWhVYomUZOYYTWgEIPDLcASGkdnsJqqqaKukFz8xRNeks5RPn++17848l6zwiCDYzs0ZTLxfSr353+2Y6HVqfzfrw6/n5wzQdIvLHn/+44+H19/72dz799D/+z/6P7Lg5vEamTgcKyZ3CjaT4GIiN2CUsufeygCb2Mwkt8VKPrRtu7++Px5ummBdtbWpNs3zzVZTWNHIA0WezDmmUBhWaEUhIUJTI3WMuUaUlVtxVOwQQIkpQgN77YTkux9Pb+7dv7u4Pr47TfLg5zKqkGU3Z23Q7f/TRq2U2MT1MvauOjDG2D199/vju4cPD48Pzh+en9+fL03V7Xq/rdb2OkQAyR2SFR3r4iu2S1/OzX0Ow+MZ1O+/I03JzEIOKqgIKUY0RmVGsQnrmtrqHR8a6XTKyihGxT8FCI00EL8BOJZU0K+yyu13Bmy963r3jyRebNxGKJhXWDKqJ0p/9H30iqq233qd5XuZ5OR4Oy7Is82GarfdF1URKzUynXY1f+6KxqfzErowgRCIIsM+8Oc7LzXK8XabFwLJmVAdjXdf1uoXH2Mb1WvtucoxttwnZ+THkTkPfvLYRcblexhYozRSkEhpZmVCYigHwUXTxQO+TiPjwLKkEIvfxkghwKw5kEbMzmGVX+ah//Zdv/2Y9fDjHemhdjGPLXd2FciYiiiIj4vHDVzenRU2vm/tWonI8HR4fPv/49u7tz/zmD//wX//RX/7rw+tXrU2UTWli1RopmcDqHrn5vteKZClLC0wioypMKSkZOcbww83pozf3SzNrrfcGQ5Rvvl3WazLVjCZs1iadFrNZtO19XkqHWnM4IFUsprRsk5i1nRJGyZ0bTIrZPPfjab55/cnb00e3N69OzUqboRlEVPyK0Q/Tq8M8ebzR6ZTydLnYFtvj+Xo9X87X9PH09O7dw5cj13U9b9uIdK+CSBYyZFt9u/rYhm8R1xpXzy0yM0tX94jRD1OfJ9Yg4xpO3YWwYydkRNbm2xYjM1C1W++YiLAJlLtzw7542FERM1DDMz0SyErPoIC6m/u8vA+qUClRJFK0dZuqysQUuU+K2q0dj4fD4dhaE5qIZErvXTtVOv2pIkuGXsXdq7irXXajh0oUMrNiE5X5cDgebw/U1OaJ8Ijy2LZtXSs2KQ9hRWKMsW4XADO6avk2YqCwavOq9MDYMkvgzF3MApDarUkyR8ZIRvMRplrJSgo7ioBE1TqCXLOiRLoYSDIbX1+v+b23n3xt/mw8PAevZrZdvC82tz4i3QMFCm/uby7Xy+F0q6394Ec/7N3u7t6w5Hp+UObc7/7s//Nf/vTH3/kP//3/9e/8wf9dLWkL4SJMYbFQpDh1g4q2jFEtLIKBKBQjK1SojiSrwdZ1/Oirzz/+6P70+lVwXP0JAg+HpM3E5jLXQaYJyiwgo3zbrplIN5RFpG59bJWZtKoa7tBm4aDCoITuPiU+Bsg2ifbox7Y002Ybsjo91vO6vlmWG2+v/fC1ab6Pfj2v26y51uV6/ZHjDy/PD9fHL7768YoV7jEq4Cksf9ahpTcSa1a51C55HRJo6iO9gEGPuvD61Q/e397eDo7xnJgm1a1MRYssaBVDm/Tsa60NbSdBKRvRdi+wQkVGVSJ2f7ytqmUiYjg4qRBoU5vnWVjUbFooZkUBO8lWoM3qZpmsBLsIWK2sYZrazc2h9UVEBOruYswqX3M7rCOWLY/Ei6cNALx4hGTV8HH1AnhvNjf0ztaXJlq9FSXW7bz52Lb0VVaPGiODY91iJFWGr2JaYNaguOdWhTFkHZlCr6HVqwopKJaWJHNgbOruJlrFbd32Kx8imW42i2lRipaQyCatgTrLTT/qF+/O3/ruLa8tfWvVxYwZtJpmg3CMHNulqswO8zxPh9O2XWVc/XppNh0Oh8enr8a66aw//K/+r9/9h//47/yt3/p//+t/MX26AHNl7nAzioiLwPWFFAYa3GO3hJMSowSQFTtlCpJfPnzxx9+frXGeoioKkcyi9FkP6FyxjSHSTRQZXiNyBqQC2zrWdXjMvmVErON5kC/+CLpbldm+PvAYw70QTTsSObwtjYqoTdzvq1WcfgGvvqEnEemBoy05FYRrjYue7zY+5oc/ef9HQp6wfFifUIDIGGPEFVLoypGZGJFWbVQObGu6u0paVaZjexzv4qvT6XD36WlgmBcz+jKLABqAC8fUFRG+iSgydv+yoKhKAzQzlZJkeIytqjwqPSM9vPLZQ6HSbJrOh9maoZtGyRjr7kbT2qQmHpuoG/f5A4Bkm6Qv2uc2z936DKaPls9cR7Sp9blP2zTPs3uoKn5ioEuRQqxrdPbWrbUpXYmumLoetFNkZLpvWwUiar3GuIyxbT7ol9XDtWuojAjAsgLpVZFlmUVY5e6kIJmIUeXjmlvTXslxHbudtGdQCUh6SIc13UmwomI6dZtb782OrSZc5dXXXs2nKda5IlEyVp9n86rIrWezss4pxK+r13YOhx7vrs/Ps0X4dX18b8qPX70J1W199+7dD7724fm3fuN/8Tv/5r+xOVnqDpSjZMt1zS0otGRCiREjIaB6VCZIjeEisF5NOffD4Pjw/NUPP7SP7m5VNWvsWgbrdtSDmNgQlE7dVOeqUpl2n9YxxroOH7WuY4z1fK01W5lHXcjazXeBlEpRwHZiDZj9/HR5nJ8oSpePng+vPh+3l6k5Ro0Pj+8vz89cZhYk89Xd3duPP1mO84/j8XdTJ8zXzT9AMpG78STkvF5R7+HctgxHPsu4VgQ8eN1WHSXQgitsffIf/cWX/TTLzKC7SuCiuheBjIGInfIXuycod0btLprWUrHy0VuXfsCE3fobW5Y2um8eVXE+Xx+fx7zY6bjcHA40e5EkvVjXbECamf70r3+klACs2en2Zl6O1vvUWp9Vm9B6Ro2xDs91g/u2bSMzzJQQUZ3miaYQS7Xe5uN8WKbjNB2X5STaRQyqplqhl/NlXZ98eFzL1+15rNdzhiN3xxZkVe4WCFERUWNoJVUMbkyJTZDMLfw8fEspI6oyhIaq/Yil5+7q27vObRZRUW3W5nmel6W3aeo2lW1pn96+fbPZPC7nS9U6dgwuSdsJWfkiCuzNhLoss5nSpB9n65K+Ssntq1chxGZ36V//7nf+5Me/9/n5Q5+P59qCW+XY4tmLHlo5alRsGKO2DdcNI8srKsMoU5fp0GWuZlzmhYrEamoQjUJmQWiqZkqRaZ447fQi09YPh6X3pk2Xm8P98Waej6qihj7bfDgUi4SZggTUKGQkC7Cm/e4wTTcHPcwQlQ/bJ9+vj//t4Pvr5d2HL5+ent69f356QrO6buP58nB5+vDuKxY/++wzbfL5ux89rI9PHA/xkBVQBYU0CEs8gteRdN82Oa/wrXAObppOVGpWgVGSTDvyeNNVubGSIyvSMTbdbcK2bYtwoShVQGEjVESsmZn03vvcp95b671rn1Rk75VKyNZEOrUXZada6XJcluVwmBYjwz29ICRFf+bXP9pda+Z5OR6Os7FJibU20cyYNbYxho8ttnG9btvYtoisgjXtTc20TY1ddm7Wcjgel2We5953GoaaECN8retlu1yetmvUZtvq63Vsl2vGgGgpdyMd7GyPEqKhGMF1q0zWoAfGNWJjOjMyPU1apoyxDY9KRURalNbUmmgTk9abcudCtWU6zXZsdtOmvvm2SPvW4d7er8USEsNH7DY9jowSLxmq2tuhJJd5Pp1OBSSaQJsoIsa22tQvl+fhT29/7hfeHN/+zr/451vPzX3lqC1Xr3S619iK14ix+cC4MDfIbnQlsC5tEjHR1tAJRrcWw8HIqoqsGoXISoq0qbW5mU3adqGJUqFN+zRbb9b7PE8UtnmSppPVpBpAwRdrQBUSyooi9GBtWe70/uZU/e49vva5fP0HUe8e3/kHBDQL1nWeu/SojKrZpvL44bsv4lrf/dovvI/HP3r/p1+Oy7kC6ASqPBBQ7Bp2MhIpQ5U9Pcbz8AsqtEYVLMpB2lztKPNhkh4piEpUZUX4KqIRta5rhJkuTCB37jelT33u8zIfl+lwPE3zZM2s67RM09yW49Q7wQgJtNRJWlfrDYouebpd+qEndhK9Ib2JWaSUwkx7M91HDEXKGtVqjEzJzEwvDMHublW9yeCujRRVlkjXiY6LX4haloOJwdU3TJPWagO1rpfLZbs+Y7uUb9f1um2r/2SAJksySMB3t8XdUCoZGzIKyAzsfPoazID77qrjGZGZO89jOenuNiwCAcIzNNGkChkC9KnfdJ29yXE+eeLp4bGezzm8qrQ4NpFwlS5oLHMfpaWMKF639XBzJ2vVOI9thXvF1ppw226Op+fzh6ff/aOf/pW/+7Pf/af/7A//+XR/g6t6RgrGyPXsNWKsOTZZrxHO8paJXZYpzUJ2znhSV6FkkpSHh+friKnrMps6hgnrrvdmNO0ibDnKUyKzKNpEKWPEiLSpT4f5iPl6fn4+Pw3BWJF1qQxP7wmrMGGzg5jx3Tr/6Pwdb5+09pjn1eqeN0PQD8fhyOvWhEXpE7lu6HVAfPnl5996eN58fPnhR96N1vyyiVaABCUqcfUW1cgobxU+0pydQamAoDJRAsmcelPGdr1qn8BKS2eZKAWZvvtkV8AzWMXcmaGqynme52kyXXq3TJeWu/fMSQ6ZOXy5Xk7vHz48Xy+bb6xcepumGVmbb0X0Q6upx2VNwRjDnGMnECgLFaCWWqav6yoSGVy36zYu7usW15Fr5RYRfDGD2vXmAKr3aQzftgFymmdVZcp2HQJS7Xz1x4fz5Rqxruu6Xq+embudfDpZTKYRpbXtcswMKS0yS30LCWQhRlXsPSWqPP7KbhHQriKiWSPHEFftTK7DFaQ5YrXtuU3HNs2zTtKsj4x3V+1t27b1Ovq8bPWU6zZ11T5BxdpMcmT6GB/ev/vqi68Oh8Plcp6mab1cjocufVJp07LczK9+8Kd/9DPf/u5v/83f/qM//ldn9Ofi5sNHrteoDXHVbWQM8ZE+IkdBVK0X6ekqQLmMfVZgVolGIS7XM1Ib5wgBzLdzblNWp2RKFWy9DqBUEGa0EpGit65m4tlynsBqk52f5HwRH+uGUTDFvMy32jW38bUP/qvt+Dan82V78tVEqWbCy/WqOi2HQ0YsMEFu6ad+aBd73J7Pfp6l9d4m4uGyebqRJppehSyVbbhCUVHCSCKV2ltnbOCg5yBpXbRZkVWVnioB3T2ZTamArGMrAkgWACnJpma9995UlYrpQDIrSmAqEFMz80RDm07WbnCzHq7XLYZPvROa7lHJim4dkowKiqfo9/7GRzvLYmq2TL3PE00LvnPZfYzn89Pz5fl8PT89Pzw9P43LcC+AKoKftKM07uTcEau0dry7FbAIrw0lVXK5rg9PHy7ns4/h7h4Yq1eC+xQVQBCFGFERUkQgI4GWgdgSXrFVDI+xE5R2R0FqMctb47S0QmBkJbPCzChIEC9WI+XhQl36sYs2wRzy6kO0hPt4vmygtkXcfaxXoUMyqyIK1m5Pp+Px6OvapikitYmK+4jVx3pxj6oxTq8OPS+ffOe7lw+X3/3zf/O+1hF5fvb1GnHe3HP3dYvIXedFAoqSrMydqEkUaZSJFI+ozBwDDpPetffWBFAxRUNKelwv23YdgSShJUxCBBAhutoWHo7wYcC0TNIOyLisZy15Nb+N28Mc+Zv5+u+8/bm3vHnWfL8+WdZhPqLoLFXr0ltrxZQIZrTZBOGJ8/Z0/+btyPzdH/wP18rzCC8HKyhMk2QJUhUQZsRgrJZX1lq1BkZg7Efaj7ft+Gqxg7QZras1YVOzJrp7IWAbI4YbOwFVsybWupmpCqS1uauBWm2yeTI1qpJGUWlzp5LK1uywHA7HpXURLaO2ZiroqpN1UfWq6xhmJEQSHFGeY/U1t5pNygo5xubX9fm6ni/X83l9uozn7ZomKjZn7hqFrJLYPTwkrNV5fbxc7qY251jZK4b33guBKt82hO/mmlXcLSKLRdWsQkqSFhiZEE3fXd+QUbFrMr0yMzxRJTvDX9EgS2siO9N9D76p9XKFIMaQLdR7IyrtistTPfbb3TF9ntDSB1jB8fwYco23H71aun7xwx/per053bW2xLZe0O5ev755+/HY4jjG5fnd8c3rD+/eny8P2/X8+vbGx1Yh58vjcrn8w9/6j37n3/y3l8cPn4/YRvnQWadrnN03ZFBEpAEshIAVVDF6ZVK7jnVDWQb3HyrDr7nJ0Ll1CQU0N3hdIVMiLufr6ljH9XRziB7NtJ0mSosUYwmq0hEvROh76jjdLRg3yWlpy8a/PX3v3/v6L63FR+IaMfU+VVuzTGyxHrSuLT1MGmwXVtaIbJbT3GrDx3ef3s+3f7l94caezd0TJSa7kQvJ3TCrdjqK7UA0X1rpsp2p0TqWg2mvYjq004Da29+IyPSIYdLI/pKKArg7lTWGjzYsijJps247jJmoAkIHoTNvs21ZoxDuNZIYE3PkNgQ0a631RJ23s37v197ihTZfu/PULjSKcvdxvl6u1+u2bZfL5Xq5jHUd7jvuny/JMTtHrYAKjwp6ZDGtWdTIyM0vo7aRl/Pl6Xy+RGZERo7IjIjC7h/74huNQkDGyBgIRw7kYA2M1cNrtwzZadw7ERqJZqb7S7z7LEiq7UoB+jbSB1G7qXYiI/18vSimnzp9ejrHNi7n6ybCiM3X7fn5w/2ru88+++kvPv/88vS+CefllCWXy2Xz0N4//dY32nyCTL3PLG+my6zLzd3z2U8HG09Pp0+/e7T5X/7e7//5F19K4O50B/byIMT2IJrdcZ5lKkZTSnJXuhGpY9R6vvrlGmtsZ1yfnyO34+mwLAelya7eGrFdt8saj4/Xzz//4vJ88Rjrul6v5zEiM4RBxVjX2hvMrA2RETcyL/1okH/35jv/4Nu/6tneP39AuFzdZCpaE+t9FuouQgMpQt11VSImlkFkqk2ffP2zf/nF7/3eu78ogSp/ouiskmJjIBGBlEyWW21ZVxln8XPIMCHZMN325Y3ZgWoqJm3exw3drfLc04cDxVJAd2PwXRgnRjB39dVOlntJqlERoyiygNotkjJliEC6qjYBxMT6LK0BCYaKZJR++1c+hrB1qrH2wCJG5HCPMWJdV/cxfGzb5sMjImKU76zsLAap/Ctha8I9UBmxQjwR21gvvm3jcl2fz+t127Z13bZtDc/MwT1VIrl7luyy1+tIH4i1asAdPhBRdGzuAKr2XBZUFRUUqvClA5YsiZ0CSRqF7u7uw3P1UYWxjXU9P6zr4tMvvvpsqRiXyzoqU7rAoyjyxY++WJblZ37upz48fPnlu6/W8IiRcTWz4/F+3Twzrk9PmaHC0HZ+fJCSnNp2ftC6HO3u07/2a//0v/wnf/LDP/2Zr30v3R/OH2bI1CbV/cnskTI7ZT/J3Tci13UNr+265rjG8PQUojgCW5+X29P9ZA3ch4HNx0Dq0+P1+z/40edffu7DY8Tz09P1cs64ekQFxhoRY01fwy2q02LqN1f924fPfuun/ka7+sPjB81CVFsOoIhJmw/KllJzm0REu2WuXWVqlukkALtenp+u548+/tp/98N/9Qdf/PnSZgLarJjgQAWhYuLY4wcRTjiwEZtyQIdFrnrC6aObfiu64Hg4TK0lsqC66+yF2xjh1bWTfY/2MYqw2tSsNdMCAzkJe+QAvU9mrTWbREB2M4U6LKUZxZAQMRVSG0srk8isER5MWqYrdU+Ky+zunumwNkvfi05GZaaZTYclCZRuHAikRwRVLgWTsqAURslKMqnP57M0YcVu4pg1Nl/HtqVvGVEBoe7WMXv4ko8kNaqhrpXwUS8FFYjdQ4q6p57uoiDuNqWsERtKUdWoaJoSSIfIiNi2EVFqynWtKyZz9qk0jwe+bXNwc9Ara4SwQ7aKujse/uJP/3Csz5999gsZf1adp9tb7T0Df/pnf7Be/WuffBzjGVmHPmXl4ebN49OHj28n97xcMR6/P+Mb//N/7x+9e3yg1+Xp3LQFRM3nmkNAz/BMYSQiK2vNsecc5jwZUVFok1FE5TrxsMa2ruu6XrrM4urckOkZKh1Abmtero/5rs6DxpIvp8N8/+r1/f39bDaQK8EcXRqUh6G/8eqz//FHP7++H++fLwADukwzVKUZklkQ42yLJqiSLLgqhVSBJsuwAYjVFTj2ubXlON9dYhu+TkKarOFBoqBWYsyV0sAlePHVtBr34ITldmkHFc2pWTMBEmx78KJYj6gMUWm7lxhQTUTVao/RKGtNMzOHr5FrPNuUJO/kqNYp2hSQCrFSTWSObWBNr4LFuDIZEbGNiowx1hzWF7HGRFLUa6inqiLqrKWVexwNibaL3dE0FUlfnY2eOUY25aZOajF3t3wpZIxMzxwuexgL/8oelCVUqQCqSb3A3SxBhKCssAUiZEQZkpF7eUkja6hqoviThDlBD3NksFpZK0mUU1UqRqzukq7lQ8F1DTbk0DL/9Pbt3Xz4y/MPM9jZV1w8nbGbafJ0c/PFVw/r+P7Xvv7Z51+9GyFYyS5vXr0G5HpZD9Ph6el9TBMDHy6PrPH+/fs3r14DuT48zZ9/+Q/+wX/w+Z/92f/+//WfLPfLEvZue5rUUthFa01QVVhrwDnGGKutY520z0u3VpVjOvH8/L5ZqfU+zOtp+DUihoyKRFZUDtmK6NpLrzm2D+/fG60sn85bbNmlnUWa6pWp4Wvyl772zV9+9cnPnD46+/O784Mnux6mLpBWVTG8t0XRFKSD0oDKHFQpwN1VGxjX7TrPLSk+6vZ4e3e6a9NUq4io45nwxkqVMgorfUiJEpsXZBLJwRj0+WTTqXEa1ffhs0pVuyYwUCoY5yEhqg1AjiFmVRARbdPUtDXzKiZ9PJ59Bafc+rNkl6tJa9PchSXoXajIiiHGZOQILW99XFcAqi1jLVRT2v2rWxGMMTLTaJUyooAU2UoEgJmoakZVayJSuUbEqIx9m5cIRBQQ8ZOAVUBRVcnIjM0TKIFERZFsxsiEZiazarfOTyJLVEdWhQCSvmUiaZWoClVFBKClKS+RksUahYumRBU1UYNBCoKbi0qE6ECq+FQDA2DEtl7aLF1qrE+X54dMFKPgYiqV7l4lqtObNzeX8/rw9Hh7ezpvYzoeT4cjgMvl8vr1zc3drT3c/PgHf/rR6zcKXM/+8OF5sWm+u7tk8Ud/cvvtn/vN3/if/Kf/4v9hx9s/eTgvN8euMdKleDvPRflwXrM8ISyR9LfzAfNMrLc39zYF+EEXq2rWKM9RYUissfq6IVlpEa4NsZ4zr4FCkWUDWXtUxPrYcUjI/Uf37/zLefW/9a1f/3vf/JU71oenx6frtQdFyhpU2h7e1uYutP22UwUZiGjIopAc40rR1tty9wqP71tEbuv7y6VSFFyWggfScg/DxGCWlngaKjKZKTn2uERpB5tfWbtrs8lMBWzLkBaQMFi6+9gyKcUuarBoxkalmMjxuEjvI7wLPa6UG8ppdwAI5/PDtc+LzekwCE1U5AVelybuFKFRmD01cgcggcrUv/nb3zsc53ma90Cln0RBkihVba2ZmYiovrQkWSWiCt29/EQ1gdidUvfxNFF79m9tufOwXsK0SsA9o49VuhcLFHbzmxIKK2IX7lUWMiOKwB48KCBUxUqNQIo4SW2EaFjBSClrQdscgxIsUigUlVZhLCTyvK7bZf17P/Xr37l7/cMvf7ytgoiqQRqrosrEWAQ5LUeUfO3TT9+8eXtdN79u7z68i9zW7fLh+fHrn3z6/v0Xjw8Pb968bq1XxuV6PRxnsz7W58P93e3bjw9/8W8fzk9fWX00HbqZAnd9/vh0s7mf1zi2KcoJReTtcfn06x+vfLKjf/T2xtqYl364ma64lIzGFFqJuK/bBuxiQR/X6/Xp+cFjCKVErPeTHbaxxbZpVveS2N59+YO//51f+ce//Btvw56fzmsOoiTR20RQpSWEZtPhSGpktKZGbmOTyorBPS9R9hQoZfBpe9Su/f71P/3B73zZL8fpuLuGUyVKIgOMykAplOnuI8pbbahV0qM3uXt7Wu6tzdIm4aTs1EkEqk0i3CNUtVszaKPNfTod+rLc3My3piK7x3SEsmRnDmXueprdTI9MUaPuS7QUpRjJiswKiEik7z4dTLAYMfRX//53em/LMs/TDKAQFIClYi+nX/fE1Z8oLSorA1moyErfnQqqaico7CSm/ctYYO0yfwV3bTOrtFIpBmgJC3uy10v1qGJJRrBQnpWJSmRVhkymzShFSVG3LiKWlDTahNYbCWtXsyBMCTVBsaAjEZUGVNaomKX/xrd/6bM3n74/Xx/eP1dQBGNzVfMYrJynlpWFUrUvvvjSIzz8/eMjhV++++LDVz9+fP/V5eHhZ3/+F/74j/7g8enDPE+UOp8fI2KZD7fTaT0/Hr/3c/N6/W/+4F++uX3zVuxoy5vD628f7046Pa5bbPnZR5/EGB/efXj7+v7q5zd3x4/fHs/PP/zOpx+9evVqUmlTbPUEwZ5pT6WvNa61ByWQuI7xdL4wyyhVWdC3x7vz8zU93t7dzdvl6cc//u1f/rv/27/12zeaW67Xy5qbT9OOuquIOYSi1htKq8oUhErknmkKloiCrMiuqoKiPJ7fz/Ny+Pqnv/Pwh/1mmjAPKAUmgsptbJ6VqJCqciYqVUtlIDaycjnY6eZgx7IDZKFNbLtrlDaPTMDUjDq3PvV2XJa743GZp7mfzDpqt+UUZGUxd6Z/JHeP4mooM7HerXUrRMSg7DmeqqoKicRLdkcmArnlGKv+2t//KRU1UTMz1WYaUT6CL9kL2qyp6R6XDWFEIiQLXhm159NFRXhmZsUewL77VzBVbJd/7wED1D25MiEqBdllj7tyi5WeVVovHoysksoKD6Ban9gT6qTQpHeIajXCoi8UHaKiJuSGgpTuFkUsIOYMSmWtmm4i8fVXH/2dv/Y3bzj76hRet2ts21/ZVBGcp+PNq9eHw02fljZPfZ7vXr+iye39q/tXr8Mr1vH+3YeB+ujNqz/8N7+X8MpQ4HhaQsRUZsTyrW/D/MNf/OFc8/0y3fXDT3/8ta9Px4eHc9n0ts8fn47ivD5dv/X1T6+xXnH59scf9V7a66Ob+9N9u8RDbdtuGOpp1w104cWfQGBjjW2M67ZZawTCq8KX+fZ8Xln16rjo5fLXvvO9/90/+A9Pjq8uD18+PabHPM3bFkSHCm1iazd3t0L45ktvSs2Ipo1NkkmUirBETaapi9rz9ujbdmjHulv+sj6Mgmoray/Z8CKZtUUM+ChvKUgLbzlQV4VLMz3MCxpkrn5oYlQRUaQhyO26aXHSNtMOvR/btKiqSNdpTyAsVmZt6S41aoQP960q1/VCBqUm03lS5aImYEbtFr80ZRPNRL6IED08cmRFDV/t5WS/pJLTrDeNMLBSoQqt2jNqAcCoXWbnNej14qVeEIZkxQAEyt3Scv9fUblzHKilBkFBBTSERCMcimqq8EgwHUitPdxgB6bCM7NNxknLoDrUjJpiLCVb7GHuAiOzpIKNkgQjogIqXYqWVS6eAlGTZia9HVtrlBSo0oIVoIK9L6fD8XQ6SZuTHMMPx5siYotpmh4/PNzf37/9+NPcBqS+/2d/8au/8ktf/8a3/uzP/uTV/WlqdjgcDkeuNZ6fzncfvn93+9FvfPrt3/vqq4TNN8dvL6dGRI7p4fHuk0+/+PDO3nx0b8ui7fbtcm2Ph4Hbt9/cND599c0P24/ubm98I+uxcSx62C6H94/venAwAb+Ma2SdJhFtj5crvcpxPV9m45brjeB/9u/99m/86l+f1usXDx8uY3UfYv0aQygAQgDTeZ63MVR4mHvbT1lWcrfnTRFhAshp6ptfK0q9eltsOfXeyya/uTl4+HPFaMAwzqZDdesVAqFnOmOEbywPEG1qaj0koXtMTA7ZGxFf60pRVhnQmyrYgcr0csJVSrrJrqiuct/Kr8PhnmRt2xYRIha5rdcuGGXoRxbKPZtpVZm102lSbdvzJSPCScuMQdL2Rcbe+qvqS8QkSq13bc2mrAgEiu7OLEbsXoCevhtTQjMi3Xfs2iAo391opZLJrH3RUyKSKJqRYi5SiMwR+RK5QVYhd+b67g5LSGtNGlJGM9Om2rw0XNlmSc0CSylawkwhTbJQ4Vr07GNVjDS2Lco55i7Hw6tPP/7MNh1tZIPpxHwQQQ70rtJMTG2e5tNh27bb29vwej4/Pj089G7v3n31xY9/+OknX/vkk4/+7ffPl6f1z/7iz7/7Mz/34fHheFimuW2Z16enm/mQotvzOn3yyUff+Obj+Tl0WW6XWy8x+6k3r2/mw7qO+ea1d70uR53mt+sIvnp7e/uhztX0m+3Te2/WkfO7V+3VV59/MbejtNs/G4Nz8uk8RrTWJzOTMp3rGk8tTI+3fZ4n+8bbr/3dn/3rv/3rf2e7Pj6vH9ZxrcLU+suMR5bV0k/ae2amh/ROQUSIVOsS20Dk1GSs2xZ1XA7rukYOZKmallfVdHNjT6eF1c9rX61rhyJqZT0BQO4eCXuitQK1C75pyk4zy+JwbxNKZctUQY4RuU0yU4pKVnkOgVKQtRKtIiClGqxNsUGgFttYGaGGdb1G+XVb5eHh9nQ5xLL4ga1aU2iBGrTjYVZtrs1kQj17rOvmvR2sqqpQCVBVLZC7FSNJ1fZCm1at8q7dywWQFM2dXY5wj0IBpTsTYVfiQhRVyMqEF7RS9rWvGcvAiFYFxUpElYhVZAiE4VUkMxIl5L6ETNGkDSqlhzbZVKoHE6CpRWm57iFQQWqVxRaTHBy5bePsWyZu7+bT4djl/tNXn/WmUUVp07KHIAhjD9uAr56JU8b9/X3EyKj7+5vV/bQsh+P0F3/+bx/evxezpnOb+5fv330jvv2tz757eX68vT2FX5/G5Xv3t11dCuRxPr46dcU0HW0a41wje+DTu9vrJdnl7FeZDjd3tz9697h6fPub3/r+0188X9dv4TZO09MPfxzLN8X73enG0m/vPn18fLys6ze//tH3f/T+9e2rU7dYt5vT6xb4gw8/umvz129u7k3+g7/z9/7mN392uz6u53dXbNE6/39E/emvbt2al4fd3Rhjzvl0q9v925+eAqooqiibzkAAxzEGCaVBchwl8sf0iSP/HcmXKCJKIsVKHAgmIGOBCZ2hCqgOquqcU6dOf97+3Xvt1T3NnHOMu8mHuQ/5vqS1pDWf+Yxx37/fdUmx6cjuJEWkW212kIqEqVsqBSJMNaG46SKfgbDmYeEOcBpnBGWhYFSdwzxJV6OVvqxEMaBvq6mauXALRGZO6s29LQV8MDdzwmDuAgFAJbETNCA0xKaGQODWmluoSKMmgV1ZgUeEgYM2c0DhjOANmnqtWmdQ18mhmjYzm5s+jIAgJHR/vLmMJ1s8W3Vl6Tu5r6hPZlayQNWSupo0Fn24uUyjNsOu944A1AWg48yJMCigBbBVAAwEWY7x1c0RUKiYQDLTcKvh3ha3eRhRkpLBY0lsYpCaMytGWrqcyBQLsCyciCopELgwzkjqBBaBjGA0cw4jCIoQI5Igcw7O3iFjiFNVbpGYGeNNzKC51YCeqNdwksQi83Rcb8rbz5/uhp2eIuphkknpnGoKVnaojYwcpjlQjnV89emHBLHdbnePHm3PNlJ7rf7y5csPPnj/8Qv/+KNPQDW85pSmafrJx5/8gT/4c//yn/3j81331otnh4fT4bQ/3/Y47uH2+zO03fpsNmytMaVpmodu1a2GdEaqremqXA2qh20uRiWlfHX1VddaSrHo4PznPx/vPjt+/JWrF/uHca/xB65ewNxO1i6u8qPNdtWtjk1Tvz4dxtN0vDrv3x3kj371l96/eFbrwzjvpwZ1DCGweqJlDJJYun6aa/Ioq54qmFYRIVhSJACBzGQVWrMQJ4honooYGDsYykO9e8xJy4YItoRTn+RUclp7PZg0YkYVIknss9UwQg22AGBOIkYQRpQCGVwRKZwNNIysEQGOoeEVciGaRTRRZivIHH6a2iHC5lbnWo82tmjhMzh4DZ1tMWdPGgB5SnNQMEfCXYfZiSE5GExtjgjnoGSZ2aggTGYmt/uxlGaRI7tJEkIWQcRwNlsQKBBuRNKWLJqZ+8L2QARJiRTYY2IAMCPilNLiWoKIpgtT2gAXbC57YDQnAggGAk5QKDU1jEiJ69JYAtc33S5mCczGCVRcCuHCkGFYBFlESBxAysjohJFb03DMlLxinS3cH11dPHv86Ops26VBtsNlN/RMEjOhjq1hSTqNRdLxMNc4WXMCZsbj8TjNp+uXpVlsdtvVavX69etutWrNbJ7M2/XxZpVXt599evf8xR//E3/yB9/5zXVXLtfr7/3kBxmfln6N8Yqbg3Srdbm5uRnB53nadWW7GhAxcAVBGctpzpdDf3NzY3XablbI2cxzN2wuzy7u/KpdPn/05BXf/d7HHz6/uOiG/tvf/d47z9/yUc/PH83ot/vx/fPLZPX5s0dfffSVf+tLf+h4+nz/8IBQZhvDYJzHrusgQrqSV5t4o44GD6BUyJA9St+11kBnsKZoFuARTIwBi0FdXdvi4EMKd8lMq0w2QygIBmoko6CudbOr+aQRPcoEFgScJEsSI3c3FHNXgyCjcFBubuEK5izkwRDGeERImcHFECeYskK1aAa1eWutNWuxSDmrhgMimQY4smPVNjZAOiU5iHTMzGzRQseGpthFzpkyWU9ttpQZg/jya+eBCr6AeNQh3kBEA9zB3RdfERFHuJt7eGvqas3UzCPIws2NEJJIylmYSy591xVJb2zEjEiwqDNj6dFgRhRYAmHEyLi4Z3QZsjqqhaMtmjcUA0FMKMlJjJgdPdiAnAiRnNAZAUFc0YwIuwDWiefZhpxfPH787PLRZrVed8Ou7C4250/KWdKoZsdpGoaVZNY6GS2IyX7o1+uz8361Xu/O12eP3KxONTw+/MlH5+fnb7314vNPPy2pUMCk87Mnlw8PNy+eP7tar159/vFQOiJx1cz88OozbDOmxCl3fX9zc3d3f9t33W63a61N8zz0q8NxbO73D3f399eMMXRlbjWXUnKCOiXCQvz8+Vurspbml5tVO5426+Ht7VmPtF1vV5JWq7UkOWd5a/3s/WdfSeUwz7fjqVJO6mzz3K/XbuYWqaym2lprhUVSYiQkZslLwVyIVNVtnq2Z6TIX+WmWqi3pQrBglO36vD5dfR9ehauD1cmqNwBzUwBjAmYQIW/VakRjcMTgePOsglE0cCRXb3MdqzVtS+MNI5asILg5A5vb3CaD1rTOXitYC9WwgCAia+HmbhC2RLbDHTAQQ8ygqaaUh9UgIgQ/9dgukp3m0ziaNq3aZuPteyuRYBQGMKsRQYEU6GFuXutsphACCBHWWlv+Pl943rE4HqJ5I8Yky8WZc84d58LSpYyZ3qQdwgGA2IkWv+iyXECLQCYmcDSQRe4tS+7N3Q2VMiIT5mBySYSCCwUfDJFiwf754iR0YO4gRCeFyTPBbr15dnV1eb4b1kPXDXkom7KCMaaHaViv15vNeDiUUhyW9DwRJwvQ1jhJWQ0o6fzi7PLq6vziEbp/55vf7Nbd2dVlPYwXjx5vdv1Z3++6/PH1Z7lkPO1fH/ab9RnApLU+3D3knIZhUPWSystXXxz2h812uz07n+b6+uUX4zTeHx6Oh7v94dZN3ZyY1W29WR3H48vrmxa6PtuV3c4tusR97tj5ydU5IgX4OufNet13Q0FJDRPnFy9eYAmtGI0VvY61ELbWwKPvh3Gqc9PSdbkrROhzizpLSrlfmVZXBVwuYQSBADFO0ziOqhWX2TNhAJU0PH721su1fnf/Uc7irTWdEEyEELC22bQRspAQlzAHVdAAo4iEjIbL9pSQQEHfLJWW6zUgEYjwEh9AD3VvEbNOLZrhIoc2BweHcDcPRAlHr2S6bIzCGxDy4jEC8Fy6nLtwXOD+rTVtOh4nndp0PE3jOM/Gq7d7phAQgGhePWCpzpupalOt7oZBbq7WWpsw6E0exdzN3iA+GJgwiRRJqUvLHjuJ5JQkCRCbofvCs8c32hbi8EXevswLHAgzJ1yWYhgIGAGORgLExAVZIIQIcwSQEzpEEBiipwAMVPMAZFzI+NYccbVeP3tyfn429F1XhjWkyD2CwenV/mx7cXZ+Fk33+5PkMj3s728OD3ev5+lwvH99uH01nk4Pd6/ub2769S73w9l2e7bd3N8/vPvB+0aw3aw2m441NuseEg190TZ6tdN4Avbx+DBVr00vz3bjWG/vbh4e9mW1ktJbYG0zUty8vp6ORwBLpRyPU8lDq1USB8Krly+Pp8Plo6vVqtNWH27vLs7POGFOJQsdxiN6nO92q76fJ3398nbaj8+/9P5uu57up3k6eSg0dNM6uWTklDQgSelKl/uCCOxBsVhlDD2SsGRp2mozJonwcRy1qbkyLfkAYABzTGV19c5bP8DrHx0/FqGms4IRLs3WQAb1qja7ejM1DQgUFgJkFAABWtxuFGHLvhacWISIBGzJYC/cH7No6oowoxlZsC8M5sQkjAxmwW5hCosvzMxcw41Cl/OdTdNcqwpl+Kl1sDWbx1rH6m461+k01qny8GxgYWECcAdzgDfdNvPW6ptrQKC5am2wnImWNPTCm0P0BXoPmCUJMzPllBPnnLqUJAyXCPByWofAJdztTurmAUiAEe4GQAxEBEgGYBGLFDpQkAVAINjdEDSzMaACkrtEQzD28ADDCFdk5uQcCE4oiZ9eXT66uBhWq1yyMAXPNMNQ+0fnj4Nhs+r7fsVE06woYjadDveEkZmmw1F1AvUFY3q7v099v05DyfLO73///uXLq2F1bIeRlA4Hr9PmydNoBiGTWZ1H0GamJdHpdPz008+s6fnVo3BaItggsF1vbNLgfH27N+L1sPLQhYL/cP/QDd1bz18cD8f9w/7u9o7CAaD03TxPNy+/uDi7MIim8/F4evX6/tnzZ++9/z6oPuxvPRScx2kOB+47FJrMUhn6PCTghRFGzZGQZS0sHM3U2lwB3C3m+dTq1NoSpBUk0lYzp5TEVLlfbV48++bhJx/Nn2G4qs6zhqqqOZhGMxgJ3aLOba6TWYMwWvbDi0UGKWAx2nmE4pJ0JvDEgkDgIJQRMci5IDIEARIuzAU3xGWW5Bq+oOAiHGEpmRgACJFEgKpZtVYtAgSQwuaq2rTWVue5TtVVp3Ga55mHpz26pCRMjhBhbuaIvtTSalW1CLBwr61GLKxJ9ABdXDBAvuDuIxaBRRIppXSpS1mW+zAvPisE8wBgJkHkZi3cEJwQF5b4cq0OVCCLIA8MWJbeEajIhp7BxJuhYcTSt4jF/UgYiBQO7iFvtBNMzH1HF2fD+flFWXfUEbH4DH1dfeXiPRHqV90babVCpHQ87fcPt8ICKalC7oZUhpQzOIyng6xKbQ3m2ubjxZPzs8dXL7/1ve2T9dgOp+vbh+Phxdvv3e+PYvAwHsPa4/OzcRprnQDi5u7B3YGw1nZ2fmbgEZ4ln06TAc9qpcsCwSSTzo+urvZ397vdWcnD8XT66OMPIQCR1kPPzJ98/Al4lFwOx+OpzgnlyeOn7331AzLY399WNDDSGqc69UPHqXO1Lg/uHOrgCMxIKUsWzlRKLt0bWrjqXKu5Qtg0TfM8qmpOKcAhsCslmIn4bPdo/d47//LT3/7B7Y9QYxqn6XQ0natOVWdzd53CnJgC0Kq1qvPYdAZXAAvX6gAYpBCqjZALsbkJCqEQACGlyAEBpEtJigGEkTkCnSAiwDzMMXRRoxIahfHCpViMLYhIIIziHmYtwpkFI1zNVOs0W2t1rnOdT9OeV89WdbZElgjA1c1C3XRWsKY2z2C+4OTMLQCX7EOYtQWwHsv7H0FNwyOlVEru0iLmoAVyj7IICx0gwsLUXSFCTZ08CGJx8wFF8JKVEw9WV4vqIYgJWBkENQuKqpoCukTz0AWCqEvVlICF0GxOJJkLZoJ02qzy06unwyrNug8UOnJ3TB9cPnGsU53ncbw/HupJp6k+XH+xv7n18JxXKF2/Gdbrs0NrOaXLR5fnm/XTi7NgI/KbDz998ZW3yvnq+sOf9AGBDCJpKFeXj1+//HhuD13ucuoawPE4e7O5WepkfzogR78ehqG/GLbH+/3D8eTM/bpbd6WkfP36evfoqraZEQjTfn+c6vjDH3zv3Xffefzs8TRPr26uD4cDkHTDalJd+jQXVxcNY26tzlMCqbON89yXgUhO44mpMCVCIuSSulwGlgSOyNwlDmLuViywTDgYgrnUVk+nU9dlEZ7mKUkqqXNJjvBk95SePfrH3/pn33/5fQbRycy0tXGuc23VWniry6sYDBFSBM/TFLXGbGFokJEIQtyDlqABBjKxoxsuXXAKZwYidg8BBDQEUgh3IETwCDNXAzVrjibknSuqerizMKIREgYhMEDEm2o4MeE8z21u4NbmWVVrG9VnXj8dzJsFopMrtKamas28YSguSrb4NxcQx+ZBSGbuSxgIYgnomztEpJS7lBdHOy/CpsQiQkxAqOZmoUstUsEUm4ZahBsyICLEkhJij0CK+qb/brQk0z2shQCCkTV0awzohui4aNTeqMAEnC0xdmQdc0p52Gy260StnG6nK7h4b/O21uk43V9/+noe23g83N/tx/lU5+N0PI2nk3u11lqN1dnZl778rmJ8cf3Fbii79apkRrRWjy8//vSdn/3K/rObdn+IIQfQ7RcvH18+vjvi1dBRspcPD+e7iyxye3c7T6N7AOTd2eMlDnJ5dXV7f+NmrcWwXW36fjod7k/71dBrtYuzJ9e3Nyihcx1Ph3fffw9d2qx3d3f7/UMWIcachZG1VrPJ6+zAYH7a76fakAmYZq3i1FASIEkGhEAzQgEWAARwB/QQBGJOXelKNouqNUJFCIjrXAVh1RUiMTImefb0vbbe/LVf/Ruf375c4cqiQqU6m6mqVg8N9AB3h6azWYAzSwKLmMkNjWfBYujuc7/4zPtOgIhIECMMGRbvkKMGOgUgZURzNABABUByjHAl6wv2TOyOaIqYgoCAAAtiWjDrKJRTlySDWasa0eaparUAN1X3MBt5+3xFSNpaaLiHNm/NwsgdTMHdECUiTG2pI4YuyGloqgHhvmSagRwglrUx5ZIXlJmISJLFH9PUF5KJq2lzV9W5aXMPDFx0igsaaVFVIRi15tFC4s0j7hGhgY7hmYKtqdYAF4CGCAKZlk4wCKAwRielH0A6Hta7nJOpyUhP8pP3r95DjZe31w/7/fHhOI5Ha7W22sZpOh1bM20GHsPQOTWw+b33P3j+/NnnX3y4P1wvgzJGfH33eXt1942f/bnf/fEPURuB337xUi0uHj8fD6+7JHPz0nVqfndze3vzKufEmLdnW6KUU6YU97cPzFkkW+i6L4f7QxmGUnLfr0/j/Or27uLi7NPPPn3y9Mnl46txPB4OD6fTOI6TgSNhM5Ocd9vtNJ4YKUPZ7/f7w55FRHIzZRFwWHKOiOABpmCGKecIj+bIISzqbG5CVHIBiqajVsiCVauHlS4RiXAB5H7VX7z9zvXh9q/+1//Z3bynCnWeW53m1g7TwawBmLam1qZa1UwrWENcOL1Opt60BjC6siCRZ5EskoVlObIQYgAgquviiAREoAZhizecGQGwNUtAhbehZBYLA6C5IyESAsgb5zimvuu6oeeFkB8B7mFe52nJ2c91Vqs8PC3gYLV5VTIwtyXPsEwhEWihT8ZPZQy+jH/cm9vSy7KF2LgEms0hIom8OfwTMS5hO1yWaBERjmGual7dZw8Nd3MwRIbAf3MRbOrhDE7oCIFuBO4YAC5hblbdmusCjkRZEEWIQsCSGDMiSgZOuCppu1pxyVXH4XhxIeeXu11muX99Z+oIdLo/PLx+XavqVLXNqi0szBoLvnjriQh+9MlPrh5fvvP+u599+tHdzct5HFPqEvhnH334cLp/74N3Pvnoo47F23yc91ePtvOkOa8gzE0P+/v96eawv+/yjjtmofOzc49otbV5Pp0OZ7tdncculcPxYbvbch5OUzVtEF5Kev367urxE49Ipbu9u71++UXXDXOdkpTEaXGSW0RgHO5ur+9uzs/PAek0nQIhKLF0gSkc5jqaVYAoXde8NWs0JMzJKLggeJz2x+NxT4Q5szCD4zSdkE2kL2VVvSYqVxcX3Vtv/c4Pfu+v/8P/8niadLTpOB9Ox6nVh9PDaTwsyJup1VnnVltTs8qhPp+m6aBmDYjA0bURESTKCQpCKalwEHlCcY8Ac2yLP8G9BSghuy15TXMDwcxRfOk+m6tBBKEhUBIoy7kcgIRTyiWnlJjJCeBNTmfJfUKAVaut8erpKmLpRXmYI4C9iXN6BCKRWbjF4po1WyLPoI4QvEDZFv2Ug/sCfY1gokVYnUSInJHDQc0QwZq7RmtqLbxFm1SrmmlYENKbYJ2DGyyHrlj2ZxYUiB7gYIoeFL5cPgqQpmXbxpTSG3IMEjMxZgLGVe6HQrmAa3m3/5nLvKmnh/OzTavuZoBx9/r1/e2dV0Bg9WmapwhAdAAoXTo/v+w6/vCjH7nBk8fP6nSo8zTOIxi0aJ9+/OOLy4v1sH59fb17dLE/3iXudhePtE3aqlCu437S4zzWWXm9W7HkzXp9Op1MPbyN+0PuS05pOo3VWtd14XCa5r4UaxUNh9VmfbFRU0n93f1pmufEeBoPfbfCpbboYO5zq3d3r/vtOsz3xwMnLikzSdOYTlML9KbhQCCubdKTtpkZCTnA3BqS55Ka1qlWRg60w2mc57lI3m4vAmB/ut+dbzabVT5/9J0Pf/Sf//2/s9+f6mT3x4fjaZ7GqlaneZrmmYiAUV3B3S0gsgAnkPnQxuNsRsBk5iLYD5zYS8IksUjdChdw1JgM9E0kMypGmPtyf8QgJiHjCCBAi8UWSIJIPzVnBIGpMgsRI5EIY2CY24LM8cDlg1XNzLUqr66GsGWKgwsNg5wW52AERqBZvFEBOEAwOIUtuQMCCyYiQASAFmaxXDdEJOX85nFkDGBA0kUSoNGqa9VoTdXGaZqrxfJ5X8oEBKoKgAC5zQ0W0pUGLHpnC/MgFqRgJkmceyy9i7BIkYQkyszIQYSEzJmplK5IlwvMw9fPvv6lx2/X64dpmrCj2pQd7+5v1Zs3qq151JTSPFVhSl3Xr9aYCT0y+mefflTneTucmdvt7UuhRASS5NMvXj55+gw8SteVYYWxSiXf31wjxGoYxnEaD2OiDplW67PVsIYQInLTOjetEwl3pbhZbTXnTtUO48Fq7buuqvabdcpJiE+n8dXLawwbx33uU1e6aZyYGVEgUKe5255z4OHmzt04pyJJmzZr682mDH1YhDkAjodja1pSEfN6PMz7g05K4VrnnJKQMdHpVF/dfFHyqu92SHY67RH53befM0l68uK/+rVf/i/+yf+XqUBFa3Mdl6NFi5gtFAlyt9TNkCllWmVIEizRE9A0zc3MhVms77HrUuEAaoiQMgsKQDhbs8okQMEUgkyJkRzdQAEVwH0hX7FiYBEgAHRyDjA0QCRkIpYkTPzmCrysd5dJv0I4tKZzNQ+QACMIX9icgK4RuFThZfm2kkQR6F6JISUhFgglAgRHRGsCiAu5P9w8KIzcUSNaLM4nB6gL2oUJhSPhcmx3Yl+gKuHsCorhaCQIiGYaPnsYAQIEEYFBOBMgSiAbi6ckktGZUxJmRoSIQEwighKAQaIRMrdp1u08xwpwEGKHYbs+He9rbVmSsne5a8NmgjY9VDfzqpvVGsg55wBobVbzTLgZ0t3NF17nt99+zmKvr19tt9v5cBLpPv7443fefvvlJ5+9/cF7bKVNbWwaAN40LFqzOs3d0GeCeTy1Nvf9KsKO4wGxdr0wpc9vXzrOK+uF16ZxsMOwWW/WAyDPk7Y6Tqe70NNpf0iC2/Vam+ahbC/PTfF4eNiuhob5NJ4IDVgg6HCacumvLs9JVqeTgiShnCiZs9da0pYTgk7Y6jzttU1mXpi3m/5h3t/ta5d3w2pYrYZpOu73+w/eeZclYwxY0ne+9x2bYlivHBp4nxKxo9UZRLnjqZ1o4lKGQBKiwoTBc3gpadWvam23p70zHE/KOYY1AioGUwQRIBgnI/dEbB6oAQkNfFHvIieBwHCRQpEgQFJCSwhu6IQCFG7N3SUVVUUMFtJmvNDhkTCQUMJbBHoLtEBEQUQIxTBEjIW2SeIQcHJITBwYjIiuTgzgAWzC9IY6HeQWyxweFxwRoDZvs4ZGqJsZIgHqUlnInBrq8qWRcxeBfedew5TAwNlDQUdAljD1mGlBexNSCoAIRSRoCMjRlVRKwkzEvowQkJyZkqSUiMWDDAozL683Ok0j2vT6+vqS1scYMcOg6WFqTWtKiaRbbbjZNO5xnKsk3mzW41TRqxm3cTSKgBCOOh+++PzTy0eXZnZ/d7sZuu36/O7u/mXJj5+/ePnJZ1/+8pfnCZyRcnHI01gp0XRsVNP+8FrDk5RS3p6rRljOeZrGLz67Do0QnVs1r2belQIoktPh8JCoI/ZpfBiPD4icc/FGTNKv18ES7tuzzc3167HpbjVsL85Gg6axXm+kX6vGdDzU5oSdpJxL36/PE8LyWxJvFPcahyR51125tanp/f3RHM83T1JyAJtnBZBh3c/Tcf3o2e3dy9/4rV/lDlOHaMA5g1jzxoAUxTTAYppP6sgCm7QCrKpMRMi4IOfJo83VDQ/iZ1sbMgMSIVprLEAJYQ4mADcQRiYgd7XFFkwUhMEIGVYo6iAUGEiCCKHGGIrNNMwI0MyYnYnMLCG1cNOAJX9h1lpDDEbk9eUKdDlPBoQjcEAsnjpCREQ3C38Drgogxrx4+QA5nCPYFokBwJKZQ0QRGUonjAUpfBmnEjiEoTXSiilJJqFAcK3N5moeGG7RWrSkraEDBRMCI4pQRAC5MAcHCkqm3AEll8JJhHjRdHvuqeskF04ZuAALdrIKUUQpBHrirV4+v7joWI7H0TAE8TTOk2mtR21a6wyAm836NO9ZCMIyy2rbtdpqra1NhEGE43j67PPPt5urdbcyMEF66/lbx/FU51a6bn946ZVX26fuBn4yMA9KkllKgD3sbyyilJW7QSB6TON0ON6thiJpYC6Hw/1Yp/X6LCdq2sbjuFmtT8eHH//gu1q9Hzopq6FfoYgjqQUh3l6/aq3tNmsz6/r1VNt6NayGoWokLOvhbLvZ5DJkzlOt4zi3cR5Ph3F/mE+HaTrprKDqaqdxvrm5AYX17gzJofnxYX9/dyslXT2+UIv1ixd/75f/yf/p7/3N7dV6kESZSIzFSRCZkDMSMZuTq4bFRER96sEJjdnYnU5tOtW5TSHgQJZdVlIMLTBEEqEHGQjGAoSSCApOkgQRDRctIEVKuUAmBAhiEEJAISSHxQposWBEwmlBNcDSTas6naZpmuvcWtMIA0Bi4d2jTQR4NMRgWOSS9lMkkYUtsR8KBwISQkQkDMQMKD9F9FCARwQGMBGipJRKkS6J4+Jy+ik7zikMIxzcUhJGaurTPJmGVUZlVwMXACGWtPhCKByJIQECJ0aCtID/E3ICEYQUGETsuYPSldSl1EVOESkCIyVFTNGkk1RgeCRP15QBnMgP93ttRsJNdZqbh4KDqSXh8/PzXCgVqjZnQWEKb63NhJiQs3AhvL57terXV7vL+VQD4tnTZ7fXd+fnj4L0dKhXj97JQ7q9edmXToQlFVXf7s5vb27A0zTp5ePzruvqMQIaYDiqSLq/26cub9brVcnW7P5w64rrfvji849efvbJbrNZb7bIpZQ+d3m2hkg2T6HzejXUcSSksdZqrfQbNUwlMXdplQ3NXU/jMdDcbZwmbZYIicLfOIbgdJwOp72Fr/ozKaJVT8fDeDpAxPZsu9l0qRuGyyd/7e/9rV/56LfW6+3KhQo6mBAQL/2pQAZOYuggjTpw88K547UbszF6zCc/TdXqFMxgjKpSMGWiQMAqiQgJIILMMMwjQAlj2SsJQZYkIT2tEWAZFwUALFoVojDXCA9bPORCpLWFujcws7nZm4PJkrFkSkmYkfur5GHmhsGIGZECGCG5xZISAEQCRiIAoKXMzgAEiIyA/wblgAzkwIwkxIlkeSsQAXAEuQliAoDA6mHqLiTMubk1C21u1cOCGAk9clDBVFgSAQAEMGIQASESMSdOiEtERJAAICIlTplTV6RPlBAEmHJQVJyEJPk6z/lJfvpMHmcsHu5Wx8P+4XCIiDaruhNynU8AHt5KH/3AQ58RKwIg6ul0yyIIETCbW+l7YpjGE5I/e/HiVOt4PLz/3pccdD2cE7vqhMQJUzvNw2aQbnWa6/pyo9o6FEYo2/NIXe71tD+Zq0iZx9PhcNysd6XvgfLd64fjwzUXJE0PN1+MdX9x/oTL2m1erdfdcH6apq5Lp9NECLVVN5h1nqztzs7DYZ7GeR7V23E8WCWRTBJRW5aCjDpPbj7Z6GbWFAPH8TTPp+1qq5BYEDhOpwO497vN7uJsyGl19ThdPfrb/+gffOuz3xtWkMmDgBc7dBJzo4SUAkUxGWVwEnVHp4QpKkZVbdWa17GNqmiURUZ0B8iUgCTQIgiFnJZUNLpbTjkFESAGIiCBMAtA0gAMRyBYDiOAulhf3uQ6iIExyNvC9dQIqE1hqZlZQwpO9CanM1ysIt6keiIs3BAwYCH2IDiGIzMREhOFh9CiB4bFDoAAHosCDoRQmEkwZeHETCJSALDOGgEiCQDdFrotIjIye2CtzeamVWPJvXVAGbmQpJClmokIGBBAuEQmAImYg3jZPVA4IQIQYArJIMmRlJPklBg8dCl35C2tz61nYQfb395PpxGT1Glq86ym1uacQMi6TABN20LfR2aBwNNpX/KqdMUhcl4hCXLenW3DfH9fHz95NtXDeDpAyo92T8Y6mXnVVueZQp3AAHeXVyJpHsdSinolgOePnh4Otzd3dynlJNzaNI51e/FIcrm5ufvJh99fiTl1q7Oz/d3NPM/dZpdltbs8W+8ubm9ORD7XUaRTnXSeqlV3Xm0uVWN/f68613YUpjJs+/68mbkqGt6/vj/e3Y/HY6uTNrPmoXWurdaYWwBhPwy73Vqt1rDS96WU7WqTS9p+5YPr24d//K9+9aW9ZmopCwBFOAtwYuQABhbAhMEBhIbgCGCGHuSsc1VX4giI6hoa4sREro0JKVviFA4OgUzECQk4EbHQ8j8GJkR3ROIIimBQC0cPCEMPaPbGK+wRYYRBYRHL4ugNnwFwEWaHL1+5JMzCAssAPhDCINrynoYwBEBgAEQAbW42BuScxQPDgSwSuHAYLQ8nhIMLARKhMAou4yCNpt6aikBKCYLdUnggNESEEIIqxClR12OzaOHIb9QDgogEHGTmtow/39xKEBCACTzeGKcdQwM5TBsG5pKRgMgIM9MGyE7z4X48fjLHOnd5ldqRvFnKA5JRpvBIDBmkK11t5KaAwzSfaq1EZN4Ipe9Xkmjo16rd0O9SSh7zMGw4/Ob1yx98/1vPn7/lga3ZXE/b7fnDwwOxPNxc77rkzVNHfc7j7JeP353uP3/56raUMo17n2O9Xs/jKXEAihF16xU6fvLj74ZPuftg2FysdltIHUnfr3brfpty/uTTV+FSUktMx/1DbYcsJVPnmKZpMp+YnKmszs4BRYLmaY/IOtfb6/3h4UGI3QLC1ZwDTFXRJPWZJfdDN/A4n47zaXOxW2237XjCMCk7SP0Xx7vuaniyP5/nwV3b/FDnQAbhCAsPfMO3dG7NCN0wKvjME3FCIWIUi5XTWOOkEeY53JFaQPUQtZyzNUuMmYUQkDEAmgZSICCRqLk1dSAKNyNGAEf3cAtHNPXZF2XoGxSnmmIEkRBgYCwKTSRbsnqLXog3jwZEClu2xwwhZhhgsbTbidwNIFgICSE4yH256UKEmQf+lNZPAcBJck4JBZGRYDEBm/niuUFcPoTmFghEga3WVmtEIKFjAC0ouAXOHZTIAcKc/f+vCnyToGYAYI/QJbDqgESYQgrlTliYMFiyAzVDDleNh2nusb8oF0POrY4elomZWHIuOW/Xa0DLWbq+Tyml0okkD2jVUpJuVRCg1pNkYHZAHfeH0nHpu0dXT5CYCS8uLkvaKkJJCKBmgQ77h3ti2G3OSuqCsC+Dt9Onn/zw4skzLuubmxvC0Ho8POx3u8eb7Vli/vTDD/c3Xzx/+90X73+tH1aDoM3T7nLDiVLqbm5vWTKzt/HgdartlHLpVtvpWFXHuR3dqjXry9Yj3T6cEKUvyU2Px0m4O7+4VHewCE6uDUybOqYEoMK4Wm9O88R9d355gQgm2A/p6ury/NnzH3/+8nc++93vfvaj69N9nzkJuDBlTlQiyBwMQjIBO6YIchJxMEB3h4x9JhQKAaIwiGYeFqliYJeRUsqJCyMHvnEFGzMiomsDZoiflmCamwUaRoA1t+bNMAwNfNK51eZvHhGP8DA3DW8eEYQYjhFY64Tkkog5ByKSyxuYYThy4JK/AIjghYqKuNDf3V0oKBZvSsUAaot+b7EEeLAEkjDz8mPuvgx2lgREM3NFL0H85gjkDm4a5ozMLJxD3D3AXCEANQLB0AEAjMEDwxHJAyPMEWwOJQNCd1JVFgwAymQ15tEBRFgg1B1VAzwpehNQr3Ozgx5WfTfP8zSPZ9udpAwRVSdBsRrMCbm2URkT+Nj1KcJLGjYXq5cvP6+jnm2G1WrVut7MoukX95/studDGp48fu7Exxo6XUcEmq/yMOHRImq0zarzadLWJHecUp+LO4gMzKf7w8PxUJ8+zY+vrn7y4Q9fvvp4vTu7uHzx+NHFaDrf71dl5SVU52k+bbdnEHV/+2ocZwAathsgvr+/BZubz25IOAz9RgPG+/vUD7mU2to01aHb7LaXL1/d3N0fXjx/Olp99ZN71Jb7Iffd6fgQzvu7oxTouCBCs7rBIQWUftiL/JMf/KtvvfzedbuGnnXWXkpIqtUNtc3zQq8DBBExUuQAx8wc0FqzSlPGJIICSdR1SM2iRaoNEQwB3cksJCEJL3dKCIJAQgDzJUxvvqR0IjAEoJlqdQ9llAibbAZzhuIU4UYA6LCUDhYaj/tP0WcLxxAAEQ1M3IIYGQPRkYAE0EMbhCIygUcENAVkl0SIaK6oVt3CCXnZfDvyYuRlQQpzwyACdSIKRDKrrVrDMPPF8QQAod5aVbdAWFj/uLQQkBZyKARAAwhy89Y8XJfXPhFEUGsaFICsgGqO2MKkdFRHYFZCpiQW5pC8mUARBp+NJRODq2vVi4tLDiR/U/Q3BZGU+26aTmqt1dENkKLknlmOx6NwfvvFB7c3d/PkKfl2+9i1blary8vd9fX19WEadpsvf+P33e3r3cs2ja9rm9jp6upqijmY1MfVsKktDg+f1dG0YS7w7OriBx9fT83OL6+aN2Ca2oRJnrz1/tnFpav3fRlvD/1qfapjneL88uL13e315z/Y9P3l5aOQ7uZ2b/XAUMf5GBFnm8cspZrOY12tt2dnFxEWmDarNQVf37z67NVn24v+ra+89eEPf4wBXDpKMo2neVJadTnJZpDjw81h8rOrR+cXT1lr3/ff29/+4P7zl/bgObbRj4ynNksQd9AUMQsaCCKiRyiiuLoIgQdjyiTRamSgnIQUXFLomvLczFyFVBJFWKvBTCJAwuHRzDInohTWINDNVR0ihcESl9RqVnVuEU4BHugE6D4vCbQIxGUGCmG2UGuxNWVmSUTMHuHg4FWEOUIDnEhIkHhRWQkw1lnjp6944UQQQMYsZB7NNRBTZrGAxdnMKSgilsWwhZMDI1kQYRcxtVYXYihzEBGo1VqnVtUUAJaNLyL8NAuHC0g9zLVaxAK+ckAOIvfm4EG0mJfAXcMroNY0jWYITqYFmSA8xLA1M4JtObvcPO27TY+GWk3nlAatLVQ1NBFoVUBD8DAnFE54dnbZ5sndu6uz4173D9PubJ2kO46nNh5XZ4O6P758EZA//uTHH3/+o8357q23vzwdjoe7a0A+zuOOUPLOw7p+kzC3dqy1fvm9rz168tbDfP9wukmprFeXfc+PHl8wJ4cY1punT19cXV1U8CI50ai1glMZ+tM83d5+kcrq6dtfrqoff/KqcOo6cRPp+kRpHtvhdAfsuazneX51/QnKar0627cZDOaTXV2cvf3B2/M817H2w6pqvb+/m/YPqUsFi0eM0zxr67ddGEDXb7cXgfN4uNvrfbBnlmqVSIQ8UFtTYGLgwGV00yAgzIUJiSQXbC0FFKIkzBiZVTmkoEQwGaSgXhw90Ah4bkrCEogYEuFYkZKQWNg8V18AaxGgNmuzFlatNkMnBDZwgkALLgmBa60AgILo4GoAi53EF1xVmAeEZJau4/4iEQqQcHJiZRZEZEZAanpyg4ycO8kdclJiJJyFiio0nQkgUV6EqZiR0+K4YeEMwACQkiQsauCmphrNogJA8tqOdTxNbZ5mqDPAGxZKM9CGERwYZhoNY0JvWJtnUHIGz76U+x1CGS3CFAzRAJQhUoAhIgJjBBq4NrDelaY4iXbvdM8f7Z70wypLQotaa5gJAmeGwAg3tbmeVBsh5lI4yWq1Es4R+fzRo64r8/G4GoZHjx5Lv97tztfbHZBfPH4seXV9fXP76vU41/Uqq+vd7V1oPbY5qMsl9R0SpYfb22Ykq9L3NJ/G03EsnAHg5vbV2fl2KcJeXT7ZXl6y8Gq9mY+n+8PnNzc3gqtguLu/Pbs4u3r0VI0+f/nZdp2JoAIkWWnANM/H8b5f567fltK7YimZhc0ijCAs9Hj55JKG4faL14fXD69f37g38FnIcjc4lWgTWnDJYI0M3v7q7xveewuq//qPf+2bd58KSKta3awaajQzs2UBasiOC08GnZCInEvOQfJGfohdHrpEnTfV0R1caT9O1QMSkrASuQRxAgxIRGELoc3DgzD+DZokEjppddPwuUEQLmFJDPcApJREWJ/2jcMAAIAASURBVNwhkRABE4lIOOTcEyGqLZ2TCOeBuZNVPwiFc/FADHzDGCQkgxmFUurm2UEsZWLBnDkQwknYTsdqxljQrSIiQ5ihahRJboAdEdnC4kNyEYpcIsBqm+roVQUwrM7zrG7EkBJ7mFpFpWgKJoAJDdUswiKCgdQYyYEagnMJylybm6K2EmDMbObzNHKS2QlVQ9PMVUIgaVn1gz95zJu3Hz0/79dtPoHaNE2AsV53WWhqNQGBJ2PgJBHOBCklAra2GDHV2ry7vHr61ruvX10/zPHo7CKl3IIxmGD46pevzs+2H/7k029++3uU9Btf+kByvpn2WYY+pHSpKbH41KpI7rp1gM9z63J3c/OaWZ49fU9nRJyfPn1mioykrqpKROG5lF3qu2Mdd7vdlruXd/fYb54/e+vm+rP7h+MwrFvU6XTfWjvbXUmi4/GIMOVcxrERVxGzmHOXNhfbeTrxsWvz+OrVF83bfLI6PgxDl0sPEF6rh/S5G8dmduo2K0hPceekAgBTzK1pKDrj0m30aBAkQguWoQEgkmNtXnvtMAkGUDhHCCiGt5gBXSHe/FsDBQnAQ4CFAKt5gAUxKzkjegNTFpYEEcjaIhq4oxsGZPOGJAThqokppSScMJwCI5CZ3M3d+tIhpimC+9UyFcUU0smw6XJOIpmHYWDx1maIjOiUWSA3nXnKiKhROUnpIGVwCHA2bxbOWMKghaWUwMENTUO19QMTG7IAkFlQmKQkqc85W60Pdw8PD+PokNzNKkSYAAG6a3Vrjj4ahCxAaQ9abvyJGAO9NUrKCYcs0vNx1nnycNeKqkYMesIZ3KraFFaFhkjkrc4G8mTYvn/23uPhCq1N497bydWYubW225wJ5+nhgBhLYyGlVEppTYUzBh11zIxgen/YO+Wrp89zFvAmktt4fPXy0x999+HRbvXWu+984+t/sKwv/u4/+Fuff/bhz37jD5QyIBQu7JHn5lkcEDnharM+Hca3n7/9e9/5prb57OLifPu4ttPx8PD40bOH+5OZrfJQp1kgwPthvQ6rpXTrYTOe2ubiSbde333+yXSc+lLq6eF0OnGJbugDsWkM61WdZrVxnkxyx2Krza7vs7v1XT8djtP+mBAmtXracwDnzWyhh7uSUTmHc171z56+K9IHAuScsK/TeKiza63zOLqiO7TWqiNYFgAHZDQ3QLDw5tG1ikwcQJ5ESIgYzN0CCBmIASAMIAM6uSAxRIQZILovqmQPECNgCg9wIQ9XcI0wMHVGIhJXh8AspevycvSHJf8ZCB4AS25AwKzjCAoA4pxR0EhZQFil9DkLp+KlSGupRcVkbkDIUoAmjGBgzx0xozOAUpgAz17VLTORIzgguoIzYORcmJeCDC4woEE4C0dP1TvFmOv9cX86ztURJAEvn7Jm7ARq40wABqREGAhCFIFuQRERLoybde43BGwgvN508ylO99HmarOFSgNwra1R6DFOKJ1sV0NpleaHpvV0mhgV0FQniFjnM8F888XtelhJovu7o7u36n3fM+eUuqXn2ZXBVes8hR6KlMrQjnb7cH93/er+9au7/c1Umx2O69Xq/Ol7f+xP/zt/+S/+9//q/+3/XNtv/cLP/tKpSffo8ni8S5OtOUfEsCkBmohPh2NgnJ+fHw73wl0SWg+rMC8dE6d5spzBTJC7qR7CmqRhrrp9fLXaXpzuX+ciq8324eZ6qveUZbvadN1wPLRUOqsK4IyRCUWS9CtOyTXmOgmlOkedp+aWWCrN6/XVanv5+u41h20uLoCKElxeXDx+cnaaX/XwPAzv9ncPrx8eXKFWbbW25q1hGGggQ50cSYKawhQLIZNkMk3SennTFFwKvoGdgbJgMEQWd2/g7EGmgAQsyGhN1YFZABEDMUDNIUidKQwI7c14RhEJcTG1YzgTp4AAdHcngYgQplwSYyBBlwQ8jCkPnYNP5mbNkSUX5DLljjjJw2mOSRGDSAOIM7C4G0EgEbB4YFiqhCsR1OZhEOzuoUwdOUYk4QgD1AAA9K7riagkWfUZRGoYsZOF2mgM0RwSS3EOtWaSCDUog1dbdNLhEGbMjCQBFRnLWlbnTKVFxOXZtmlAdV6Tlzwep7mqTnOWrpce0aD60K13Zf3e7uzx7qrLm5PWLqrPjVG6fqCUCamZvXz5eb9aI0Cd55wzc3Ijlpw4ap3MTShT4dbmNh5eT8dXr2/raTzsxx/+4NNPr1+3cKGyW9Hw8ndeXV//+T/37//lv/hX/trf/M83P/rixbtfuTuMHXGW3FpDpPE4XTy7OOr+7v6hX/f1MG93Z6uhS4m32y0RUWYS4Zwwxc1+bgFzm/b7/cVVvrh4vNvtxvmUxBDs7uZVL7EadtL1QIU5dUPKub+5edX1CR0xOQqENa3Zw8BhgtNhf7zb3589ubCppu7sfPf4eJxtPr54+0ULJYaLR4922/L6eN9/8cXwYsT+8jjh3c1rLVxHtUm1za01ImCBcCJMKBI0veGWBGLj6rUWdskA4GABQEQgQZoIghkpEwIKExEEvpGqZGBERgWPAIS2pNAAHNjM3asruBeEJIvcBw1goTEjEJq28JDF6IIBECKSJbXWaGGWFCHBeVYKUHcwln4jJDNlyIVzszoZAjAFsnsTSaEQAIKIEUrojgZcAxyhiwDwoABAJ8wsiGSAoRZAXsrQ91kCS8lp6CTnNeKu73rJxnW/f7h5vQeunBzRyIE0BafeWtTl5bIsIDTIFucTZNo8KWXbKEOfh0Rl/zBLCkZoHOLkEAK0Kt269IbiqIyRUKKl6VgTHI7JEHpQFU4AhIinw7FOo6ve3d0NuSOiUgosNU43Yu67DADaYJqhaTxcHygXyOcffvj627/7o4fR1Fa1Hcd20pvTxUB395/f3/3NP/fv/tk//kt/8re/8723v/S10/4IfX56vr25f1nn9uLRIzs2CMNM2eiorVTvzlM/ZIW4PL86Hvel61EwIvaH+9B2miqhdF0eNutq7XB/PR5ubz7//Gy3ksyJOyKaDKZqpe9as1RyKSlAZju2aSwcrkZECK4aieStt55O1Sc9Pn787n6v43TYna0piSo8urrIOe3ncZsud7IyvaGcuq4zszbadJqjuc82TxNmGUrmLoLAohEJBSvYMspGTtWjerDP5EAoJMkCESPQIdGS5CQBYTYGIARUd00gQKihECCQVJ0ZA4IIAWPJOgASQkJsy+S15MwC5hMSUhAAmjXiRTpkkvLyXhbBkgIxPEVzI4SgEBmIJSW23EOuBAeNYCdgMpbgBNaWeIUDB2BkThiJaFZoIoDI4BjqgYyZjEdKWd0QmDiInFNZDUPOWXJKKTFuSumd/YtXcKwPk8/eiYgImVTVit4rrQgAWgAgIxcDa2YEsTrv85kPO+q6Qg5sMSWWNBskwaCmCct6fd7lklIaKKmg4vHV9Y0ex0fbzcXpZMceL55upBcSDW/HfT2NdZrcVdsphs12vTuepoAGoUNeMngiIiyZMGFZn46n+7v6/Q+//5PPXr3/la89Xw2nh+mwv73evx5nPe2n3/zRFx/e3u3rf/2n/vQfe/Hes0nnZ0+e13a6q+1UbdcPZgZuFFCkONb1sEopqVvpVobgEdvdpYYG8HRqq+Hs5tVnD/vx8eOr3e5yrqZuOXcqw7NnXwqxKSqYoMca0R8e2lzbPK+7PjxYJDGrNq8n7KRZZEwpZ2QKMIr25OnjTz66PRxazrkbZIx2cXl2dX5uPKlKnyA65ry5//x7/+o7vzLV0AZ6MreGChhUhBnY5iZDODmDMCTFBoIBiGKK0CKFMZl5MDAVQBNd4H+JKTE6mqKyFCRjYfBAR3/D4kRYsGmGphMEmaE6EjmhI7lXRYYiaaH+uHtKmYAjAoBNDQBaa00hSyRmRLUGIpKIGWkZM0nKwcxA1Vmo58gQRhzq7lKcBYgTYlhYEvblu8dl2CAnh2ZhAZAIILiSFMkcFIt+rLW5SWbpUkqlF5FUUp9T1+duhrFZPdnd3bEyIZCUjVC1w8FKIx4wrwmR2+hEDMBzM0+Quxg2uFozUZACgQmHFCFOoJ4ASWNzuepKjwQDClNuUEa+GVbd+frJo67PhFODDgO5AbRax3mcIyKiAfrpdOi6rvn4sL9m15kkUfZQZCDMm4tnz9760uXT9d/5u//wk89f/+E/8m+V1cCEf+Iv/amyvfx///W//eu/8s/KZd1uH3/7W/96//Ctt99958k7zx4O+69+ddvR5UcvP2RC9JmoN4hoMwbc3x3WXXd+sctdZ74wCeY0rAFJrZeutYeb24fDo2dPn794gdzfP4zry13fb4G73A+n/QPVY+p6aHZ4uCMCrT50GRFrw1XH/VBQnK1FRoQkkSiX0qVVP5Dwq9fjrMd+GAjYrD46O39yuVYfKWKVEkhEIoD+o08+fXVzOB7382nEyq6QUlqvNv1qve1LUz3CQ0DjkE541NkjIxb345v6ObEpKRAAFKGanJVNTQAJRZMxGYKnn7a+AiEmwyBADg9EDA1oZh7NEyATOaIuAf5FM+puP7XFE0RzCwCMQHebJhNRIQlRETEPZTYLRHY3ceAX31hLMhYTRkZqszMxOhGSB7uDNy0FVucpD5QIEIiIiD314mEcOXDKneQhdQP3PQASWlQ91HkOyB33XZ+HYVNKVzruenKEAEHiU9sjz0whCMxNugUjRJsd785tvc5UiFJgZ8N5161LXyD3VnrGiOoNoYDkcHJsjhMw5q7r+7xer/u+6zerPDCkmPSEYJthWIv0sircJYRwdQ/VYACoOnoj6ihIa1Wbx3HvHtNxOp1Ojhpgm7Ord7/2i+fPvvLZ7enXf/d3L148PTs7+7Vf++2PP7/7T/7T/93jxxd/9f/6n/3uj370+ublk0fnX/rKB7/73e9zxJfeev/86slmuxnWHYTa8RB1XO96c5qPx+PhtutXfeklp4vdpRpzwvV2havd5H3u+3FsR5ve/dLbzx8/a6p3d/eqcbq/2+9vMKfjaVznnkKQCqeOCIauL1lASnXNvahqAEpyt4BmXemkrIf1MGy3JW9yXq82w/pip6ba5rPz7ZOnj1u06g0jUubd+VvD+x+gH//h3/+7//DX/sVeT0PqqCEEdauyLv2qrNerbjNc9mk7xRzUaujJWyAKSABlkIQ4iCRo6wSrLgmlCLQgMx5rnsIkYcrCREGBRGAqJgICYQARSOpg4RCEQUzIQkAYAOhMJEgmQhggLCUlouoRhEqh5CkCLKoI9p1IXkDMTmgRoOamYcFC3EQKCzMhZhyGoY6mAQFL1ACkQy4A4Ik6FJBFrQwWzfuBJgdSpmwpkxBLhEILFDc+HbVNp17GeZxg13LuKYkhcOa+H9I4DsO6RdfkABZhKEJpl2CjOUseOkrOtatTqjrnHsCHAE3dkcUcAJq7VHexYnMdVWrqC7NB0jLwqvQMaIzVpzX3hcGpGSW3INIMKcLbNNc6g3liGrCYYmC7vb9jZqIhE1ea1A5G66uLR8/f+32rxx8cVL797W+71idPn7bTw/Xd59/61g//wl/472r4t37v25n4s0+++PTjj/9H/8P/8Gs/80c+/PSj29uH7eUFJxaUvuR7tzaeCHAoopS227P1emD1tF7PbSylpFXPwznIrrAweqX917705en29ru/+3tffPbhw+GLNtf5YSbibt332+6DL33j6bN3jZwzswpmysOK9vceLQKaW2KmtLbTLQGHOQ3OOWXqQ5tGONBuWHdvp/3ZPmWe6wER+7w2r+6eh8Ilf/G973/3u998erH+yuWXPrnd//D2e0li062TkLBzcObgnHtfjebCyibq5EyZAABaM2ftc0ZK5uGkgO4QQJgydW82uMiSISzcF9Il4ptYP2AEKCASSSyldQZwQAdOCBSIyZfoGzROCEhCrr5YcsGnpq1No+uGJSoFI6CHYgB4qlOb55Mwp5SZyJeZY7dKrS6yVGYIyeYBVIwQMSBR13XNAs2FhCtPBuaWOHeClIVEBBCNBBDq6XCcbtf9er3ajOO+36YwCRCIRgApQ9+vTjVbQM4MligEkSHRaigeVIau6/KRTh2sWfrwpuZJOMnCYOynqgETZ6YETEyFc8mZJbEkJI8ghCzIltCNJQKahpuHIWvVphNaTHNTgs2wmvxU34DzsKTu1GK9Xpc8PH/23qO3PujWjwNgf3vz0SefX148effZs3/+T3/54bBPQ/dP/+k/NaTdbrOf7v/Mn/3z/+P/yX/4q//sX+y2xzre//Czj1+8eAYaKCGAJISp88qpb/2QwHk+nkrZUGROnjqPLpX1Y8au+Xw81reev3N//ek/+gf/1fVnR2vlVHkeeWpGjtu7k8jdpx+9fPvLb/2+b3xte/XWvB0EoR6mnLAxnY6TawvmHF3aXZ7miXNKyB0mV6ttBAug1HXdsOZ+uFD1aZpK7ptGVcurlNYl9vcxPXzta19ZX15+74uXx+MxF4mqhYckRAApc8oYiFmoQRaB2XSCEEJEI1cIsCgWC8AvA1bwpVGl5gpvzLmL5oHdEQkJCHyJkDkAECVekLdgQMpiqABKiwAYgFogUEhCycYIXmPBN89tVG3uboFzqyVhECAGU5jPBOjNrLoQkQghhQcSQc6csrXGjBDhRCHCKVFJQFCJexRAB2AohVmEOZhKphW4IYIRpMiub3Tyx7vT7c2h6/KwxrTqV0OnrTFjrUpsORNLztFnIsSEzoxCKfqUKWPpejcCr+aClDwaumVhITY2MHF2ZBWjlCMhY3ZJlgiQGvOAHpDMWAh5gLyVoda9UuYGYeR1qvOxYFqv18Bw2I8aTWsDgKYzsAzDmnN6/Ohi6C7XF5eb8/PTQdvpYZ7Hx289v351e3d7NHfEcvFoHRGhdnu8+9N//t/7j/7KX3l+9fj/8L//PxrK/WnsV0OHzAHeXPU4pBTQlUGPxz0aIbAn2pxforXT6frsaserjTazSqnvbNLf+OVfvbluH73U0WMCEj5Lq+1pf3j5cNrI5iLi09/7sd492M/Ho3fePt6Pp4e7gOZcUvGUsmo7qfV9GRKrTkBxmg8I3Pc9MnFOscQrASJitRlqrRAwrPrHz9+S3RnUdhhv7vcPv/693/3s+jZttk8v3rq/eV2nfT+cp5Q4UekYAHKl2SEWAgq0wgkJXDXcXZVEhFFQBDGS8+Sg4E2NCAkJPEUgs0YQpohYyljuhhji2R08nAUlIbEjG2ZxNUAkSgs0SNg4jBmBHZuC0zw3NUBOEOgarTGTs+BPtQQjU1AkATDmjJwFMUDZQjLjPAEgGoFTztJ1uRRTmKsCOiF1qQTxLCmI0pA7RjCXlAnAAHCZFaMlqLZ/PW63h/t9KasppQnczMIh1BTJk5BAl0AwMnIGoCScMG0Gwky1zZ3lqgRA8wRETuBmwJAUXDKbUoQxA/cByQiJIyCBC4Qb0CwJgKlDgXBJZIbu3rwxgIh48zrNWCAXoUbBjuE4JAQZhoHAEbHbDdPoz1+s6nTKWTjpfjrsZFO1jcdKMSBQ4CgiEPL/+Zt/4z/5X/zHv/DzP/fo8dPf/u1v/oEvvVtWgxCTYc4duj1+dMEid/cPfbedp5OkfrPbOWEikUjinVFuudnptC79N7/9zdfXpx/8ePz+3YOcd48uH+8uLnR+2Dw/h5Db6/HbP/rx164udvPmd7778VcN18NKcjIgNm+mREQuhiAiXZZx1KZR9bgeVpk4sET4IiZpOrvbdPLA4KHf7Xb9+jI4/eQH3/onv/kvvv3pj8b97aasuv6MwMHn+/kBMZiR0ATRGfrSmfvslTEQ2CoJQ6ItYgVTgoSISFZSLxQOVNDmOrVa1RsgRHWDhsJvdL2OpugGiAQpWBDDS8KcEQkIyV0bmPtiH4UkLGTChN48nDhQEYDcgDHcwYM8FNHB0B0RCSlYghgkyUACxA7BZghvlCsWAB4ZCPsVdQNzhvCsjuhIEcKYBwTAlBm9BYRgYWEOnGutWrGFNqRE4jHu2/39cb09gEffr5pNVScFcPfEIrhCCxQKEMQeAQidiIgZdJFlzOrhgQiCGKgKwYIwYHiAs9GQlutytCKYKYXGTOQUwICApBiRYAXbYjlrSDCAOfFCQPXmCz0+5czdDj1EpI73jx5dcaZC9vTpU6BUGC52q8uLJ7/7/Y/+zB/7E++8+/6PPvr44f6W+FxSHus4DKuPf/id/+n//H/9pUfP9w83t7ev5und3e6cF/QZ17Pd4wgc2yHlDB7H/f7q8VPBzgmFyBBovUFKYtGv+3rdPvnw81/5rd/7/OS/8Of+6Fe+9tXnu6/+tb/5t/+bf/73dDz9O3/0T37j9399/9aT7//Wt/pKP7d5cjpY1xkR2FxtnnJKgZ5SX8DcdZotycq1CRX0PNUmGToqEGhuWTBy8hbCmTLmoZf1xfT69ld+7b/5V9//nbzpX1w+PjWeIJDTdnNp2A7H1yjrNg2nlFBaYu9Sp1NlzAMxB5hXYCeSAiVDIiIiwrBhGKRPrSLl882k++k0zsdpquYqocBOAeFASJ0kU/BIpTCjZonEkFiYourMkOuErQZ5EkGEeeFUMUoRIkpnkU7HmThAWoAiObGaKYKEpYhCzCmLDKttLmNECyf35dgjKfVmcyBSIilUekAKceEgoQirENylFQp4hlbJLRgdSVF91mbq0WhqFROmHr2maR/393sMiggkG0+jgzccI1A4iaBVVYOUilmLKKE9JRMyxNHdwxxSJO4lEQrY5JRSGDLMKJaZcyokbCDQKEyDKhNITi7mFmABjjq3VWICZgkObpqCNSJ0rjZPAHG6r6XHrs/Mvt70HhpAn378WT7bIb2VpHt0dvELP/MH//k//60vPvv8/S+//+lnH55O33l9+9pKMtPLs+0f+P3vlZh/8OGPv/eDH4LqrtuNp9YuYNP7eJiZ8tR0s05CIT6t+r7O2l/ltNno3f328TNYd2bVZ1fnjz7++Nvf/+yHn++/+od/Vuf27/7p/+Dmlf7eDz6/fj1Op8Nf/xt/89+7/2M//0d+8eu/9As//q1vfmXevP/lLw+DHQ5fGIwAgCQEKkDTNAXGaj2YGaCXMhAlCxd3V8u5uJqHI1DOXFIqfZ+HC9isX/74u+O0f36+PYbL+bbdzovnwdLgsNnr3OJQQ1OIYENsWXKWJC1TRJe8BlfdZxx63vYiffKUEgoSwabr5hKY+o3JbPowne7u9uN4mOZDm1WIE4twzpLQJLjkgsKek0JMDDNTCKUZCBKHQiAjKAOHV8qQpGsK4CxdSkAeaggCkxAmwZRwrjMAIpaUEsHAP/PHH4sAcWHKANaq1hqtqptFMHFaDf2wEV7OzhjcIyEhrlNfmAE8EuSFRkdspnUcqzZtzsc9uNF2u0uFgaN0fZLkWtGjjqO2yWM2VJacJQOQKQinrtslIaRgEghq7cFMl2R/Ji+ZMualNemRxjZV9K6kPq9S6ogEI4OBMJVBkCvIFDELpZVsV9Sxh86u4eBxmmszc/V5ns21X2/WqzN3BKSu64mlzZWIGrQPf/Q9bY79xVyn9x9fNo2//y9/+emTi0ePL1f9ENFyF++98+IXf/5n//if+GPC6V//9ne++Tu/++LZkz/7J/7I491mt94kYp9P4zx2JQ9dSeDH+1sWOb86x9U6DatxfyjrHeQNBpZudXx1951vffO/+Ee//qWv/uzji/L3/sE//r/83//WX/9bf/373/8tUxkPh/v761bbarfaXJzn8mSq9etf/7qftIPJ2r26grbQOp+OjNCv+tJ3p9OJBUVkaqPDBOGBDAgsRCgeABDIPjx5Kz3/wEtq93fXh9cPt188POxXq83Qd12ggEuPkV1xBom+y6UEoxF7otTcJ5gZvc8CZB36qgxM1HW07cuqT/1KUsopJymZ85ClpMS5z9thV/JKkoSjayTMXS5Dzqths+6H9VDW6261KixBCZIQAQLM4W4utc6JISUJbIuCS1AEBZ1K6ggTEUPU1FXJRhQLpKd0HZEgJCkdMRm6c8pE/XgkhEbomHmeFECYObFDkKkvhicWypnIQDJjSXUGQQ0P8zr5UZVMOXQRvPg47Vfbp9v1usfMFQx9cm+Is2mHsk1FzZxQUCY7qHcrzkjM1Ajbgv3NqUGAOxExO4IbcdFaEYOwE80pupxWJJQoAEtUT0IMFkJCjNJCp6pzjdSZA2NomqYx5qlWF8acGGkTFA18vV0TRCZh9BC+f319ebV9cn754Y8+jLRaDSUNm//eX/yTiu3Xf/03P/iZL7/1pffXl+dD5rOzs5SH7/3o+jd+7Vu/869+5cXz53/5P/hvX56txtNDqxevr291OpZMQ8fqTSMo5xTYpRVzH+ackkLqV1ubHMZ6eHX3L37zWz/+7OYbv7ht7WY+3f/mN3+jdKuz7W7/+tWf//f/0n/6v/2f/W/+V//L737r++fr59uri4eb16/uH56thttPvjjef9iOY/UxEheW0g1qp9bOLndX0zQdjreATiFAwEmIiBeOd9Nm86orabOGsiHwYRi6Bptu3XVddVfJ945r7maPhnFqanjvXgl6oHuKLOiM7i36Doece0DsMwa4B4uTzEJY+pJFmFJIz569SWuYvG8dStenvl+ttnU/RauALeeUBXOSrmdOE3AwnhEzo47H6l4rGoai8+nYIDR3WOtpWHOfh7A8Q4TDsDrLOTfbtfgi46RxEqlTgAP23XayEBER8UQsJNVyW3XjcQ/E7CpUmDkVZ3F3kQV7whoOBtEJM4BQZzyrhkVVr4ANCQD7VpGQSNo0Hi62m0ePLlWVSUgiGGbXMGBPDOJ4UqsRDEwes8OJIoMXxACeJDFGJgJDAvcIcguLIEpmEQ6ClFLpcy/i3pyoUwRiT50gBkY1AkMNnVqjBwNokTyLzW0+UTALh3lrc8bCaTYPlqQ2a/VQD7fTzcP2rfXTq/6T7/7ryWl19ej3/8wf+o/+0p9963L7G9/65t5cuk4VP/zeh9M0ff75XVL8j/8Hf+EXf/EX+rwZb1+/vH613pSr8y2Bi5CZJc6tnkrJQ9cpONgch7lfbWDoyZUI7z99+cNPX/6z3/idy8vLvksvP39ILo8uzlUKSJrnw9e//sG//W//4p/6M/+t/9f/8//xS3/E3Gyufrs//L63nj5cw9Sm8f56aqe8Xo8OLIdhfXmBZZSHeW7T6WDWdtvLROV0vCOMzfo8563kJI0w9xFuoKh6d3+TxvlqWF3buL95tepwtbs42nQYK7P0Xa4qhOpYE60X8HIS77Mn5JwLUhKcBDszI57cAVIYQVckpQFk06G4Q6u5ap7NEWcAKClrPqK1BdlAgLlLnD1lAWyEgZzRWyq1tmxuWqNVrzUinFKQELiVwhQpTAlWl+dPhGie140KpZP67Wm+A9fWqhTcbLdCBF3qSuqYGWdd9V3botlxnBpCYkYiYxImCq9EQCJN4c2RBwWRchZVrXpEIhJOHVozQOPEoXpxcbY726xWXWvm6jlJhQlBhZCREnJIttBwECGHaqZApKYSlQSSdK4tYE6Srbq7IHL40sQHRGbwzJKldGzLBFM4E1HpyGOGpbvJJMjjNB81BsidSNFAmBrA8TBqm9frLSsTB6ipKwuq+fFwEsbjWG9+cPz6V76y6bNotPn+137zlx9fvfsHv/Hog3f/+PX1w83d/n5/4ovtaiibVX+2G853qy+u9/vDuNusyNNcT3NFBjztD5uhV62qmjkCEUqZpmnoOgOkecbxMJ785ub21c3xZIUI1Pzs/Enq18CvOka2lmT3z3/1tz789AtxeP7isYpNp1ksCQXm03vvfTDf39VjW+H67n7PBNt+fTydjtNn2/v95eW5cIeQ2qzuhwqnruspSN1SKUNaSwDUEK02PdTbL1YX3eEByt4uu3KEada72zodvVHuV8xpylZAYyY9y72n1Oox1tYxUUkdyeRKmQUzeFASaDrR3M2rUlYlSY5ITpxMZA46zWHAkuosmgDNI9B0JAzmYEEWMZ+BlRGbOQMyVkL3AHdyg9ZaQHSFABugdXnd5w3BpkgPWK1Vp57ZNttH6Vhw/xpsnOpnZ9uVABqnIikLJXfFruA2E8zXd7RvjSRSZmJEhEJi6oiaZEXYZUk5ZzVE9NKhIZlra5UTS5dyzpVstVu9897b/TCkIgBgxCyeHDsujM6CyMgLRQIQgABMtVJm89qsJUZc6DBAjBYMYUaQCIFQuiJTI2+VBTAMgJEzODIVpqHL4B7hE0p3+3Cc6mmV1itOyUAE0SkPHWlExGq1kpI92CMBSmBrVUW4dGmapkxop+P3v/v9r3/jZ590fUn8k89ef/LD755uL59cXD3a5HcePRrWq3nGaZqQ/HS8f/X5nqDbFTkerkPH8dBgl6tCmysPa3e/u7ndPX+03p5NwKrzbCpV11nq/c39nR3n2qXy7PLJd774yf3D9VdePH/3vWff+/DH9aB9H4+fPvrtf/3rv/DzP/feW+/87B/8es6AEcfDfLla9086fH3x9lsf5KyH2/u5unBMMyCHYBBBhBGJKcx6IqTcFeKi7uJuzUrmwIiqYMGtXnTlOpehu5znGOP0cHiYYo/pvAMO0o5ljG4fTUpKNPdp2w0XQQenCaOloigOqWujMUZXCrO6TWQcOGDHWViwgzSMrQmcEiByoXHGmBHcGxIosYJRzpkFiBsDxCIEZorGBJJYE8uxeVgguKsDYspifqLkq3IZmtRORQSx1zolWSFMRCCE69VqHPF4fOA/9GeuSteV0iNJRM68JRB/I4jVnHG9zd3AyBaoQSq8hkiJu6GsA2DW2QIEI6Xi4a0FMRKBOULQ87euHj960nVdSh0CizALaAsIyoU4MTBpzOGKgGZeW4tQphLhJMpJAav5ZF4JBYmW1SHiYtZhD4vw1dB1uQRGYDAV4V5o1efImRTV1GmOHF3S1FVqVb0aB5r5rG3WlvuuW23yUKT0QiKckkjTJimzZKtaurxe94fjw3oznMZ56LrHF1tyRSRtOE9VEm4uzp++8+Vutbs/nObjaVVkmg+vXn5+PN09e/Z4Oo03r292211mWQ9J5ykcN9vz3PXj/c00TZv1tmzL/nD46AefBZfd7lyj/pN//C+H9eWXvnrZZzo9+GfXPxmn1mYEP15e5C9//WuPHr24uHz6+vXNz1xe/uGf+/27ty9immAaOQeXohpQx6HfpMIQARDa5jrVVCRllJxK2jBlTstiE0vp6HzD51dtmuonP5yODwYy+dxscggiFuHt5qIUZgoWrKyO1q140/dZzrpy3q97zgThKEokRBIOdW5M6jEmkdx13WqdVlvKQzd0XdeJYGA1aCktcE4gRAxnnhFaRDCuAA3lJMnCW3gQsKtYDVMIlfFkENQPqXQhuZXSS1JOKsIISaS4IwmmRMRMYhb3QEbEGFLrJIvAHZAQc0rMLIyJ4rIftqthnOa7xGOXABCnSTMn5i1oSpyZk7oHAkZCiG7QEGhNzAxKW211t7l4dPGkT4W5IWgpvcdoBuqNEubcKToyoc0RIwN61LneYWwtn8zCjobcOFWPGUAjxJURGIGBDCFaQ22OCAwoTOqNyDMLAxEEUE8RENxay5wzbwi6nvP5buiCpFrL02y1d7em7lTyAIA6qaoBzgt3z8LKegCjafSut/vDw/bskRsU5ot1Tuv07K13ADtOuV+fl25z98PvTYc7gJNZPT5cH/dffO3rX9+U9OMPPz3brnORgDbup2hVU5lrw3qbCKfj4eHuVeqC1KOCQwyP1/+dP/9n7h+u/87f/5Vf3+x+7pe+8Sf/XMI0/uSjTy3sS+//3Fe/9sHlo6vt2fPXL++2E/2hP/Sl9bMBujPfvp752Ev2kjkl2Q0WPh6rSD6NszmcXW6H1RYAAUJSVzJTpgqwLplIAgp4bdc37XAQ7nfrjarSdNwmmfrd0dpYTxPUU8KZvKbIqy4P3SCPSLCUlPNqvd7eldub/bXHjMF9x1B1GmsujRMndcLsmjTxZAZslFOh3tDn46kvwkglcc3QpjSHIjrGrYODH9iByQBHCyJmlJYLT+K5IGHHjMiV5f9H05/1WtNk+X3YmiIih733mZ7pnWquYnWzmzO7mxQpURzUlCEBvrEF2xIMG4Ig+8rfwveCLwwbsD+CYcCAIEG2IYuQLMqSmiJ7rO6uqnd+xjPsvTMzItbgi108n+Cck5kxrPVfvx8QNoLs1lt/PSQmeolERKyWglwSs0v2AomyjENxMe9m4U4pc8rFGxAk2t/mFmPSpnPwe5IViRISBBML5RvmpH4cigCPtdagrrax+DhRa+AeJdF+zDNTyoHUER3ZhOj8eGx+TsEAE5OwZNVN9dSsOrTAtrU1D2dErtVo7cnVzMwU3VRRBJKwhdMFUYeRiBDUrAIYBIE4IQpxZhKZClloHgAPfJfzzlfNyTCgtQWEGHjIg2dXdKAkQkmKqdx/OFmtGHZ9tZtGeXqswMO4O7x+c7/0+OzjT7ZzTUK0tHeff3777OX903Jc/sU8jL13jk2m4fx0Oq/Lzc2Vbuu7dRHsxKbbORD6uopI0346H/t2zjkDuLf13S+/enh/6gvu8tSW96TL/+bf/Td//MnNf/Zf/skf/HP+7Pt3/6N/+9/66vMvQPrt4eXdi89Ssj/6w5/Rh/W3f/KDmx+W3fe+4wD9fB441rotT49DpjTdPa1nbBjEverp/kmGcd5dMY2mS7fOHZNwSQSMnoeUD7GuaX0w1GgIfRm1suQK5ZHPvcVSTYQkibpN2K1gKfupTJI540SUxgRTPgDl9+8+BwCkXkrWbdtaB+J5l9QEjAkEgGtXCYegQZKJoHSAKDmXhGdVbSfADYEY0RUdILEBqqMRk2Rz9ZRpGNIGHNEvSB5kstAI7bqBHxOJZO61A3awttkjcs1DuArgWIT4r/zrLwFwyAWBKDilWXAMENSeedhN41QyQCA2STqO05T3g+w4ZRIUYWZr9qjR3AwxE+eIDZF3824slCXJSJS4dQ6MZtvD0zu1bT8/Ixo4cU7Jvao/WTQA783cI+cxJQL0gEokFhcdeQZAgAu1AZlYRBwhEafhVxk3AR7KVIYroqEIJeEe9by8W9v7qqfWV4vN174uZ4Jo64boOeWcSx4GckssHvrw+GgG0zAnwnVZHh9O5+VeUhCyaSzL1qsPw/T2zbfv3rz98P6b9+/evv72q/PDN8vpA0JoM2t9WVcFXL1/8/b14/E47qacWCLAjHJmyUMetu1MFEDQe2NksvSnP/tZ3R6f3e1yWns9Dow//M5H3301pK5v35zvTw/7/UGQlnNrT/H1V1/OZr/9w89+87d+8oPf+nXsev/mz/2bz5f7dw+nh3o6QUKRISKbewQKyzgOZq1bU62SgUEcwANK+JSn/PKjOBzs3bd2eupNH9ejrU8EUDgFwarYVLtwK1zZlCNlR6JOtJv3Q7qa8iyJCWUer6c8Lkt9enoH4YQc0B1WcBzSCOO+jBNPBRNBNIhwA7fN4mz2BO7hqF3dkIBUwV0DmmoPgIjNLzb3cIyLDTITZo9GqXLaSJSFkALJ3EK1d+vHp+PWHljUfFE7Cg25ZLrEJ1H4L/+rLxF9GGZm2dpS0m4cb3p/AsZduc6lDNPMnHv0SD7nfcofEZechTlHYLcHi+UiekR0ZiaGiC5USi6ptMziNrW+mp9Pp/OyHklwmg6SMgGzUFDVvrgpkJpSeBKRYcyOFQAAulm7uCsBiQiYOKdEyO4O0Iix0MDCnECEmTNBCQcnA1b182l7/9jebnZsG5omRCySEyOA1tXO69K1erdee9XWthoKJOKoqis4dusX91suOZc8DeP5tAzT+OKjT57O27DfYSrD7gAiDtwNlnP16qe6dO/eZOth1qY0XM030zjv9nsFaY58mTgOiLb0cz09HllS91rXh7mwJNKUbvbXu7s7s/XlIf/0Oy/HdZGtQYWbJDPZR/Pw1376g9/4m3/pO7/2PWimH748ff2nx29/8eH169O6eGKAYTfuKM2SACAICgut25mQ9ruZMVNO+2mmIjxO8+0LvNpXdd6eCDqYRIDaIhFAWAGVzACcU2RGdEGkNDkl7SdAfn77/ZxLkAmXeRxL3o95cm21Lu5G1DHUXEvBYd5T2achAziBI6oBgNVYzfqxra0rqbbokWjIPAiTo9a2uB2HQZl7RKDPiCAyMhYkcGgRgQweKtIRkyvZxXIabAGInGRgQQQgKjlzEkJUABRkCITu50TZvD2dv2QYhnyrViNwGgdKlPI1pn5cgWjHRAEJwHNOgLasyhiZseHizhBGrCyKVIFH4mJgtX/YWmXGquvW1pSviM855drcXRIfUrrvfu+GuTBS59TMgYIvbuy1q6qzB1OAsTDknAmlteYuIjKM7FG3tjoGkiEJ+YGhWHSPToRCxJBLnnLkWhuwL82SRdcFHJMMtvXWq3oQY+1L71ZNhfxqvrq+efkcnx0fjvcPp5RW6+urZx9/8cUXp9Ppo48++frrr8c5ffzJK9P2cP+u1/N+Hk6Pp/BtW9fweT/tMu9LnrPsZdx3IO8bY5gFMKPa24djXc5hwAOnotM8t7bdUuyvxvV8f/Xxx9//rb/1xb/479vrb/76X/1EW+K8dwrK6eZ2f3197ULH95+/+/yXT6+/iPOC5kE+5ETlkDh1U/UGADlnGdKHD28tNNBTHnZz6aZb1NtyMx2u8eWnuDvwm2+juYBARrVagSKCGDPSLiWmCttZiAvjqZ+c8YbnJ5S35/a0Pr64/hiVJTkXZh9v5pfz9/cfnt598fXvnc6PiXgoXe2+tw/mz+s6ug1QsnonCMDsAraNXc9JUHhs1N0QIhW+TpxTzKfltfYnlkBKxAxBDp6HMIOdDWVIarxVreeaDqn1SkSE3aAiIUaoasm5Q1OrphlFCIGE+G/8w08DFakzMgBrP4fHbnpmGpJzKUysSAbkEA5wsUQzEQR24tbs0WwDCtNqcVEV/apsWXIWSuGOhK1tW/uA1CGCmeZ5n/OAxA4qnJBa93uHSmRAyJyFMgaZuzGYe9ss1BGJORNxSiKcUkoEgkDCDakG9sDK4swlghyUSAHPtR2tKXuRXkpMU55JPSubhWSG1nXb1NXdwPzx8X453bsqBo1lit6HkknILMKQKYhx62pNv/72Cxb74Q9/sJzO63IsgvMg8zAgAOXMlLvBaV1Titu76+evPrm5+0hZugYjZ5KUZsnlfH7o6xKhTmEEhKXwkAjN9O7lS8oC2vKL5zff/RGYOegG1tvbu9vD9fVsvmzL/Zd//ifvPv+zxzdf6oe3EF1HqRdvltrpdH/ShQnLwIJJtQN6GQphFoiUC4gOeTjs7/KLj9rdC0dOT0eqR6+tbjW0YgQERViADykLAAQ6IkCwQ+KMMmDIuR1Xf9xfXSWZLxrwnImiueYx7+bpFkKRn5iRpYMAiuScOGdHY+oE0NVCrW0BhtE00wiatKPZBQuCwiJUMLJbNF0ByI0CutqGKOE0lIEJhZhwuDB5zD2CESUcIiBcESTCzDfmlNPIlJiZf+fvf4IAjBHechLm1PoGGEO6JmJzD1QiN61NN4Om6oAIKbqdFBaL8wWsTpDCIsAAxQMRW07ZLrNq+VpYAJUpBURAzWkYyh4jIwQyEAdgmJ8CAjEJT0xFrQW5B/au2npKxMI5ZWZCREIWEjcxD6KVUrBkAAM0ohIu4QjQVWt0B0u7fH1TXk3plsKEU6LMgNZ7183aGt6Wuh6fllbrBeVbZOxd12VdHt/X09K2bV3Pj08Pp6fz+3f3VTdiHobx+9/70Yvb/fH4er+fX756DuHvjyfO6Xhq5+rTbpfSsL96fn33aab5cH3IJQNmSmMei7N5Eo6o2xNCSsMdAWyndyUDsu6GabjeP71/z8vy+OaXE/WrNGdC9HpaHj68+7Y+fvj6Fz/fzk9MsNsd8v62ajvdv9mOT8S0KxNCCZKUSkrD0/Hpw+NDV23dMIyt17VRwrv9i/H2JdxeYRapWzy8i3qioN57XY91XbRXAMu5dNettiBA5oiIgO6xMW0SgNKhEZxJAmk80GFkljylNJpp107MZSiAgEnBgj0NeYc5IQIHqyXX8BYUBUzbeqqnBT2D79yJuBHZpbfDgO4twLsqXtDPnpgmZgpQDCNIYQWR1Lw3J8pJBqZMQMzJzIki6HQRjSGM4Yn/xt9/gYBEYXDyqIgU4dtWJRVmsOgskDlaX9e+hPWuF0r75tHUFgPLaXJDQBMuCEyQmQnQAEl4Kkmy7ETGYZhTGtWbxpHES5qYExEjOxMBRLeViAICnYnIyA2CEOTiqifAC2OehCBBhBu7kSQhuTAwECECHELCS3hCtNC2rrW3mhDJsJ43Ao3wAHA3bWvdzsu2tNbWZdnW1lo7HVcPbmbH8wlLzsJrbU+nczDncVyrrkvL43C4fnVz+1wbjLtp3E0a0oxk3Odxv1TT8A/Hs2k6zPPbt1+//urz+9e/QGvjmHMZpAwkDJww7zbzrTbBPmUJsHV9AID94SZxmvbXfHdLeYr7x9d/9scfPny7H6Wv29Pjh0zU60ZIdzd3hHE6Prx++/X94wcmmuYr4eTuqkaoaZhbxZx5dxgCJYnkBAlTGmk37K5ffJKfPwcIWJ/g/Ih9QzPrTohuW90qYFDExdw8YgJCd+PAAgnMK3ZKpXBCYFvPW/0AyaYyj3zgPLgJBNR+6npiwTKVHoHREUI4B2cItubhGKbRzc0Aums9nU5djYgBAdQll4sKt61HdyVkdQf0CMFIiCSJszCRaI2uzcL+JQz90tbJTEKYEZEYkJp5NTeCIQj5t3/3FUmn1ADcIwAt3MO49T4kQXGgDta6rc2qqfVem1YkRXKzHhZMCYMCTGgsMiQsCQtDQZqmcS+YIhyiiBTOEGi1b246lCkPCdE9Asm7V9UzUvNwIgwIdXRDhnB1dEYphFQSljwiJL+QV6gEAFFH1rgQdkMjOGy0ixwcvWu35q6YYDeXGyzZW7PeKULXpS0bGHjr29oCAYFKmVKZgfFw2M/Xh6WrCef5wGUEZM4Fc8oyHG7vPvr04x/+5CeGjGkkmX/++de9w/Xdi+vbjxzjw9Ppi1+8/eKLPwd9f0gW/e27b79+99XXx3dfPb3/8uHta+sdTLtlwFwSjQmHgajINN7kQsSblKtyuHGyQf3h3fuz99b1uKy7+ZBkVIdAOh2ffvlnf7Rtx6vd9d319TjfCedxHIIuplCD8CIJo3VdGbCUJMKMhRMdDtfjzS1e3Zl6Wo7ezuBMAdu2uVcENwtzC9NcpKAQwOodwUtgIAKhAAzN2DgMG56a36s1KTPKCO5mUdvZLDwq8vliL/fwFpeVboxI2BSbhnqY18sksy/a160uTVdmxAggYFnDj9p720AhnLZQJEyAcQkYjGUOJ1NVg9ZVZGAamdM4zEyFIBMJoF/iLh7d3QGB0Pl3/vFLkibJIghjANKuFTx1RQ8thZHATavVrjXCuvVuG3DzaOEaGOAsyMyZQYrMwskMEMckU5biBqq1pCmlpL7Vfg5qxMw4QUCwawc1DWgePbwSJiIhEQwBJ74IkSAhk2AIRUqZqXTrCACREAKjE5qBhYeHAiTwITy5t96amqpuFGVML5lkiGHMEzVHdeEUCOdlpYD9za2kknLK0wgQ0ziot9PxFERZOMDX1k7bZs1ePPvo5Scfvfj4o08//v60m3b73dXN7e3dy+9//4coMB5uFGhtrchwWvub1+9O70+Pr5d5mqYxH+/fPb193c7vc3YEOz18/e7DNzwcDoeDhALC4Xr37O6WhzSl5GTDfJUOd4tDRuGAZV2n3d2rT18hBLg/PH6IaPM87cY74UtOPjVda12QM6eMgK21arWHIaRhGBHImhHC7dX17nAYd3u+vo40sBlsmy2raSfTtp7XbRGmJIyhROhq5F6ICKH1FcATyQjDnHaI4gJPvoQsUZ+6kvXStlOtdWs1oHXd3NeIaq5o/qv914m5IETT1cLDGxCeT0/aFvCmtjV92vQRsQMtCCugujez0K7mJwaKYCZm5lJG4nRBSWMgCSMREw6lTPNMxOEBgAAkicxbwBbh4c6c+e/+41eAxgScCELcKLDH5cXCSClSEnNYtyNAA7TeoHs32OjCNwlMWBInRmESwEzEAa7q6CZEdJkGYkiZPHBrR8lngN47cCoAEKDqm2oldkJB4EASSeDAgAHg6AQoBEIm5ECIlHpvph0tuStGcDIPc/NfmWlsYGSzbt7UzmEqMGfcJ0k5ZTBAN2ZGYDQfE2qCp2WtW3N0xw4ArXVkHOZ5LPtAWratdZ33N/PVx5znT7736a//9Nd249W6rvePj93gtGzN/O7Vx9N8vb+5SWm4nq7TMH7+5TdPb8+/93t/sD0dX7w6mC15GvO8P2347ddPtS6nitP1bV9Oz/bz4bAzqxxGBIwy757T7oZePKMXr4bnn2YRW5at33/55S8fPrxJ0m+udnpS6NG8Gu9cQgCG6Sbvb+arq/nw/HD7Ypxv0jjP13fXNy8DUmvNrKmeOdHNdDPmwa2DCPCA22qn16316BXDh1wi3HVLyA4klGgo+zKo9arnAcM1EMQzBcGKa8fwUKfH1pqbb1tvXVvTbVtrVe09omtvGMRB2swxM47hXXELdVfQqL1aPS3WVoAmKcKt+SlAk4QwIaKjMQL0FmEA4AFEOSKllAMtwJhHgAuSl1PKQxmI0V0DVGSIAPdGrB4GEEyToASTICsCE5tqQBCAImaSpTYkEggB7EQBQIYerq49JLvncMHEUqj2hpCzHLquhFVYVHXZzvO4B3SLWpsF8TiOJLdrnFZ9WDuJ7ZgZjN2NsDogIbNEhIuk5s2thzkzMwv/SgfTzZ66mikpPxBlFwAFQGytA5pIiiCNCOeI0A6Oq8iHpQvDrfdtxikn1taXukRvKedEfJWvxqEIjwiyvx6n3fVcrnuvmyEK7w8HABkPU57G88NpzAFYOsF4c6uPp69f30/T7t3PvpBScs4/+tFP97e30xV/xvkv/7W/+vl0Z325HuHd43k3wLOrK5Kb/+o//73Iw1/4td/44Y9/M+3pruhHn73II58edqftJI7D7av06V9EHrVmV8yYx+c/LF++rcejNtbA9+/7vA+7ugv3rGs9b4/3xyzpxcuDjDsDhHBEKfsD66SqLSjtbmZCqAXhFWXaikw3e0wD9g30bNtaO2y1JrAs7IAeJCIIho3QA9wWcHQ88NgimGyzh165M1I0VsuSTfKC9yeXHPVhe02R2QftweLTNObky/rYnIJFe+3ba6ALaErQ0UJCm8YKURNnDCmjRqwYzaJ2W5kzS4XI4pPVioDmFgBgW86AZGArkJeBseWIztmcuzUwM2QMaBDiEImh5Nwrbe3Ef/N3b4hdmBEpgsxAtbsjkTAzov1q/EohoLtBtyByDxPOANIaYBAxmoIbI0HOORw9OmCvm6r27rXb6rgAakrDfvx0zLfmfVsfzXtc5K4BZo3ZEQGDL6AvCAIAJCeCxMBMiAbYI0DVPaK1djHcBziE974GUHhBGMLIbAmP5keHDaGBMToTEJMQDvvrm93V/urm43F+Pu93u3mc8762bfXz49Pp+PDheHp/2tbeWvemoKfl+M03X7358ltQu9pPuaRxmJKUrgHIz5+9lJS/84Pv3756+fS4fliXtVph/LW/+NPa+5/+wb+4HWger+ra1WN/9+Inf/nv/vbf+bdvnr8cZhn28NOffjZdX+N8fVo7ycHoCspOrl7E9BzlgIBuK+aQnOpywsBWq+R0Pj4e3351fP82DKI/9m3RcPXqrZ4ePjw9Pbb15Fqtt7o8RTu5LghxXk5KbXcYd8OdOWIhdY0eyUkQQGjMiQDUDNzAm5kacIRBmLo7mGrDCDA3V1TN3gydAZlayBIIahxRu3ltZ60NAJGMQJgHD6tb7RaE3b25aqhGN1c323o7WmscQGHhxFxEsgggbkDdvImEqoZTIG3diDMiIrGaRqzmzUIhSJgup2dB8ubWagCadSTnFIA95axmtT4KhJgqgjAzBDIrc1I3wN5auBPiqZCoqoe6g0e/IMdqreEXBXHDmoVmAFrW99mGy44BAMJD9zV0gW7EY8lDyTvBw5Chj1hXX+sWHCIumBBHiMYJwj0CARUJPLpZzSVdNN1ABEbWUa212k2lYwNKnEi9Qwh5sghOatEcKwtlhNN5Q5wOu0KQym7/6bMfFp3O52NE6Hk7Pnz7+P716fT+dGxlGG4/fn5ze+edHh8f/Xh8Oj4s520+XAHLIPPNzVXA06Kn73/8ats2ijTNh8++930Kurp7UXbD9fUL+BH/6S9+Vtfzbn/Vev8Lv/4X3/xrf//3/8n/Iy25n5bMw/nUvvfZ87IbH89/zoV+8NGnGYina7y9vdrfFIXltCENCBBhXAYAWe+f6sOb5eHtn379FfQj1vv3n7/JnKfDtZQhYZSrZ1e3ubmqNkLfT8NpOUePeg6lOEwzKGzrmtNw2O05yX7a764mxcKRrW29nsAszEcZ23Jat9W9QxgHMWWLBoEBoL1dWD3gARDknIUi8ZUEoKtRb4gl7QlO2iIYjBgZ8YJfMW2AkNW9aUtFwSHMzQxBiIv7ol7DEyKHeYBG4LQbmLtjp6LoSDSWSAnByLFTKLPIMO21L70/WWj46OFCyGxmPYwAovceYSTsGOgKkVTZUVlMupIwBw6AIpQCKyGKuGuHCEaptfb24MZA1cJ6uEeYATj0vjEgp8HAPJ0AyKN77SKC6B5+ERZFJEDCGKb8CQaZn5jHnGbmK7WOpAIpECRJgJutSI5I4Qk8MBQ8rCtnIEZiCcBAN3d1RZJLmLV3QzKiMGMhUFsMqrsjcUS8evbZYfzufrzNctXP6e0XD+LvT/UhPCWDhFGmm0bluy/k+e3dw3H7+hdv1uXhm2+/Tnn86W/85l/49e/vr1+WgRnw7bvXMuCPf/zruyH//h/886e3T88/+mT4apzSDJJU2q4c9lc3h+sD5vHdh5O1fn24/d3/yb9DGf/k//tPvNOOrnl4oYA9Hm+flWfPXoRIa/z0xVt4fy7jeDTN034/32EacL6KIATMaTo9rf3+A67Lu/fvrq7m+cWnx/t3uj6FQynz4/Jk3pkxp2nBhBSJRdK42x02V3efpskWfHh8O5Xhez/4i9cvPrOZY0xRaYgSLG1do8yn01OYa9+0nURyyVeIQbYFUjNFiiJJW1iClHMxqM1WtAYdQZOnwW82b5q6oPf+RDgiZmIx8w6W02RhJMXaB6sbM4ODdndfAx5AyYEDL+I6UG3gjXPkIohT+JmTISycc6LAMuQ0bcdoG/kG87Q7bab9aLYyiUVYXy54PFNs0bwHxwwUiZypAGhKGDHzX/67d4jENIoMgiMAh2MEmTojqru6LdtJwbsv1Rd3gMv3HOiGrmQGHtBiUTMMNKuALcLNe4S3rqZKGO42DKWUnXtHbA64rUvXSiiEmYgQMQLUnyK6u6mG28UFDwEhyYio5IGQVdUtulrXTShLskvcBdAIi9BU27b2N/M4vHjx2ctnn+3GGwi/f3dejmqrZ5N92edhTCJZhlYD8vjq4++V3fAnf/onP/vjP3x4/9U0yr/69/6Nv/Lb/9rV7avWjn/4L/7b3//n/93jun36w79wfXvXq37+5VdLXX7tp79OXJpZq7b1Fmhd9enx2Gp/fHza1vX49FSrffv2/uruxdPj47evP7z40Q9/8Jd+4+7jZ4frstsVbJrHbN2++urb1+/evX3zzfnpbCetFbwjJwYG4iBJCVnWZb4ba22tWt5dyXQdMiNnZFYki+CQ6AyY1OJ4PLp5HvI0HUJ7zokEXT0P4/721XC4CUgQyk/v7Xg8nc+hDaxZNEqUy+SewAysNt245CJCSDmlxEIkgWTmrbfACOsIRgAOqVIGJHCv3mvdwAgiEWVgEkRCDoRetfezaQMwIlRf1Z/M1DsFJI+LVhoiolsHB2YSYSDP2VOycJfYcRoL7TEyU7ZuARwk3Ra1k6oChEV1hwDstjXfupMZM6FIMEVgY0bwgf/S33mFUBiLUEJkQkB00w4AW+2m3HytvVY9dm1+0QCGQqi6moU5Vq3qtXcFgG5q0YDNwdRMLVTRlRDdzdS3eb4Bl96bW9feEaTwjoIxzB3MIeBkdlEOoimYIYAjQk7AhImLO/Wm7nAhvAediYzFiC9NMz+e3uci3/3or3zy8ffC28Pju9fvvm7tacxjonHeHcpYuioEhvett2l3c/v8+f37t3/6sz/Lkj/79JPf+Kt/9ePPfno8t5/94e/9f/6T/9vP/vQPvvuj7/723/7buZT7h69/9rPf/+//6R/M+8Pf+Jt/nTmP81yGaZx2eRg++vhjkRIBb9+++fDhAxIvT6eHNx/evnmXUqpray0++t5n560JD0AulEeBp/MSrmXi3aFczTvKQ5qmy58zjyWset0iA4/TKCVI8+46zdM4DuM4XF0/nw+3424/39yVsusa3WpdTolpHAaG4IQIQChAmJjneff8+avd9RVQMAHU1Y5Pdl5tWawu2s5b3ay2koZxvoph7EwEKCAo7OZhFghIDKamGyEUgsTi6ApuQE7chFrT1jYHMq11a0S7LBMAIlAYoZMQWDMCl0uNMoIwmXmYa7RamzuycFe1jm7EQqlwTrnkjJC8s1rGyAgDEopI+AQktZ6bLrU1QGBGM2IqRNQ7qv4qxpOSEF38uMbM/Jt/+7tJJsYEECwM4A6rea/NtTnhEOREol7dFSEiCJmsq5rWrq1uFqpWL34o5AD0rrWrmtvFZox4gXFlj2rRmbm3pfcNwhnSkK4I2b2qh1mobu4OqAHmHhYG6ICepRCSX8zg7q5gHZjS9c11ySOLEjUzbX37+OOPXr78zBq/ef3F4/EL03Z9/XzMzy4LQK2ttXNEgCOhzPublPLnn//Cev3pT37zuz/8sSX8sz/92Tef/+z3//l/czyf/9bf/Xv/2t//t0oZ/8k/+c+/+vyrd+/v3797+Nt/5+/98Aff++WXX7Lww8OjcHo8HcdhXJft6fGx1m05na8O1/f3xzB4/vzatFdt33z55TyOZb9/OLfe7Onxvml/PD6pwXnZ1nUtwzzvn427w3zY3bz4NOfROSCLULa19naMZA14nPe78XBBsmrv63IUSaGGgcLk0bVtHlbyMM27cF/Ws0MAQKjN+3G+PlStsGwYCzWNNONQJHEqmRFdN1Dr27nVI4TN02F/9SwPV2aEVJBEDWozCBBhYFT3Y11PrV1GyCNoQ+rgXrm1trYPVU+9gdABgcM0LIQk5UlkTDJDsBsQXbqlsrXe2qa9uRshEYzWCWEIUAgU3pWcmcO8qrFpCM1uRDhSzBgpydhaILC7uTdGyjkDuTbAEAYeypBKQvQAw7AkyH/xtz8FF8RgJklERIDV3U0RfMAL6Fc40d5jQeyc5VdWU/cIAw8zxbi8+gHYzRe1LQC21WurFs0M3QHRAajpav4gTBFGmBGIQgiyB29Nu2q4BawBG5IjB4BFeIBjKFMGQEd3t8sNJedUhmEcx5LJY2MaP3v1V8N2X3z15x+e/rhkmFLJXLIcgDnCgpxTHuUw0K1QGcacaXj7+v3HH7189el3Wt9++cufvX3zDaCty/qDH/3k7/3r/zCo/Df/9X/1//tv/ot5zEMZj0/tf/6/+Pe/+6NP/p//yX/81/7aX33z7v08lvN67q7LaXl6PFetfavI5OHeNM3D/elxW9rp/uGf/df/tJ1Pzz7+7u75c8SutTKntZpkPp4eT8fltOqx+vFhHdK+pNw1elumDMGA5uKKuSAzBsgwKicNqMvZ+7asjxYA5LvdNJRRHVLOTIEIWrWudatPErAb9/PhbvfyVZqe+TDZplByKiMPgwy5nZ4+fPjQzqfWKlAPb9gU3Hpft75SygZACKWMJU0gbMweUqNphDEjgprXLmSDWTOjrfbT0rd67K2P+brI5I6URFIKz+adqYhMAbhpRQhHuAw/RWDApVqXEUZTAKyqFk4izgShohZmGWxIuFfN7sYhEcy0QyCEIDZEyakEAMKEnjMlSoQphiEBNABnFv61v/USLDML86Xw0pG8tUaYDZI6uy+JxsIj4lmhCQ0p4WXG1DTMwMyQUCQcDNADAJDX3k29bbqt3bxBEDgIFCaPaEBJ8Fp4KHnUQHfW7hC8LEvtJ8CNxQKBiCToV+JuV1cCSuESwY7EUkZhoqVIJ+RnNz9+fv39r17/4qs3f0ggiWb0IScHN0d3FOKx8CjM1hvGtrUNQwjo40/uzOznf/7n7799OByuhjwQjj/46W9+9J3v/P7v/Ys//Gf/NIN++uknzeLc+T/4D/93kvn//H/6P/5P/53/2R//0Z+7Yu99W5fz8emwv1LtLELCvbV337xe6vr6m2+++uXnT+/fvf72XUIkkd5drd/d3pacAWgcBxSap/H67plTmcZ95rF3b72uy0IZSx59aw5KJTOodxcIyQUScsrT7iDgvZ4OeQbTQN4fDlMqGNSsaTfDdSpjTlKmfJiv99M15QnmKe0nyxPwQGj9fKpP5/V4Xk8PBJaE2dC6GgZ6t7ZoPUN4WAVw9dVAgYmTKDR1SzQwpe6dUBKOKKVVPOqHZnrarG/BkBAySw6QkkfhjGS9b6odMcLN1SMaIpgugeHhiJhoAG9qbhYGOg7F1d0aEXRXVW0LmObehTlTcEpjBEXo5T5CDCCLeSAUvnSXgDEkJZTkgOFuAMF/6e986gBJikV1vCdyADbzAAMcTZEuXyNbQHMARGHO4eTu2rtqIIJIkoSEgoERoN3dQFXXpbVqYYxIzBhIhIEiAMA4SgATCu/MIcARvVntdrIwZCMOBEISIA7HgOyOSEAMWnsAD3kWcskGYM9uPy55+PKbPzot70u+CcsBCLAi10AJCIOGthvSDsMZU6vdFD/9+EdjGd69+erbL792TB89f9VqH/b7V59+dHw6/tnPfnZelrvb2/3d4dzIY/h3/71//3xc/6P/w//+P/jf/od/+Ps/W07Lbp4eHz8cj4/X11fzPCzLKXH66suv/tl//3tbbXXTra59q9rq4TA/++jjMh+O51Ndz9ZrTmk/j2br4+MjJy5lGubd/up6dziUaRymqUyDe2cmTgjWxem81uhba74tj7YcW20qfHjx6Xz9nfv3X9fH9x/uX58e7nVZgCPct20jb5nwcHf73e/95u1nP4qbaxCJh3fL/c/LuvpyOp4ezw/HdjxqPwfYthy1NaJILITILEOZh2HPwsKcEiMABlrvdVusbRzAgO4dKcDBICr0Vbe1H5f2dF5PCkQ0sucAbFYROKUSzu54qaWsa22tBVmAdt/gUmGNIKK4nISthA9judpNV+CpdY/w3tQ0tgUiEtOQ0x4wAYBrRJiDdasWG6KJJMLEXNwRGVMGYuu6AToh8l//+z9wR4bU7YzpQSQQGbx5BNJMOKSEEYpI5hUCiNnNPfyidXWDgB7YEDMipVQSiZpHmKr21rUzhBCTMF56FkRsoOZbTkk4hRESRARgCJG7mjdHTZexB0AEAnQA9Eulx8PDqmp4jCVHyKtXnxD1Nx/+mfOS4Ht1jW4bsmdid1dszSuCiw9oqeRkmxW8+8kPf305n37xiz9C8v3+2c3t84e3j9dXd1d3t1998/XpeJqGcZqnPMyPp226evb3/vV/+8vPv/q//l/+o//Vv/+//PKr13/8B3/04x//cFlO7r4s57u7uy+++KLW9vDw4Y//8A9N7e765vmzu/28S5LmcSx5SOP+6u6ZpMwE5+Px8fHBPZACkRWI0tiaqnYZMoBLYhQch9x6G4aZQIjZQB/WLfry4f4dRpC2h/u3X3zxRR7GH//G3+RyhajL04enpyfrJ0aXPCELRJpTZia53o83L4QPGJ3WM9SWw/p5zZzA9fj4YX168tCcUiJyd0lClx4TEiJFgF8kvAGqXXsDDw+tfTmuT9U2Q93Cjl6f2qZ66qDnuoZF4qJGZpCY3TsCmMblbTCLbavrVg2bgxl0pCAUpAhojAUxCc9ZClOax0Muk/BAmLXbtp29W0DyYEC6fBWq5t4QtcfZooowUyl5FhYiMddcOKAiBSETMv/OP/gBgQC6+SLZWIBICU3dMYYiE0RtvXmQRQ1yBEIi91CtANy7mVWIgEgRwEzCbNrMzUIRJQwBmIVyYmZuql2NGCw2i0AQEoYQASQgsEAOIHBvRJqEL7dqRnSDRICBiAAQVauausPHrz5xjPf3Px+H0W3XtVknD3BSpM3sbEAQ7E3Fpix+Oj0VHj/7+LP7d/dff/WL26vDfvosHL76+uu725ci8vnP/6yUoQzFwK5u7tZz3c2H3/iNv/L2/Zv/+D/9v/+b/9Y/VLXf++/++a//2q8/Pj4y49u37z755NMvv/xKNbatdqvzNLx69gzdReT25vbZ82eXrPaQ5epq3u1KyjKOo5q9u/9wPm23z17wMDnykDOAJxaIMGtzHlkSE67rtjs829RSlnmarR7XZXn75sun89Pdfj/p8c/+4L/96svX+7sXu/1QpjLsD9Zb73U3TLvbV7cvXx2ef7K7ep6JsFcL9M1FcpU4L+eZcwA0a2HKmQkcAUUYL1c9QDM1cBZmuTyAS+hKmXEYc0KGgDwUESk8UC7AQ9u02hLATQGDEEU9hSN6Z0lde2tba5uqr8u69eO6HTucUTZiADTAxhKIGmCX13eeZ0Q0tVwA0JiyRW3tCRkDSbu6N0R3g6ZHB+u9YTobLADANAonZAIUCGJhZiZCohJO/Nv/8BMi5ARBhmIkjpepe0hmyd1a62vryAbsDk6CRKzqCAl8qisiAIALZQRABPCLOi8AEJzcIIIIsORhHEcPU6seGoDmm4Uxc6bMiMTWbVW3ALeoEZoSiwBiMBCjuCtjYsoe0Fqcjk8vX90iLa8//HPizZTDBzU0jUBk7mpL1y0AiZLAzOTn0+NeDj/94W+9/ubbZVlvrp4xlG2t796/efX844j48z/7o5LFAVnk5uZ2XSpz+cmPfv30dPwv/+l/8dt/67devfrsP/9//1c//Qs/OZ+3/X7385//+Xe+893Hx6O7MdM8TykXAiKI3TzO+zlAEb3p9vzu+sWL27qekuB+f7i+vhmmUVUvedXpao8IQ87TWERoSGWahmnaOVAuxbSZ9/lwUKPzu68ZAgOsHtvp/ZuvvyXA/W63Pn7z5pufA3QZh2G/313deKRxLM9evry7fnF7+2J68Um6+rirAi02X2kZYn2q9+/v3795fHp3Ph/dsACHtd7qZavWcARMSVLJIokIiQiJJHMuCYlE2AGZk1xkss3UCDCZQyATswZ4UCAJZ+Hc7YxIEWjWa11No/dW+7HWs+GKvEgKZoGwX7WFcGMacs4AmHMK7EyA5ABntdrVERmRwwLMwTnAa5yarj2a4xNxIPIF+otIhIUoESEzAhAiBzr/7d/9fgA4avdqWJGMwJNkRjGn7dRqbwbKCZghIFggvFBMQplx5JBEucihpMyEGIAQzOQeXVut6iaImVByKuM4SObEgBitrRGOhGZRchlyBu7NNzOtfjLTABNB5iyQiZAp/yp+ghUCn+77Jy++l3n89u0v5pkDajgHCMHQtREj4KbePCKnnPgKUNf29VX5yQ+/+6988dX/4K3f7l+15Xh8fGzr8uL2Y639m2++fPHiuSEAwfX1DQaa20effqTW/+TPfvbi5Ytf//Xf+M/+0//XT37yo9779fXhl5///Nmzu3VdT6fTfj+nlAFoW8/b+TRP0zCN3ToTEvjVbuemZZrGaUJVDM9jub07DIUIA8gp4TiU68Nuv58lldu7W0Y8bmcIkDIMQ16Oj0lQhhJhD2++GnZFys1EeTeItp5Ynz1/Nk55e3qigDDdD7tPv/djuXo25XF/feDnL2wqIEnKtaSpguKYOAmeTc716f6N+UbaODSlAQkBADGQMbGMZUglu3lr1VQBCQJUVdXMAQFbq9u6ACiZYQASs1AYb6Bn6xrdobEgJVHdTGuSjEiqql2JAUiJyehIrES/antRAHMggakSUYAxh0iuVYdcAKy2c2uKIgxDkaS9tt4UevPVo1lsDiujmF0y08I4EuzdBUEQhHEgzIDG/8q/+d0AtFC1DbEjOYEziTlBpLbBVjcLJUZmEE5MJLhHGLwjeWYcGG0sOQubmxsQATM3t9Zrb6BNKAbGQSRLQqaUkggXQOoNXRNCMrNcRsloCBZqvrXew5GFEybChAiIGIThFuFPj+tHtz+8mq7fvX19uBkzEYJFuMDOwdW6e/OoSB6gCCmnIdx35cWPvv8X/+SL/245P0y8Qw1dTrbqy5vvgsObt+8++fijnDKQSCnCUrf67PnL1tdt60+nh7/4a3/jX/wPf1gK7Pe3HriujwCGGI+PT/M0sZC7927LeqRwIjfvta1a28vnz0XScnycr+Z5Nxchs21/NQb0KadxHm/urqd52u/maZzHcdrtdimlXIowRQBGlDQlSa5LQst330XGxw8/vztcld0tsA8cEC0Q9oe7/bgHwJubu3DM43y4+6jcPKP9deCcJak3ZKzrVuaZpquMJXuAbg5mmw6JuBj4EOjMxEyAzEjE3Ky5O2IQ/ao3jwhEpBaIhB7uqh7gyJJdZG1+//h035cYM6WMocgISHU51daImClHRK1r1wXZWBTSxskuZq7EiuREDNQDVjMvZQAgJHZTBEuclu2x2VHYRZglzBfDBtRapaqPASe8yNtpQmrESFgYd0IZIhDIDAmTx8Z/6x9/jChNt6YLQECPhCEogNkUwmPdNABRNHESSVkO5JM1ImAwCo+ciMgtFhIgZkCwwAjyAIzilhmHnKZ5HMY5lzIjIhohT4SiHREFIhF5Hkg4HAIsWl89IuHEEAhKUQgpoJnZ02m5uf7senr5xec/m698nke0GcEjQAQtVN2YLtCUysCCo0eHTj/+/u98/fWfrE9fPZu+X/yAsFlP0/Bsf331+u23z+6emeL5tGrT/eFmWU7TNKhhEv766y9evfx4OS/39x8++853Hx/vd4fd+XwiotPpnHNBQvfGTACgauC2GzOCeuhuHHNJqh0sANS83dzc1N6JoOSyGw7z1dXVs9vb27vrw+10uDrsDyUnREzznMs4lHHalc22vDugIPaNest3zx3i6asvOPH4/KWHAqya5gQpX92WUvIwzy8+CUPtLY9X44tP3AnUINy0cxI8L/3+vdBEwGiVAFRbD+2dWl85cxaJQJGcpaBwmaZShpRzGcaUS85Z1ZpqTqOhm65Bdrn/ba0eq64OD72dXSGNgEXSkNPsBuZPta0AgZDDyWPbtgUpSLpFc++5OJERXjD+gWABTsSIHEBdl4i+nM/gm5OZniC6ZIZwJGnW1lZbde0VEHO+jA0i8gYQEAwxhDlxmLkpunnrR/6d3/1OhF1CAbX2S0eMkdXJVHu07hGISTyLCE7JiwMgi+BgtiGypMv8QB+GVPIEaD0aBCGgG7QKETKUcR7H/TBe7w9JcoAz8yAlAFrzEZlgkySAkCGEfbO6ViUcCAnZETJglsTrep7H/fXu2ZdffMGJy1CEx5wHQAzSwAhAMCcJChIcxvRsGK7r0j/75Cfe2sP918/2Lz3YYDPQteLN3ScP7++F87atj6fHWuvN7bX1BsgAqW56Pt/33r/73e98+dUvP/30k/v7D4fDrre6rsu6bkSX2hZGYIQTAWFJDGOWbd2K5K3VaRxFsqtSipRKGcZt3Vqrz57f5rJLpUzzVZ7n3dVhnEZJiCIy5GmcOI1BzCOVLBCYxsk0fH3E1qcX343E/f5b5OnwyQ+9OkIM482w2+2vX+bDQcYx0zCnibaGuvHhQOOVbrEdn/h8wu3Jv/ni/bc/R2J2WO6/9djO50foOQ/s5s26Y3BOJImQE/+qzHKh6XdTNQtEi+h1C+8hQCyhfKzrB12dBVAggEsBcowAHlzM+qraulK31i/FcmyqGyKEb4AOYCJJDT0gABwQYQRMampe+7q12rd6WuojEYoY0EZoKY8KDh7bspl2dwJkpDCvHhtzQnB1RWQMEmSMar333npb+W/+o+8G4NbWblvvzUOHPFxKre7QVd2BGAINach5T5AghiwDErmh0MDiIh4YLIgM6tXBAdjdEZIqhWPJCTGYZZznaTgIFzWxpkQAUdxgyDnCJBWiDVEMUL27ucDEOJk7SEO3kuZ5d/ji9S+B+LB7ISjClEsiAkJqzQhKytMFvpJ4P+WXCa6f3373Zn/31c//bM6oGF0X26rA/PzZp7syH98/6La23pB5d31gSWvdbu7u3r//IJLevf/2xz/+8bfffptSGcex1rrb7d+9e2/eEImIcs6qegElEHFOOUm0ugxlrL3VWg/7Q5hv2zLv52EYVTuAm7V5nsZxRJJpng9XV3WrwpIk55QBIOUxJSE2sMycApXSnnmE4s0r6zBe7zJI0s0tdq9+GJIkglnGYS/THvMeHdLNjONYTwv4RtPA82HIWbeVXUqZ1vtffPPH/4wlV/SH+9dZLkV+6L0zcsoJkdwjAIIiSb4ogGqt27ohoVAiRETOqRDCtlUkSiVzGloQInhYB3O2ZluYETijI2XT3NpS29K8pRQIqTdDbuGAmMMREdwrEREMZtw7uIf10AZaL+HgFl6HYcjMDoDozG5u2mHbqroGBAIzpfBMREk4wNyAKZtW88W8dq0Owb/9D76jEFuvvfW6NVWVLEwIQeEQAF2bRQcAAM5lSFIYRowS7utaT+dzxMbsgebRHHqABZKaqXaIFP4rbMuQR8IRg8ZhLjypX9YREJJwNW855SyBABisXjVWiCxxQzghOTIg4tXV7v7hfe0t5zlRKTymlDgBUQrMtZ0Bfcw3CW+YZiIWzkKHFy8/ev/hW8JgnIrRjscDfDrxxy9uv/vtL7/t9YThAVByHsbpw/3Dq1evzsvSu14frolhnndfffX1d77znaenp3mej8ejuzMLXaZ2EMwMEUXEXYcykAATofCybleHA0SER+udU7q7u1uXc4RFRB7K/nBIw5xyIWYASCkxYCoZIpDlV+V4kEvFInCIEE8tsdhRSQSHgwD1vjmk/c0nQTkAmjYnT3mKklwJ5is+vMLTKZ6+jaTRKVSTLu38lOYpoZw/vNde3fogWQQtkBBzzlkyIDIJCwJGuPuvRk49pZQkJWEipAvkHmkcSkk5IYGjGEb0p7o+1WM3Q0oKttUTJsiyYxx6b23zZTUHA9faFo8Ix0CPMEQKMHcHyNrBDF2TKYaBh1/mgHtriEycicFju1z2XEs947qFXzR4NBEOl/AeUTIDN9PYIFTV3QMZ+Lf+4WcWbh6u0LZWW2WOaR6yJKTGHNpdmwJwGDFhykOigVwC2DzqdoLoRAbY1RqQEUMEtW5mHUEYEwQgyDTvE48SidlzTixea3UPDUPo2htioKzgnGVydPUONmQ+CBdCU++7/bTZg+maeRdKiJGHgsHBJkk80MMQKPEV+TzKPObJml3Pz7EPfTnOAyecB9qP6bZtueRkbXv/+ptMoSGENI7T8bSMw7Sfd1999eXt3W2r6zCWh4eHYRj2+/35fL4gWi7JbQBEhEuHKOUUEaVkJiEWFnYLYpIklzqGCElKRFxbZUkaMY5zSoVSGeZZRNRtKAMx11pTyaYhQh4BHOYunN06AEJTDKQioKsnNpgG5rrdQ4Dsb8lWZFxOJ44Ybl9EpNiOlEBuP4b71/r2TVCHtp7efbm8/UVr/fbVqwTt8cNrFiEEAAqCVLIkuQRmiVC7AkTORYT9X/4ggmo3M4r4FbcSgQ3V0SKSelfrII4QBGv0sz2JsNAotMucBIQhbbWdlpN7D7S1KpAICxG21lvrqu7G4bQtDjCAEyBwIotwZxEhSimRu7pbQISzdupb3tbonQEyogA4ElwGDM3C7eKiSESDuTET//Y/+sHF2gth7r1ry6WMQ8mJiBUpwsHU3YCRhRJiEZxKFklFJAE0tJBkw4BIFG4GvVtsW3UHdAlTc289IGI/7gk4vAZ44rJ533q99MnCcKurwZaEheac9iSFMYhIpDAFp2a4BnZm5LjQhzIxQXSklGQEJAIGLwIjhAxlf9i9ZMzovj0dRx4ERSh55G2JDvX5sxfQiD0VHkNwGGYHBI/rm5s3b96o2f6w//qbr6ZpcvfdbnepfvTea61mhhgifHFslVIAkIhLHiwMiQkTpzyMORCQkCUNQyplsHBAyKUM076MO8mFUt62KjmVUrQrIvbePZyFwa33noekreu2EXbvSCmDNyAiQgrHNECvqcBmRwTEstenczwdt6fF6rncXQMjPL6L5Hb3Ez+/L++/9ixOQwNYv/mz+vBV3h+Wta3HJ2Y0YBYys63WICLhYZiGcUqp+MU3EtFVkQiJWERSiktpiED7VlvfuhkFE5ljJ6qwPG33j/Vk3vbDnvIuuoBpgpy4NO+nZenaHYkQ3ZEwh7N27I3cWNV7t3Bn8pQxMFQ1AACh8BQuzJlJ3ClMVAk8MYg5blWBMMLcnZAI2AzCHSKIEoREXK40mX/nH/3QwGpbPdTRkEiYcsYsxOyXloGZqYY7Co/MI8eUWKQQAjg0M2eOlAECWt88uoFvtZpBRNpq7d17h/PpNOScZXAHRFRTlBRoECGIgFKbq/dhnBPtJe/zkEsxMzdNqSAnW+qxFBEWt2CahIbMlBJ7SDhcVNVojFAy7zxiv79Jab8sZ7NzQJhht1M/v4UtbuePr9LL+/f3Dr2vm1OogjsAU87ldD7nUoAJ6eK/5JyLSEpJHh8fL0tgKQMiMctlKxDhYRhEBDlKGQOglMxMnHgYxqGMYxmkjCx5mveSp/lwLWUCRBGJgLrVknNcLl4AddtYgAi1GwExUj0fySI4eUYBhQpEaM05hznp+UwY9d3byNepJKFtOuyPH97I5mW+9ccv8f1D5CiHcfvwjW0nlwLz8+38+OaXf0xIn33n++fjk7oyYdduqoA4zXPJY0oJidbWa2sByJLKMLKkYZwkZSR2BEI0bapVLRjYzDtG7fbN+d1Xxy/vlwckJiAi5lzASFt3BVOqvW51UWN3FCaiBMAQYhrhbObmFsApE1IDMARwx5xmpkSWTLltkdOOI2lzj4AA62ER3Rwuaw+yO6gBuACgeyOGPAxmv6rk8u/87vc8LpuIeygjY/iYIJcQNoJAQDVRZe3gRomFhYmJGQEdcEMId0/ZIkLNAdFRa93CmaCYkyskEnAM8N1+RyCS2L1qWM4SvRPJPO3DyZwIr+Z5GOeSeJfz6HHuujGKee2xkLSSM/FgkZBERJh3bm6+sQThQDSWtE+yJ0reOaccsJk1COMwqCEIQmXCu1BUWyIgD5fp/h4QwzhdYFCUpDdDoOvra1UdxxEA7u8/+EU0TpTzoGq9t4hARGYaxtK1J0ngMA4jAgpnIkl5LOPIaeRU8jDmYQZMZSgA7BHgUEq5nKyGYfiVxRPZwMI859GdulYB1NohyVoDVEE30A2Y+7ISynpc/Wlzb+v6/vrVp7ZWPZ6vP/kuUqy6Yhm41dAjEAkOx8+/rsuHkQPy2KKtT2/LmGWaTuum2pIUFiZmU7tEfj1MKHHinLNIRqQhlZQHRGitUbhuzdSIWVIBREU4te39+nh2M+CU2QWPfW1VC48Yg3kN16Zt2U5Vt7DLqEokTu5w4eIENYsegSkHoDMjuiNEkSHhQFGsU3heFzUDRo4wjHC18KTu7mgO4EFMgAGggI5AzA7ozJxSBuiIC//tf/xDDEcIEujaKbAwzIWGTIjg7ojSN+pVAqK1ypzSBCklRo5wJHPwiAa4RUAEIrFC61oRMuMAIRQoAEXGrptkvDncUbRAVN8wdJBdREkpjePEUFjKVHb73ZDyIdG41futPqi22lcqR+RtkDHRGMgRjJiFCxIBmUXzAIJB8o54TDKFuelqvvZWQ7ekNiiHixnfzXemjYUZpEOEd+vh7k27RxiAG0DAfrdj4Tdv3rx48aLW+vBwPwyDuxMRgNW6MlNKOSJ2u/n+/gEAkyQz2+12iMychceLAA2ZOQknaVWXdZEshNTadsEYppQuy3/O2ezy5on3DSXlYdrqmSCW86lBL3nfzls/vfdwiXw+Pq3nUxpH1Q6xWa01dvPV1dvPf4kW4+FKMsR+H7tDd/DPf5mR0m54+PrPCsP1Yd5MEeD04UFSBkQiBkYWDPDMOSdhiFIKESIgE4G7sIBHb6u2RgDEMAwTj6ML17WqmXFsW21cSpkR+djOj9tT7X05b4lGxNztXPvJvFVtEJ6yloxCA+Il6FWIimMl6cQUaMKEhokL4+WNymRiuuudI3LrGoGuFhHhKYIiSI0IBEkAPBdhgZQREYg6kqlXlobY3Rv/zj/4EZInEQjUejaPJCUlZCJ1NxB3UvNaPcBDOrBkygMTXfqUCGpbt5qYk4iZI0vigsYBIVkI0MOSDIUHdGf3w04C1aEPAmrGZZiGK4yym6/KsGeSVPI43EoiIN96X+r9cXlE8cxVQ51gGmfCHGDk5AYgCsjeSXsluLxOY+KBAnvrrmfEJWzrrVkwWAxUxjRua+tu1UybtfW8tQUAEQmIkNjQEWmc53fv3+eUxnH88OFDShIRy7LM81xKbk2naSainAUATqfl+fNXiMychmFAjJQYEVQVkYWZkCGw1uruvel+v+9dtXcS1t5TSrVWImqtCaI7n44fgmMYOAI3w9PDBzyfaRwYCUxbU5CShPu2hathn4adLP3161/w/sVut+v3v8iSeBxtKPh0HqzBLp9Oj0kwmb/7cE+w7lO2poFs3QQCgNxUWFLKampu9KuLb5CgQlcDQgEkYMqp5CQsREkCMDwIiVM6914dCiUQOtn2/nj/0I7mFh5P/YSurdq6ndWrRtv8kcVTlpSQizLzIKOIIxFxIWxCVggzFsSCVCKy9qEbmrJpIGBoa7p2JHVBQgVwV0aQNDsAseWcipQxR2Zz1e6r+gOSQVDtlf/mP/iRQhMhBN16d4oIZQ8PdzWwMLO19mYGyACM4CUnZAFuiIqI5q3pKjkN4wCQASOXnGWgIII08RxODJQTlyQlYWYTpgikJGadYtjtnu131/v5bhrnQWaEHAg5UUSufevxoHZeN3PcSmZ2IHBJhWKnG/TmddMwj94cAEncQ60L5ySjasVUUbTbEtDdacAdqIMTS2ERvAz26KJaXTWAUp5609rqkIf9/up4OkpK7t5ay7nUuhHRfr9HJBEhIjM7HPYfPnyYpvnq6hoASimIMAyFiC5WD3edpt3lsNRaG8cxIsxsv9+vy8rEl06Cd3UIZl5OZ2E6nY8s0tV103Eoy/Gp1QqCZRxZiAFVa+ZgZGTazJ/efH17OPjDl8f7D8Pzu+LNLDznHDlG7B8+ZE88j29/+efQz7shn46t5IRg6iuXACKP8EsdFFHkgjlnulRbUiKSIpkJkiTGIA5khAA3Iw9BRiaFcIQOttX6fn16WD6sdtKuusWq9rAct74p9q37povh4mEIncUNUNIwlpk5mIlYHVYmSJQIOfOesWBINLKWtELT7m7m2ltr3QOJ5fLvlkScOTNBzll4KGVADGIADo/VdPWoQbXpUtvKv/27P8oCAC0wulvrHRVAqYeCOwZpj7W1ap0olZwDGogTk4VHmDC7Qbgie0rZ3EhkLDkRWVfBkoUuAWaikJwHEaGeWZCpA3iAbrq/ut3tbwoNu3EqMhho7cdhGIR2VbfaHxPNblJdhaxA6g4YkWDoXeumrZr3retZQznNzKluhsZI7rg4tN6dwXaShthrjykN14cXW4ckLIim1qu2doLAcdpDkJmzlKvdrgz58elJRJKIqhIRsyDSfr8joog4Hs9XV4cIf/fu3SeffDqOk2pLSZh5GMqlO5ZzvpQRe+9mJsSSZLfbmZmIJEki4u7oceHq5ySmflmJc5lSHr22kmkYB6u9ZJGyezzeD0zWq9YTuW+95v01Ht99ePNt2k345ptl6LFlbtvIaA/vdC487tvPP883+1x2H95+ftjN3WNZjtc3NwF4eZRIKJyGofxLgTwjIjMjsuSBIRG7e7PeERAgHAhQiNPlGSMRIarqw+OHt08fvj5++14/bLqa0uasxuh5tbXqsesJXcG7UOBF6sY85KssO0kSEQ4a3glmpnALMEEvZBNEAgPgZuEB5t7Dw4MgQCQYgzmNhRgICKVcZAWMiO5EAizozWqt3Wv3Tbvz7/zDHwomhB5oFmCthyo6CDEReXjvpuZh4U7EJIndgBgRs7VISIIcoECasjADURJKhNANHIzYNBQQSTgUzC1lzzkZhYYBodmZU3p2911GFlLmBMxrPSHpkEZwbluPLomzQpidL52yrhaW3ei89Nas96qxOgULM4t2XdfVvVqv6F4kZQ7T6j3v8t1cdixz75EIwKP1BtBYeOC9RQKAYR5yHnpXIF+3mlOapqn3flm/d7udCPduqpZS2u3mDx/uI+Dly5cASITTtCNCZgEIZimlpCQ5l2VZtmXd7XZJUi55nmdVLbkMwwAAjMSCl12lDDnlZB1KljxOicCtYy67obgjUAlb0BoRL6eHpw/v6uMHaHW6/SRsTR5NH+/fvc7jmERH8Loey+sHubleJI5ffXn13Vd63J4eHsapfHj/zfHxEQHq1olS72HWzP5lmh1Ata/rwpeKh3tvBkwimUmAspSJxhE5QS6Yc/TWW23akkiRGTidI87VHur2FCsQFRyZ2BTMGkYkolL4UlAdhyKIAoiILKn1jgLAwBdPkCJBUWUECSA1VauqrtrMlZCZA7ELQSLJiS4uk8CQBIxIQIgmpAC9d2/NmqmbIyT+V/7B95MUhW7RrUFXYKLAgMAIUutdO4SBsakQSUkjYwkXZMcAbxhUWywQDojECdwhhPCASN22qjXAARFJENCsB6uhKvRASlm4YN3as5tPyzC5BTIT8+l8Oi8PJbFAasumtlJgD1ULM0BAU1iru+JS+7JufnFziHMihCCM8HANCp6HkcApaOBxkBm7gVM4123VttXaFMJtcwuMjJLzkCTLpq03HccdC43jmBK3Vs0853xzc2OmZn45/avqtm3DMJRSUsrTNJYy9K4RTsS73e5ScxiG4Xw8EdE4jtNuHoYh53zZWzgJiwxZEkvKKaUEALv5ICzoiilR2FjkXDvUhom72cAgGF07Bq6PH/rTGzu+jd3LQ4aRqXKG+6d4fr3bX8XDIxSw9uhPj/l7L8+vv22vvxKOx/s3hfmjFx8/PjycjkdCIioi1HoFABFhFne/HNvcI8KAgtOMMhNLAORSUJKDmZp2beeln1fCQCYSHsqwm+a97AfaEyZbbauGEV0BILLMiRMYZM45E3IwIQUAhAcCQgCodYuVeROEUHC/bKjq0c1bV9UOgB5hlzkpoShFABAikMHAgBwpEg+JGQLAMRw6WNVaa4NAROS/9Y9+nAubm4c3dd3CQTSag2l473bpZNUOZjTkMVEpskuUELq7Ne0aW4OtawdElEAXwcmdAzElbq7dNnNFIhGMsO4QAZicnAKRU67rypwOh49aw+6Lu671eDo9ONgg8/l8qv0IYV1rdwdM7MVDLMIsmnptysyXVSkJZE4USYIQkKMkSmNJIxdR5kbW1TZKNDIEhUlOCIGuHoyElAhFgNmDpmGaD1fC0lpV7cMwlDLs9/vLDKS75ZyGYbhcakspwzAdDrvLuXnbNoBIKQ3DUMookoahHJ+ORHQ4HFLOpZTLNxMe5lZKudrvmYgQc8kpJWZhiKGkIHLt4D0AtvMTWQXGAGrLKYEyYHR7On/blnfnp7eA0zQU4sIUHGbTTV/bmFrVDksf2jbtdl6DqKVBHt68H+fh1ac/bBoMxpyATVhyLsxsZoickkQAkaQkKac0jJIGJAAwQFDv0Lr31tZFuwWTmiMhEJgiEoHj2rs5OOWl1WM9btaJ05B3QxmHRJIwgLr1ZhvCGI50GUkn3GpFNDdIiATg0QFR1SAIg+3iQEJzMiZixgAnhjyIR+26mAKgCiliiBAhEVymc8Ad0Ar41BryX/9HP0FAIq6q5obOtVrz6qGMGB5u3rp3pfAiNHAw4jAOQ8K8bbrZ2REutCz1xmyZZvLsph7QPQCgtzWoBbqrAiDSYKHMAkiIHdxVodaYhxui1HXdWm3tuCyvz8s7yUVdj8u3hisJWHRmFJoJxwiESK5MlEsp865IRgC/+LETiwQPPA4pZ0HVc+srsKAhhOzGnRDkPIIjYVhv5kQMIJDSUJsjpaGUlFJvbd2Wq6sDIpo5AGzbduHS7/f73vvpdBqGaZ53t7e30zSWUtxDVUvJ0zSIpGEYc87upr2LyDBcOmPDUIacc5LkZh66n2cmJMKSyziMAFh7C/c0DOBmrQ7D4NZsW4PYu4tuy9Ob9elhvxt5TNZOgvkU49tv/vT27hpwtad7GdnaxsfFiPJUsNn9h8frVwdEgeApyePpzXh1s58O1hYAAqRhGB28toYoklIEAFISGccppWzee9tMGyK6O4aJcAByTsN+x2W4kJ6X9XzS5am2p7r1dq5eGztLskAPoMhSOGci4ZzmnGYh2nqs2xbhBGzOapgywwX/pysLAMHWcFvVLNQUCBDRw4Mh5UA0j96jpoE92rK1CEQyZu29mfWUMhI4WngwjAyTdeyN+C//vc8wiMUjFGEEyqqtLu6uKWF4mPauEZYJi/BENFIumQ6ZJiI496VbTUAXlB0SFRqEBg1em9bmrTW11uzsEZc5N3VijkyFEzNiYmeSeokSDkM4qS1rfbtsXy3t9dP61mnb6octFmIgFOEJQC6QCQYsaWSZCHEYZRhGQgfvRJwgCdLAKQOzIxglZHRzgFF2o5TWtoeHh/P5EaxLylIyoUzTlRoAYxlGbTrvDogUYUjQmhJRTsN+vzPTUtK2rcfj0zCM87zb7/fTNF3OQdu2EXEpebeb53mnavM89962dZvnOeeMhLv9TliGYRAQScJMEI7u67KM08ycUmIA7O6pDENO2hYRyUXujyfziGVd3n8rYu/ffft4/+bV7fem4aZ5uXr+6jDA1tab3f7+7bvFlut5evv1F0o85MFKNqtPb78eMB/ffygDItnT+w/iPO4m5CC41DSj9W4el0mqcLhEvtethXehECkBlJNkZr/8WmVCZHFIGNqaMJGwUUTUp3r6UOv7p6fH06OCEQliF7YswyDXTIVF8vB8Hp4D+nq206OaJUqZmRhTkFftGu7EYalr1KYGARoOUWsAQR7RY4MwZHK/8AgTSAD3i87RFIgAIBCCPEGdVFPrzTX4L/zWzNJKnjBKhJJkpNKb9nYOxAi15hiFhb13hGmcXzLtBDonwJy003JeAS8jyUhBtS/EjJj61qxHbdu2roiVpBFlAGFxJAFsGefEBdxzHrPsEo9TGV09wNft/v3jL1s8LfW12tlxNezMIjRkmVmc2RCTQ5XsKQsFJJFhSiKOCIguhoJEjEOapnxIktU00LFbrxrGy3lJA0+7mfM4lIKUmPm0nQEpp11KZZqGcbc/L+v5fAbwaZpvb2/3h2ldN7MWwWY2TfNud5inPRIg4jhOqn48nqZpOhyucr7EJXi/359O5659t98P43DpYIvIbrcLUCaZpx2S5JK3rTbtwzA+PNxzOJHrVplimMv5vIRaInAg257WD1+dTvdJRI+LLieLk5Kv7bwbJgjfuj2/vf328QOW3WHY1bpp7+F2uL0Rzh8+fIjQ++M9ggy5dOvNXCiNOefE3exSN0Ezd1dXYTJyBxvzMMxXwkOSBJwoJ6YMLKpqtTFYuHZVYZqmcZ8n83iq7dzW07Y9tO20vlv9LFkST0nK9eGm5DnnYTfeTOOrZ4dPrufPslxttbuvKcNcriMAwps264FcyHN4AkaA1Fa80BeGMXEOAwMwuygDMFpvSA3IgC66504AFhie3YsphuPWjf/R//hvrNsjEhAWjSd1QhgI8bSttQYgdGOAYWBKkfpSp+GqlMngTIhCYq0tW9OoSMjMgNyttthQRE2X87FrN1egyikAIqfEJKlIpgyBhAxAETTNhyIThgRVD+xen+ovt/aBmQM6ogP2JFlSYuLMBcFE3APMex6QJTA4cREyht6tkjFgJmYIdtC1PRKBWsfAkoYp71IeUx73N3eHq2eOhBHLsgDzYXc1lHGa5mkcP9zfn5d1GIZpmuf5sNvtlmU5nU7unnO5u7sdhmG/P5Q8HJ9OImma5giKiHEabm5uELHWmnMexxExVFVEcs4550uKTkTGYUJkkTSOQxIuQ2FKiHh8Wk7Hh/P5AUDPx4dWK7ltpw/Wtvnq8PjuK9RTVwv1/VzOy2Mpu3Ger1+8fP/mGwlwgKf7+xeHG9jNhDKTnNbT6eGRAOerQ183dA2wbdsQaBhnYoYIYprG4cITZ8KUxN2RUASFRVIiogDgVESSX7LQNGIghFFYoAZFyilTOS/LYh0I1GmtlyBmXdu5a2XMIgVQUpnH4Woqh914dXd4tp9eHua7Vy8/en73ijiFgVsgCwPWCy8RnVyYMKwhJgIJU0B3i5TpcqYmAuEiPElQdPNuSSS8rbY5kHruiuHglWxTEON/73/97wCmHgtGd7AL9CQa9h7mK4KE58RpSEOhtK1bbTVPsyMDWaARmQauG1hUEWHirqt6M7WutepJu0eEqgEIkucCIgNEHqd9SQXchf3Cqcs5S04UjBgkQ7d1bQ+AwCSAW1xi6BnNNvJEII4r5U1tQYGSBV0ZVchV116bBQGQBIM7sQiKNZ0lH3YfT2U/l4mBx3Eqw6hq6t5byyntrq4R0zCMkuV0Oq/rsr+6FpF5nsZx/v/T9GdNk21Jeh7m0xr2EBHfkJlnqKru6gYagEAMhJoADQNJWNMISqIZdYWfLZmJkCBA3eipTp06JzO/IYY9rMHddRGFv7Atduy13N/3efZ9LWXdtp2Zvv32u2kaRSSGYd+rSDgeT4jk7syUcsw5ulsIUUTcPcZwj1XfS1XMDAC930d4bGZEyIRENIw5hsTsRM7kIuJdy7pA6307f/npx9KtLS/nl98c5zkQASJFUZK6bTHFw+nw429/ezo9aG/t/Tp9+wmI4LbnOW/b/v7yCu7DEIcpsbCgnI6PzpRDZKTWaqnNXN0dARGAmWMIbqCmpsrIQASAzCwxIqLDbl7cOiMCumoHIHRet23TWnqtDbZi13q7tuvuTaGTZyIoe221Hw6neXh6OD0/nj48P/5qnmdVQ+AQgkGv/SIchJJDUHXwFhhIAAkQmAXdgVmI0FpH10ghpDilIVE+TQ+Rs5CQASqqYVd0jgzBGnhHIOAA/L//+z+TkK2DhHsYtYIXcN72brAFziMNgSiwBHSwuq5rsZbT2HVDRIlBKEY5QC9qHdCIm1nT3mqvQPBfCSmInjhAjB7lCJ4BKPFMBA4lRLFu5pjyKEwOFmQ+TB9a68vyZt6RCSG6EwCYWms9hhQitP5u2NxKIkBtap0YALS1VpXcjJEZQpQxSB4oJoj7an1X7wiuHLiUsu3bvYudUkAOiOIEtXXt+vTx0zBmRAghrOtK7Nt2a01//es/yjmr9pyzdmemjx8/qN5Hn0gMx+PxjtWJMfXeQwjMtG0bEd0/Aojo7uM4rstNtSMCmhOjdSVEkWC1lr04eG+l1xvovpy/Wr1aa7UD1EuwXvcWmY055GxkTU0bELqwbMv27Tefat+/vH55PD7pXta6nk6Pw5C77jHIly+fswgRO7irIlJM6f16bqWaOoDXUsyciO5TUXBAQJFIEuBekHHt2gA6gpu5dUVAUwXXrpUDVTNDC0Oubu+3y/tyvu3XvW9mLeXgxstaUsofHr89TqfD6fHh8fHD84eHw3Erl62e97p0uzbd3JRjFhlMDVwphiApYUAAoVBb4aAiId9slwAAgABJREFU6NBDkHGYgnGCIJxSGLIMoNDUDcSdwSn4gJYdPGeex4H/t3//Z3WvtW0d1NEZFaAYlFZu++5TimNOBJxFoKl3VW3X2wZkOQiQiIRDenoYD0JQajEyTm6oxE4A5DFIZoAYAiA5KhiijzEGQXRlQFZ0iSMBtboR0TQ/W8OUwnF8SOFxWc5L/1tkAku9mzsiBqYYYxinEQnMdu0VujKKGzkqMbTetk4OCAwkqfXe+g2d9rWrbjlNQgmIem/v75cgQqR130SYSAB43XcJ/Pj4nPJwvwDGGM18L7fb7fqL7//wdHwqeyEiRIoxPT4+5pzVmrm2th8Ox8Ph0Kq5YWtdhEXI7Pep6TtXOEoo+x5DSCnd89Xgjo5m3rsSM3u9vH9dry9vn3+oyyvUvZe9rzcSnubDcvkSSFKcSy0VHAhGSdP8KDysy3lMfBiPe+vT6Wi9X94vrtVNS6nglgIiYM5pvd2G05GH4LXWvaR55hjbvph3bX1dl33fWmuqnYgkxiAxpimliUV6V3MLUVhGkYxOpVXtnYljzByEZGCPW+ubY3G5aXnfb9fr+rrttdwIacgzgO7lMqZxyk9jfjydxo9Pp+f5cUy5+H7d3i+Xt27VvXRrCIBE2tycIqDwnVxBQZwcJSBy66aDTBln70wKA+bk2bzfbGlQxIZgA2JAvD/4EKPzv/v3/9zcu16uy0V9RW8OW7UOagx5iFMIIgECduhbbXuzvpS+tjaP4zQdiGKkIUlOaUQgVTWjEEYHRPckg4RM9/AU5t7NzFQB/L6uiojkzGDEoGpLqZcwhnl4DhyHHAMnh7Du6215B3R3VO0hhDEfSIQJogRB6t3cOcqEzoCKBB0IQEqDtezmHb0hGDjJkA4h9opCyb1v+zWGTECff/7d8XhsrZTSatMhj8+fPmozJ3cnkZBSPJ8vpezDMDycPoQQAbDsJYQ4z0dmYSZTN3WzHmMa8qTqIqGUql3neW6t9d6naSbCVur9GrCuq7vHGO8dAGFW1W3bRKj3kpigl7effqbetBezuu1nIfr47XfWmhoM0yGP2cDBoe0rK44xA3RhEQi9b4D7h6dvnXG7nuuy7vs2D+FyPmsHYh7GcWv1NB+tNQcszRhD10buzKKqpRQRSSmbubkRSUgDS0QOEmJOWVLGIOik2s3VEB2DGtaqpXZwr+Rvpf10ff+yvb7dzufl/dJuqrs2SDwJUSlv6/6epzDGYwgyxDGFSUL0YMXq+XrbzhfD3nFT3ZmJgb1bQCIiRCCmKFE4BWb33bRCS2M6oTMaUhdwAgJwt0bWGTyypJxyShEIWDr/d//u+94rArn51r42fTfbTTuFPE9TkANjiFHY1fqm3dbSb62ZI1r98PwhpSetLcUxpti81LohkKkxIYpRIARC81ZVVbqi+33chxIyubgTuBMqQmt9WfsZID49fj/ECVEQrRUWeKh63vcLeFCzGCXFkYGZLFBEH/dWCIR9RIjIDbmTiOAwDYfIA5sFSJkOh3RgEF2McZjHUy1rDBzTeL2uf/THf4wS3l7PccgPD8+S8743ZlT31g3ALpeLqqnqw8PD8fgQY7her5fLeRxHAByGQfWOEbBxGkWk1hZjQsTWCqAdj0dVvd1up9MpxrBv+z0zl3O+XC6llHEchXjf93uGdFlu6/KuvaYkOY99L+6KiEKyti2GLJymwyMIc8zWe9tWpEi93a6vQMVBrXchf395u+3bPE1JJARa3l7AdZgP5mZmXe00HPdlp8AoASjspSD4EAdCBIRpmuKd0ZLi6XSMId0P4BwC0P2OQCQECIggEiXEIEJBILCISAwwpA7x1nRtbVtW091oI2xmDszAxoLrfltul5glpqH0HREdDJRa6bf9cr1+vWxvQDWAo8MQslBiFu5CLr17q87MiCZRgHpf0aylGFS1NwDCLGH02Wos4EZIFNlxGEJMjA78T/+nD70tvRfE5t5r20pZhnx8mD/k9CwUAwhjdPDrdt21GLKqcHRsHYEOx2dvkFLOYVDbq3aFTtiYiCi6sVlza4gMgObgxqaGQMQYOQEwoHJwclBrRd8QUpTTaf4oPCDavldtNqS5lrbsV/NOCFN8iJgQu3DsaMva0XKSkYUUNhFjAYnHIY6Z8il+GughuEx0TDocH4/TdDJDVR2nyYB+9etfk9Cf/8XfDMM4H45qaCZ5GM+X166ecl7W6/l8UfXT6fDhw3POo3n94YcfWFCEp3G6X3HuRZnj8QAAX1++HA5TrRXQ3N2sE8v1ckH0lNK6ba3W+yColIKISB6E3ay1GiKHSHXf93V7O19YsNWr6g5OkdL0+GFbVzfLOQJzqe7dWtnQ+vH5cS2lLKu1qlZTCOPhuF7O7rDdbtM4HIa0tQ4A1/0WOByengLHulZ1ZwmRowFoL73U3uq6LXvdzZqDiQQCDByZ2RyIAAiZxQjJENwMgEMiEASgECilINDAi4JhoHFSycu6LOWt6Vr2pauFKDEkJGEO1+VtWa/j+JiGvO83gQYFtaXr8vnn60/n5TNBO4aZOQDCHT9oJqokRGDOzoxOiCzRUFuvte5EQiAJwyA5UEyc7jEk5EghhkBxCDHM/C/+7QO67vXWdHeHtvOYHz59/JOH+QOSMAm4uffd9mq9maq3NNA9uVT6mlMIw9QVjWvtCzMAVNMaMBKRm3lvzIJAxDsYWwcAQwBvHUDTEFgcvCEioG3tZgLEs1g4pBNHvJZl2Xd3yFPet9teXyl0wjjiya0bKJhpY4GILgiBY+YAjBbClCDnNKIF7Y4EA+bT+Nhqvdze67L1bmuvp9Op1/p//H//42Hgj59+OQ4TCwDRtuzIkMekpp8//9x7/fD84dtvfpXTZNbfz1/3vT4+PuU89N7v0MzfK8uJzHzdlhjF/PeRodfXlyEP7r6uSyu1t3b/IEzTpOrmjRzQybVVvS2XV2g2D2Nv6+12JicGL9u7EBLIcJqsW9mWvS+MJJAaELm3voPzMIwhiLauit1szoM5MFKvvdc6HaaUs6TY3ZZtPYikcRw/PNVq2ot5C0RqXlrR5tpt226ILUape1+3fa+3fpehEKQ0IBNhAxDnQMPkxE7ghK5WLpdSFjI3kI56vS1vSzm39/P2ee1vN9+hO7rcUxPiLMnPl/O6bM8ffjHmk9aOoCFCgnHr+8vttWyXcZwPh6eQxRHckXTf2qK/P5gJQFMzZIqRkOy6rg5yGg6RSd2ZQgox5SFwIIopHyNHIY6c+V/82fdE1LS32upev/3wq1//6h8c5g/z8BDjUGtb972Ucl2vwI3EqxdEjdHiMAAPFVqKubfS+tV6c1NANd8DgwC7VoOm955bc4WNuAkmJEM0EpfkAA0cGBwJVGnf/Xh6aqYkPI7PrfXl8uK+h0ApHvay7bUgAGhzs657LRujppjRYu8sHIOEEGIQxWhEvG19aztoe0rTuly/vL8zmSAxx2k+iMhvfvPXTDYfTqp2vrzebhfrlsYx55FCKqVeLudf/eoPv/vu+2EYhjy+vr1cb5echtPpJCLbtuWcEfF4fAghmHlr9Xq9PD09Et07Q1jKjsQp597q+XI21ZTSHYEfc6x7RcdIoZbdrNayvr+9ZLR1Obu3KJHAXXcCzykrkjYjkd7qerlEkTxmJy7abS9tW6Z5QBYHjDF2L4CEhL3X1gsT5pzVIechBIE79ElCDEFrdTAHFkEmWq9Xd88x3E1AvRsjEyOLUAgO3I3uUAIFBAIE876DKaGDdlDvddfenOJe6q3sq9X3/a1CNSitdq3YHIX87gULlETC+fa5m3/7/Ms5PqtayJzzccYHdVrrrdRynD8+nX7xMH1giM2bArj3ZkuHW8jEYuDgiB2qB1i2mmQc8xyYhV1EwpQDp0BDiGNOMTC3WvlP/6dfpTgihd41xekX3//y0/M3x8PjN88fH45Pt7V8eX+53i5lu3RtFgRZiJUEjYOkubbdVQFpaytoBW0GCq7iiKbdugNgTGNKCKywU6RACVFCwhAdoBOhqYEDIrKk1nYReXr8tncNIQZJ1+Va+iqIhOOQTqa83TazRaIiiurVVKE5SyAkIknpFOUpRjLSrWzM0hsc0kO0bB2Px2lI0zQ8pzwSwrKc9/VGRAiwLMvXrz8z8fPztzkNW92WZXl7f//22+8+ffx2GAZE6tre3t5CCNM03bNApZSc8zQd7jm5lNL7+/l3v/vdhw9P4zi6g7vX2og5pdR6bbUSMxOJyL7v45hrbYwYic3vXWplNCjV3az3GEJvFdR7tZyTAW7XGwaZUtZaL7d3sHZ8mEOe1+s5MCDA4XASSV1biDzkIaW47VspxRGfnz611rd1AYeiRkRk0EohATPT1mvdt+Xc6s5IRBhiEAmIICGkNM7zY0gBSZGRRRDZrC/XS1tW2Hcsm9WC6JKCmK/Lcr5er7elaOtMHaw3QPFe277V2pUBHMjczAGR3cvL6+840tPjN2M6lbZSwpQGwhQ4akP0aZ6ejqePx9O3QziSCbi7r6WsXV1CyJmdkjoYgJrV3mNIQ0gJo3DGFByQAIYx5BhNO5rxf/tvf+nOMRzGOT89fDPmecjDPDw+PX1LELzjXvafXn4L0NDFG6mZmhOPjujegLqDE+WqveuGBrXsjM4EHQsw39kVh3kec0ZBswTgLCABhRHvbVkhwogoRN5pNaMPp2+DpF49Cnf16+1q5hwdIY7hWThtZe3Wc5QYhB1aOxu2GB+FnnI85GkOOat69bWWGi1FDtApUYqIMQ6AwQH25Xa7vDn0+XR4fvzW3GLkT58+xTj89NPPr6+vps4xPj1+yDkTkbuVsrtbSsPpdOy9L8siEmNM8zzfuXGqqqqXyzmlcDqdav1919Hdh5yZ8Hq9EmBK6R79t95M+74tIkbkdzaNMCECU3DVXtZWVkJijqU3xK51cWjklnJW93K7bteLpDCN47aXnAYCHIZ85zuQYJQYJOU89tJba9M8CEIej3kYt20H8A79Ll0jwrZvdd8BwFyXdeldgUCCBImECODWOxgysZp2M0aJLHQnH6WEIWKI4EBhDDFurZ5v58t6fd+WatrcOm7kfSt72SopUUiErG5uGimWtn99/Xkej8fjE1EstTJwDIKRhvEBIArLMEz5MGd8GFJqdlO1FKdutbp2UODIzqQoJN1q63WQYZABOhRoft+KgXpwdet153/zf/lHIgNTRu4hhDGcTvM3D6cnc399ebtcl252vX5l4kFyQCbKIpkAQxhjCA5eq7l762vpq4MTKklXbJuuEifhEbQ4Uog53mN7VAIDYVQzUwLnzHHOxxynlCLjYPWaMuf4TWvYekXU5fpStaWcmCn4kIcjS1rX6r7GQMIRiUt19OOQnplzylOICSiykzeFpq7NwQl6q/u+lm1rtWznty+t78Nhfv7w7XJd12WfpgMh//T5589fPo/TPEyD8PD49JxSBsCU4h0pllJura7rmtKAiPM831+AWquqqfZlucUkOWezOzOC/ytRC0IIX798CSEMw5BS0tqh91q3UhcE1961921bQDClWLel1mLa9n1JKYUk1gHQGKy1jv+1fMzo18vrPB85TkV1msfWyuFwbLW10sFRm+aUDoeDqu5lC4mFhjTmaZrdvXXdarFu2o0lMouIsFBMkUW0N3dPISOSAzAlQHLoTgqAIUQOgQJjCJACCDPLfcokwimnIUZAPm9lKU1JK7SuDV0DJnRG9ERM5OBKEvLA5fb+8+evHKdhGAJz7RoDAkDK03E+1VrRgIWGmM2rmtZWGXvIDoTdcGs7A6OiKSJT7armEkJ3Ne/OhIGbtr01Qyh15//5f//nh/HbIGOru1kPgd1xzMf3r2+v18tW/Ho777evOVIIkZhjIIkokYc0TjmkPCLYtl3I3YCa7SGCozbX7qbWCYVczAmQg0TEBqwpRCI2gG7OGBPkWXKUxJRjCK7m5vPwaLo4aWt7qV8JnGkgCOZsyjFFoqK9IVCKh5ifrY+lQB6mGCaRUTipO8FAQA5uJowUEIu6gx+GKTBFGT9++O50fHh/v1Urp8OjVliuy8vrlzTG08Mjc5im08Ppobae8+AO1+uFma/X27ou8zzf8aCn0+k+y9+2rbV2L1IN4+8rkffMDwAQkWqPMZrb5XwZxzHlYOr3zO2yXIYcrVUW6Fq2fQlEKUvZKrhq38w0hAjKLDHGSZ2WfXOrriUwRuFt7w/PH0lAXTEECpIkEEop1VRrLSFIzLFbr00HCZf1hgCn42mcRkNels3Vr+t6vV3N7c44IaAc0r0bGLKMh2MaD44MQIhJQiK5D7IR3a1UaB29qxarBg6ltWVZt65LL2tbO2wQkoGliDFEAhIKAyZCQCFiDpRj5Pfzl69vP0vEIT0KsxbLMTpBHNKYx/W6QtkNzYmL1tKvpb47eAqI1pzE3UHB3RScYyrFFu09EiE5uCIoem2tlaKu/K//1384xuM0jN1g3W7q+9vbT5frzTy9vV0IrdSX6/qZkhsoYUSshCB8Ly/fy5xOHAUzc1JTJDcHRSPW0m/qznAAYKAeJAI4OiJAswZACAFUBFgQcooOaIAOfW9nFhAPvWwOa12uXXsQjnJEk96aeSeuMU7CA9NxyB9jnNd1a17zMFkFJrhDjtU8xCCctTdomvIwDkfq4kqHh8dhmrblpk4fP368nM/vb197v1XbWrU8HJmjxICAxBxCeHt7CZFvy/Wnn36UwCHEEORwOA7DQEREdLlcRGSapvf3t2ka7pm5e3vYzFRbCKGVejeduRohUGRCtN5qK4geyBEd0bHbtty01RhiV0dAVyBkxKYOzDHE7IzaWttLqz2nQWJS05xjktDB3ZzV7+1eNVXTtaxIEDj22jHwmIbL+aKqIcYogk4dtJfSu27rptpDEEJMKY55TNMwHz+MD99KjE6dI+U8hzRgCNZar13NSAQcdK/3Qf+tbi/X959fXn/7+vXrcl1LWVstuIJ4YhDBGCKhuDJSVA+mFZ2Ec0x5vZ3fXl+c0hAfY5zUGzP0fU8SDf318vV92zEaRzOvVZujdS3NKrv03pHcsXdoIbCIlFIMCQCAEaD5vcGy171X/rP/679A9BgjMrzfvp6v75/f/ubLyw+1rZfL19fzb7pdqlZH4wCOYI6BiV2JDSl0a2Yagzize59SRgYi6r0ZFKag5oQODqpujoImKN1s74vVFnEGT2qVArCwCGJbq+2b1q4bge/l1mqNHkrdumoQZOTesPembjHNh8NTiE/EUUIE5Je3171c3FrRhVBUfd13RnKqTffYYZLxTtI7HE+U4vvb6+38Mh9PZd9//vEH082pd/WPH3855vn15adpnubj/Pj08PnzZ2Iw67/73Q/btjCHp+fHeT4cDsfee865tXa7XaZpBoD397cYBRHvZ4xt2wAMEcu2D8PAgOuygGurDRkDsQiVtr++fmZ0cqh7jUICuC8XRw1BwB2cABxFEciaqtYYJaU45wFJWrd7E1+Yg7AAgrp1J4GYYspZ0d8v7633eZxN/bKt4ziHEMuylHXbtg0RCEH3LcdwmOdtX9Z9kSD35loaZ8kjcwAC8GZGxAkZACJ0N21uiu6g2lrpqmpk4AbQOxTXrW/XZVuWssmVgyRMLMDixGTsW3etVdEIXTuBMVO63m4/nX+LDvP0hMi1d0Ta9gUiuOr78tb7mgNEjsN0ZKSyF7PG2glUSCmaU++9poBRDJu5EEdOARGpF8XaDYn/l//7PyVEYu9I1+X69va72ta1nd+vv7vW162/VKs557rZtl7dnYh73zuqAqGptuKAYCBeQyDmFigH4SEiaDWLQTDGLkLYxEHMUMgR3LtrB2tgYF219ptD7b4qdu9dqbR+035rrVvrFChEKXW7j357sd5VUgBHIhnGMXMix0wTOb9evuz1uq/rXhVAxEyrumCWNNpoFGMYUore7P3t/f36hdymYbq+X1sr8zy20o+HD998892yLfPD4zefvmtWl8v5+v7+8dM3v/3px9oqAR6Ox9PpIYR8p1m11s7nc0o5RjHT/yoSh/tB/3K5NG05DG9v7yQyzVMrtbVuDlXXOYe6b0kCdXh7e2H0ul62dYlswojuvW4izMJuXtueh0yECArQ0SCl8TAfXVLTIoTgXlujGJ3QXD0yB2EK4zgDeNlube/ffPMNgf785QuRnOZZRPbe97Jp11abOUzHYxiSILCTd6dAIY0BRXVxU8RkzRA6jgOmDCzh955pJBEAdMDebS+rMU55rqXf6t7qulvtWN0xxwi0FDIDBGtt37ujMWnvpni3jBLS7f39d28/A+PD/ETGdwLVXtZdW/BWt8Wqo3vkIBKBOjESAzgwIZO7kzcBVAIMEilIjiFLQvW2b2AeEPhf/d/+BIHMwE1aXc+3L3u5llp6NcTOvOQgH46/+PbDn6yrf33/21t/681q9bWs67ZWrWrVoSDgPIwhC4CHEEOKIBgYY4AQNDEmyYiBgHvfHbv74Aa1+l6LAmzlupRLtdKhOSEgEJKZV6sGAI6M0dHW7bwt27puwJ0DGXirNTIKRVcH8BAzM19u59fttu8F3VBIHUzllE9jmomiqtay7OutbI1RDtPctN2ub6eHOQ8DUfzuF38IANu25ihvL2/X263s7ePHj137+/u7m6eQjqejdjDze8vx9fV139fHx4c73PN+iWTm//pxKMttE6YQZFlvKSZCXJarBHb1UrdeayAGNK271g7MWnvZbkSACNaVGO89AVe33gE6IQ95NHc1l5DjNBKCIwQeEFFNk4T7IqL3JhLieHj6xfdEvF2u3fo8zinl3uq2rRJDDJJZmE17B2t13yLh8TjP8zGGMUoackQmJ2II9zg0iGDIJBEYMCaI2ZAdPcYMzda+ddRlL7elLrW8X8+17xgJgZ3AxEBAW7fuRTc1rkW1KnTovZupNQGQana93S7XN+SeI63rtbayl91KIQJT751AIhJ1rd53Ao4uaKJGTdWVHLgrFCVAHoYxxSGIeK91rYDetfM/+tefANi0k3PVer6+b3VprVqnyGEapueHb56Pv/7m6e//wS//vsj4tz/+dlvVAdZlXday77X31axMw3Ec8jTNxEQEKc05MeAlZ8pCQpwju6tabWV1R4XQO3XV++xIode2GRiImJv1FmN0tGq3pqqV1F1ha7WW1bd2a31VYwO1jr00h2LWyZ2AMx8UwqVsbW9au5oNwzFBJgjT45BCWLZL2RdrLUpCDNo6hfDx44fj46dS2+HhUTj95of/crl86dUI0d2n+UhB3r6+CBIATIcjswDgPB/MrPcK4ETEcld5o1o3s3EcS9n2feu9A7ianh5O27btpYzThOiXyyXHqNb3dW1lzZmu54t1Y4HTMGzredsWZGbmcRz1vlNhUa1gigwIlPJMJLXVqi3FZN2b1pSzSFjWjQnRXVvXDta7apmGh8PhQ9W97C0Fmae5aFv3rSzrvq1p4CmPwFRa7a062OFwPDw+K4C25oQhT8hBzUkCs3gQdHVUFcHh4NPA4wgUQDhkMYd1Wb+eX7+u7+/rtfTeuxclJeu0mxijeSuAwB5KpX3feuuGUDvUUrV3RYwSSlk/v30uViTIUq4AOhAByzAdJWW1zd1due8GBlpqLe4au9LetKnvrTUl4cBR7q+4qoEzGDRQ/vWf0hBnBAAK7rzt+7betBuqhNC+/fiL7z7+n+fhOaUUZDgNv6jVrssbEYBL175uRc1OUwiDRJlyyuMYHaM2FypIK8uas4TgQi0EUqdW1cG6YTeqvZiBu7EUAPeGaswkKQiQAZkQuKkbF7feFR1rhW2p+1q8gyuo3lrfl/1LEJrjHIm9OxNAw/f3q6MSq1o9zo/CGWkcs3jX9bIRyLbut9t1GKd5OpHkz19+XrdFJLy+fH0/v03T4dM334P77Xo7HI/LvpStpBifHh/VVdXGcRrHqda99/7w8FBrbX0XCczs7veYZ+9tXdfW2t1/CIAhhF61lGpdwQ1MxyG03i5vb2OKh4fjy+uL7gu0LQm13hEDIA3DWGptVYdpkBgMTM2I4zjOIhERet17sxCk9c1dx/k05GlZlyHnGKetbkHU1tK7TqfTMB3NoZdVcjw8PkHX1tpWys8//bjt2zjN4zha17qXptXBHRqGKY0TRsHAEAK4IxGlZB3AgJFUO4cASL0XxCqIiBSJOtq5rud1PW/NfVBmEajNt34LmQNH8OBoiNDatm513dq2bdZabbU1MDNADEFK2XuvMTEH1K4dXCQe5gOg7evqquhe16ZudW+9Wzcs1Xv31sydooQQQwzeWqsKbKTNqjf+o3/eA2WmAZQRqZuu663W4mpqu7D/wTd/+uHhV7313rzVXrflWq6t92mcAXDdW4rjlA9EIi5THCRmQGJU03dOilTVFiYmZAohp5PwtJVWStduQNR7b22V5Mi9Vo0sUUaBZAACjaGIBBVxj73W3tyctnXXBr1Bb9tW3679vWPzXhPEo8wctNeyrXuxpmgpCgWOY/j0/Inq5N1BWSi+X25d23fffneYHxDw7f3ldz//5TQl4cGBHh8/DcNJ1d8vbzkNdd/2bXf0HEckWMrycDox853s0LuJBNWeUrrrM+4IrbvgFhFKKcuyxpjuEFxCcLd9v7l3tBruUo0ol8sNwQk69Gratm0nCikOQx616zCk3nrvjYRCSIQxxdx7uwewc8r7vjv0+TBve12W2zCOKaZaIaQA1pbLNQ9DJHt/+ULEpVe3VkohksM0xTQQSy/1tm/LtnpX79paRQIEE6Dp+SPFaNCJgCgAIQVxEiSu627rldpGrUAtDEruvTfsJgY55xAHMtkVlSKnEMYjYbrtK7MchiMhO3UG7Qqtw+XyUvYdNJIHrb2VbRzGwzxI9qWtrbbH41Mafq9VZqQgEJnXZa2tIIFqVG1Fb62rAbsGcGTBmHhK6X7tdnPr/Q7s4T/5l6EUiyFbd/BurqU2haIGWrnua87y3adfmxKAKbT315/flp9QLBARmrMSMuOYOU4BwMBAwBWhOHrvlVCRqyq5RUTI+WHKn0Smy+1ctLXeiToLtGqIXRBBmSU4GFp3q2hGQTqDoGIPptLNvRqqhWCC0Mw2uxERIVDrYmjurWpR3LR2aHGQnGnfvgaSx/kX1jQPGdEV2nEYEw5drbT1888/ENLj48fn50+IsK63rs3Nh0N2s23dhjGv6y4cQ5RhGmJMAPj49LAum0iotR6PhxBkWa4hRGautd6Rb6odAEppiDCOo7u7ubadUBl1vd28ahYJkZzg9ecvCVCx5mlyQ+EAgAQowq3veYj0e99JAEQEIAbz36sriNkMJMR5PrjD++vLhw/PhvR2/t3heBSZl+2Wj4fhcLhdv/Zal2XrpS/nt679eHjqTbf9dno8AfK6rWoNAYeUCIkpcYgiotq9dQQEYSAGjoDACKAFW7WygzdHIJl4PgiGttzW9epqCXjMydCrgfIAAXov6DDkwzRPgZMDA/XubV1tWwAxMrv1Ns6n54dPQ4oiHhNrbQjy8OH7ORwRhAF6W5mRUbZ9rbq5WbW9WUcSxkCGQBwCDOPIAYTykMcUHIHMXdH5H/+bb2t3BCInJkOCqsW0Oba97L15LQuJMvPb5a3Ubd0vS/spBmRHxc2hkAEhJ84i5IhNm3ZruqsVQgWoCEYcjvNTSiewMcfjfHhgHr++f9nLdchZkGtTAmVkYDdrwkGQiJ0luhMJCHVwri1t247uQ5DTGIaBhzHHELU7Wxhj3rez1lb7vumytaVaG4YcEUj7XmtIKecEoO5gpsv7pVx2ILrdLkzhD3716198/3e+fHn52x/+c+/bYT4+PpyI6Yff/PDdd99fb9eX97df/+Gv3ZEEm+rjw0PvWsqeU+bfu31Kt55TBMDee4zRzEorEkTVtbd79jVIKLdr2a4piYCcz6+380sv+8NxLtva9r3UbduXw3gaxhyitNZ774AmIikl4uh2RxX2+5KhaUHAex4bwGPMUaJ530uNUVB1ud0Ox8PT4enr+bK3/SGP1rT3nkMIQQx9r9vzw2Ngvl6vzBEIUQIhuXcHkDimEChGksDI5g4EiAxhcARk4pARmYTMtLcOABYycQbzy37d9r1rkUAxJnUpW9v6tZNO6TDwIU/HYTgEiW7afKGg7uCdYjx8fH7+xfe/mqfHECLHOA5DTulyeSvbNuYJzFrRXnTbrs1uLHttu+reejcHwoB+r8kbUouBOPGYD4MIsoE7Q5CU+J/+D98BcYpjDDOzhoRuVtoGSKBTLbqX/nb5adlvL+8/vLz+5aZfwHugFsSZUEKX0NHZkXdrxKitLst+P76BrWBmOiJE4eE4/eEQPxCZBBrkgwNt9WJK1hXIVF2C5wEoYpAh84CEmDgFEVQmagplt66NUQ4yTYI5hxCGIRxjmAljcAHv23Ir3XZfSm0sPATOAhLy1vrebnl67N0EhlKs7wVVS605D3/wB384jvMPv/mbv/mr/9/pMH3z8RNRCnH+L3/xn2IYH56e//Jv/urv/b0/QeCvL69DyoeH03yYf/rd747HE5PcSfGt7eYeQzBTdxjH0dxVe601xqC1uzsSDWGAbtu6btuWJRwO09vb15eff5gCzPOpmtfthr0ScoghjNGZDJyQAYA4IiIw3JNFrezMTsSgGoQcWq21V2XBlHLZTdVSFgdflm2YhsBYr4vt4ADaGiIic23NrO3bcpxP5rDf1hhSTgNxADBhCndgyjhCmpyIBAgQnFwiSjcHxwCSlCjkg0gCq32/kSSZj+r+fr2e1+vSq5IYBWttraVIPw6fnvK3jNN8nAK7ILe+o/BxGhjaGPP33//y9Pgw5jFPMUqOPKaIHNvl7efz5WsIXsve2q3r2ut+VzwAORMAmJupAyGaq4RK0lmYxJkpBUFEAEY1/mf/+o8SDZFhCDlyTBHvTxuMUhiwn+q+rWu9refb8mWr54575E4CzQpRmQaexznlANq3fSsdiAfv1HsD2l21OeV8BOO9bCzhOH9jls2s9x4k9MaX24UQBAI6SMA09nkcAFibAyd3YvEooXpTU+2m3QxkEhqhRGIJE2GMfHyYfxEpg1lpftsWA2CnIBhHEhFGZPbrctt1Oc4nbMMQSHtRbYcUjsfnvdXbZb+efx6Oh48fv/vpdz9spV0ut7Vcv/vu43Ldpjx9+vDNf/6Lvzg9zPN8Oj0+ff7pcwrhu2+/e3t7vyfkCPl6W+fDwTq01o+nQ9kqKBLBHZ8/xKHt+zwls91V9+s1IA45RJay3y7Xt0D4cDoZ9o48TKN5F2ACigKE97OOBg58b2QhAcI9jddbL6XmNLvjsp5DkKenZ+3aWnOAIQ+B8fb+RubjNKKAoqOE0ntOgxAul6XUvdUSxzQdDnZfJiCGHCWEQVg40DAgCpGokFojA5fsYbLegJ3jZAbYC2AlTwxgpRACQ9gNd8aX9fq+ree6fqlLB5x4jDGN80MMAwGxUBDpdTO75cnSMOfxFIaQh5xTZnGhDI7b/k5oLNBaKbV13S+3l1L37lbaSggOoOZqWNuuauauWBp1RwCsLBJjgN97nV3V+L/9l98/PXyXU1KrEpkZw8BBInQa8/BweHRtt21pzWtlQ2FAMyMGiTykGITMAQCjBKG4llq6InFX3fbSAMCMkXM61GJ72URClIdt1WZFu+5lVe+AHHk8jBOSjSkQIRNFieYEICwMQF2LQzcNa+l73QIxE1GkNJ6CnHI8jnkaUgoYaqtbWXv1KMLEMQcWUi8o2qGJ6pRPHGP3xkJBAqott01VDSoTjePxr/7mL17fXp6evs1xHHPSrrfr7fvvf/n5y5dhTPPhMEynUvd1Xf/JP/4ntfTL9XY8nlIKtVZTnca5tSYi43Tcty0EykO2rtZ6TNysIbqrai+kfVlfwNV7SYG9t95bSBJl4iDkEqMQ3OvgfifP1b2W2kIId9oKk7gjYUgplVaut/M8jyGEZVnQ8XQ6bevaWyt1jSG4w21dzREluAoRifC6bYiQUt62ZSvrbV176zEOrRXEPuRDHmZhdcrhcEBhJHJkdEZkYIDMIvleE0Oo3hT3zbbFTKG3fnkz7U06kRPm3Vop+Hq5IsGUx240juOUp2s9e3cWQvBtfTe8xgmBwNFSTIRRKxJi13WvL01LSmlMc21Q9lpbXdZraQUwAKhp36u2pq1U1YoEgOTdOlYKioTgbA26OqC0zvwP//vvnp+//+7T32Ecm14BIGAcc0qJU5pP0+NxOvZme2m9Y6mVICEIIuUhzVMSQVWo1RHZQUgA3XpTA29OTokRu3ZAYcl72dZ1Fw61luV2U3W3Rl5CYGLmSGngyEPOIswIEuJo0M2K9tbca1mYFaCVvSKgiKtpiqNQTjEHZuwWxNXrtlT1LkxBJESiyGYVEKbhgTo23VKaycfAgtZKLQRGALXvIcTPn39a1ssf/OpPvvn0i9vl68vXr8zh06dv1q0EkdPpuJV9PAza7O/88d8Bpx9/+p1I+PTp075vZV8R6XA4xZgAnFjADcCH4YBO2ru7phBa61HEVRH99fWn3sttvQzDEFIuvQGQGR7mIUjqvZhqCJGQmcM0zEyh1WZu94AdIpnZtq1Ny2Geay2t6eFwtG7n81sIHGNclgWJ1r0cTw8xpt6qtZKGbKbLtrZeLu/nWkoMSWvby62tW287MaSUnCzlkIYDxyRhhnE07bhXSBkPB/MOgJRnbH5fvxo5CiAjWF+Wde/lfb9ebq/b7QocYxqqw3LrxRViIiPmMAxj29Z934OICHXTps15c145KgD01oW1tovBBXkPkXIWd2UkR2Hmve57NeJQa2v3BmrXbd/dBCAQkAM5AiOjM/Pv/997VSDkf/5v//6cnp8fvxvTg4Pv+2K+Cevp9Bz5lOM0DhlwvK2L4bVDI4eUJAQhInQHYJHEnrypO3dsRCKMHIwkRg8oU++4t6Xaba/7dVlu29l8K+2ybmfTxurMEVkUNAlJYAN2S109SCIMbkZs1rt7BTQUUsPeEcCwt9pbCAkBhsTuvZat176um0LNOaU8kIhECSGZA8cZvKr1OKaY5oGnMSdE2ta6rmcE/Pr1xbT+4hd/+OnjL76+/vib3/7l6fHw8PhhWZfe/HA4AuAwHvI0PB6OxPTl5evlcv7lL39Zyr4sy5AGIpCQRAQAt30nhBjDneQE6K1UYc4puakQ7vt+u1xykt46Y5hPx23rSDEE79qmaSaEUndwiOH3shY3jDkiQu2tqxEiMyF566XsdZ5O7qjdUhqEw+VyuSNvm5qEhMA5JjMz7QomgWttrZQgsi7L7XoLMZt1LesdRt261roS0eHwHIYRRJCR1L1Xgo5Z4PGjtXAHN5kZRSR3VO8kkmYM46a+up7Xfeu0IijHW63v26pOQgLObpCyNK1b2bU7C8hATSt4Z7Km4K051G6r2d61AzozOythRfYYxiC5m7k7gDtQ0Q1V0bxV3HfrZiQIhEwI3khEQmaIfa+q3RT5f/x3fzLnj+hExL336/7F4KrexzgN44TKKU4hjute1vaGoaMZmcYg6NIL9YagHFFcBQ061Y4QKTIRAQkiGFsPvffWb6XXve9bv5a296Zlv2nfkQVAmCRxIAOUEVwAyaB222MM5g2AhymmMDKmJFkwLGXftQFJa4tZVbNmXrWo9a64lc1aHcdDSqecMhEadJOGQUWCG3dHThIo9NL6vgH01vv57S1y/Oab72IYz+f3l5cfP3z8dp4e397Pl9v1OD+cjk8GPabpcHj67W//Wl1fX78Ow3A6Hd7f30/HB0DqratXBnKwbV0ZKYXYeydxb92tpyCmpdWt7HuK8e3r15zoYX7Y1xYjg4GrT2NsTcHZUYXZze/CbQeVFFgYhM3d7iJR7yKUU+rN3UFE9n3btj3nIaWhNYsxWO/buiIAknQFRdlrV/PT8WBVa6kxZiTc2y4xCYuZmyET5DgmyQIUUobH5x6jgyqzoFgzm0+cJ/SOAdrbhdqGblqb3a61lPT0YTycRkUzK5F35ZROzsPaLk13gmjKtS9OhYOVsnYHI1C91fZmoNrYK/Ve3FvvaiaAsavV1gBsHFKMYwiht/sIuCEC3V3Oe4VqURk7ABAKEYJJkQghYIgRzFqtXdt1u/F/8y8Px/nZVYLkrZ1f3n8A2IccDXoek+mIkEkcHLsWAGASVwMjBHJHcyKIosKQgwQIqlZRzQGBoys4ILowSUzJAYgIzM3AuiJUBhVmBwA0CQFMWlcAdVMzV+tESqwGLhiCDCnmlNKYhiABgUBVva7boobd09771lZz32sx6k8fPohEcMsRiLz7RtxjDAh+u65gkHnAJpnjvm/b2jLzmAcwLLXvpUzjLHG4Xc/LuhLSN998L0HWbR2GfD6/E+Pp8eHl5eXh4bQst2+++SaG/PXrSwyB5K64bgg+5XFIedmvDAZmoIbk2lurO5o9TMe3t6+1Ls9Pj6W2urdhnACs9eruDr7v650AVcoWUyZiCoJMKSVCBkRwW9frul2HYch5KqXe1w4Avm07AEiUFOPxeGitvb6+Xm/XPORtXeu6aO+APh0PXXXblpiiEJdamQIRAWMMIhJiDMwQY7Zh4ocHYvFqDTsFoS6/t3UCooG9vXnrHEGw7ev2/vK57WfQ5uSNmHFea40SxyHc1tfee7Nlh7cGi8ECvCpsDq21um+tN7cK662sa6lFe6XWqfVmZq2b0JDCyDIigHkh0tqLWQNDBkdA7GpamHDIiQIobRRsGFKOLIiotK71fF325vztP5Dj/MgcWq/d19fzZ8BLiqDutbFwvp4vtRWiFrgKUBAMLGZdrahWRJ/inChHBE6hWDPviK1qJRQoYdM9pzHnKcdjknGIM/W70J0JiEECMkA1qkjd2N2w1lb21nTvtXmnMR845N4bEYkEQYoMMcTEQQK4S+APKT2Yc9dQum5tVwALUVLMeWR3ogbUzDa3zcndQatqU6aUOYHWUppAADN3kBgdcZwO0zBdrq+Xy1cR+uNf/0mM6Xc/fc7jaRhHM/vjX//6/HZhljwM83z4+Pzh5y+fhSUPg7buvW/LMqQ0jxOg3g+s+7oQuhC69bsOSHttWpil9q613sF3IswxCBOzANjtuozD6GpddZ4Prtp6124pJQIA8CHHVmutNcXxHlMjohhTjKHU7Xo9l1K76jRN7v7+9lL3JQki2bYu19tSyjaOiQCW6xUMcs4IIFGIHTFyEkcDAw5IHAnI1xtrhXsHEioqdmY9LxwDJQR3K404h8dHInp7e3lfrte+N+ghHXTXaleVfbm+gC81vBhfEFvRc+lvAJu1tZTetlBvvK5627BsUDaoBddbqXu/3WqS4zg8WwdCQ1Lmilxbr6VuagCEYiCIAOrYIXQIFaQLGVOLRK6+1X7b1rXYPHzPv/qH2W2PnJf1fanvTfdWX4VTTuOyrdt63bey1ws6uLF7D2EgJiC/b30BMQUZAgPLrovZZqoOJAhaSzV3yDmNYx5D4BRzwJyHgcAQegByt04dmMjM1IkQWLR7a7psl77vgUOQMORhiHkrqtgpKQUwVQREiMfD8yl/CwbdXD0Urc1WkVygI7RBEiiYmXprvqurW1VoyLHZXrUEOblKZMGq4H46PUVJKad8SOtyfv38ZRiffvmLPzgen19e31Hk46dPaUp5mLdlqbUfT08p5I8fPvzudz+q6dPz0205awNh1t6GManWXss4HZftFpgIupZdiIi8tWVdLmYlxel2XtwrgWrbAT3FcDw8tdZDYHQEw3GMe9l/327p3bQb3BcOtq0lp0lirqW5G7GN09i7AWDOsbV2PZ9vt4WIjsfTOEzLuoDjOMxEpHW32pfzdbku67Yq9Noa3AscQCych4OEPMQU8kGiOAJ37V35eDQWW6/etxCOOAxwvXEzEPYhGmeSKR7mIaf3y+uyb7XV635tDEi92OvW3jd9W/yl+KJta+Vca9cOvWBbWyvtei2v7/u6t7JrV+3VrJo5l821Y4qJKQAW85tDq2Up/QJAhKwtkylZh8pAySOgdOgFDa1Lb1y7VrOOHOXh+0//gP/eP3u2vpPUYm2vizASBwWTmJj9dlmX29p0V23oPaYpygCMTuTIHEBSFBkEySV2atve72tjdOulmpJCTDHMQ84hMkYiijikMNa+VVtAqmkMcZAwmLlaQ+jAaF5bJYacZDSlO5UJWLTvrV+VOiMIEIVT5mN3dLhvKBeFKwVF1kgVqQGjw77Xt+q1mjqZ0252A9mL73UvI41RMphHl9M8T6cnRGKi3vjLzz9KkO++/T7E4XK5NW3PH5/GaQIIAFa2NY7TfHwY5/z25Yt1/e4X3++Xa9nqaZ5f317n+ZBTeD+/IPE8H1W3yIjm1/ev6C2GQZj2968GcJwftuWmvZBQszpNQytWyz5N47osDw8P3fpt3XLKhE5AXbuqau+MnFOuta63JUq82+TMVEjyMPTet213N0DU3rZtJYJhzDlkdHxbLm2vcxpqq4AeQ9jLzsTu2mtliQElpcxEIeec5xwDjTOKuDDyiDSRsEMkpq4dchDtsCyASIFZxExpLzKOx8OpNV2aOfcXfd/bbd9fipaX7fPtcvZWtlLeL8u2mLUDaNbuvWvr/bzv+2rgdMfoI0VmCpFNu7uhooMirF1r7Zvb1YmrUwKraqVJ54AhNrKlGvRWCUwGg+DKASamUXgcwyP/o3/2S1BF0jyF1joAjPnR3XZbQ3zozdfl0rqhEzGFcGQ+9g4kSATGRBLHNImFZiWkrJj2ukEAdDTFrtEpklCWiADE0QyYAvhoTqW/qoLIgBBTeEhpROQGbq6MrooOnOOYRKKm1rx39gbey152N4gclAJYZxJERsSOV6ebmgLuFB3dyOJSz7f91a113dR7CAy0OhTgTpa4jwIYIRzG45w/dUO32ns7XxYWzynelr13jTE8Pj4lmW636zxFwdBaH3JOY9jXFR0eP33Q1i/v7w/zuNd1XW5Pzw9IsO/b08PjNB+W9V1bBevL5SzsKWU0ur7/gJHTkLfblVRDGs287DtGKWUNgZmp1sLErVdwDxLNDIlSSuiwrkspZZrGYRy6OjOKBHBat33br8OQGMnUsiQm9N72/ebWp8MMTExyWdaltUjUe/PAmaMbdmvunmPOQ+Ygd4JLSiOHiDkjshlRDk6dJKII5InC5O83Lxe/o+LKBl2hN3Iry9LAG0BptbEv29vn9x8dKnNsFdet3q79cm2vr+deOcZHkeSq6KhQb/XCKAIsgvdpz12pACDm5ozdSrd9rbdWqzkWBzMD1w66gzdGpBagB+E4DnNOmVJiEUlugTEMkZmd/7t/9QcpsHAOSSTGIGMMLAHNmqsC+V57bUg0CiUEiVFEotVG1CUEcxBEQjSlbp5zcrR1PROouzok5AAu2q3U8506D+4GYLD2rog0jiPjg3ZiCkRJeGAiVe/WSn8H0ofpFGBC462Vva5dda+91R3Q3IEZXMNWXtby1g0Ox+nx8SQS9l7ExQ1L8+aO1AC7mhk4UycyIBBO3obMHx6nD1OetPfz+afr8rovN7cK0LdtT3l+OD0dT4co+Xo5jzk+Hj/u21tvdZxnA87MIlK1bpcruqaceitBJMbYe0FvHz98WJfry5efj4exlW1d3g6HKJK3tWzrl9u+Mcm2LndVLiC9n18P0+xutZRpmlSViYc8xJTMlJiPx4Pq3WYXr9fr9XqNMcYofv8FAIiIKexlzylLDHdJ831wva+7de1FyeHx4WBt124I4Xa+dW+UWDrGnB2USUKMd8FrGDLliYXNzFEgToys1EwYgTAFar3XG0RgCQaAyCjsqtf19ecvvzuv7xi4mP98ffv59W8YcEgH11g2//zj8vq2dQ05PQ/8kGIQwsip9bJuZ1UNAZGIBWJmEWGkwBBIAqNBbeWirZhj62KIjlBKJUTANoXwcc7fPz1+/3T67mF+nE5TjoKMxGrc0RS2Us78p//TL4fhUSg4IIkhakwSQyDvpd1UoTZuWoko8EAwppiQqPfVbRdhhlRb67gjRnQ2pZRGQa+lGGg1IwfhwVyX7e22LSwBlEq5qu4ORkTT8HgYPuidt23CADmNiGOtvteLt2bmDkyARFjB1ma91e5r1RVaQ4zbuuzbAgRjPk3D06en7x5PnzKe3LC2JUgmj9frrbYeOLh3AiAMgCZBMx8Tfcox1tLO71/3/cwcCLC2DZw+ffr+8fmDGrr55XI+noaPHz9+/fn98+ff5GGI82HMg+3b2/ur9pZDiJEAgADd++12US3H+XC5vC/LNYV4PKTPP/8EVgA8pbnUzawTMBqZK5CLCCIMQxJARFI1+L28kkiYmCTGfV1rrSGEbd8khOPpVGp9+fp1bYUBg2CrxRyGYRSJiCQcQhREUHdTHMa57j2FgL1vt/fjYYrj3BW96bZdt+Ua3BGN3Ls2U43pXrKPPE4s7BAo3gGdAFPi8ZNxaOsbIYiM6IDCLpl5RNaqbbutL5e3L7evm5bOoTuetxu6xBAIad3aT68v13IpBofpwykdIzIJITE4tFqaqUsgTshCjEw4hDykaYiHO6SXQFvve28da3dV7dHTINMvnk//6Ntf/oOP3/zhN0/ff5yP0yASgFid99bX0q7r7XJ525ad/9m/+nUOcRgZCNUdMTBDiIzs+9721bQFV+u9pRyGdDQdCQjctSsRO4ZSaq2ViGMY3RiMcjowTbVp8xokOZM5tNrWpSEAUyi11taIPVASpjGNkU97W4yaQDYLzIfj/CFL7rXXfqXYHaF521rprboV7V2VGFLZilltzdxknh+n9Aw6aOspTqfphEraNvC2lq3WIk5qzhQYg/kegqV4UEUGrL20CsfjKcRxW/fA6cPHb2IaX1/fe28pj6fj8cPHD+/vL3/11/9Je/j2V3+c53x9/XJ5/ZqHeJynGNhUJVIQup7PrdXDfAgs18s1Rnl6fti329evX4JEV09xaLUGRlNjCSFKYCbB6/X8eHogROIghMys2vdtM7daNwBjhNvtaqaBAyDGPB5PD0B8ud5u13eEfphPUdK27+7uiExs6mYQ01BaX7bt+ePTMEYl37dy2RcCR1cOGJkRqLWtls21IiGjhJxTTAOlOAZDonjQmEDcGIwD0YgYuFZkUgciNEQKSdEMMUqMks+3y4+XL1fXDoKQx/TL4/AQjFCDg3x5e72Va1VHpG9PzwjQrKurUem9IqbIc+SQIkX2QCHRmDAPlEgoyRzDwQDW/ebehHn09BAffjE+/tHp+MvHh4eHw3QISdy7X+u2btuylrfl9na77Ldr39bSnP/7/+E7JiBKDaw7pxSQgFkcuDUvtZs5ALoCow05BQpjyJFj2ataATNyol57vUaRlAbiCKgxpphGVKxWJAAAaTdTQpd5PCny5fZlb7dpOsYQGTnKw9rflvqaQ4hyDJKYaBpP8/xk3BVa861502bbWm7rufWdMTpBK7s238t1vW1g6TA9gQsDpzCcxoenw1MMtK5ft/1MAOjQq+yLIpaUmGmIYQAiayh8mPMhYOjdjvP04fFD6/b15XOK8fH0+PD4cDg+qPoPP/zlXtY/+ZN/FObxtz/+ELQfxowBh5x7rdYqEZa9rNtyt2Yw8Xa9TkOYhvibv/7L7bY+PnwwMwcve2l9WcoSQ5imad82Znp7f+vWRcK2rUG4tcKEiLCvV0bw1nIIzMHccx4ohLVVdZumaRzmVvfr9eKAKecUUtd+3ZbuykympmrDMH79+vV2vRALx5jG47bst5cvpVzdIQ/DMI4h5ZCjs1CMUx6DRAnCQjGzoSF0SCPPHyHPXr2vV/IGIk4s7N6679X3K0fEOJt2IXo+HNMwdyA3vu1UK378MM95aJsW7V/f367bTpK0tUABQZdyM7BitWgVDkNAIQtSY9RAjK7ujZljHEKYI08xBWGWhgeWE8ePFB9zGLMQE1O3fb+8rS+X6+u1/PT6+npdbktZ161vJSLMY+I//TefiCOAGmJt4NCFA6ALC7mXUtZ9CeIpZXDLIYQYokmIVvray4bYEYl1CdS6vXMKIkMKkRNNeR5Ydl3VvWt3MMZEPqR0EMnrfn6//MQSDuM3BCFGZ8FSbm59yg9RsjsBcBrmcRrVWutn1JaBvcCyXl19ig8SUozkTqq91r6sm5kJp3mcT8NzrRvQFgR7uem27qtfa0ULrbnEPg3DEOc05CAHwhT9lOh++ovotrzfLpfbw8PDhw8fmIQZUh7/5m/+i1n7g1/9XWH+6etPD6eHbz588+Xrz4BAjgRQtW3bdjufp2F0MHez3pbL++PDVMt2fj0z8zDk3krMUnv78vKzg8/j3A3cvZey7ZshWq3q1sp9Xa/DOA7DqGoI1NViTikOyBxidKJa9/V2S4KfPnzK41S1uXmOGQkVrPd6T/C32pblNk9TL21d1nZbiCCgr/vSzUHd3ADVMAzDOD88xukgMQ05pyGnw0TxxJysLiSDD0dglCAAytet9xWtowTPAxGAVlgMvZnV25evDeDDd7+Sji/vZ+BxrxvYLUkA9b3vr+eX99uCxO593zezvuzvVVvrXlthVpZdqRmQIxtvHWtzNuJ5OpEkr0joYxoPIU9QHxJGh+7QGmz7dlvO19v6u9fyV1/Ov/n68rvl/bwt+7qw+8MQn2Z6PgT+J//jJwdClK6goNbdYZUooJWMBSeySMDCwcmISThrgaVeFd16V9uMLAgFEqXSnSXkEJhjIBw/HB6m0+m22m29qnXhw5QPMQ5I2Frdii3rMk+TQGLyIBQT1r7XfjO3yA8s3H0PRGOeUFO/XEO9Jtgj0hieh/RxGKaUc+u91BUAW8HLednrxbQAgekSya2X8/nrttu2Y9nNPQCASApBUsZxPHA8MQZUhh60921f12Uh89PTcRgm7a6qh8N8vV7B++Fw3Ld+uVxyzH/yd//4z//yPyHR88NT2/fb5brUcr68JAnTmK7n92FI1/N1nkYRvG2rdX94eFRYupaUhpTp/L4ETixB1efDQa3X1pLEthdmRsQQooRoiCEOKY85DxBE3U3VEQGAmbRVUrte37Z9m44P43QspfRazL1U9WL7urVWgQCJVDUwai+9ln29FC9I0vcKZCEKUuCcBZGR5vlRjqccx2E8pvmIRGbG89Hi7CmZdacsMrgMTgZ1pbpRmPvxieYjNW9vXy/vb+fLzz+cX3+6XN5vb6+X7X3Hbd1TDDlMt9v72tdLebuuF0MFMMLe2n7bt3W9oGnX5l6dSu/dzen3mR5256baYZ+HmTg4igCK7UwlEwLO696v1/Xrevntefndef3hdX9Z9lttG7SOHbwPSY6Z52TTgfmf/MtPtdXmoL17Y6POdBGyphEZj+lw4Ec23Pve4WJQGFIDqR1dVSL3uAO2CQnRAb17RVJwDzCN6XmYnj89fBckfz1/WcuWcBYaOVII0ZRrvS37167hOE91WxiRCQDOdX8rZdEOxE6o2inw/Dh9nyLfzj+XyxJgmA5P4/x8mB7TkJf+ftk3LZ2qtKplf7vcLqMEDqnp2nQv/bq1i7aoxVx3EgpIg5ziOKC0QCzUte1dQ9MGrhmG4+mIUfatuLcPH79Bx7fXV1N4ef/aepnG09/9+/+n3/zmN9u2/vrXf+TqX19+eru8EMjjw/M4ZLUaorhbDPzwcKx76a3GmHJO21YIhBDNTIKcTh+3bTEvzCBpWPcGyNNhIufulqaBhFNOpVZiliBxHIlnBUhZtGsvpd6rwyED07rfwH0axt765fze1jOpMiAilb0exymm2LyrOQfspr1pihGZmwLFFIIMEkOKTVutS2bO8zGMQ8wTMNZ9VxZ5+pYxkFfsHWjUFCUJhUmJ8PaOAJ4e4ZCY8/L1d//vz3/ZHP7ib/7qz3/716/L21/9+Nd/+/oXKBpBz7ev13q7XM7n7SsFZzG30ro5MBMBsbm0rm7sqODGEAyidhT2kPauG6cgMZSylnYTKOKbk+3At2LvdXtZy9vuG7KRABBFHwKTdnAg53nCeYQhMv83//xbU8VO4qh+T5VVC5VRxzQO6TjmmMf5srxv9Yyubd96NQPaew2RUuhgHchqp2JguBlQs1ChPZ2eRzliG0Kc1/6+rD+jUsBBREKUIAOAubVtvRCLQyzt5rSXfgYCbf769kNv13F6DDy5o3h4HJ9zPl5v+9uXzxLz/OHT4XAMKRbdlvXSqt4uTZSQgyORYZPrz9e/ei9/C7wBiZsBAAUA3gNhTk8cCHExqG5dMWN3dkuSx/xgqutyCxIAadn31/P7vm2tlzxO4zR9/PBcavvy5cuvfvWH6Nha+fL5s5l9+PBpmkcOzATuEEI4no5RQqu1lDqO47YviO6gx9OhVAtxSjnt+x5DMndDckcJnEJUI4qxW2fh1nt3W5faDaBDtyaB0KE37aW0XhARYzpMxxwHNwXiFAe7m+7au2uFXgnU3CXGGAckZqAYsoQQQkg5xTh00xAI0FPOOQ+glVynYYhpRLJumKZjVyBkGh68me8XFAIJpAqBKH3bsMnrjwRNc+DhMH76fnl9+e3rb0Mefvrp5998+e1r/W3d32p7b/t2Wc5rub29f73c3kGcTFk6kg1hGNLASOQsgRMnwCgysiRVtI4SO+Y1hB2gmLdl20urGN3YKvFt86XjTfuuCiQxhZCkgxo0EXcopntKMk7iYh4D/+N/8YduLapnlpBBWTfdPTaC18D5MHxzPDyNw4OBXK8vte297rWqORoZoBLsUdyid2/doHl1ogZY2sLuh/y8lV513cq72i1KJ0IHtk7mIIRCZrCu+6KGe7uZNrW27XvrtdVyuX4ptqfxFPmBANFTnj/k6fl6296W6/j0MByGIGHZlsv5a9l9X0y7pjTEey9fnERab7V0EmExSZApm1fmFIeHICMjGtduzU3ERChFnrXq7fI+5gHA13VrvYWYkOneN398fF62xQxExMzmabhe3n766beHw+njxw8AkFLc9v308OiAEiIgvLy9DimYOQLV1pB8mo+qqN3NKhEJyzgdendViyG0XiUPMWWkYAbmmOKQ8mBG75dbt45ErcG2NjBnohDEQ9rXmiTkgbvVUpsgxxjM97JuwjyOsyMUbcgMDqrIRHdLOXOkwIjgShQCUhjzlPMhpIkopRCpd2tKw4mPs3mDOFJ6Aqb99iUQY5x63x1ziJ/awLSf+esZasEkz9NweV0/v5x/u/z44+vfWq/TIIrtvN7Ot+22l+uyNzAkNPQ4DnkcU47ESCDCFFIQdCYchjFLtAZW3LlBXKkbgbnXrqKGCtUZO4S941ZrV+u9a1NzMMDWratJQCYlghAFAyhhA+R/9Wd/ShAzg9sC4pJduRgoWjez58dvDsM3gfOQvlnWy/n8BTEChiBjYEEw1cVRMTCG2m1X6BZAQoCqddubwqVd9/altjfvhg7V1ByhgzsgV6DCqFX3ZSutNu3mTtqg9WaOzeH1+vNW9uPx05BH5KE3VROZnov15ls+TUnCvtzOb1+u5259aBWixxyHGMZEhzEdcpQGa/MVEDmwhd7J8vwhpw+P8u0kc6VVcQdXLwI4gXJdyjQMwrSui1l/eHqUkNyRkB8eTj/++OMvf/HrZb28vL18++03puU//+f/lzB/8+n7cZ5TSrdlzcOU0nC53ESkq7pDENr2nUVUjTmqqgjv+8KBwDHEGGMsZY8x1FIcIMfMHIRjkJTSIHlAiSjRTEhoXfcYBgNs6o6kbs7NzPZtN1dC1tprK+otxjANxxiGNJ5kGIjE1AJ609JbBfDr9WZdY0zo7AoKFoUIgTiGPKaUgohpR2SQSMQcR8uT0cwyRrF++9kghvTB/IpmXSaQYD9/1vef/fZWCRzSbVk/v/30+v7iyCLoDNV8r9CaEM/Hw8d5+HCcnw/D83F+fjh+OA0fxvBhGr57PH47TkdBygCBwLu1rntrtbv15tar7yA4pDGERDyaQ6t1WW51a702VayGACgcRJjIJXJKEdEBPOUA7vxn/+u/BpcxZiQ1aiiMiATaHQDKmI7Pxz9AEubBoJxvn1VVJJFHMC9l630lQgQ0JCBrvjJjDpM32kp9W5Z1vSy3rwhNOPXGW3GSNA4TsQPt3crp9OHh+KFs2qqiG7MQRG1UezcXM9n2t729UeQUDqyx7Nvelig9Mk2nRw6xb+16ud7ed2tkzcraneV0OOQwubUOm4bWSQEcoVa7AfJ4+MUUjyOJxNCtVH9nbGyzawyOSRJz2tfltlzm+SAxb9seghzm429++NvH54fT8eE//H/+H3/06z9097/567/odfvu2+8fTk9BpPW2rus4jr1rCOHe0wgxqakBkAiQxJi1N/Bquud0J/R3IEdwInZ3lsQckPnuj+AYQhqaQjdCkWmY91vpvc+ncdlWjpGIXZvW5mbMpK25KREoWuSU8swxdwdidgdC6GZm3u8fx17W5X25vqEX9B1doWtgAm9gNRCxREoZ5lmGERF7iMIjEwMycjbX/fYlRCbeHQsbUQgwpde3L9fX143hdbue9+u6rUutnAJjJgzIMcpxiM/T9GHKz6f0YYyn0/R0mk5zfs78IYXnYXg+jN8c5uPTeJiD9FpbK61ZbdaaBXK1FehGHHIYxziJZyt+u71ty3Zb6tasEdbeA8OYJWbi6DFyIARTJhBG08b/7n/7X2qtpTQnvHfHHIDdOnaEbs2n+SHG3PqquC7lrfRLDAEsOsSm1bomyQDRkAGwt5o4z/kJOXbH3g1rDSIx5t4bIBgwIE3jYFa6bQ8Pj9N4nIf5+fh8u12ulxUhCAZG3mtpamM+Bkmt19t20d9vZxjqDfp+fHj+7le/TjTtCp9//nJ5vUYUM1vavu015XCYM7F2K7VqL0qoDfba1LErxtPwgQmr72jv0G4iMdIj+uCmAw+t9rusSUI6n7cYJUZ6+fruSH/3T/74P/wf/89ffvcNOPzmb/82CI3DeBgPLBERz5fznYw7zwciRMR+J8BRRMJhnNxs2/dxHAE7mOZ46F1bb2YuEoSjcEQiY1SkPM4KwJSEY+/uZsM4aOu9tt/+8Jv5OAKTto7ovVHf9hgIAUWktaKq83RC5NJ7SMmg7/uaYxAJtfW611Jul8trWXZTc2/7dt3XHXtHU2u1tgaAzOTmLBLnkzNhGPowIrGX1fsNDTHPkgSXi2OAEBgNsIE8yTB8ffmq3JYK/+Ev/vw//vY/llKZghEG5ma9NxAaY44MoNpJGCgys6nV7iGmkGitq+01YbCua7nWWra9llaQOwfmcIVUKYxEo3qr1cruy/V827frdueSODowQxwoJk8DBSEhZAADM7faKv/P/+7fgNvb7XzdLmAOrECLNiPMCK6wAGgacu+11OW6vtS+BmSwXCu6iTVUo8Cju7eKASN7FpqIU2BmIFITlphj78W9IgMFT7kiNbV6Ok3CAR2mMTmW1hS8l9IMST1oxzyknKcpJfR+u7wsdXFA6rvr/ukXv/746VeoVNz+/G///Pz2HjmpWzcEAO1VuAEUkW7atmVXr+aB4JDioLoS7sBYvZZ+q17Qx3E4ARAiCzF06K0Ipe6OBimIaVvW7dtvv7+cz2o1x/DbH368S2uYxIEQgUVaa/X/z9N/NcmyLfmdmKslQmRmib2PuqovWgEgAcOMkWM0Gh/4dfkZSL6QD4MZ4wxtBmADaHSjrzxnq9pVKUIs4e58yIPJx6gKs6rIFS7//vNah2EMISBiirGrhhBVLeXBzZZ1a11Pjw/uRkwSsgPc1/HGmENMZmCuABjzEOLgjkCCLObOQqWU3vuyrR8+fko5PR6Prvb2dkaiGNjV8jCXbvM4mjkzA6AjgrsQufVSas4DBX55+Xo9nwmBELZ1DyGO42hu3hW9mSuQEIeUUkoBEAhA2E1FwhPEaH2x6wu0jZOhBEzB3cwadqXe0Ip0Lm3788ePX67Ll+XL6/VLX9tNb0CAoKWv2hEM1apa7163ui9LtW5mdds3U5O7qTZLgPt+vW0vZjs4mKvRHqPGoMzGgsRoFVrFrdRay76VVorV7l0JAZgo0pCRBIksEPdWt720Dq2p7O0LYGm2XJZXS8xONIi3CsUoBJ76ZTvT199lft62q0OlAAi76+ZgqMlNWte9VisF4K6U8ltfxvzo7gxgWRCRwMGwNW28hAgGs5MiwuX653fPf82aartOE/72L78D4x9//PK2XA22XWtq+TilyBxlvur5x0//+Bq/vE+nSDHHzE6IzN0UFnn01gqhj4oYOae6rF9ZjhakmTl5b5BocnLmYT7E3m9f93+sq0Pj4TAxx618CThyeOjaGFHB3WoHIceyeu/Gwuvttm3bcUqfP30NKYlQ0/10+tbMzH3ZVgR8fHza993df/3LX+37zsjjOJWyM1Hv7kAxDU27AhJHJ08pEGJXpxj3vRqoCCNEkaRmIQQiaVXByd161XXf19o8xG3Xst/nhFKxPk9TXZcOziLb3oc8GDiaEYAw916RRUv5859+nB+Oh8Ph+vZxuV6fju+/eX+63M4O9Pj0jhyYmZmRhBEFwBGW1ritwWYI1L98xIdJjrG30Za3tpZw+MZowLBT2dUWMKBL+fTx4w31dPiLP37+d2+3Px/HyCbrbVtvW4Wi2NR6t50jIcS917013ywnyCNbI0DJOU1xysJNovaVfLvPD8bIZqi9m0bs0G0zfQueTMHQETlwJAdXN3YAB9VtW7aYlSQFdC291ta0NiUC/m/+L39V+/p6+XS5fnHdGR3doPVuCqT3CoF7b1ZKW1vba9vQQRC1YzclZiQwNgjqrZddA2ZzUlwJSc3ZmSw5cScofV/K6628OanCXsvtdquqalrcmgjMYX5/+GGYD01bM20G27aehnEaDpE5Dw/tVi4vH8DteHo/P30T0qjd/vz573/8+PdDxCEGGTAdKYgngOnwNBzfCUTt2lvH4o5J8sA0Hg6nnFLZLut2a020JUJVKgoVnVkzKEUkU9a+m6mpbfs6jad12cYxrdsFUY7HAxGDQ8qBKI7joezrOI6t6fV6eXx8PJyeXq8XIwsxGUBzdUJ1U7UUM7ijuzA1tdo1jSNz6L2auyMDCoXAIiEEpFCbOqCZXW9bLdvl7ZXcD/Pw9fUrkyADuqtpyEkBBbFrA8IUo5qBA9jdOylLePny8qff/ZfjFN+/+3UpVfV2mA+Pp8dpGFhSiMM4nPI4gTUh55SJAqeU0yNzUGiotb58Vtfw+GyOdtkwDCxHcCB28M7Ebdk+fvrxzy/nOurab3/48R+Wdr71Zd1u2q97QzPRWrsjS7JOWnfQprq1vtW6KxTHfdvflu0z1cVq6bCikDkYFBftgL3uqj1IBuhWjeHAksGFi3TtpRYGYxIjDMFJCFzvTIF9r2bcetm2XTXz//H/+ou9rKXrsq3sVRgQUDtW64DoAFGYkEyRmR0VTK11MGIeqkEIgwO7K7MxEyKYmbk6NAQgk2aKHoCgQUOmWvdedVu6VQ3h+brVHz/8eD2fmW2ehtPxcZqeTo/TmMfbckHiHCK6TvMh8kQojn1dLoCcp1NOxzSkrd/+9NN/vFz+OAwShhAnnw8QogcKD88/pHACQ0Gvt80N1XCeH5McUVPinMJslrZVS6kGzVAUHQXJA3vMFLG7Qnf3ZV2tQx6HYUyt7722aRpSTL21eTog8sPDw7IuxIwGl+uVWb797nsD3vYqzMzSmiESAJha03Zv9MY4kHBtxiHGNJoTsRAwOGMIIUTiwBxr0VZbCFG73a631tbr9XycZxb+/OWTSBynQy3bvt1ECBF7qUzuYEwUmAF823eRkFJ091bbx59+v12vT0+H+XTM+aSA97GE6XjENFIYKETOWeaZh0MaT+N0DPkIFGur3ZTBcN+xUxgnHtgZKEyAJ0PWbrDeJGZS/8cPf/inD3+3rl8NfIe+7C/eb0X3ujN6JokxzMLxPvXvQA7gPbROhJFIhDkPs2BGRIrshE27gzmxmZS6tlaYo1AinEKYiHMrWPfS2n5f6A3kOVNOMiQMjEMSVevN3F04lE1bBf5X/9370pZlWwAMwcHJiN0JkAFiVzfXnLJIdCNwHBLnFHuvwpJxQCVArFhBlYMIMwV/eDiMOfTWrO+lminFHJjBTQXhGJ+TPwT4PsdvUzgu1/3l7atjiSEdpl88vnuach7j2LXe9mXKp3kYc44hDtq99ELEHFByOh0em21b7XtttVTzPQ0cggvaMMbp+Hw6vs8hiYSmutUlxsgQmkKgjC2SC2pCe6zN962V7oDMbMEC9SRGbGLKW7nVWgBgng7jNCzrpfeWYh6GEZEBsLV+mI85xU+fP6gquIeQpnmWPIHwsm2EzBR6U1NwhxgjuKcYGDmE6MAikUgAkFh6N0cMIZKMBsySzLGrxyBEdFuuvev58uZuMabL7bys23x4lJCul+u2rYd5imHc1+KgZoYokXiYkqnXWg38elmYaBpD36uDUghxOEqMgO7khIRxnKaJI8eQTg/v59NTylOe5lZ7J045CxO7Bqu9XVyET+86m99eSZRoQs7UFlwaY/yyv/z4+ufb7RKFSVOUFOiqDgjZ3SVADDmEgICEbIa9ce9qXogIAUk8yjjw1LopUSXroErQ3VtVbebN2Ikgs0xOESC2pn07m7o1tQ4ing4+3PWDkQABOZJk5sSUiARQ+Zd/c/zZ77gxsyHeZ70QIkZsjtdtK/2G96VraNBbzBwimyLDEPiUxnEaI3FXWw9Ten5+nqdTJtTeStnRopOwUBKJkVMIQdNJ3j/mH6yFIRwzTdvyirAfxveZv314OE1DRHI1XMvabT/MxyGOLNys17obdUo+jeN0fED3uhlTZErrsre25oAxUu815Hc5jswQmaC2QJ5CJMzrtte932FsZlwbEAlQdxdwDsAZ00DjIAeCVFsDa4iQcxLkr6+f9307nZ4kxBgPOY/bvseQHk7PX758LHUfhjHEmNIwHo5IYoa32zLkFCRs2waA9ySbGFMMIVDX5gqE1FsPMZqjWidmCQOHyCwxJzNwdxLatqWUbW99ud0QoGvftg2cjseH1ouVAqY5DUOIrezgigrCIcZMRN26ez+/vvbW19vtMM/TPKm6xJBSTPdNjyRMZLWD7TkSEzBLFHZvagVDNANteyD2OCA7mToSDiO40P6C66vvX2kcME+wvb28vpy3y8vlx8vyqspsU+YJxZU2B0QMMUZ3k+jEzMSM5IqGG1JnQQkQY8pxYpbqvncvfe1WCSITWWvQhRDdgWCI4eRO3dys9nLxamBmrYREcYYQHQCYOcQoMsY4xCDjNAZJCMCPv46qtSmCCxIaGCEHnIHJoBp4TKNrLFsXj0mGmI4h5CD53fHd99/95W/+6l/89re//fb5YZySegtMd2EwQe/a+l6GMDQFIOSAzh5iEI1imKdDCKdIYwysYbPQT9P7d48/HA7f5cCl7K2V5fZy25c4TXOanFh9RzAkI/YQYghhzCc32a43V9j3dbm9uYNgDoDdhQmLm2knWw7TMA/PINxBrtd2Ob+ZmzuZU1djDvOYhMVVgsQ5jwEnBEKvrqCmbS+363lbbymPeZwN5OHxm9tyu62377777vPnl/Plkod8fHja9n2c5mk8mqEwt7Yf54MIq2pKqbW27Qsi1NZEJOVhXTcHyMOATNqBY2AJZsSCEhiQ9rKZamvttiyAWGvdS933FdH3dQ9hSIEul5/YLIYYSKxXgs7o3q31SiJ5mK7nV+0bKvTSJFBrfZjGYRiFJQpFxhACM7t2dq3bWtYNkDhIN3O1xCGO74JM2+Wr1xKPz55HUrReEFeKo02/JCjt80+Xjx8YUR0/fnn7009/+unjf/56u1zbYJASh87keFa7hZiZMqBLcOHkQFEkJCA0gw5gIiGGQ5Sho6l7LVV7dTPmgIjmql0xMImQZ/Lobu5dba+1aa9CSliHSfIcmRsxk4CbpTAc5inHOMQcJLZWBJVr6QrK4m7J3buppRYDa7Xv3n/3L/7637x79z4EqWW73W44+mFMDKXW/f27X/zw/W+J5fVyTn+Ia9k//fST1zOhF/F93yNQtHaiuSqt6+oRj2nMIZnupb8O03Rbb80vEZHDMck0Ds+EiSBo36zdWJdedd2305AQIiFM03POx6YXIK/1soUZceQg3ro6Bhx4D3ULkkY+tM5Xpcg9RfQU45AGG/BmnnZu2r9etg4cY5EYCEJvIAE5ZATWJhgCoSEhiDhoqY0AUgpmVvZ2fDxcb18/f/n47ft3f/rpD7fbOk2HkKblVkhijLm1DkAgpK2rKiOEJABmXt31dtuZmSgisTIjirMgiXojl1o6gBNAGJOqWXdVba2llHq3vb6YIUJcbrdPnz799td/WbZ9vW3z4xhzaqa6b4g6hNhrK620to9DqmW7vn0+TQ/WaxAeD5MpSACRaIbdjRz0zngKkRVVATCoIZrxOEl+Z+qUwvj8i+v5T3I9h8ODzpl3s7JTUopi0z9P3z++/v1//2//n//3Hg5A/vHt03W5tVZ6WC+35vnQWcEmDj+F0L033JlpRLTARm0mIh9cfa91ZYiMpNCss5mZNTVXYqyd0IkTR+RAzEw+3GeITa2bIUMaiB3HaaAUKAlxZElNvffOkUlkHh7m4WHI02F+keP0VO0CYGtZEYLQUHvrbRXiX3x/+Ntf/cu//u5f/vVf/ebp/enr2+2yfDlvX8qycLAdKgfBXoTYLbhJL33Zl13dHVvfcNOHdMiJa9NipfSNKBZooAKt93Y1/HGttwbr3i+Izgyq2ra1pkwojOk4PfHLn6+XVyadczqM7wI9u1q3eW0fHPatf2E+eALtjYMchocBxau33ZrXohuLi9PII1ZUWkOS+ciEEsSu50vdKmANcRJMBt4qxJECsXZ0NgY2TPfOTAihO9W2JQIEu769XrfL+3ffqPrnzy/DlMd5UINm7RCn3sy0pzxeble498LMJMRtW8zAzPa9nh6etPvb2xsJS2aW3Hs389Y6IjCwo6jCvtfWmllXVQBalqXsTbtvtb18fVUHJ78uNzdiFlU9v70chtjrXiSQ9WZNvH7+6Y8SqDc1MxGqtQ7TGIOoNkQlEiQmJAdFhGbKOQ4hIQgzzePUEL7czkF86Ft694tx+B5ezv52g+cA04H6qOXCaWxhpum7b377L//x91/+X//T/6f6h3Cwi112W/bbjj66r0BwWZdKPXOLCYLPYIpQhccg2I0Ng7Tcm4MHBzKzZqCtlb7XboxDQEE0JI55QFREZCJ33Vst2ok4RwyYyWGcBooZhAVJna0WgKbAEqbnd7/85vFXbjHLr/h/93/6rVAijAZa29oVxKN3KH2PzL/87tffPX///PAcUljKpftm2m/bdr6dL2+3shUFP1/Xz1/Ov//Dnz59+NO2r021FOwFk0sW9g6X5bLouoOSBEK2ihmz1Wut54a19BXVy20TjEN6QGtGO6EGGgFlWc7bdnVmRiQ+HoZvJA5EVtprw53EWrXeW6s3wS4sAhIkQCAgBCbEtKuvoRujew1sIWdiDyw5ivWGVJHWQMgiLAO4qvYUco6BKTAMrr3bZgBNCQFiDGp2vZxDCI+n5/N5W9dyOM4IzJRSHE07ALau13U1tDwOrmCmKY2uVmu73dZxmMdhPJ/PgAZOKaaccm/dDJgpxkgsiG7m+77v+84siNSqXi6LOS7L65eXn1rZH06nspdWd/ciStq29fxlCIIAt7evkbzutykPe63jNPVWEf3h9IjMe6kxkjpqaYjauwNgIGEUpChhYggcMaQcwoySwbs2hd48eIoTqyNURPYYKDAaY1BgAHTI4ymnH18+/Kcf/+5Wfty1XPbb29t5021p58vb2+evn/JwOhzfCWMQUlNTDvQtw4TUHVqv7IDIxiIG2Hvdlr3ue1XrKgjuAJtWQCUJIUQkKl23UreyASoxEEx5GlI+SXgSHgWDmWlfHXpKASU9HH6Z80NZt9Z2IU4cg+k1YGpQzBWJSaSbffxy+ft/+I9zmgrUp/NxL8vL+Vq0mrfrUj7+9J8a7qc/PpVGn18uL18/132dpuM0JXSA2gUVwGqxq/ZqRZFhJUqstQfsR/IuobiTSQcni+W6vX791Oq8w/jt89MxZJweTqeny7Z4qxvneSLJCVEMNgDobXMTMGi9dt2ARXutWIQSUiEmjtlZetOy99Wrj0Jg5CqRY8YdOHDG7ga+9xLYmcCsEgY1a9CFzHtXqK17b91bZ7RtW2NUdhhienu77Hudc9yvW8wHJ3bXum1A+Ho5D3n6xXff96q3cp3ykMeh976umzscpnHbNyFiFwDovbfWeu+IaGZEpKq1dhG5N9SYWVWX9YrU3OptXbuWmES73/bLwzxj1yZopaJZ1RpjNuuAgmC9dxbet348PZf1dd8Wotzb9XarTBnUwDtiBQvGgk4yxgGHwzhwpobWe5/inMdwWzdEg69vtioeEw6sW/NlxXnCw+zubisTQtfb56/ocDq+++nlp2q+bLbXGlrYy3q+vD4dv3uX/2bEAOED8623M3FDe+3wjmBCck49wWDeDLU31L1XrZuqmWG7XgrGGImoNHxAwZaMVdvebsW6labYYziy09ScWy1mRthB16Y39Wtpodr6+58c8JJ42KxJq2vkSILaDXggD4FjFBwptN7/y58+fPjp/0GRZeJhTPP0MI0Pw0ja63lZ9vJ64XNf5fxyu8E6DAOym/dpyHGIvW4AptiDhtYhCTvVZltbAWAfn9LANnFcfLx5hylpbZevf+z9WwmhjyaPNMT0zcP3y+v5S/nJdtbWTYvD0vXWjbRLb0uAiCa12ra9MgeJ0ryrggBEASecKILkbl5qU28xojqadcWKyWIe1bqXpr11uLAQAKx2pabAOKAzAUMzUEJQdUVbl9s8jGiw7mcJaVmWmAciqm1db3WW4eXDl8u6zL+cVXW9rtfbWd6/3/e97PvtdpumqamGEFS1ahckkajqZqDaQwjueL0uAODu95V7ANBaQ8Te7e187r0jsaqt17dxSDlna3qa5uu+tq4AkFKoWWKMTLG1HnMCc+ueUzq/ve3FUhRgwaE26+bCSL32lFIIod+2xXoMLcoBOCCV2q7WMyMYBLcd6icrDxRmEdGMphUkG2ap1f3Wv5x//+MfPy9b2fq+9G3v2w7FYKN9L2uk+Z/94m9P+VtpN5qOu50prBSI+mvfCXAKIYQQGLoC3cVIVVpb1tYWNxQUc7zUQhJjlWTYArlab/u+l9rWzlpsDCkyqbckIiB2azeD5rAArtsGLNtWP7f109PxVyTIf/Gvx24NndBib8YMOY8xpSgp5QFBXl7f/vzpx58+fPjpx4/7vk9TCALs0UOs1i1wjNmgMzIjDIGTSDdDIslJxZ3ZjIlxSEk4iWQkxACSaIwxpcmInME6gJqCgRjnNMR0GMYcR4CwbOvr9TOgOYFDC9IR9+W26B5QgTxYVUYOHMfx8XD8Po+PRhjJp5QyDENIknJ30F73tS5laXst2o1ciBnQCRmwe0MCQFdTd0fiyGMUxtaxaW+FWjEtzQw7sNxXJUCphYVEwjBOW9lbqYb48vpVRIact20rpVxv1xijEJ/P51JKSomIWqtmpqop5Xmee++lFHdPKd1ut1LK/dyv66qq9xfAzC6Xy3K7OXjZt1b2Xus3794nEW3bYZq39cIMKcVxSAhdQgickTRFZiYws07MsZbL9e1T228OlQDBqJZ1vb4hqDBarwiKCAGFQUIe9q5r6YKAgRAK9CpOXs3QZIyeknUhGchBX77+l3/4L//h4+9frp/run19e7leqzUWjoGEKfzmF7/95vExBUkhNV2u+wtgu8MrwEnVAJBZEAMidu2m1mut267NTMG698Ztd92c3E3NVaE3773utTYl9hAJ0TgAcnXeSr9s/exYiAuQAUJre11uaJlgZHZp/VV8Aj9YFwYEa2qNaQAA7TpNU0ppvh2+fPpyeTu/fHz5i1+/j5ZSHErDtrtapRin4zS2m1cI0CPmGnCzXWpCHgkgjpzBAIxIFLyLkTURsDjvDMQtOmkGIAegSl7aeq7LsRzfUZopvz89ff46frq+gHDKPsR34B5FZQoRvu29m7QgOeZjHg+OcW83YbTF3CggkchSN+172ffr+a3WSkRhGA+neQ5kqkS0NaulS+yRYpI5cOjFNq0hcTQ1J1CutatXZGYetHnrZ+IY88zubn25ncven56ezuu56C6G5HZ9e+1uIYQ7mbmUcj/05+vlXnZUtxDCPc4xsxjjvu/X65XvxGyAZVnu2lJE3Pf9drsxM3fsvatqCJJSIjMWL9slZjbN1p0gMEVzBDLiuG5N+tcYY05HRIpp2MrXUs+wVGs9ShOhTnpb3kpdTsOU87Mb194kR+RRCLa2vJ3fhilPw0jxBGoghUh0VxoCy+xl8XVvt7p0fXvd/vxPf6x9zcORtxYBh5yQ+PHx4XA4Irrj/nndXi8/9ngbDjQyOK0cxvtmEOLpDhYkBtTie3dFRmGCqmrduSMrOGpxiwicU+/dDUNKMaFwwegqtruSVuPCyTLFxGNvbNiikPk0pCnnsZfIf/mvBnQWDEzRrdZatSMzBM78c/U6jsNwOJ5CjG4Fvc5DTimJkDYtS2PJMaHzlmJMIGWzYowgZBE7AQBQIg4MkFNmCtbB1dypgu+6NwcFqYaNnSKNeUIkZz+MYUDqve1929fXRc8Gt2kK8zBlOcX4IDLn9JAPeZpOMc6nx+PDw3PKI0c1retlJYBACtSLbuC27+v5cml7BTD3Fmh7HnhmNtJtK+t56WULOBzy42E8THEUj9hItQMYA4EHQDdswmLNu1pOkZB6t4fHx8v1bRyjq5XlWmt9mI/M8k9/+sM8H6dxctNlWXrvIrHWptZPp9PtdhvH0R2u12sI4Z4M1NZqa2ZWSqm1Xi4XZgYARFyW5Xw+p5wvt9dat259yPnh+Ljvt1auoO308ACILCGG2NSHcZqnATrcrlcRiUEAK7EQRkEjFABmQhYPIaY8ioQoghwxxDQO8/Exz+8wz3HKrtrKJhBCyHFISEhBaJwgTxAHpRMznX//n//zn37fIL18vf3hT//04e0PQJpiIFB3f/fu/bvnR0F0wmW7/enj717f3gyAOQbIBs1wY45mDI7u3ru22vrem3ZtDUsTQAosDsGQAqUQhyiSRFiQqIMiOUYEVgidWJmgWyeWwJRQhMKQxkAJXVxDiOEwH8fhIGChVVXcohAxEqC2vi17lKTNJAR3jxJSHOf5eLs+1uvn27XkbGMc359+Gfj6slxIte6xmB7CBJKFRBgSexyVGa26gSlWtYVIpsR7rGbayBlRUUQimllDJwXzTrYsy9vL13h0iHlnD6fj94dv1nJLaQAsFC4RD9TJfeUg6N0MKIKydOPr7fXl5XNrW3RtHJNQCtb6BrAeT0MpMAwcmHKwGJWhUtsDrmTaam6IMFnynPMskdrOvRNzRVQRWdYSNAQJ/WeAOJiX+fDQekWHGPLb21stxQCZ+cuXL8IxpeEeu3/68vnx9DAM07ZtT+8eiWgaD+Mwv7299d5zzvfl8qp6j39qrf/blWVZcs7btt1joVoKIjKziLSytbJo7xBDCMmNydRMY0zgLHEMIITeG4QwupdSWo4pnt6Ped+bK1QkM0DvDmgSUxyOMQ1pnJCFSGJKe9+Z+TCfWi+9d4boQREZiCEKYCBgwEjj/D//x//4hw9fK602LENhtaJue22Hw+Hx4SgiTfttXT99+LHdSpaD3qwC32YNg3FEZkcyt90hIN49a6GukdgluDsiKamLhxhiHmKkEEHB71tRHQiQIDA56x42VqIh+cDaOgFHoMFyHnM72txbt+v2IQ0qXnVfAUhKqN4dEYYcAwYAEOJ1WXqOmDAlJvfjfJT5aF61uRIIhYfpueywX7+uuzL5PJzycGj95n42WTBo5DGDd8o785flz9BLDocU3HEHJ0YROaIbDpRW2LZ6A+NOqvgTejONaehYwzSF8E3us0QFbnu/an8Bp5xOpQSSvdZ9+xophtr87evtdi2hsBJYAks1UXHuDvFSJExI1qKkFLERI3NCTNHzQOUCr9fbOIXH+TuiUYIDB66IHYkVWht8IDxYdwlqXnvteRwcatnOh+m9K/TWmsN4nGtp5PR4enp9ff3++2/33i6XyzRNAIboUZJwlDFer1cRuRd/SinDMLy9vRFRCGHbthgjIt5/h5m3bXOAfVsCcTeMLGB+PZ/HxIGmNBxUtZbNahnHYRjGbdtSD6otTkNEVrMUDmRVArJnppAPUk1dO+GdCY0o4fnx+eH5u3iYOMZhethK37eVu8kwJozUwZpSHjwwULjvYzH7s6+ax+cffv23/+9/93/7+PIf8sBzHkrT8+W8l/aLX3xPqIjcQb98+bRe1tFjCGnvrV6gaR9NQm84XlIChWiFVNGNiEi835GZ5s7uXdzQUbpTG/Ipk69t66ZQeyePmCMyK5ZCTjgPAmuj0CWn3ny9lR41x3HM2UBUfd+/ipKXxq1uOWPk0QBVeh5TlDiOMwh++fJplevhdBzHPHBQDt6JiFRw3/ecc/J43ohhikEBoxM7Wmub0jmYduyWhHsXhDHGDV66vOaUQDACiAg5OjzV5lLWAGfD2hVWCqWe2+scwxSnYQgYKA5xwLBwJFD0vX99+ezX1yGLWFyW6163qr11IDsQ5gFOQmmIXKww2zxFigEvdV2jOvUulXEzQGakEGQeh9pZl03fyjqXK8TBWYRJRCJBKQXQp/HU3fbr0ryBtXCHKLinFKPgst5UNVFIILfz5f3T+5dlkRjMbN0XEem911r1XtED2EtxRzNtrc3zfK9+llJijMxsZszcar3dbvM8l1L2fV+2G96JhyQG3k0D8zgNZe+BwrbcyrYys8QgwoSwnL8C+G7w8PRAgGg+RgYgRKLhJMEyQC2diVLOACCM6XDI0zHPDxQHawTWRATcW6/TfOLozdRReBhhTA6ETanU1z99/Mcvrwb+m1/+5u32Yb2da1u99uulvP/uYTwmMyXAclu3ax/4MGdW1QOn12277u4exlmJV2YI9GgCvWxGa0jdVEi1qFVwcDfELuBkjwGp96YdmpICQx2Zg/Ts0IGDD9OQpLuvpcZSm6EoAOiglm/bqixRhFAS/+ZfpL05Anpr7IxMqjrPw5gyh3w4HvZ9vazXqktptQMnSSHAdb8WNe3bupZKe23Ltn4e5nw4vsvxVNv+dv3p7fK2FW3qqUeoobTirgCVhUVSVDAXaiSYG0jRmnsXahx7TWZerWylrkVbrYbmbsCMIgn7OMrD0/zPTse/Wc7X8/mLtqV3ut28rL1rMG0z+qOMhzx2bU7daAWuQUb20Tz11kgxcZQ0dEM1AGZni1OfDxwJtlqAo6AouiOCI/TOYMjUenFvSZIwA5B1zykPIe/rNbAHQEbYt5u6YoSt7t9++12rdXt7Q+jTdIgxv75+zWM+Hh/Kvocg9wmveZ5vt1vvVkqNEs10XRYiMoVtX4kIAF5eXnpr7t5bcXcEU22Px8ccE1JTr2XfokiOaZymUouD3i4XLTWxu26mpdbFFYUiR+YoFHIMMQzJmFhwPBym+TAcHymMBqyqX96+nq+vhIWFGZmQRBJLJBGbB+UIEN0CO5L2/+8//N3/+vd/dyuXyLz2cllvr+e3h/HxN7/8PkUbx3kt+6fX11ZpzIccpKtXtx3rVnYUTTEwIkmn7A5KgA669mqldbdGuDeoqxm7IiYMJ54CoO4NDBxUsKURQ6QRB5QxTVPwHDqx2r7DukJdzEpjjKrYCry+3s7nbb81/vW/nAwpZOJI6oag05TGFJ6fT4giPJ8eTqq3razbWq7Xs+kqQntp5+vr+fr1ulzn+fj09ODIh+n5N7/858f5edvq+frl8+cf3160nKWuu1XAgrXurrTa3KpoCWGfoJA2vNayrRuaEhNqBFTTpg5GaAy1ay+we61VE85DPjwcfzPz979895u/+c3/WeLpp09/PN8+ag3rdXBSImGaZuaBRoFktDauu4IgRg85ppBHBTcCcCxVeycDoOBjwiHGKQ1qrs6RM6FnZwFLFCRILbWVOqSRmdbb0vcaGQPhul5QG6hb63EQtTqMMUYZc3p+et6WZduvOeeH4zdq/nb++v79t+7Ye7uXfQhRe9+2DZFEpGu9H3Qi2rb17jR677XuvTd0VW1uFd3nKQsbARzm+Xp5O83HyCzChFxLJXAEZfTWtjFO7lD2EkJMw0ghAHMKEmKIwpJC7Q1qG2IeDqeUBzUwMFdbbq/bdstpOh2fkRADe4wuAZghJMaM6/ny8Y+fL+efXs7/7p/+w+cvPzboBrq3PkT5zS9/GAeOEaz6x59ebtddIHIglLj22qAgkymACwdndqQi4kJEQO6szWvbew9gDdpa94qFZ8pjiNlJgNi9W0dWCIiRMSVFoRCZkBWomhW11vbbm7GGYUKMvdt1K7dLvXzW5VL5b/6bB46CzDFGQHMo4yDoJWaaDw+9yzxNMaRtrdq32i7nr0utrfX+tn66XF6HcRzTu0N+/NUv/uZp/uZhfhznqZT1erlt+9Us7a96W+t2a7pVJBKA5nG7wX6DbCNhhi5FpXavzYNDig9Cw3pr1o3QGI0x9qbEGEh8t23rKT384rt/9u7hu2+f/+rXv/pX33/3l+fz8vL1z83WVgmcXSGgJslMsVFbdd97r73HkBMPKU4kBIDadN9r3YE1ZM5zPAYIFAanESwSgHRiwhRTDqmXbq3N4wgA58vXwPL+/VOKYdsX8J4Dr/stRZnnA6EKyzROzw/PkXhZryw9ShjT9Pr6EgeZp/l8uUiWbl1rN7N1XYloyAnclmXpqkPORPTx40cAa6323hChlMJoqs1NpzwGZtV6mGZB1ro/HKavL1+CUK0NkY7HubcdrRJi4HDXV4sIilCIQQTRmYUQEJyRwExSng5Hd9trd8AYk1pbtxtamB8eghAE9uMJY/LakRhJQG//4e/+3f/4H/7X3//0D79/+d1lvSyt7htMQ/jF99M4YJ6Cu376/PrTT5/3wkBiYMakAMV2dUNiROxeACsxETFzA2imgE6tQa8U1cndPcTOI2DMxHQvL3kHpQgUGZg8CMDASAkg1I7dQFUSh5HyHCimvVettl19eXPdQ7fO/+y/mYaIOQszInXkHjMcH0Pprzk9BD61Wphx38uyvmnzbl62bV22favrtZPnb9//IoXpcfr21z/89uHwnbus2/Xzl991veY0nd/2L1/q7dJquY5pOIWjeOxKdV8UyPKA4ZHlhBD6ZhmzoQR5FJr31hxgkFGQwHczH8IxxXnf2svLpzSN73/4zfvHwzzwfHicx++bruflT3U371ZrEcgpUMxBwlyrbWXbizVjIWEeDdD6rr1pE+8hGEQeA43esVTTFgYeJIQkU8iDste9Yu2DyLLezue3eZ4eH08xBWbu1oXF0cdxODwcXQWJAofT4RDQe9vRivVbIkbv27aMYyZ07erurnZnB93NfM55WRY32+uGQLXWWvd1XXrvzAQAXSu6mzlLYEQ3n/KUA3rbrJXz25dal3EY13Wd58NhHrZ1gdanIQuHbsaBgsTeLQgPKd4XOgNgTsOUBgzBWJiEQyC6V3jd1GMQAYlBZIxGiTDhmDEObS2EhWj++unt3/2nf//3H/7LbVvrtmMrx8Pw3btpnjTnZAhfXt5++nR+eV3Khg7IdCdpmoIDGrMKazcFHQgzMwQmZlftraq1jrtp68Q05CkxCWGKFAG9dlBDNhZkRAoBA5GN4i692V4ZTEYOM8QxItPaam29LE3fWAtt2nff+Lf/6uDYHQ2wi0BKItE4V8S+7fsQnlvpZb+6oxuCoUHrtda1aZvA4vVyzVN8fv4m0/Du+YfD6fF6O396+fHL6z8OWQ9j7sDLddfdhkSH6IcxTDIWh7fm67Y5QMiPw/AsMup2rXpWTHdaAnLI+fQwv5/GUcgRRMHdIthwPr99Pv+5tnbID2k4nJfzui9qrrYVPdcduyFTmIcg5CKRYlr2basFlczdgZraVjatvVVEwBAiU1DtrXpvoLUzB44phFCs9d572am3upd93+Z5fnp4ioRkbmoI6OAi4TCfmEVY0D3FfJgm7bvbjr5TXwlo229ECEZdtZROIKVqraX1sqwLsxDitm29964NgVprtZa9rKqdGB2stdZbJ0IAQEYRHwOhbvt6aXXTvhEYIY3ThOQOmofBqgKZROlaVXtO2bq79SDBiViCSCIKgHg4PHAUBCSOHIe97OuymSGSD4linpkTxICM6ghpkCR+eVs+nl/229v5djmXdr7t69d5pPfPeUguEhzH27q8nc+v594q9U1DiDEkkdytAAlTRtpTlBAj+oAYlJuDuaO772VppXnFdd3UDMkQOYQYQ2DvaGbeHRtId9AOqA6gdO8LU+AwICcgNkLcdN1KvV379lb3t46sOK35QKKFnONmXSIl0hQ8RGjaAgTV68vbP4ofazXA9PD4zGLLy+pGtVvduwgh4T/+4z/keBr+8nC+vRJRLev59rlBeRznhPjd+6C38gY1SmJHx33k+mD0xfC2d7by8IAhjhHI4u3z+VMKSqreVVUPcjiFBwoQQIhue4d938Ecgr+9nv+n//nffvrpw9/85d88nJ5LvXlv7x++F7bf1a/XdZWgte4u015rmE7fPf2VfvkHYp1iru3qHrQBURon7FXdoRHU3vtewA2JVBd3Q20EgToMhM4ecnoeRiIiTCn45fxKBJKkao0pD9MMTrVfsTcENF3cm4AJA3LaSxVWBTi/fZlODxzGVmoFu13PTds0TeM43m6X221NKfXWGOV2u5SyM7Nq6//1Q0wA2HVjiyiAsEYJmEZwvZyXVjYEPh6PpW5MMD9+Iw+8bueuysy91LItEsfe6/nyMuoDEQVJzdQQvWoQnh8eMA23W/1ZhxdlzANaIWCXkRwAK+dRW0XzVuE///i7//zH313OX4JvcYD30+M4BUJFK97C6/r6tr3tvXEWKs5oMVFIkQSxu9be3DEgSiBSTurMZQ+qdcyZcRQcR5EywBGRXFiU3PrWrJtaiikkxu7VSYkDSOzOkkYOIYQwjZiEhjgEyWZwqvVyW9dY7GjwqxAixEkxR5EWsTb0UHondnYnJndxcjS+3T5EvIX4DpBRrZlFFgvzjkW9ecPDMcVIv//j33PEOMj1ujTdr8ur2q6aXOaI9XGa8EGsKOLcal/wMiT5DvEfil8g/Da+e//0PQG+rMtyPZVlq9BrU0ocaNi5zsc58FHMToN3xLdbk/iznOZ/+Y//9t//03//7em7d+8P0zzEYOPh6fiNty+YbEHvADTEoamlcPru3V8v28cYXDgxhUjezAOGQvte1s16jEQOWJuzK7fmFaEHzRNHUAJmQvHmBIRuy7KrWwjxfHnjEE6n0zzPX7582Zfz8/Nzq5v16xAzUSSSFI9Dr8XK63Wp9cY7JfcCvTnst5t6F8Z9z8t16b0DHEopAHBvBpt3d7/LRVU1xaFqzTGeDsdMOiUnhMv5Ukphjs6uvV8vr2Y9IGmpFNNIB+01EhJRaU2iEZG2fru+AXhASsMQODt0wqSq7J7Gwd2vt8telhSGMQ6uigwmrChRTcpmRT/99PKPv//Dv/1f/ofP2z+lsOVjGokVi1k/v/p2ey2Ehd2UI5FmceHjw3ycD1FQm/TmEsN4muKQVFtkUfC9ljHy0+MxholgCj4hUgoZNfa2l/3r+fVr60WNDodpGIZe27pvZkYSRSLFMAzjcZpTZEYMaWSJbmxmZblps9qLEROiIFAGeUiZXHCtlwUrhbqVuFkAKKBJAqEt24soGYyIuO/A4DlGmwOHtRUlsGmYFervfvx7MD0dnoHref1z25eVVxXGygn5IQ89ejVfFlezCT1wT+I4zO+PP/zyu79FhH67Xb67BNGOAABgS0lEQVT+E5S3rrrfFt0ihmAqb5dlPE3T1MwuFDgmWrYtppBGlnD48PLh7z/9j19exl/86vt3D78iHh7mR1LfzgvIXvorQwoZ976H+G4OgPpVJLqVkGIv1ZuGzBYI+9KtUYgBsYt1hPVWtl6ZUk/TAQfBjOzIyMzEuC17EHn7+uoIv/n1Lxzs86efzudzoppj2Jer5HGIJyIKg6iTN9tLMYp7aWa6b0uxrTb1os2txHI+vy7XdRjG8/l8l76ZWe9drREBEbmhu3fdmfH58fn9w4FV0evXt4/Lvnzz7j3a075e2ragtxSE3fq+ME8cYwixt8rR1ay0mkLUqs127AZ1f//DD301fB6Oz0/rbb2+vSkkraX3KkFEMmEGVqhXPnyPcdCy4Nevnz98+fH169flRr1PdEjDQNRduzeoLaxr1RLikMbp4Na1sM9TCvx0fDodpiiBgYVzGsd5nnIeOiBoL3VHoCnlwzSnlERiCCHHlHIwZ93s9fJ6vr0ty5mFpnkOIUKTdV2v25UF53EaUg6Rh3wQlG4G7CyGQOjEHUopi66ttQhDkDlPJt9MgwCOXPpbX9TcfOttJzXAQkuSrI3W8sUwtY6MlAMNh5lG5cV8qxyAsz2MJ07Hpa5ff3x1XBgqIBbdE788y+PeFTxkpN6vYGI03GqP0Z8OPL5/fvfdt+/mZ63bS5pQiUwD4PvDg/LgRjkcO9TWNA5DqRq1HEiLkXdTKTnQdw/HV6uvX88GfwSgIR+j8DE3WwV7NNzL+mdM33SUbnXOT3urHV7zwN6n1vZqb0FO38xDh1xu5fV6u6KySqTQwPetTzGo8HR8OnLSuo1pDCFeL69VOzFRgF/88BdJ5r//x/8foDPC8fQEhjGmlBIykcQYo7sbQRpiHHKE9PL69WVZ9+prre2mwCoD91styx4Q9roBA0ojYEBV7+jBWm+9IQNzejo9nA6nyJG4rLdL3X2eZ3eY56NDM28EnEnQDRHZLXkCBjN1RyJqffPSjsc5Vu/1usr19bM/Pv2q92pmKQ29mxjV+7pOMyAOORBnCGhxcHqmjA0//Pj69eN5fRjnf/m3//q6vqoWB+UwEIay6/68EZEwe6u9F3WQu6B+HlOIpVQJ6XA4HKZ5HsYghOZIgCDIIUS+d8cBLOccY8w5M3Mf+zD4YeRtG0XC8XgMIQDQXUvbVVNKQ45EVGvt3QAYAO6Tk4zi7rwL71y5EtEwUB4mYdRo29NsG4TrpwYUI4mrAql7uW6FenJQ82qKTqQwCvmQJFDkzEbogb/77i9++5t/4dB/96c//u4P/2ldtxhDNAaXLXYiGQ8HYdbFtO+3a8GIA1uY0zCBYNWit5u+fN3Pb1sm7AQt1dPTI0B0tMN4XPFq2sWjEx6OMsxBDW7tuvaLKU/TgQHbevn65eXdczTpgnEYpnLukQlDX8pHJEUPFh6m/Mttxb28DQMGTaDJveR8jHnWrOpc3m5OHDlGh5CBcMghO0JrLVJQ8Ov5lbodj0dv9eH9Y2D53e//YVuXb755yjHGJGaW05ziJEGQIefcWnfuTpKAjkdsap1Yz1vd+62sQF7WpqoEuO1LqTWNgyspOHESv58GJZQgPAxpnufW2qfzJ2EQkTiMgfB2/hJFhLPEptYVgRkUStRAwZFloAl52vRCiEDdwJ+evvny+imiR0Ltt4ynvl0gTGPApexEg5GA7m3bPA8+jBjFVBk2LM2UptPDQ6U15DFPQxrVyjRNz0/v5/lUS7+9nUutKNxaO1/fVDWIJBFg8q7koGDay15Q2y4k8zCmlJxcBImg96aqd73TOI6qervd7uKo4/E4TVNrnTnEmHvvRJRzFqG7oPDecTeDGGMIQTgQAziaWQhBRGJriJhzFhFZvaOo0BLjYAZ7aRpMQhpg5BBXrWsBM1Q3RATG4ma2JJQgQuwsJU/Hv/5n//u//ct/FbCf5ue3t7c/vt4cJEYBZ115eJzmMMaGOhy/fLXrstlowC7BHNt6e/3Mf/50Pf/uw3/+8PEP748Pcci1L7uc56e89gtVdOxuu1uVEJ6evpnyadvrT68/gbeIumxvOWQ1KPt5ucLxeDDGMSU+ZuwWmLReq73mPPV9Ps3fH8f55fqf9rKinrz7OLVxGMZpfis3chYKBhghHY9Ty9bWHjxoR0cjx2XdUhoPT0evKxK7448//f58/TLPT6fT8yGl18tHbTqNj4gcY0QmVYsxqoI6MUNKaZ7nDl671VpD5GVdL5dLCCml0Lq5IxqDUxoGQ5CUEBHU0IHAx3EMQnW5fn39mOJwOJyQYdmKo3R0IgQnpNAc2AG0dt23AllG4QAA0+FYezTr2K2a/vCL31yWC8RxOjzOp0cP47Z5NVi6RCGJOTEyuDFwTB4HqpvjDTt0nHKGp0O367IUzyFN09Pj4+Pz0/vj8aGU8iZJRCSGrdmyba4K3oBcVdfbsm1ba9VAl8t5W3Z0T8N4PB7HcRjGdG+BxxiPx+PxeMx53LblfvpTyncBee+uqvu+E1GMUuvP8xWltFK2WquIIHrOERG3dSe+k1GEiObDiMAAQETyejU7DdD34tc04vULeh8zKnAMPjymmcq21gKI7uimxWo3NCfLu0h1s8d5+v6HX75/+t7r7XFY3p2++RD+0NumOnYmBgxIQH7z2tp+W7dtb9Rh1TaRt3l5ffv4urRP1w9fyx9popuW3CTkcS8Ay2pha60hU9+uSPXx9BxtOMQppeHz9UsrVai9e39Sq73Svu2O19LYhBM/5DF4927eq75evmD48MN339ayhCSPj7/6+voTEaIeXRc3qRug876vt+XrMAyCMXKWKCOKhHGUOXZAwHenIQjclusYcm310+cPEuDd8w8ppWkarKs7xjwwM5OEEIEAwBGBQAAdAptZSimUPY8p7iWX1LRqr0QEnlVVFcBxGMZxOnAMAGCtIyIhklvKUVtzLYwEhtu25YSBcHz/Q2Qo61licGsssW51CuzQLucr0gOPMxgDYErZDafH0bxwmn7x/rvI8fj0DPOInUOwvSNA6/sS5mG6q9NhcENHIAHfVDWQjBhacUcrwpDilPOAiLflspe1lNKaniKnFMZDOulkqswYQmhNt21b13Xf91r3Wuu6ruu63geAkNy9qRIzs2CMMcbIjHdb3nsXkfu5N3NV7b0y876buztC711bR8SUooi4+77vjrspBOS7yjCEEFjcvaszs9wWrQBpyBVbOrSpxHJV0hASCyUhHgYtVgDIHcH/K4a+165LHt0tHudv37375jgdb9y+nD9HgtMhvl02V2zcQyQAi0OqGZa+rXItgZMPrfN29Tj11/GCUpfzn9r2JUxIpE3XEI4yTo5A1i7bTzmMu+v58qKdjuHJ2mrsDj2MIAYi5NBT0DQdum7qO9C06J7RDWEvbS9Yy3j9ch3C6+P0bLVDGKbh/Xa7kotrsjXdoPpOiZ8fx+fjcDwlGSQhJo8poGTgQCnlyfvt9etPU0611s9fX8djen76vt5qTE2tlKqARJiG8XBXVWntEmAcQu1uvSO5eUMEIspxGEc7v93cFZEQubbSWhMOIpKGfDgccs6muq4rEsUQ0FStq/rlckkhhzhNhwGhN9t779Bwyo9L+RwE2QkhOZEpmOr17TWyME8sgcgBvHkfYnQmkfj0/j2Ms4KyKbFybVya1rKca35+DodMZtCuRNUkosZyWy/ruW67t2rqMaZ5npnDtu8vLy+qSkQxxhAZC5M2dxeReTid5gMw7Pu+rnvv3V3dvZT29vZWyobIKQmS39P9UvaXl5f7VvC7LBzAW7tjMvT+YebW2p0ZY44ANI4zoiOiuwFg750phEgphRDCkDIiqmqt1R0xRln3dtvq8YHDNCeuT4/96q1ssK0hHrMHlvGIu6Ov4GpOKcRuaoplj13b4ci/+e4vvp1/IMC3df3zy+92+/L49PB6vuzrxnbAKQNGQp5Sqo9Do9MBMhTGtmMaFh3i3hHeynomVhBVZu+q1JEBkYMoWb3t2+60d/nzh5dtKU/HMTHVSGkYkwTCHjEYdCICMFUnOPZ2RA/S+cAyH4fHoW/jhUrWhSJN3EycYp8VwLlOMqkTpDq+V/OYOQkrMlrFpTVoGzHREPdW2u2KGLa1XW5fhykdT/Oy7gQY49wUz9cPrV5Px29yzq3t7r6XdQ6jK7l37R3uwQzYGNnMB4nTPF4ut3UrLFm11bpDGty91rqWHRGtNkHq7rU1b52lb/suMT49ProTOJStEMD29mU+jHl4RBvDwMioe40cJBBCqmXbtx5yS0EYwdFSICJ6eDrGcbqtZRwShaAVFNC9B5SUufRrr2vkI46hobGjrjVYULB1b3UzZg55CCGM4+zut9ttX4sIIVLvpWyblrJvpZky8+n02L75fpyiu/VeWi8p3aetnEXKvm5bMTNwJMLea9nb5fzp69eXcZzGccw59d6XZbmLw+8OAQBU7wBtAqB7iM/MITARmUHv/WfYNbqIIHKt+72+TCSIKF+/3oSCNXrWOCY2Rj7Ap7psm5+O0zAcRPKS6PVtA1AkRG8hCEqsTcr+9d379HB48N7Odfnppw8fv/6hlJ8CTyK4raubOMq3D9+ElMOI3+UQJS2tRAi66w5COXatqIUjxzAjJet98/1r/wKND/iw1627Vey4DSd61NDdGfQphDDEFPMI4G47AHCQGIVIoCFUMg7mOEwTWyqtb/uZx6M10V1jGpMHAcfsi5emIWicYoz5cZiyiFjxqrVp3Wwt66LdMYy9eauVw7htpaw3hkIctAc3F2GgBNDPt9fHwywpGOg9c+29u4K7r+ta655SQgcCS0wFnall4efnR/vytiwLQicC09Z6bVVvt9tyuRLiHetXSkkscwxMNs7zbbnkmNzRwXLObTk77Ov+GiklnFwAIjCEGIJ7GIZRrbr3XrfpeBQeHLQq7Ht5/u5YGpBHcClWbudbMw/dgTRnQmvUAR4fMYiVgpev17eXt1b22rWDmiE6gLVWAMCsE/1MV1e3fdkcbN0LAKSUzueve13ncULErvV+kXABQOKwbdv5fN73GkMexgRgRHwP692993a70X00dBzHlFJK6R7kpJRSynfY8J2e9HPu+1/15/+VrIH3AeumvWz7PXUuZZNitSnYBvKCj3PqcdutHA68ewHCObxrBBd5W88b9BYTVg4se6LsCoDmhJ/PL398/bM4f/7zn14+fLR4zqGE1LddO657sY+XkMfv3g0PyLGM7e364bYv7EgMU0SOWrcaDNiFQtjbHhV2XdiQIQHLCZ9OCBJikDmO05yOGUYQcEJqTd0cn2JiJoFijJR5CgkULXFMcVIs27btAQAscm7WTEHYAyGiZHAsSuaRMOXAbtw78x2ZCGbGRBQYyAlCHvh2fdOqxMw+MIzo6NaYcxzkxw//BEDj+E47uHsIYd1uYF7rbp3dW+urBEBMRARMwCqCrbV1W1Lm2nbr5g73OTBGIRJwAzAR6W7zNEXCvrcQ6e31SyQ9HcdWVVtxQyC3xkvbeGZ1jS4pDa21pj1OSYTdpJtr11J7OEpOg1Iuzd4+/enwza+MB2pbasvNO7ZqvTrAEA8UUf0C9Ez4nmIvsP50+dOHjx9fthWMmCnGxMzbtgLAVnYgdIR1K04oAZ2AhpDTdEiDlbbdlvW2AICIDMNAKL0XVSMiMyNCRHdoSMwUUjrknFury7LcbjdVnedZREopd2BMjOneLXE35gCE3Xrdt1K2EALiz6Okvddta73fl5F77/1+VytVRMQJuxk2PveuCnFUHphStL65Ve27NUagteyo3Sx1LhLIpLHZu6fHh+F4uXz4w4e/Iw1/+vwPvfu69oW+hpBMem/FXT+9WMJEFnmA7ky7UPGYD2M6HNMhi/i8gZfMiTntqfsBAY0kCk/iEpHFAzpwkGGYpvFwHzNf9qW4JZN5nmOM1+t12+pSa0klpMiMNCLaGqPkHO/qYhahBvu+d0VAAatmvauLoCIty3JvwcYoImIGAPchhJGZCbn2HkKgw6ydJ+YU4r5up4fj48PjH/74T58/vx4OP/9t6ECIl8tlGjIjllKjUJMMLuYWQrhpAegiMkyRXrVthZGqdgEUYkJ3oceH5yhUywZgZjbEvK9ba6vberu9ffvu27Lr7fJlSnlZrsQRSYWE0Ep5pXhC4Bxjc2s/V/3moEaZeu/7esvj+4EBYk5xZhKI7P0AAz2IlkUBViCMh6MksYho4E5AE53ep+my+uvr5dN+23POz09PIUBrjZlFQkrZECR54GkOiQUL9DTkKEFjhUp3KME4jilERARAZr+jfg6HQ7c71Bp7bSmlaZpqrSnle7h/r4qqain1HiUCwJ2dEXMKIdRaS6m9NiLKMczzPM6Hw+GbdSvL7WJmzAiEZnovQy3bIqiCFPdWFN0WeCAZJ+/UgowIvGuJMv3w9P7wr/4PRBQpFEchUOgk/PR4eHyaD1PibXu77K7jafx1KyTc3h0ffzmnunuKMVBIQQaZchi7tcdv3iFADvk4n4Y8hhBcobUmSIgMdyEUtKq9FDCDLCwYuxZVRYfeCrirNtSeOYyHMedcay2t1t7U7e1yzjkPQ8op3QNBRBaRbduwNUJnvqPCupqVuoOjMGuvvffbenP31Ia7CalNa3O1moeo1s01plFZmkUnPS/L82nOafiHf/wP19vrPL4fhyhC4BEwnS+v23ZlalEoyKmZMgckQXPt7go/I/BjSimdL0vrQAwhxTTkcTiOKQ5xyINIgHVdg2QD6r1fb1/39TKmlPP49nY5jBOAu7aH00kClK3GNJay9VLTNKkbmbdW91qt1jjkYZy5sbr1fQshxjTEYYY42vGEzrTl1oUHF6kckMIMMUMgw0TIDhZippSPD08AcM1vEjmn8Y60IAzMEmMUEWBi8UFCongAMiETwsBD41RbjDHH4b+eYGIGEUkSOAYiaq25OwDcI/gQMIQUQsg5hxBKKdfrFZC113sefMe+030fGyIRioi3XgmN8HA6DMfjsXvbH9d1ZUF3u5u5Xpu2LthBe0tZGA3Jd9+t2Gl4GvLzw+H5af7+ND4+jPNdph0kCRGA7V3VIceQssjA3drTCN8//eLt+Yd1+6s4xEA8SFZA7hgCd9+GYTpMRwBwkMQkZswYkoSQwKW1VrWqNWu91o4oCrZib60JA3l3ImRw6LVrbatZJ6IhD5Jk39fr9brsi4FL4GOe71ie+5DhnS1lZtu2qSqLCLOqruu6lwJM8zC6u2BwRgoCiBikuXnvjo6C6t5MhWOIA5i3ps5h3RdC22v9+PFT12U+HrZrTZ3ccZ6GUspPP/0UEwNxM22mgBxiAAD17uYA4L2Bee8WYxymqV5vYOCueZgen57nw9S1Lsvdd0uMqdzWZb+2pozycDxZNxGmIFrL48NTCrKs5yhDzLOq1m6PMSCA1m6miKja1msFtXE+EJAieCbS4q3dSZKI2YZjDqNbdUdnAQ4GiBAQmnvHsq3ns2kZs6Tnd6dpvn9rrXUJnHJAYDC3rnXdqrdbpBxi37ojz09THoYYs1EgBySy3kspROTA61Y0erCfIQD3cdA7MM/cY4wp5+PpFEO4g5VUVb3fc+JlWa7X6+vrq5mJSM7pdDrFwzSOY8ihu/VtJ6IgcphHdSvlDqHpIclDeJQA1r2Cg0xBskOAx9P3vzj++vn03cPpaR4Pc5pQJISQUoo5pRA5iXdkRSIzMHcovfVeetifhNze033LBnmzXTzkHCVTpBg4Eom6oVvXqqqGwDGIiChll/u+SoXNHYRkgBBYVNW7c5REd1ha27ata53nWUKqtVprvTUzA8IQwmGcmPl++q/XK4uklO5tlNvttpRd9d6J9JBiDtkJjdBZ1BURkURCIhJwZca7LhcACAWAat1ZvLXarYfg1/Wm3nIet7U0KzEeA8fa9rLchiGP4xgiC4dtr+M45pz3fW+tEjGi36PSXmopW4hy/17TECXFZn69rUTdEUtpzGwK3ru7O8BhnpmCmQHA2/n8fDoG5tY6o+SYt3Wv3VIKpdQcE6cIvTNhCPFuI+8JIrKgDFmiJ4Ixa2ltPYd8cI4YBoBEAO5AaHD/7wHfPn/58NNPy7rWVureHEqpuzsw0ziO0zTdQ3kzU2tYHCFuxX789JPX/iv7dX6XICqadTN3b6YYBMxbrcyMRIC4bdvlclnX1czuBdDD08P8eBrm6R4mEVFkcWJKQ2ttGIZ5nu+H8x4O5ZyHYRjmnxPlZVlur5ckAYVFpFu/J+uSIgUREZmOKWNQ6pJxHPPzwzffH3/77eGHbx6exmkCBmA01OZuW1NvKTwBgHkDckysBVophB3dU8g5Juu9bLt7U7UkEdEl4pxHcjDrjJ6TlGZOZO6AqGYB75s9PaWBg4zT5Nb3fe8CNIKqtqp7q621Uja15mZMwRVulwXIg8g0Tc5UWh3HERHvdkJCMP/ZOZq7A/Ter9erqo7zdO+oC7G67bWodyLKw0AkQcYYE9HPG5/umdO9zQ4AMcZWOcbo1m/LBzQ3TWZ2PD5Mh2PvvSzLYRopZ1UFcyJCIO3VId59kaohBgrRtjWItNa2UoU4yDgfH2Mar7e1NJ/nGchfPn8BoKenhynlXoxFa+vb5ofD8e3tNScZUj6/vrHE58dj2+tW95CAQ+putdZ5Pq6+qdr9yTDz3ZBpb0ce4mH2p3cQjpJG62C6kXfz7AgCho6A4OCICB5EwvXt/KfPH1NKgeTTx4/X88t4mE+nRzNY1/U+zp9SGlMmAgamEH713S9L03E8IgdVdfuZCRCIJEittbu7+zAM4zjGGO8v9rZt7t7dcs7H45GDvF7Oddu1d3fPOQeUu1Fj5ufn5+fn5/sbYgpqLZEAeFv3223Zeo0sd/Cjuw0pE+Kch/vt8m/+zX+r1Q3AuZjVh/z+2+dfzfNRhEGsE5h1uROh3K3obUG+WW0NkuQ2E4i5jSlljobm2upevBftlljUAAG09X2vDI4ELIRukQMRCBkAiBCimxmim2kKmZnd3QwILcYIALXtcIVtW15fX2svp9NpmqbWWt1biAwACt5aE0YH3Xp1VVYhZhEhortjJaJxmn49TYoaQkhxYOZSynK91d7yMB2Px+PhMOQJkRH/t7sQwBHpji8nYFNww969rtu2bTlEZBaRw/w05MnKyszDMLRSHbS1EqOkYXSFXhURQ4jbWhS7iDAFRooxXtaCAEMeYphYJgBwVEBZlsvL61dtZmb4+DAOoTZar+fjMC23cwj27uGBwHtvvXfBB0rptp3ruqXIyLFzaNqnabpc3mptIQgi1n0jgCApJMHHRxueencIzA/vvRbVS/ANBJQFCBkQAADQEYb56Ye/+Kvp6VHb/vblM6AZyOvr6735lVLKOQNASjHGSErDGPM4sIwpSKnn5fNLN5zykFLqqkjOyvu+q2rwVMumvZZSgsgP33/PzK33Uso4juiwLevXr1/X642ZY4x4nxl2v8vgAODeLAMAB7Xed29EUGu33sQBQdEdeq+lLOe3+2G4R1ny3/6L/447N7OPbx+uy9scxyEkJlPRogUNzawphhQBbd2X3is6uHu0IVjJA4ZJpukYYyxlaVWZEpDXxgQIbmDo7tfbjRBzjhhC3TVQAAZ3c/dScNvKfR0ii4BTV1XVvZR1Xd1dAsUY0jDOalupsG15msfDoe0lBQOwtZU77VOAtr2KCJLsrW61iEhKiZmBKY2DpEjws8wY0Hr32+12N11Dmsd8GIfT3VL2Wksp1vvdaqq2u4KFCFGb1t6qNtMhnxJLTtM4DsfjwbT1Zt379Xp9OJ62zRU6IhKRSCi13p/7MAx763vbkHycwrzN161qh8Ph4MRvb5emVUQGd5Ewj8fb7WbmvVeLUNYyxNHMtu0yDTEKg+vp9PDy8qVqJ2Qk76VYbZ6k6M47Pzw8mM2IyCz3KALACVr3HqcnGL4FxW6KvHBg6qRvZwpXPj05j3f8BDgDAk6nH/76+Av0fn75u3//b0/lgDO//NS/fPrUWnv37pkhbNtW3oq7PuTjkz9d1mXdF2E2s+v1HGP87a/+ZjxNt22t2kII5JgkTNE+fFjWdQOAw+EgQiGMhI5g67L01pCIAccp3wEZ276UHe8J3r0NDOZl2xFBYiSAvW69NQBAogiUQmIiALDeLm/ndV2BcLhLrb779ld979u+bLW06szQbLdi0pE5oAMiOmHblNDd3UvpgGY29ZoQLYsb1H0Db9AbIqaUYoyqQ+8dFDqqK8CyaHfhjIgkAQkcoRbV3pm51Oru83gIIRTd7xCEfa91b9u2EcPp8RiDDCke58M9XGTUNA3etda+9K1pFaIkwQmaVjC/t59qrXcByc8sTtV930opYM4h7LXvtdy9thCnkMZhzinXthu4mQPgvR2zLLfWKjOtt8u+XRmcg0RPSeR0mMHA3dZlQcSAdl9PrWq1tiFnAGkVMFopJQ1ZtasZgAOauzoUDjROMxhO07g2//Tl815u29pzzv/8b/7mm2++6c3WdRFqtfgcaZrGWqsp5JBr0ZzCOA61rNu6S0wp52kQ2xUDVO37tpQckcDU8jiSMAvHIaeQRszaKuAN+BhwgL7orV8/v9a2BPa0run5EYcHAEYABEMkQHUQOb3/9d/+67fbdj7/0xiGX3zzAxENQ4ox9vFwu11LKRZpb8Waln3fEUnYHZdl+fz15T3DttzWdb2b851546Bgd4XPuq632+1eDOUgYC4id1UIupdtiyKltX1dt1IAYJ5n93BPbdUa7QKEZV/P1ze3nyWftMA4zCklBRum8S4vvdeOpNfFwAA0h/h8enTvCtW8t0altLtzERGzn8vJpFq1Q1cbhiBpiAmaNm7WooQUUrrbGLNYS1c3cf2ZeW+WUwIndzT1VjZXCyljFHJQVQjsCOu69t6JwKFxhGM6MLMIrcv1/iMWZ7e27h323vuupq0zOJIreCtdWxckF3I1EWmt3TWD9+b53lavHR37tjUHbSZCgOZoElIIgRiggTuo9t47EPTavKtIUFcQdiD3XmsjkGGcat17be5OIOOUxpxrXZv127qAYZIMjKXeBKNpabuKRFUls8zBUHdkBAXUYZolstdWSllK7cUur58fjsd3796XVvdtQa3hMfEUwKjXfu/5I2JX9K3mPNaiijsTjeORkrvDGGZHa92YGd1UPR9y7725f/P4vj8+0eGgDWz9kmICnlHb7376fevLLw9P0BwZhjgZBUQAJ8eOIAgGQKd3v/nV9x/Wl8vKhQgAIIowM4AdhlFVb3vZthszz8dHpiABWquX69tWVtWHh8OJgVqv5r3stUpPIYqEbdve3t4ul9sdFnY6HXrv1+sC9JOZ7fvmrofDQUR6tZTS4XDovX/9+nXbF3eVGIQCorempTYiarVuy66tbPM6TYck4W4KSbW11kxl33cAQIKHhwPzg0Lvvar6uq73ROSuGr2Tye6tBEbqvevtdpoPQjwMAwcJLCFFjgER75psBwVwQAREEQmIDHjdbuuy30ni8/Ewp9CWVWsbhiGEIEj35B0RYQUEzTkjOpK3qq1305+vrGUvpZRSkCmEkEOs5kAoziIYIhMgRkQS894cDKHWvpW1tyJIROjIQUjI3B2JDofD4TilHO5p7n9dWqEhRTPr7oTYuxLwmIey3DrYlGTbL4SIADmkw5DmKZ+vX5dl++bxW9Btq5cHOfbW2rZiTvM4bqUAYcq5lMqmElKyLlIINnc3RfCAIGwcx9HdP335omoiIY8DoFp3SUl7k0SIGNMhp3y5vqUAgN1Ybbc85KZ9HIdeO3FgIqLQexOhprVu9Xg8RuHqLR6/NfkmQGr9y/WnjzKtnYwwbKuepXhOoWhfdzlkd0REdHQEdAIEAHv67hfHP/w+W9v3vfUiSQLhVpsDDOOIMRFzTiHGaI7ujmVNZQjjUFrV+0JZpBjiNIYY872Mc7m8tab3+k8IIcYYY17X/evXr9freV3XEKRs9R5lHA5zzkmtff7y8cOHD7W14/H4q1/+8jiNSdJxmmOMwLSuq3tS1XW97SRDTPN8TGlIw2RmAoSMFKOkNIjI/W1uTS+3a2vtTvS+TxJs2wLgIeQc0050O18+f30Zx1FiCPdMSYM3A7x3p52ZBVHNwF0BWmvXfS+lqGmt+7qtHEhB18s1SQCwrW5jTDEld9y2bV33OyCWCLrWvfb7Q7u/Xa0VZMIooEoAMeeErKrMGAOHIOiOQFU7gKRg5kgk45jrtiNiHGOQVEq5XC6llBDSfBhzTogownciuZnfZ5FcLWe9j+T23sk9xijuWlZJwIBR0rdP76LQut6W29VNBGNt5+PpAITLcu5tSfTe3T3GZd+ZQmDpiijONSYJOcq27Yh13z3HFCWoA5AT/py+EyFAH/LEBq31eZ6IiFNetttWbnk49G1FxBxD2ZacH10dmfa+jGFkzgSo2ogQTLWXFDNxUKioBZDD83eKQ1le+rq9ff26llWnSZDSEJnxLihwAARGb45813fn6WmtbVtec857LV+Xi4gQ8TQdnt99gw6X61trRVVrb0wy5qmVXrZtcRjyRCQphofTUwwB4Wfx8/F4/BkdiQgA7ioip9Pp/fv3y7K0dhex0TzP83FGRED0jjHkPEzX2+evX98O8xxjfH54PJ1OMcZSyuFwqHV/fX39+vWNiFpSdTydHqYp0/+/qPfosS3L0sO2d8deFz6ey8yq7OousruglghoJHDCgQBJ1IASQOj36B9IP0AjzTXhWKCaZFNsVBcrsyrtM/FeRFx3/Nl+a3Bele4oJhGBe+8+a629PocQkVwso9ifsDS0nOWiKDDGWuvz+bz8V8ZYCEFgKhn3MSSGD81ZG/Pw9FgWhRBiWWNBCFNKEH6OslkeBuf9OI4xxkwpIQSE0DnXtW1smuWeOg5DPwycsbKqQghN08zzjDG2VsfkjdFG+xQjBtAyCuHy3iFCKEWfIAAxLobCAENMMYCRMoExifMcgk8pCq5W5QrC5GIAAECYnI8dhDGCokLr9brIKwBACCHGoLW21lBKsyyjmEwpEYIjSIRgnZLWmiDgrAcQSkZRSHVdAhxn72KCIQTOVQAGYCBYNU/eOo0AcNYSSgEA1lqYfKbyBUANPhKES6WsGeZxmExMKMQUrTUQBO/jOA7e+xj9zS6nFHutpRAUU0Rw1zcpWoRQDHicjLPm/ua2t3YeR5SQBw4Br6MD0NX5BhDqjKvKkjA8+bhmBWA0JWemTy7SbH3LVgSeOgD++PaHP4oY6rIANEGOQUIAAggAgCAaHSnAMAcR8EzdvHn59psxJDhbs98/x+RX9UZlRUqJM17XZYxRa30+t9776Lw3emw7AuusYllZcC7/PGAvdS3LMiklQkhr/XlIiREhtFpV63Xtve/7LiVwdXW1Xm+dc8PQTdOEEMaUMsabpvn46Xl/OGy326urq6qqCCFCKC7yskyMKmPMcj07n8/jOFJK8b/+V//94kBvFx+m5TJlDEJoGUWW5BIA4pI2zCkDAPgYGOeMUARhjHGZKbz/rHVYLAy01sYYp00EKcWojXHOBe+Xi2mI0Wnjg0eMAgSddYs2YhxHa60PFiKAMPQueBcggowSzgXjXAixcLtxgpQQhLEQgmDCKKGMEkpiAnoy1vsYgfXWOw8AJBB775wzgolM8hSTNtpZFyCoytWq3mKMUgII4a7r+raBAFDKlJLWOmud90EbgxBOi4sbgJRyxomitFY5hXCc+hQdih4QWJUb7yZKUgLwsD8SmBhBEOCEIEQwBO+c41ImTCZtXLTWOO9cirAd527s+qlv+2GeBmuNMa7t2qHvnJk3q7ws5KpSKucgBTsMBIOpHwqZgZiGoacI5plCmIzzzCVnjIMACYDB+9navMyLLDM2VNW63t2n4gIqDiCnUEJrm6f3uj9zjBGCv/vH32o7397fCs4RYkioBCBIKYKUzBz0SIRMEAMIpKD68xFESkmQ4DTNep6MnvTcB+czmZd16b1rT80wdJggkMA4DIxSyYUSAqTkgvPRp+i1nkPwjNGU4pKgAyFeBAB5XmaZWti1hGApBeeSMUYIFUKqLM+yvK5X9XoDEZ7N3PbtOE0+OgAJCEgJtV5vVqs1QjSlSCldun0Iniz2G84ZY2eEMSHMubCIFRBIxpjo3cKlXhoFZSylxBEAMXGZYwAhhD5FCKFzRmuNEKnrEgAwDAMAgFJMvKGUOmv7vvfeE0qllIiSoiiMd3Nwbd9lTCxS16HvYwhS8sU3HEK8dENCPrO9Q4ohBAQAAgBj/HnkipFyRilNIRrjCKIARAghSMRFyBhDGHZD571fvi09zwBCyEiMEGMKInAuUIqMMfM4zXpkVCjFlncUQpjnuR96jLHg/OriEqYgc5kLti4EieDcdGK1A95BrzfC29lGhI3W1p4JcSkRRktI2Ww0YwxDGKwzxmAuGSPaBkJRmoCLwTnjnIsBhBBCdCBC5wIAERNYZCpTilKKuYwgTeOppHwaGuCjUmQYBgyByCShlGBgrbFuZoyxbEtRyAQ30bfDuaxfKlHNk6vqgKpNhAVIIDBF2VaM3x+eP3hp19vNF1/86qd3f+z7rl5tXDxVQiJWBRAwRIFnqO0Ca5DcAICKYgMIPTXnGKNgclVfxmStnZ3Xk/HjrGcTykrN8wQJXK1WXLBPT8/N0COCGKPeuxjj7LV1DqS0gPfe+xgDpawsSymzheIWY5xno7VZkOxhGPb7w8KfK+uqKLPtdpsQtNa+fvHy6fh4PO5DCKt1tanWCBAAACFswYmNmRfU4vO5WrhHnwVpICGEMI4xguB87/ulKlPOlhGIMSaUJIRMw2jHGWOcrDfW6uAgTH9S7rhlXTOOI4SQc7lwxJc7K8Z02U5yggEAGECBKZWiyPLFPUEu9oAJAACkzMqitk637ZlQmWUZpTikCGKilCwxchhjPc7DrINPjEFMiPdRCJHnihCMEJ5GO00TxGAtuDW+77pp6ud5RAhRzjBXLvhJzwCkLMuMsc65vu+D7y4u0J96cWi6xlsnpSQIbDZbySmiKJdKCkoIu/oyz/McpPT0+HD68I9Qu2EyhKuyFGM/AIi5Wtk4W+ud8UqpRcfkAeKEZlnhwwCI9WFGGDDGMMVlXiTgjPMgLg6KblMVnFNBGUxgnmyp6jD3wdl6te66bhhHgpNNWAOKAeSyphilyIz1BiVIU6HKCFXbT7fFRVELHwwODpICAW/sCUOMUQSYdEMrlPz1P/2nNsxD11jvCMJuHjkrEEQAIISpA3D+8FjcSShzgJiQpQ0+ej32A8b05vaiqgoAUtM00zTs9/sYw2az4VyEGGKMSvDtdsOlJIwaa51zhJOMYms9xphgaowex3HZvixMh77vFyp/jB5jzBhfLLUXXnRRFF3TGrdfHhVvraTkxc2t1ppzzikLIabk53lwzo/jiBDCeGFfY845CclzIWHiCwS9gFwAROssBJ8JqM65xZ2CMSa5cMHP8zzPE8OEIYwpkZxCABYGzqJzwxhLKUKIKSUIyZL0lmXZAhZGkKLzISRCIMeEc55xASAM0ReFkpIjhCjleZ7nWbk/PC2DoBBCMg4JhBAstTykSBCgglWMTv3UnFrCsHPOGTvPI+cMY5Ii7ftRm6GuS4IFJgRSkvEyuiV9Ohozp5AwxZRSrWdrbdu2erZZVhKCtZ76qTN2EowLhhBIKbiyqCHA3qTOaZv6KngpRF1s1BdFWa3Gw6d6miGN54ePMEGm0qF5gpRiH0GCzoaUIMEYADgHDyFOCANCI0II48269AESnAghs7M+GK9nDHiRiyLLizz3fYcISgEAiPJqM8wBRuAjJQQyVAi+M9ErIQlmjCMXhtibfhggIOt1LZk0g1VlLqq1RxDAHgORUpjak8Rse3H1dHqe53nq+5SCC+n923eXt3crSgCIEJCYAALQcDB8f8zKHRISQCxFmWUZhpRTOY7zNE1VVUmVH/b9h4e3KTmCVV1vYozn81kJUciMYjSPfcMYQUhrjQPOCsU5R5Awxsuy2m63wzCE4Pu+59xaazCGUqpFHtD3g/c+y4qyrJVSzrn949Onx4+L3ohJ8frF/eXughDWdc25a/thoAwzwmOMIKE/cSKA9x4hQhhVkvOUgvWfn4Foo/cxhARhIIxmRb6E9kCMEgTzOHVD3zYnTigBmDOSZyXGeLAaI4AxijFRylGKzltrdQSIQgwSghgs9COECMTAuxi8TSlhABNMs5mjDzF6JqSgnHDGGAMgnJtna3We51IIjCHAn+Et5xylFMe4EMcjx72e+n2TUmKCW2vPbTOOo/GOcRlCmuex6stVWSUEF2abyFVKKYTkrIYgIrSUf2+9I5DgpOexsTHpoQ0hRZBoLhDjCCWfYtO2DOOIQF6uFM2jB31vm/5DVpBcZfz21bWARs+M58350B6fu7EjYEAY5Kqw3iEErDeIgeSB8wl4nHwKKSKSxYRDHCFRyHlvLMCYqZwCmGWScdzr1rgZTgHlWYSIICZLWZWbFFE7t0RmF3cvmFKYC0xIsAYO2heTKGtGeZg6KDFVNLgYCCYAhkgiBBxJkMXxcM6Lzf2ONe2h1Wejpx7Bh3cfs1wS9jUAJKWEIEzRM5eK7TrigCEGIMY4vbi8TCnEiKLbj6YbBpCp4s3L62CHh4d3CIZJD1V9t15jAEJiJKVAIArOYMaUUiEBijKU4mhn5xyndV5WWVZO0xSjX4ovpVQphRBqmvM4NcPQFfl2t9tBiIZ+djERxiKMhDPGyPPxQIV8/fq1KpR2tu/GeXATdARhznnf90opKRUhLKZEuKDL7JVl2RJFuHQWLpkzflmBSS5CCM77GEHwQU9znpVVXgTvg7MhecH5rtpxRhYQYJomq2eEkJSZFCqTOQCAS+GTX5g5PoYUgLPBB01QCiHp2RCGEOILuG2D7/seY7SQecqyRIiM43g6naSUIYRp+qwNNcbYo11o4mnJ33WYEEIICcGbacYJMykiZVrrnpA8zxeu0cJsSynFmKbJBp+WXJalLQxjk4Azzs7Or/OSIHQ87evV5mpdM0y0mZHMynKd5aWUUnLBMGv6pu/GwTdDd4zBrKriZntxud7OL984Hd+++yGMDQgaUaIEG/vGzTEADBFJMAYIMaLGaG2GepUrIYO3JRcRAs45CongaJ2PAKSEZJ5BKZOP9eXN5eU9hKQuNofzk1Tq+sV9lmWE0JjSPM8+11QqoThNEBvTjGcMgKQcJJEwQkjDlIdAXDNG67778Q9SleucJ+8Aoce+l5IqIaINkEQAEADg9O57f37cvbiFZQESiDAIBnfrDSFomCdAU2VLijGlgRD+9ddfF2X57R++Oe6PgmerumSEhwQiCZ9rrrWcQ208xnS7rjCj4ziOgw4JMUYgTBFEQUWe55QS59zzfn86nfp+SAkKoYqiWk4shGm9XnNBjTGn8/l5f7QurFarsswXWMm5EYRAM75QW2OMxhkELIieSCmtNiklyujC5rPWQggJw4xQ6x1G2Fs39UOMsdxuaYYzJRYe8mjtZLQNLqJwXUglhY8pJdi2rffuxd1dCKEu66pcjePIBMdsgVGT0ZMxRkoMYQZi8N4SglKCBFPrDGNMW3c+n7NMFUURY7TWLQ+G1to5xxhrmmbh8AxmHoYB+kgQopwb5+IwUM5D8CCmqiiyLIeIMMbGeWr7DmO8BM4tuqSFQ+ucC6HDBGLEnXP9OB/2zfHQSikjjQNAECaqKko5kyoTTKoqLzdFWVPKMcacEmt9Xa+bLrXtyWvTNof+ePQXw/b6sq533mFRqfPzwc4NJhADRvlm6JquPTkAPUwhImujtZYxjggzekIIWOcgCpxCxiWngFBKCMmqVbWqI4ibzebuxQtM+DiOCcLd9jrPFSY8RSRFZlOIEVAhFWcIQlyUmMv80BIYADIxGezmhDIAIURumob19W0P4tvffQN/8cJaP8+zHqfrV6+FUtaMQq0ARHbuTvsPcTJk0JuNSgkgQK8vrz45bXSPcZCcI1VknGlnnDHVqsiKryCkP7376fn5wzSpqtzGGDlnlFJGxGKIYq03xoDosqJgjJl5nsyUYAApWW8Ywev1dolI6/t+no01oapWQogYfQgBwqSUEpJyTodhmrWmlA/D8G/+zb+5urp68+YN5/xwOCIEsXUAAUqQNkPQkRDkrSOMEQCi1QtfGEEICSHaGmstI1RK6X3UZtJmyqQCwQcQMMYgBQhiWeZK8Wkemra180Qpp4JPo+667urquq5WwXshOJes7Zt235SrWuVZroqyKE7753HqIMQgYcJkSUiMIKXEOPUx4hA4Z8sqSWs9zzMhtK5rKWXbtm3bLpeKEIJPkTCqMo4hmo023iUfdPBOm2kYAABt2zIhLi8vMclOR6e1Xha1RVEsm/iUgDFG6ymExKjo+3EYBm1d33ZFUWGByIqXZV7k6vryoqzy5IKSlcoyxkhV1SklM09lmWvr8qoWJOiGEwTN2DndHh4aWbQQ5RZa5yLiO6FICEYgElGcw9g186RHY0zw0Vr9dDzkQ353dQ0A6vtGZcxbmyhDiMtMMiZysUKMKsWLqpYyN9oF79uhizGOs7hTL0peaK0hBiC4GANkggmRqIxIqIs8wWT1AScHmIRQQoB9ikxlU99hABOkv//D908PH+ahjy7G6G0MThuhW4CJOZ9TQIkJbb01mjKZdI9BMtZ++PSMExRZhjPqYnDBBxCyskAJvf4SUkHfvv1x6CfBNQDQe5dCKLJlGRDGcTTOd117fXvz6tUrEFPTHpwLKYF5tu1op2lKCRpj2rY1xuVZWZbF8fj87t1PKYGqqsqyBCAdDoflYEgp5yl9+PDhdDoN42itnWdTlmUKIQWPGRBCUMIpFpBR/L/863+ppIQIhRBccHrW8zxbp1MAGGIIkTWaQEQpwZ99wXmKcRx6yuh2u6mqinPBKEsAzdYem1PXnRgT69Xau2CNxpSootjudkKKEMI4jSkFQYkx9nj6pM1IuRBCIIxDjLPWC1ywVHfO+TzPznnn/CIzt9Za+/+HhFJK86IoiyKTijOeKAYQKqHKqipUhhDS1kAEi6IoioIzJjiLCWitF2mItdYY0w/D+XwOLnnn+7E/ng6H4/5wPBzOx3GenYsRQCnV7e2L+/s7LgjChBAOUYAIIAgQRCoTRV1KKUOMnFIpKUiRoECAGZpzfzozwoObIAxC5C4CSDBm1KdovdeTG3Q/DLO3YRrHj09PWZZdXVxaF5wznDMEIYSIC1nXmwQRADHLFCVEShlCHLsphoAwVrmKKYUUs6KYjNbOcalUmVORQ8khoTCBhAgAjGAFSQlQAQGKMRKaM0zmw6dPD49YZc/Pp6eHjykmjKhUuVR5JgSK4bR/ev/u7aCnBJwe+v2nt8gZOx7a06lpe0xEkRceeAAjRfj5ee+dl1IlGPM8V0qmhLwPC9YrpciUUpmkhC07cSElItjFcHV5+eb1myyTMYZl+0mISCl1Xde27TzPUqrVqp7m/t27nx8+PhBM7u5ebjZbrfXxeDTGUooIoQjBPC9ijE9PT9OkCSEpWmuMcwYkEBwIHoKIhciIcw7E9GcobqEAAZAoISiBrmkhSquyAlF67+syz/PSWlsUxZJjLoSs69X1xU2WZe04/PT+p8PTE+fi3A+fDs+//OrV9uJi8Wa5ubn58PDu8enDYf9JMMKoiIFYq7mcrUNa6+DTNM1/NglbEDsA4LKyTSn2fbeMZ4tIYHlBHwkhIQbG2CaTWhpOKKNCaw0QhBgJIZTKjDHRByHEss5a1C0LSk0QVSoXjKeUjLNGT0137oZ+ms00u3meJ62VyhhXEOJp6FJKhGEwoXkc+7bbbi9UduGtbbuhbdvdqsScRR9wtM3z4cP7B1Vs797Us+0ZJVxZChTEufeeIM+xpXQAMYEUjNEAo+vra6XUfr/vxy7GKPguq9ZCiLKsAcQEI8Y55xIlwJjAhPlkvHeyVJLxxJKUKsToQ0gJ8iznRQYQTwAAyCGAACQIXAIeAgwiThAiBEACow8PHx9//vGHD49P524euzNnKC+K4/nkv/12GjpZ5NM8DF2XUrrY7DCX89R+evc9BPr9/pSx7P7+PsFweu5Lurm/e0UYP52Op/MBgMgYQ5C9eHHHGDmdTkIIITjFOMuyoesZ5S/uX5aran88/PY//+6Hn38WTGg9D4MGAYSQ5kkPYzcM3fKnQghd1y1XOCl5URSLCcp2u0MInc/nae6aplk2/atVVRSF4IpzEWNYOEiIMp+AEFLk2W63IxiieZycX8wPE0iBIYgxwxhFH6K30zTRAHYX2+riwlr7/PzMOb+6unLOfffDDyHE6+vrdVWDFFfbzXq9bs7HWduP+6fJDC4GSilFaR6Hfmg+fHjX971kPIQQoKeUTtP09u3bLMu22y1CRAjRNGcI4WazWQBjpTIAgLVmIVkopRbV3LJRnabJGTPHCCGMUiqa51J575+enqZp8sEyxrIsQwgv3NoEgTc2QvBnF9UYAaW0ltU4T4zQPM+llNvtxTjMbdsunzVjAsLYNvtpWlkzMyaChy4GYycAJiFUWZZLjHtKybkACaUyqzab/ePP3/7w7r/6r39J1dpnqqCZjTNhHFEVjI0uG8bZRoAQWYYxAFAmMgKR1noa9eJdSQnHiCPIMCQ31zdc0Oj8MEyH/Wmz20aQICYMC4QRhJBgSCjaZpvkA0fETxoKAAjDgACAQEoJxJgSRHGJBgMxARS5Kh7a4f/5+79r2pZnJSHeTSBAP5lmGovn4ycs0c16t15v87Ku1yuCyHq9HvT822/+w/fff/83X//aeu29X5WbqlwLwW7urs7N4fnpOM0DhGG7vdhtbzabDUIQQtQ0TXAOAVgU5V/86ldVVXVjL7n48suvunPz7bffci6VEkIxRFmmEmMcIzqMXdd1WmulVJ4rjCklHEI0DIP3HsJkrZ1nbXQQPBdCABhTSlJkkgkps7yuACbTNMx6RAmUqxolgCEi0YdlFzlpHWNMPgTnQUzWWgRgJpXkgmMiuSSYdlPnvZdSLj4tUkqZScYoZaTpuqDnEALFAHJ0s92ussL18/7pYbPapgS7rplnI6jgXGRZBiIEThOCpMwgxNbElEyIRmViMVRbnDBCtEIIqbg1flGdhhCKoqzr2jm7pOp6a12IxjnftIhgbb33nnEiMSuyPMaorcWMYkTbvpu6cbGnhRCDEF0Mfd8DACAGCCQAWJlnWSZtaa6v1yklo522JqWgGHXGRgABpNCDiCAieJ5niNBC/aiLcpoGhBCmSHJpkQJQff1X/+z+q18BICSXNOMZu7DWhmAtBIEQyGmIyIeEEDROa22VzIO3WltKUJ6XhKBZd4uIpaqLlKDTjhKe4ry8ECU4ApqSc44Scj6cCeX8IoMhDl3jncmqGmVV4h4kDCGOACEoFkZnAAgDlADAhPzmn/03bdv942//Lsuyal0DF4ZhcH4WimsbKlJsdtdXV1cARuc8ZFBxsaLldnPLaH55fesxBhiXmRqn5pvvTpQJClmeF2VZ5Xnuvev7Tqns4uK6aU6CMV4UZtb39/e77fY///73H58eb6+uV2VJATTGYExijMMwYYzKKtvttl3Xf/z4mCIS3FJKjbZKZXd3d3VdL35YjBGlcoJFrli5qpepaRmbIYRCKEIpSCkSDCnP87wuau/jME1k7NuFvhadn61JPiAICUSUcs75xcV2tVotMYaHw8EEt95uOaXDMFBK7+7uLi4uy7Lsum7xXNDa7Pf7GGNd10WhJgzGXlv9adE4C0ypoNbaaRh9TN5bgFHOynPbaPMMAVi+aWvtMrcxtnhZu2nUznlCKBO8KEop/TzPhBDKOYaIUkacSwlqraMNpcrU7iKAlIIzRkefIMGLNxuI6eJyjTEGCUkmQVVYa5e6IqVcfi7L0ns/DIMQQggJEmKMIQyM/0z0CMhqBDml3gbBuFTch4ASADFRygmFEAFOM4fM+u6FWF0ASOUqTyilCDnLKJHjPPmhsdZDEFVWhqenczM23RBjGlNIPvgUEVoU4jjGkGVlnuecSQAixMQnD2C0szZmppALJYQkMUGACCowSClaF7T1zkgoQGIYwpRcAsmHESaEIAfQQYAwpAvBOQK4vtj9d//dv/ri7vYffvf3AKRqt95stiklQjDnYru9kJJzroyZj/vnLMtjHiklF1V9f3WjlOr7PqXkXdTatm07DF1d7f4szl6IN2VZSs45Tp9iOJyONzc3EeGHjx8JhZvNGpA0Dh0AcGnsXddhjDEWXTsac/IxyEzc37/UWhNCCEF5nl9eXmZZZoxBiCyeA4t7SgihbduFzbmstlNK09gv3zLjMkRgY6CMlDQnbpwJIYxzmuUQQp10URRZllHMpOQXFxd5WTw9PQ3TGBBQVSmE6NsuL8v7+3vnXNs2CMHDYX8+n8uyjDFcXOyUyvq+b5oWgDSamVJaFMWyWULWLFhBCAEhgDHt+iYGxzlHCDAmKKXjMENAl9h0re0CXwuZjePgrSGY2uCfD/tMKkEZYSQl0Pc9QkipZbM2EIb7cbZ64pwhhMau74Zps9msNzUntGkaCCEXFGAEES1AlhcKQdL3/eKx8edrRtd1C1l12SQYY2I0GGNKozEGgrRoOFb1pm2atm13uw0AwLuAIKYsv7x5/f79e8wFJDjhBCIctEEI+GRjskulJ1j4APbHZpoMACC6ycWAMKYEQUistc/PU13thFB/5tgSQo7H4/F83lxeEIiCi9o4kQmM8cV6l3Ext/1kp+3mAjGZOIuMgkAQhAgg70IIPZIUIQEBWGjOCMB5nN/9+EfE4XZ3qc00aa1kfnV1JTgFABAsGMPJhyovIATWOhjT2PcAIZRA9A7EMI39OI4JQQgho58F+E6bZ/1U13WWZfM4mnmepllSThI6Pe+bppG5LJWEKFirhRAxgqWrr9frEMIy3xtjQghKZYSwGCNjTEq+HPQ/Cb7TYjW32EEvLotLrM7CiCaECCGWi+s0TVpPQ99+tpvGFMcQz+ezKkoAAMVECVnmxUKDOR6Px+PxcDhEkCjG29UagGStzbKMMTYMw7fffnt1cbHdbkdCjvu9c+bu5Yv1emWtmSacUkKURAhGPUcIRKa890tvwgDKPEMI6ZOllKYID6fjdnNxsbvibFr0KMbOAEQpF/W62+/3Dx+PlPCyLAXjbdv6IpcoaW36vu+6brvdMsaarvUxLJEK3kVCSNudtZ4JAjCuNQZL1MKpbSjheaGWDwVBsnziS41RSi1SoaUbhBDW63U/DnleLncJlGACQGsdfcAIEUI+Pj2mlC4vd8sH7RJgUnGVVav1oTnvT8c3L18RAubZxhBghNbacdLtNNjgrbUhuBDSZ/cyGJ0HCKHDYb/42TPGlFIppXGesiyz3jHBy7KECYx9n8u86wZrbV3k5OZKj/bt739fVznLKaQCAAExBcE7PVIKAZUBALCo3VNKIECInR4ePvzQj8eccs7KmcwIkU8fP1xdXb1+/ToluES4ntqOMQJAGPU8z3Py4XH6IKXM8xJCHEKYhznLMsmptTbLVFkV57btui7G2LatHue6rqUQf/O3/6Wex3cf3s1Ge21mrcd+8t6v12vOxSL1WsaYruuW944xNmamlPfD0A/DxeW2KkrnXFnWIYTFkM9ajRBBCF1eXsboj8fj6XRaBAZFVSKElntalmUuuH7srbWkWK3mcWrHAVsbosMQYQC9sZRSTiiBqB36eZ6VyqP3epqttd55a93hcJznWTB2Op2WmFtrbTt09scfT6fT0E/ehxACwmlxcuScLxTrBdOGiGRSpZR2O5RSenx8BAlTyoyx1jqAoPe+awdMYF2vhZSKO+9iAkgpVWQFRfjT81PTtX3fa62ttYvk13uPIRr7iQkqpQw0EUKyXC4T84eHd7Nxi1TCWi9VbqwfxxEAQHFaBNeU0hjjQmVdjMpCCFkuIYTWaee4MTNjBQawn+cY/fF4vr6+vb65OR6Pz8/PlxfbGHyCQCp2PreEkCzLQorfffPHY5ZfX19+Tv4KIcaofTiPbT8MDGGGSaAAALA4qXCWOWecN3/5l39RVYVzBgDVNM3Hx0+//OoXUsqiKKZp8tqsViuZFb/9d78Tkn399dcgoWBdwDDGCGOKkGAAPAAYxuTnRDkgGEWSQPq8a4PQOffbf/h3BPvt5kI3TXD+anfVT+PT06eiKBDBBDPnDESICWq1maapWpWU0/3THlFifXQhAAA3u8vFjBZhgDxYJIRXV1dKKWut1iaE6GGqNuu7F68+PTzM9jtjTCnL3famKuYPHz4cDserq6u6ruu6JoTkefb09BxCmGc9zxpjbF2ggmNKECSztvM0lWUJIey6bskfoBQv/E5r9QLsLsJGZ6wxJsXonJsBBADkQpYXl2S1WimRSZkNw6BtTCGeu0Y6yblACE3Hw+Xl5cXFxUI51lqH4I/Htu+7hfuZFcWfHiwghKgJhBAOw6C1ZYynlAAIf2YUhxAWkjNjzHn/8OFdnpVKZRDCu7u7xeFwmiZjDKXUWjcMY0ppnt6WZVnlWSYUWWFIsJRy6WspREzJbrcz2nHOg7dd1ymVY4zLMld5tjyZKaWljR4Oh1PTLVbaXKplEbTciZdiv0Dry2th4BGElVII4nEctbbTqBdWnwtOa+29r6owz3MV49XV1dT24zgAEDAlAPClBfdtU69Wr1++6trWrNfOeD311plJj1rrtmn2+z1CiDHhU0QEwoQowjLPtAbXNxdlWU+TjjGemuPT/pMQijFGMRFSppRkljHGxnF0Znzx+lZKCZx1uttdbWVZA5gDTGKaEOQAJuCSxwExiCBGAIKYAMIJAEqRkuK7n34fAUgBXF9fMykqSv7iV3/FGGvO3cJALvKcUjZHQxlDAEKEV+ttkechBOdM27bTNG42G+/DAuOEEGMMwzAopRDC1to8z0OKXIpvf/uPv/v2G54zKVVW1S9v71fb6p/8k79++/at1lqpvKpWizvi7Y0cx/HcHJcaKpUoiiIr8mjcNE2C82Uzbq1dCPMLURTCtAjBl7lxKY4LyDtN+uPHt0KIXKoff/iJ9N1IMFZchBCEYJTSEHyC4OLiwhgzz/Oihxzarutb7321rtab+ng4d12nlIoxJgi0NSFElatClTEE55ySudbae6eUVEotDJzz+dw0TZ7nWZadTodxSF3feGullBfbzdN+v1hmWz0HpyWXVVU65yHE7el8Pp+zLNPGjONY1zXjBFPy1S9/seiVp2laRGSccwAi52KzWS82G1mWcUqXmQohtFnv9vv94XDYXOx2my0hxNgZQjh10zzPSqlK5YVQy5xGCIkQLM2NS+Wcm/T8tH+GGEmI+rETkiGE5nl8evyIEKKcHE9nJnhI3vtIKWWUHw4HBPHtq2v9k/7x/U+KyvPhqJ3Vxr37+e2P3//Qtu3idCmE+BPoAa2dCSEgoa6dry5kgkDrkXP++tWbPxtCSSkhhBGAaeiKorjYXkYXASOsyuPJQkYiwSAaiAAABkIaGY8pKgAjDDBSgFJKCzwQfv31X/723//f7x8f/uqvfr2Y61OKCQRSCklZil4KwTmP0VtjCCEQAiHkapUpmQMAHj6+x5gqRRa3/rquzTR777fb7TRN799/IBD5FC8uru6ub56Ph/dv33315Zt6t5KEDdpop7//fo8Qub+/x4haa6fRAgBAIlJSrXUMYL3aXl1fSCnPTUMShJw7axcDdIxxlmXTNJ1Op4U8towbxhjn3DLoIYKtd58+PmGMVZZxzl2MEWHy/dsfLjdbmBDGWAhOCAkBj+PIOb+/v99sdj/++OPH9x+stULyoshOp9OsNUCJMLpMz1wKKniatfdecqHtDABggviAFBbr9XqhHy2VeAGuCSEx1kVRaG1BQimBh0+f3r59CwCs1zVBeFOVMQWIESGcAoTB4nkEmrF/enqa53m7Wa02a6Zkcm5xrFimAgwRAKAsy3nWTXO+fXFV5JUzC5YMqqJwzoO0loLNRkdvq1V1PtuUUpHnlJAsk8vuoiwyznmMsdcTjpAQAjESghln26YZho4i6L3fbFerzU4NXTf0kvN+GsZRq6IkBC0bW4RQAPD50GwQCBD8+MPPiko96kN7bprmj9/8/ng+QJRiipBhwgjGCCFkvfMuQBjmea7rDaXYmNk6m2elEOJx/zzOI8s4mhesDwAYPYBaa+MNd7Ys67ltp2mUNUHWA4wAFClFXubQ+jCdEM8AJn9y69A4OgTgzc3dvjnWZbnAroLRBajB6AwAQARhDFJKueJCqdkOzfNBkKxabyileVYyKkJ0C3lz8cl7UxRcCIxx2zSLGMU5t386fPPHP9zeXRdlOTaDqKvorfX+cDhored5XHLvlnuwMXaahyzLrq6unDeLxbfgXM/z4neyNOGlVC2/vuiJEULLKLFMLkVRLNj/zfU1hDDLsqqqFlUGWW3WH58eGcT39/d/3mddXl6GkN6/f3h6fu66jnBKCYQEOeCtXmhqheR0aXYxJsaog8ZbN6Th46cHKeWyPPXeNk2zaM8XGIsQsmwYl8slSMgY632ECO0urgEALMtISv04+hipkpiQ6BMGOKUACX51d7ur109PT8fjMaWkp0koSSnFGH12qJRqIXuaeX71+gWlWM8jpXxXrrthCCGPMd3c3MQIjqfTfr//+PHjer1mjLVNX1T5okoTSgghYkxN00hPq6oCABpnF7M+htG7d+8O58Zb1/cX46Qfn6+qTF1st8659+9/xhjnWRlCQggAGEOMIYRxHAftnSGnpun79g8//vHp8BSMLooqRh+TR5QQgiFIMQVOmRCSMaYUhzg9Hz8ltEspESxOh6MLPiF4OB0lppeXO87pPI8xOO99fzyH5Kv1quDy/HzEDkICAaQIzkQob7A+nzMGvdZkLRFiMXoYRxBN2x1/8Ytf9G46nc5XVzeLroNzLmRmrYcE5kLGCKRUKsu6rrNzmEbbBX/uBynler0mFOnBSCkZE1prxnlZVUzJRep1Pp36vp+1di784utfrtf1MHaUcBsihigk9Nd//ZsY436/r+t6oTB4bxGOw9B1ff/y5cuiuDqdTu/e/axUnqkiBLcc5XmeT6fTPM9SyhjBMAzLjL0ojJfDsNAfCWHL3ZpzPk0TQCnPc/h//O//6zxNL27vKKUAAy4FxYRzfjyfvHVLgWdCNd0ZghTcHFys61pw1XWdc345fCE6jgVEcdTj4ia0uFwsexhjLGOMUqIy4V1cCo/VOkGkjdNa51WplKpWNWPs8Hw8HvcAgDdv3gghuq6zerLaweQppRATH0OK0Do9DL2UcnmWYowBpDzPJaOcMYYRwMjH0LVD17b3N/cvXrw4Na1xNoW4MD5SStaF9+/fx+RXqzqlBABcrWopOUJk0R+FEObZYggxhIRx51xMxnubAPm0Pz58eD9N0zhP9WpVl2uFhR51N/eXF9tNtUsJztZEECJOLthCFj6kj+/e2qkHGD4+P5vZUk4IZwJThFAggGcKxgQBIIQQKjBKMNgQAkacURkhyIS8urqDDJ3PR5jAbr25v7shhDx8+ng8Hr768uub62tKcV1mMYJ+1IwxygSVmEEMEQEA/fDd95MehWS3r766uP1Vgs52D9DZtu2AS4TR47lXSsTkrXPeOYJFAqhar7eXF8/vPyjOAMWHwzPHqCgKXuYgxNPpNE9TURQLu9Zpl+d5hP7Tp0+MMW/84XzyIXAlOedvvvwKU/r9t99QhIs8l1JKlVsfCaPRp+60pwwTzMZ56tojIaSq123beu/v724wxtZ6RsXh+LhI2hlj06TP5zMhZJ7HJfkLIZRlcpGwOxcuLi6urq7atu37NsaY56Vzbhz7cRwZVSTG+PLlS4ZJ13UAxIUpCSGQUjJCPgPDJITZeW8BSOPULzL+WY/W+N1ut1pXi5uXt55STij33gvOvfcQYGNm78MXX3wBQOq6Tkqx5J9BjKWQQqXnJ6uU2mw2IYTT4Xg4PCOEXr58KaV83j+G4ClljCNGJec8AuBsKOrKWsvO5/P5bEOnuMrzvKoqznkITnAeQtB68j4yKYidDu2+/c/9OI5ZLglm0zRlueRChjRf31w2zfn5+XG73XIunTOMo8VMTirOmXz69Pzp4eEXv/hS5rJpGkLyPFeEyqurq1999UUMoOnOj/vnaZr2zb5t++jTZKaPn56UyMaxV7lElDztH6EHu5ubpu8wSEKK1W7rjKeUGj+vqvrV/QtA8Tzp5Ly11iUAkvfeR4gTAs770/7Z6okCBCJEBD49fizKNcJ8tm/tOCefemvef/yklOKCeu+HYTqdDpvdBkTw2V/V2tOpsS51XXfujq8+ffzn/6IKID38/D7jbF0XJniG1ctcLTuDyWjnqJ6tECoFe3h6IgQwyTARRb5KfgIgHp/3nDFrTIx+Qa+qvEgh/vz0SAk/NAefYp6VZbHGGDJOhBBumnCWEYiOhwOIkFAJbRQqAykhONar3LnQNA1lDAA09JrhgWPy03ff/6f/9+9vbi/LohYiCzGeT4dFpL6M/hDCPM/ruq6qylrLmFBKee/fvXv3008//fDDd0VR3d7eCimbppnGkXNalqXzEf5f/+f/VubV0HZLroG1emFs/3lzYoyVMpv6IcIoBJ/1OI7jYkK9ENfKspRSLpZGMcHlGno+HaZJr4o1UxRCWNdlSjBFuAATn43vMG77vm3by8vLoiisj4sYlBCSEgQAFKXkksMEAIAUoXEcV6s1Ffzhw6e+7znnEKVx7oPzGMAU4+IhgwlxzmVKjZP2MC3OYafH0zybu9vr+/ub7W7NGIkJnk6npSE+Pj6+f/9+s9lBmDjndb0WgmEExnHeP+5/+P7HX/zyi9sX1zHGlCCltOu6GP3d9Z1zoesaY+eUkvUREC6E6Jvz8XhmhAnBjTGIkHme3717F0C6vb3Ns3KYeoAgTCBGoE3/9OlRyuyLL77AETptjA8WRD13zkVrIkhoMuM4tkbrqR/KYj3PY3M+AkghI9M0rIvVy5sXeVUbM+aFWK1WgucQpYeH91xyQmgmc5VLENPQayboX3z9iwQIDP7i9nJV1cZ6qx0jpHeDs0HiQDGTUo6T8SlKmWVZFoLzIXHGFn7KPM8hOADAMoKXZYkJ++abb9qm+eLV64vN9tOHB0ppwujQHqty8+LuDkLYjc27d++M9X/7t3+LMf7+u+8QAq9fv44xHs8nN2nC6Wq1zrLi229///33P3LOIUzrqtbaYoxVkT8/nbr+lGVivd1QzDlfAoP94XDIsuz29nZJ/qrrumkaABCjYtmOvH//Pldie3FVlmWCccGni6LQWhOC8OPjx7yoFnC4qMr903NwFhHadX1e5lmWDUMPMKirIsFIab0wxvq+F1KOw/Dw8HB1ecOYgNBhjPM8996mWD8+/rHI67vd1axH7yNn0kXfNO2frB45AMDHuKxpp2naH89FUSxglnPBOVdXZQSh7RpKKSdCKeWcdc5yQRivpeRKqcPp+Pz41DWtd+7u7o4LMU3TZx2Q9/PQE0Yvd1sQU7COcyqlLMryeDwuFhV5oQgh9/cvfbB69qvVijE2z/M0DQTDxc5S5lm93i62Akv3y7Lsw4d3x/3+xf0r5w2lhGKylQohggisJK6rrCgqzun53KYEY4JXV5dN01htSyU3qyqACBEOIYytiiY45+ysX7142bdtRJAoMevRzt7q4L2nDAIIrbXjOHdd1w+TqorNZuVjOOwbmNAQbBpPPlifDBGyGUYpJc0y66PkUsiMS1ll6s3renuxATBW5SZbF/3+5LTjghCQrI25yBFLMZl5nLpuWO12HNO6rgmGwZKEQN8Pfd8TsiiKbEhRcA4AmCYd4nSx20kh2radxwkjxBjaXl7sbq9/+P77v/u7f7verRFC1oWqzNvmVFWrlJL34Xw+zfM8dN2p6T9+/Hh9c/OXv/paKSWlZFQQhg0AJMsk45vNpizWjCNCUN+PCYDwmU+Abm5uECIfPnxcGKBt2z4+Pp7P59vb++3mIiXMeTZNQ0r4cDhRBtfr9RK+9PHhPVmCloRkx+ZsgplHeD6fOKVVVTGCk/NEkOZ0CCBxgdfrGiT2/v37cZh3FxsAIef86dOz1npbXhCiy0wlEAhk5dU1xlQbN00ToTjGiADghAIlrbVt37VtJISEEGECjDEp5aRtURSccx+sVFxBNk2TykRVVNM0WmullBjDvu8hTFVVEkJmPXLOf/nLX57P53mec6kW4wlOhbF+ITgZZ8qy3G0KrXX0vhtaemQhJIQQRCkEFyLhjF1d3vzww08QJgjT4fC8QOvH/WkYhvV2u6RHLitnjDFFuKpWzsyEomp73U+j9x4Q7GMYjx3EUDBmzNwPTQghz4rjufHOX+8u9s+H9x9+/vLLL8uiOh6PTXNys729vJJZlkBAGMhcDuOox2FVr8mKpJRc8J81ay6kCBknUuVSZhgCF7y3ASUwzINPtiiKGJIxzlnddR0EuK62eUaUzAXnUnKZSQSy3dXV4fT+4aendV5CgBAAsMhxgoohwrkZdC4tQsiBCBGy1psQEYgpRBhhmZXjOEYXyrJs2v756SQkgRC3TX9zc3N9cdk0DSHkeG7bab4W3BmbZdI4RRi9v3s9j3bs9z/88NOs//D69WsA4t//x3/48ccfv/76V7/5zX/xq1//jZ4GZ9OL+zdCKETgbMLbdx9fvXlxOh3+7b/7t4KRqihvrl+8un9NJB6nfhrqpmk2m83vf//7//Sf/uNvfvMbrXnXdZTS1Wr19PTp7vbFxcXFzz+/c95jhKQs908PwDu43X769Ekogf+n/+FfQAjHYcQQ6XGyxux2W4SxsxpCsPD4irqqqooxCiHSWp/Ox3GYVpuNtQ4hKJWKKTEqEIRGTyF6a103ToRSQujxdAjeO23GcUgpGGeGaeBcCM4JIYyxBMES8pHlBabEhxiBxwh471KKi1MnADDBqI0OKQolMcYY02EY+27U8+ScWwqztmbSxltHCR2neRo0ggiEwAW3JsQAIkjeO85kUZScs6JQGGPOBUhgGObj8SilKop86UhSZqdTSymWRRaSq8uCc5Zg8sFTKsqyuL66JARP0wwwoZRp57x3MUHKGKH83LbBu7Is58l454TgYz8xKZgS09SXRTGOg5lGVeQxxa5rGWMAQkKpMUZxgUDq2maeRh88RmRo2hRjXRQv7u4EZxQiBCEi0OmBQF9klBKZQtTThGJSXFxud1+9enV1dbnb7m5ubwmAxhlKqJstwvjdz999/933QmWEc5Vv8vUN4xJqPespesg551Is1c17Z4wGEBIMVZY7F6w1znkXwnq9rqpSqkwptd3scqWM0fv9ngq+2e2c1+/ffmibgZKMimK7u3bWztNAs2x3cQMwffPlFwEkY83LV68ur65Fxsdx1kZ75wCEKhdt25yaI8bs6uo6k0JlYl3tplFPcxcTeP/4vm87JSVjrOu6sq4udhfffffd+/cPZVkJwa+vrzHG7z+8naa+KGS9qpw3xkyvX704N80333zDuaRc4P/5X/4LQsjz8z46LzjfrFf1amWtDQC4EPtxIozXdbVer2dtplkDEDNVZlnOOF+In8vKCRPMOAspGRcSJM64GKIU0oc4jxomgjDabjdCSUxYtV6rImOUZnk+z/NnMM95PetFtey9D+GzH7X3njGKKbbGzvPsvbc+xBSXhBvGeN938zwv9KSyrILzmGBKCQQgBY8xIowUea1kZrQGELx+9cVqtQIwMEaXBCRCYd9PznmEYAi+LGuEYAgRQrDe1FW9zvMcweisSwBCQgCGMMUUY9/34zRlMsMQUYQzlas8QxilhJTMBBcIQghJluUYI6uNdf7q9noe52UMhSlZ71NKS7wkJSQB4KzlnC/BBUKI7Xrz4u52s1lTgvNSLRQsPc+zHjvdO2+naZqGXtKCULbdbHa7XV3Xr169Wa03ZZkb67z329324uISA+TcPPTHeTQJorvLl9vrFzQvojbeGQ9QcH4YO0ww43QchxC8lMI5q3KFKNYmcJmtN2UEi8u8ZoxACLqub9tmmodvvvmdc6Yoy4eHTyn4q6uLFy9uIYofP77d7OrZ6m++/cP28oIJdnl1acw0jv3lxUWel9b4T48PQztO8zhOQz93b99/eNofvLN39y+uL1+em/N2u9qud+vNKiSfEtlsNymk/f5AKQshIoSlkATiV29eU0rbtoMQ3dxcCyEeHh6Op713PstUva7HSf/dv/8P/TDUq02R1/h//G//uTEmpKgyxaWIKVlrHx4evAdMSMIFgKA7t3aeEYYRxGmc1qstoTzGVFU1QjDGRAidph5gzLhMiAopGKVKZjDCrMxTQkqoVy9fSCUgQogxlatZaxs8odTM+nw+M8YSTARTjBElrG06a4IUEiAoM0kJjSFmWV4U+RKws92s67qSUhBGBOec87IsKaWE4PVqvTj0M0rKqsqKzDmfl2q9rut6hRDpuj6EYOxorQWJ+mAhCgn4YeiVUplSxujgbT+c61X1T3/9ay4zIRWBCULMVQYRDMkB65Y8MZVnUklGGaWUcZliooQjgBnlPljnLOc0xnhum9N+DzEq65U1ev/8nABo22Gchvv7+2X5HUIw1iKMtTFK5lfXV1VVzfPsnNluN5yzx6fH/f4RRDB2PYiWML6ur4ps++L+i1/9+i8vLq7KqqrXdVWuOJOciZRSij7jHBHSns6ntjHB7vcHmZe//OKrgACXNKagTZtxhDCnnAvJMcLOh5SAFBJCZJ2PGMm8QpjGFLph4IpTSjDCIYCmOZ/Pp9nMGEEp2Ha7QZjEGDGCGDNn435/fP/uHSK0XFWAsFzKjw8PlDDGaN+2IYQIwLt375XI/vpv/snN9Yuy2hAq6npVlavd5ur+xRuEMSEYE+KDk1zFEMtK1fUFwTQFYK1/8+bLerUpsvLu5h4itN1uXr9+rZQoiurm/kWe5d9//5MZ/dd/8avLm+vf/+GP//Ef/qEoCwQBF+r/AwHU3/3CQv3GAAAAAElFTkSuQmCC", "text/plain": [ "RGB4 Images.Image with:\n", - " data: 256x256 Array{ColorTypes.RGB4{FixedPointNumbers.UfixedBase{UInt8,8}},2}\n", + " data: 256x256 Array{ColorTypes.RGB4{FixedPointNumbers.UFixed{UInt8,8}},2}\n", " properties:\n", " imagedescription: \n", " spatialorder: x y\n", @@ -45,8 +45,8 @@ } ], "source": [ - "using Images, Colors\n", - "img = imread(\"cat.png\")" + "using Images, Colors, ImageMagick\n", + "img = load(\"cat.png\")" ] }, { @@ -127,11 +127,18 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 7, "metadata": { "collapsed": false }, "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "[09:48:53] src/operator/./softmax_output-inl.h:187: Softmax symbol is renamed to SoftmaxOutput. This API will be deprecated in Dec, 2015\n" + ] + }, { "name": "stdout", "output_type": "stream", @@ -156,7 +163,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -165,7 +172,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "lynx, catamount\n" + "tiger cat\n" ] } ], @@ -186,7 +193,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -195,11 +202,11 @@ "name": "stdout", "output_type": "stream", "text": [ - " lynx, catamount w.p. 0.552236\n", - " tabby, tabby cat w.p. 0.118180\n", - " Persian cat w.p. 0.114004\n", - " Egyptian cat w.p. 0.090389\n", - " tiger cat w.p. 0.086328\n" + " tiger cat w.p. 0.415807\n", + " tabby, tabby cat w.p. 0.235859\n", + " Egyptian cat w.p. 0.161553\n", + " lynx, catamount w.p. 0.136078\n", + " Persian cat w.p. 0.007109\n" ] } ], @@ -213,6 +220,15 @@ " println(mx.format(\"{1:>18} w.p. {2:4f}\", l, p))\n", "end" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": { diff --git a/examples/imagenet/ijulia-pretrained-predict/imagehelper.py b/examples/imagenet/ijulia-pretrained-predict/imagehelper.py index bdaade19ff28..7584b45d9313 100644 --- a/examples/imagenet/ijulia-pretrained-predict/imagehelper.py +++ b/examples/imagenet/ijulia-pretrained-predict/imagehelper.py @@ -20,7 +20,9 @@ def PreprocessImage(img): # WRONG channel will lead to WRONG result #------------------------------------------------------------------- # swap channel from RGB to BGR - sample = sample[:, :, [2,1,0]] + # sample = sample[:, :, [2,1,0]] + sample = sample[:, :, [0,1,2]] # actually, in this pre-trained model RGB is used + # swap axes to make image from (224, 224, 4) to (3, 224, 224) sample = np.swapaxes(sample, 0, 2) sample = np.swapaxes(sample, 1, 2) From 32267131f83095a34676102ebd1fcf58b73d6d02 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 00:10:09 -0500 Subject: [PATCH 203/630] complete doc for symbolic-node --- docs/api/io.rst | 4 +- docs/api/symbolic-node.rst | 123 +++++++++++++++++++++++++++++++ src/symbolic-node.jl | 146 ++++++++++++++++++++++++++++++------- 3 files changed, 243 insertions(+), 30 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 6c9d71836108..e9d9c04e9f3c 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -317,7 +317,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -460,7 +460,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 6106b54e6a8f..2868bf27681e 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -26,6 +26,129 @@ Symbolic API +.. function:: + call(self :: SymbolicNode, args :: SymbolicNode...) + call(self :: SymbolicNode; kwargs...) + + Make a new node by composing ``self`` with ``args``. Or the arguments + can be specified using keyword arguments. + + + + +.. function:: list_arguments(self :: SymbolicNode) + + List all the arguments of this node. The argument for a node contains both + the inputs and parameters. For example, a :class:`FullyConnected` node will + have both data and weights in its arguments. A composed node (e.g. a MLP) will + list all the arguments for intermediate nodes. + + :return: A list of symbols indicating the names of the arguments. + + + + +.. function:: list_outputs(self :: SymbolicNode) + + List all the outputs of this node. + + :return: A list of symbols indicating the names of the outputs. + + + + +.. function:: list_auxiliary_states(self :: SymbolicNode) + + + List all auxiliary states in the symbool. + + Auxiliary states are special states of symbols that do not corresponds to an argument, + and do not have gradient. But still be useful for the specific operations. + A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. + Most operators do not have Auxiliary states. + + :return: A list of symbols indicating the names of the auxiliary states. + + + + +.. function:: get_internals(self :: SymbolicNode) + + Get a new grouped :class:`SymbolicNode` whose output contains all the internal outputs of + this :class:`SymbolicNode`. + + + + +.. function:: Variable(name :: Union{Base.Symbol, AbstractString}) + + Create a symbolic variable with the given name. This is typically used as a placeholder. + For example, the data node, acting as the starting point of a network architecture. + + + + +.. function:: Group(nodes :: SymbolicNode...) + + Create a :class:`SymbolicNode` by grouping nodes together. + + + + +.. function:: + infer_shape(self :: SymbolicNode; args...) + infer_shape(self :: SymbolicNode; kwargs...) + + Do shape inference according to the input shapes. The input shapes could be provided + as a list of shapes, which should specify the shapes of inputs in the same order as + the arguments returned by :func:`list_arguments`. Alternatively, the shape information + could be specified via keyword arguments. + + :return: A 3-tuple containing shapes of all the arguments, shapes of all the outputs and + shapes of all the auxiliary variables. If shape inference failed due to incomplete + or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. + + + + +.. function:: + getindex(self :: SymbolicNode, idx :: Union{Int, Base.Symbol, AbstractString}) + + Get a node representing the specified output of this node. The index could be + a symbol or string indicating the name of the output, or a 1-based integer + indicating the index, as in the list of :func:`list_outputs`. + + + + +.. function:: to_json(self :: SymbolicNode) + + Convert a :class:`SymbolicNode` into a JSON string. + + + + +.. function:: from_json(repr :: AbstractString, ::Type{SymbolicNode}) + + Load a :class:`SymbolicNode` from a JSON string representation. + + + + +.. function:: load(filename :: AbstractString, ::Type{SymbolicNode}) + + Load a :class:`SymbolicNode` from a JSON file. + + + + +.. function:: save(filename :: AbstractString, node :: SymbolicNode) + + Save a :class:`SymbolicNode` to a JSON file. + + + + libmxnet APIs ------------- diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 7f993c7491b5..8b0a5b4fad5f 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -37,6 +37,14 @@ function Base.copy(self :: SymbolicNode) Base.deepcopy(self) end +#=doc +.. function:: + call(self :: SymbolicNode, args :: SymbolicNode...) + call(self :: SymbolicNode; kwargs...) + + Make a new node by composing ``self`` with ``args``. Or the arguments + can be specified using keyword arguments. +=# function Base.call(self :: SymbolicNode, args :: SymbolicNode...) s = deepcopy(self) _compose!(s, args...) @@ -58,40 +66,81 @@ macro _list_symbol_info(self, func_name) return names end end + +#=doc +.. function:: list_arguments(self :: SymbolicNode) + + List all the arguments of this node. The argument for a node contains both + the inputs and parameters. For example, a :class:`FullyConnected` node will + have both data and weights in its arguments. A composed node (e.g. a MLP) will + list all the arguments for intermediate nodes. + + :return: A list of symbols indicating the names of the arguments. +=# function list_arguments(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListArguments) end + +#=doc +.. function:: list_outputs(self :: SymbolicNode) + + List all the outputs of this node. + + :return: A list of symbols indicating the names of the outputs. +=# function list_outputs(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListOutputs) end -"""List all auxiliary states in the symbool. -Auxiliary states are special states of symbols that do not corresponds to an argument, -and do not have gradient. But still be useful for the specific operations. -A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. -Most operators do not have Auxiliary states. -""" + +#=doc +.. function:: list_auxiliary_states(self :: SymbolicNode) + + + List all auxiliary states in the symbool. + + Auxiliary states are special states of symbols that do not corresponds to an argument, + and do not have gradient. But still be useful for the specific operations. + A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. + Most operators do not have Auxiliary states. + + :return: A list of symbols indicating the names of the auxiliary states. +=# function list_auxiliary_states(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) end -"Get a new grouped SymbolicNode whose output contains all the internal outputs of this SymbolicNode." +#=doc +.. function:: get_internals(self :: SymbolicNode) + + Get a new grouped :class:`SymbolicNode` whose output contains all the internal outputs of + this :class:`SymbolicNode`. +=# function get_internals(self :: SymbolicNode) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolGetInternals, (MX_handle, Ref{MX_handle}), self, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -"Create a symbolic variable with the given name" +#=doc +.. function:: Variable(name :: Union{Base.Symbol, AbstractString}) + + Create a symbolic variable with the given name. This is typically used as a placeholder. + For example, the data node, acting as the starting point of a network architecture. +=# function Variable(name :: Union{Base.Symbol, AbstractString}) hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) SymbolicNode(MX_SymbolHandle(hdr_ref[])) end -"Create a SymbolicNode that groups symbols together" -function Group(symbols :: SymbolicNode...) - handles = MX_handle[symbols...] +#=doc +.. function:: Group(nodes :: SymbolicNode...) + + Create a :class:`SymbolicNode` by grouping nodes together. +=# +function Group(nodes :: SymbolicNode...) + handles = MX_handle[nodes...] ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateGroup, (MX_uint, Ptr{MX_handle}, Ref{MX_handle}), length(handles), handles, ref_hdr) @@ -141,6 +190,21 @@ macro _infer_shape(self, keys, indptr, sdata) end end end + +#=doc +.. function:: + infer_shape(self :: SymbolicNode; args...) + infer_shape(self :: SymbolicNode; kwargs...) + + Do shape inference according to the input shapes. The input shapes could be provided + as a list of shapes, which should specify the shapes of inputs in the same order as + the arguments returned by :func:`list_arguments`. Alternatively, the shape information + could be specified via keyword arguments. + + :return: A 3-tuple containing shapes of all the arguments, shapes of all the outputs and + shapes of all the auxiliary variables. If shape inference failed due to incomplete + or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. +=# function infer_shape(self :: SymbolicNode; kwargs...) sdata = MX_uint[] indptr = MX_uint[0] @@ -163,6 +227,14 @@ function infer_shape(self :: SymbolicNode, args :: Union{Tuple, Void}...) @_infer_shape(self, keys, indptr, sdata) end +#=doc +.. function:: + getindex(self :: SymbolicNode, idx :: Union{Int, Base.Symbol, AbstractString}) + + Get a node representing the specified output of this node. The index could be + a symbol or string indicating the name of the output, or a 1-based integer + indicating the index, as in the list of :func:`list_outputs`. +=# function Base.getindex(self :: SymbolicNode, idx :: Union{Base.Symbol, AbstractString}) idx = symbol(idx) i_idx = find(idx .== list_outputs(self)) @@ -211,7 +283,7 @@ function ./(self :: SymbolicNode, arg :: SymbolicNode) _Div(self, arg) end -function _compose!(sym :: SymbolicNode; kwargs...) +function _compose!(node :: SymbolicNode; kwargs...) name = char_p(0) arg_keys = AbstractString[] arg_vals = MX_handle[] @@ -228,45 +300,63 @@ function _compose!(sym :: SymbolicNode; kwargs...) @mxcall(:MXSymbolCompose, (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), - sym, name, length(arg_keys), arg_keys, arg_vals) - return sym + node, name, length(arg_keys), arg_keys, arg_vals) + return node end -function _compose!(sym :: SymbolicNode, args::SymbolicNode...) - _compose!(sym, char_p(0), args...) +function _compose!(node :: SymbolicNode, args::SymbolicNode...) + _compose!(node, char_p(0), args...) end -function _compose!(sym :: SymbolicNode, name :: Union{Base.Symbol, char_p}, args::SymbolicNode...) +function _compose!(node :: SymbolicNode, name :: Union{Base.Symbol, char_p}, args::SymbolicNode...) if isa(name, Base.Symbol); name = string(name); end arg_keys = Ptr{char_p}(0) arg_vals = MX_handle[args...] @mxcall(:MXSymbolCompose, (MX_handle, char_p, MX_uint, Ptr{char_p}, Ptr{MX_handle}), - sym, name, length(arg_vals), arg_keys, arg_vals) - return sym + node, name, length(arg_vals), arg_keys, arg_vals) + return node end -"""Save SymbolicNode into a JSON string""" +#=doc +.. function:: to_json(self :: SymbolicNode) + + Convert a :class:`SymbolicNode` into a JSON string. +=# function to_json(self :: SymbolicNode) ref_json = Ref{char_p}(0) @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) return bytestring(ref_json[]) end -"""Load SymbolicNode from a JSON string representation.""" +#=doc +.. function:: from_json(repr :: AbstractString, ::Type{SymbolicNode}) + + Load a :class:`SymbolicNode` from a JSON string representation. +=# function from_json(repr :: AbstractString, ::Type{SymbolicNode}) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateFromJSON, (char_p, Ref{MX_handle}), repr, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -"""Load SymbolicNode from a JSON file.""" +#=doc +.. function:: load(filename :: AbstractString, ::Type{SymbolicNode}) + + Load a :class:`SymbolicNode` from a JSON file. +=# function load(filename :: AbstractString, ::Type{SymbolicNode}) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateFromFile, (char_p, Ref{MX_handle}), filename, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -function save(filename :: AbstractString, sym :: SymbolicNode) - @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), sym, filename) + +#=doc +.. function:: save(filename :: AbstractString, node :: SymbolicNode) + + Save a :class:`SymbolicNode` to a JSON file. +=# +function save(filename :: AbstractString, node :: SymbolicNode) + @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), node, filename) end #=doc @@ -359,17 +449,17 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) $hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) sym_hdr = ref_sym_hdr[] - sym = SymbolicNode(MX_SymbolHandle(sym_hdr)) + node = SymbolicNode(MX_SymbolHandle(sym_hdr)) hint = lowercase(string($func_name)) name = get!(DEFAULT_NAME_MANAGER, name, hint) if length(args) != 0 - _compose!(sym, name, args...) + _compose!(node, name, args...) else - _compose!(sym; name=name, symbol_kws...) + _compose!(node; name=name, symbol_kws...) end - return sym + return node end func_def = Expr(:function, func_head, Expr(:block, func_body)) From f5f27779649779b8c49e36c3d160186dfc85e003 Mon Sep 17 00:00:00 2001 From: Andre Pemmelaar Date: Thu, 12 Nov 2015 19:28:18 +0900 Subject: [PATCH 204/630] Adds Mean Squared Error evaluation metric --- src/metric.jl | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/src/metric.jl b/src/metric.jl index 3f35e7455ac5..a9e4db2dea4e 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -113,4 +113,45 @@ function reset!(metric :: Accuracy) metric.n_sample = 0 end +#=doc +.. class:: MSE + + Mean Squared Error. + + Calculates the mean squared error regression loss in one dimension. +=# + +type MSE <: AbstractEvalMetric + mse_sum :: Float64 + n_sample :: Int + MSE() = new(0.0, 0) +end + +function _update_single_output(metric :: MSE, label :: NDArray, pred :: NDArray) + label = copy(label) + pred = copy(pred) + + n_sample = size(pred)[end] + metric.n_sample += n_sample + + for i = 1:n_sample + metric.mse_sum += (label[i] - pred[i])^2 + end +end + +function update!(metric :: MSE, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + @assert length(labels) == length(preds) + for i = 1:length(labels) + _update_single_output(metric, labels[i], preds[i]) + end +end + +function get(metric :: MSE) + return [(:MSE, metric.mse_sum / metric.n_sample)] +end + +function reset!(metric :: MSE) + metric.mse_sum = 0.0 + metric.n_sample = 0 +end From 0b538d1400bbbebb49b6e52810687726b4ae1672 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 12:37:55 -0500 Subject: [PATCH 205/630] make notes on MSE metrics --- docs/api/metric.rst | 9 +++++++++ src/metric.jl | 2 +- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/docs/api/metric.rst b/docs/api/metric.rst index 5f13bd7006c3..614058d18294 100644 --- a/docs/api/metric.rst +++ b/docs/api/metric.rst @@ -47,3 +47,12 @@ set. + +.. class:: MSE + + Mean Squared Error. Todo: add support for multi-dimensional outputs. + + Calculates the mean squared error regression loss in one dimension. + + + diff --git a/src/metric.jl b/src/metric.jl index a9e4db2dea4e..5bf14e52a840 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -116,7 +116,7 @@ end #=doc .. class:: MSE - Mean Squared Error. + Mean Squared Error. TODO: add support for multi-dimensional outputs. Calculates the mean squared error regression loss in one dimension. =# From 645091c2a9a2a039f779cc55179745048241ee69 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 16:26:20 -0500 Subject: [PATCH 206/630] char-ltsm temp commit --- examples/char-lstm/.gitignore | 1 + examples/char-lstm/lstm.jl | 98 ++++++++++++++++++++++++++++++++++ examples/char-lstm/seq-data.jl | 37 +++++++++++++ 3 files changed, 136 insertions(+) create mode 100644 examples/char-lstm/.gitignore create mode 100644 examples/char-lstm/lstm.jl create mode 100644 examples/char-lstm/seq-data.jl diff --git a/examples/char-lstm/.gitignore b/examples/char-lstm/.gitignore new file mode 100644 index 000000000000..06c798bc3232 --- /dev/null +++ b/examples/char-lstm/.gitignore @@ -0,0 +1 @@ +input.txt diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl new file mode 100644 index 000000000000..f429313dc723 --- /dev/null +++ b/examples/char-lstm/lstm.jl @@ -0,0 +1,98 @@ +# An explicitly unrolled LSTM with fixed sequence length. +using MXNet + +immutable LSTMState + c :: mx.SymbolicNode + h :: mx.SymbolicNode +end + +immutable LSTMParam + i2h_W :: mx.SymbolicNode + h2h_W :: mx.SymbolicNode + i2h_b :: mx.SymbolicNode + h2h_b :: mx.SymbolicNode +end + +function ltsm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMParam; + num_hidden::Int=512, dropout::Real=0, name::Symbol=gensym()) + + if dropout > 0 + data = mx.Dropout(data, p=dropout) + end + + i2h = mx.FullyConnected(data, weight=param.i2h_W, bias=param.i2h_b, + num_hidden=4num_hidden, name=symbol(name, "_i2h")) + h2h = mx.FullyConnected(prev_state.h, weight=param.h2h_W, bias=param.h2h_b, + num_hidden=4num_hidden, name=symbol(name, "_h2h")) + + gates = mx.SliceChannel(i2h + h2h, num_outputs=4, name=symbol(name, "_gates")) + + in_gate = mx.Activation(gates[1], act_type=:sigmoid) + in_trans = mx.Activation(gates[2], act_type=:tanh) + forget_gate = mx.Activation(gates[3], act_type=:sigmoid) + out_gate = mx.Activation(gates[4], act_type=:sigmoid) + + next_c = (forget_gate .* prev_state.c) + (in_gate .* in_trans) + next_h = out_gate .* mx.Activation(next_c, act_type=:tanh) + + return LTSMState(next_c, next_h) +end + +function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_class::Int; + dropout::Real=0, name::Symbol=gensym()) + + # placeholder nodes for all parameters + embed_W = mx.Variable(symbol(name, "_embed_weight")) + pred_W = mx.Variable(symbol(name, "_pred_weight")) + pred_b = mx.Variable(symbol(name, "_pred_bias")) + + layer_param_states = map(1:n_layer) do i + param = LSTMParam(mx.Variable(symbol(name, "_l$(i)_i2h_weight")), + mx.Variable(symbol(name, "_l$(i)_h2h_weight")), + mx.Variable(symbol(name, "_l$(i)_i2h_bias")), + mx.Variable(symbol(name, "_l$(i)_h2h_bias"))) + state = LTSMState(mx.Variable(symbol(name, "_l$(i)_init_c")), + mx.Variable(symbol(name, "_l$(i)_init_h"))) + (param, state) + end + + # now unroll over time + outputs = mx.SymbolicNode[] + for t = 1:seq_len + data = mx.Variable(symbol(name, "_data_$t")) + label = mx.Variable(symbol(name, "_label_$t")) + hidden = mx.FullyConnected(data, weight=embed_W, num_hidden=dim_embed, + no_bias=true, name=symbol(name, "_embed_$t")) + + + # stack LTSM cells + for i = 1:n_layer + l_param, l_state = layer_param_states[i] + dp = i == 1 ? 0 : dropout # don't do dropout for data + next_state = ltsm_cell(hidden, l_state, l_param, num_hidden=dim_hidden, dropout=dp, + name=symbol(name, "_lstm_$t")) + hidden = next_state.h + layer_param_states[i] = (l_param, next_state) + end + + # prediction / decoder + if dropout > 0 + hidden = mx.Dropout(hidden, p=dropout) + end + pred = mx.FullyConnected(hidden, weight=pred_W, bias=pred_b, num_hidden=n_class, + name=symbol(name, "_pred_$t")) + smax = mx.SoftmaxOutput(pred, label, name=symbol(name, "_softmax_$t")) + push!(outputs, smax) + end + + # append block-gradient nodes to the final states + for i = 1:n_layer + l_param, l_state = layer_param_states[i] + final_state = LTSMState(mx.BlockGrad(l_state.c, name=symbol(name, "_l$(i)_last_c")), + mx.BlockGrad(l_state.h, name=symbol(name, "_l$(i)_last_h"))) + layer_param_states[i] = (l_param, final_state) + end + + # now group all outputs together + return mx.Group(outputs...) +end diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl new file mode 100644 index 000000000000..ea6c390581bb --- /dev/null +++ b/examples/char-lstm/seq-data.jl @@ -0,0 +1,37 @@ +# Simple data provider that load text +using MXNet + +const UNKNOWN_CHAR = Char(0) + +function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; max_vocab=10000) + if isfile(vocab_fn) + info("Vocabulary already exists, reusing $vocab_fn...") + vocab = open(corpus_fn) do io + Dict([w[1] => i for (i,w) in enumerate(eachline(io))]) + end + else + # count symbol frequency + dict = Dict{Char,Int}() + open(corpus_fn) do io + for line in eachline(io) + for c in line + dict[c] = get(dict, c, 0) + 1 + end + end + end + + vocab = sort(collect(dict), by=x->-x.second) + vocab = vocab[1:min(max_vocab,length(vocab))] + open(vocab_fn, "w") do io + for x in vocab + println(io, x.first) + end + end + + vocab = Dict([x.first => i for (i,x) in enumerate(vocab)]) + end + vocab[UNKNOWN_CHAR] = 0 + return vocab +end + +build_vocabulary("input.txt", "vocab.txt") From 703d78fd97df64373729d6b182a181d1f2874892 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 19:08:23 -0500 Subject: [PATCH 207/630] char-ltsm example, not tested --- examples/char-lstm/.gitignore | 1 + examples/char-lstm/config.jl | 19 ++++++++ examples/char-lstm/seq-data.jl | 87 ++++++++++++++++++++++++++++++---- examples/char-lstm/train.jl | 34 +++++++++++++ plugins/io/svmlight.jl | 1 + 5 files changed, 134 insertions(+), 8 deletions(-) create mode 100644 examples/char-lstm/config.jl create mode 100644 examples/char-lstm/train.jl diff --git a/examples/char-lstm/.gitignore b/examples/char-lstm/.gitignore index 06c798bc3232..4b87bf4070f6 100644 --- a/examples/char-lstm/.gitignore +++ b/examples/char-lstm/.gitignore @@ -1 +1,2 @@ input.txt +vocab.dat diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl new file mode 100644 index 000000000000..d94d6e995cbf --- /dev/null +++ b/examples/char-lstm/config.jl @@ -0,0 +1,19 @@ +const DROPOUT = 0 +const BATCH_SIZE = 32 +const SEQ_LENGTH = 32 +const DIM_HIDDEN = 256 +const DIM_EMBED = 256 +const LTSM_N_LAYER = 2 +const N_EPOCH = 21 +const LEARNING_RATE = 1 +const WEIGHT_DECAY = 0.00001 +const MOMENTUM = 0.0 +const CLIP_GRADIENT = 1 +const NAME = :ptb +const N_GPU = 4 +const USE_GPU = true +const DATA_TR_RATIO = 0.9 + +const UNKNOWN_CHAR = Char(0) +const INPUT_FILE = joinpath(dirname(@__FILE__), "input.txt") +const VOCAB_FILE = joinpath(dirname(@__FILE__), "vocab.dat") diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index ea6c390581bb..acf5f9394a5b 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -1,14 +1,11 @@ # Simple data provider that load text +using Iterators using MXNet -const UNKNOWN_CHAR = Char(0) - function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; max_vocab=10000) if isfile(vocab_fn) info("Vocabulary already exists, reusing $vocab_fn...") - vocab = open(corpus_fn) do io - Dict([w[1] => i for (i,w) in enumerate(eachline(io))]) - end + vocab = Dict{Char,Int}([w => i for (i,w) in enumerate(readall(vocab_fn))]) else # count symbol frequency dict = Dict{Char,Int}() @@ -24,14 +21,88 @@ function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; m vocab = vocab[1:min(max_vocab,length(vocab))] open(vocab_fn, "w") do io for x in vocab - println(io, x.first) + print(io, x.first) end end vocab = Dict([x.first => i for (i,x) in enumerate(vocab)]) end - vocab[UNKNOWN_CHAR] = 0 + vocab[UNKNOWN_CHAR] = length(vocab) return vocab end -build_vocabulary("input.txt", "vocab.txt") +function char_idx(vocab :: Dict{Char,Int}, c :: Char) + if haskey(vocab, c) + vocab[c] + else + vocab[UNKNOWN_CHAR] + end +end + +type CharSeqProvider <: mx.AbstractDataProvider + text :: AbstractString + batch_size :: Int + seq_len :: Int + vocab :: Dict{Char,Int} + + prefix :: Symbol + n_layer :: Int + dim_hidden :: Int +end + +function mx.get_batch_size(p :: CharSeqProvider) + p.batch_size +end +function mx.provide_data(p :: CharSeqProvider) + [(symbol(p.prefix, "_data_$t"), (length(p.vocab), p.batch_size)) for t = 1:p.seq_len] ∪ + [(symbol(p.prefix, "_l$(l)_init_c"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] ∪ + [(symbol(p.prefix, "_l$(l)_init_h"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] +end +function mx.provide_label(p :: CharSeqProvider) + [(symbol(p.prefix, "_label_$t"), (p.batch_size),) for t = 1:p.seq_len] +end + +function mx.eachbatch(p :: CharSeqProvider) + data_all = [mx.zeros(shape) for (name, shape) in mx.provide_data(p)] + label_all = [mx.zeros(shape) for (name, shape) in mx.provide_label(p)] + + data_jl = [copy(x) for x in data_all] + label_jl= [copy(x) for x in label_all] + + batch = mx.DataBatch(data_all, label_all, p.batch_size) + + function _text_iter() + text = p.text + + n_batch = floor(Int, length(text) / p.batch_size / p.seq_len) + text = text[1:n_batch*p.batch_size*p.seq_len] # discard tailing + idx_all = 1:length(text) + + for idx_batch in partition(idx_all, p.batch_size*p.seq_len) + for i = 1:p.seq_len + data_jl[i][:] = 0 + label_jl[i][:] = 0 + end + + for (i, idx_seq) in enumerate(partition(idx_batch, p.seq_len)) + println("i = $i, idx_seq = $idx_seq") + for (j, idx) in enumerate(idx_seq) + c_this = text[idx] + c_next = idx == length(text) ? UNKNOWN_CHAR : text[idx+1] + data_jl[j][char_idx(vocab,c_this),i] = 1 + label_jl[j][i] = char_idx(vocab,c_next)-1 + end + end + + for i = 1:p.seq_len + copy!(data_all[i], data_jl[i]) + copy!(label_all[i], label_jl[i]) + end + + produce(batch) + end + end + + return Task(_text_iter) +end + diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl new file mode 100644 index 000000000000..1d391255a8ab --- /dev/null +++ b/examples/char-lstm/train.jl @@ -0,0 +1,34 @@ +include(joinpath(dirname(@__FILE__), "config.jl")) +include(joinpath(dirname(@__FILE__), "lstm.jl")) +include(joinpath(dirname(@__FILE__), "seq-data.jl")) + +# build vocabulary +vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) +n_class = length(vocab) + +# define LTSM +ltsm = LTSM(LTSM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME) + +# load data +text_all = readall(INPUT_FILE) +len_train = round(Int, length(text_all)*DATA_TR_RATIO) +text_tr = text_all[1:len_train] +text_val = text_all[len_train+1:end] + +data_tr = CharSeqProvider(text_tr, BATCH_SIZE, SEQ_LENGTH, vocab, NAME, + LSTM_N_LAYER, DIM_HIDDEN) +data_val = CharSeqProvider(text_val, BATCH_SIZE, SEQ_LENGTH, vocab, NAME, + LSTM_N_LAYER, DIM_HIDDEN) + +# set up training +if USE_GPU + context = [mx.gpu(i) for i = 0:N_GPU-1] +else + context = [mx.cpu()] +end + +model = mx.FeedForward(lstm, context=context) +optimizer = mx.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, + weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) + +mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH) diff --git a/plugins/io/svmlight.jl b/plugins/io/svmlight.jl index 94563357ee6e..500c040394ee 100644 --- a/plugins/io/svmlight.jl +++ b/plugins/io/svmlight.jl @@ -44,6 +44,7 @@ function mx.eachbatch(provider :: SVMLightProvider) function _svmlight_iter() f = SVMLightFile(provider.filename) while true + error("This is actually buggy and needs fixing") raw = collect(take(f, provider.batch_size)) cnt = length(raw) if cnt == 0 From eea7d2c1c2739f5592a82726b753511f82af0576 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 19:24:09 -0500 Subject: [PATCH 208/630] typo ltsm -> lstm --- examples/char-lstm/config.jl | 2 +- examples/char-lstm/lstm.jl | 12 ++++++------ examples/char-lstm/train.jl | 4 ++-- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index d94d6e995cbf..b6de43c440d8 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -3,7 +3,7 @@ const BATCH_SIZE = 32 const SEQ_LENGTH = 32 const DIM_HIDDEN = 256 const DIM_EMBED = 256 -const LTSM_N_LAYER = 2 +const LSTM_N_LAYER = 2 const N_EPOCH = 21 const LEARNING_RATE = 1 const WEIGHT_DECAY = 0.00001 diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index f429313dc723..81805b95da95 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -13,7 +13,7 @@ immutable LSTMParam h2h_b :: mx.SymbolicNode end -function ltsm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMParam; +function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMParam; num_hidden::Int=512, dropout::Real=0, name::Symbol=gensym()) if dropout > 0 @@ -35,7 +35,7 @@ function ltsm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMPara next_c = (forget_gate .* prev_state.c) + (in_gate .* in_trans) next_h = out_gate .* mx.Activation(next_c, act_type=:tanh) - return LTSMState(next_c, next_h) + return LSTMState(next_c, next_h) end function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_class::Int; @@ -51,7 +51,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla mx.Variable(symbol(name, "_l$(i)_h2h_weight")), mx.Variable(symbol(name, "_l$(i)_i2h_bias")), mx.Variable(symbol(name, "_l$(i)_h2h_bias"))) - state = LTSMState(mx.Variable(symbol(name, "_l$(i)_init_c")), + state = LSTMState(mx.Variable(symbol(name, "_l$(i)_init_c")), mx.Variable(symbol(name, "_l$(i)_init_h"))) (param, state) end @@ -65,11 +65,11 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla no_bias=true, name=symbol(name, "_embed_$t")) - # stack LTSM cells + # stack LSTM cells for i = 1:n_layer l_param, l_state = layer_param_states[i] dp = i == 1 ? 0 : dropout # don't do dropout for data - next_state = ltsm_cell(hidden, l_state, l_param, num_hidden=dim_hidden, dropout=dp, + next_state = lstm_cell(hidden, l_state, l_param, num_hidden=dim_hidden, dropout=dp, name=symbol(name, "_lstm_$t")) hidden = next_state.h layer_param_states[i] = (l_param, next_state) @@ -88,7 +88,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla # append block-gradient nodes to the final states for i = 1:n_layer l_param, l_state = layer_param_states[i] - final_state = LTSMState(mx.BlockGrad(l_state.c, name=symbol(name, "_l$(i)_last_c")), + final_state = LSTMState(mx.BlockGrad(l_state.c, name=symbol(name, "_l$(i)_last_c")), mx.BlockGrad(l_state.h, name=symbol(name, "_l$(i)_last_h"))) layer_param_states[i] = (l_param, final_state) end diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index 1d391255a8ab..03631c2672bf 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -6,8 +6,8 @@ include(joinpath(dirname(@__FILE__), "seq-data.jl")) vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) n_class = length(vocab) -# define LTSM -ltsm = LTSM(LTSM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME) +# define LSTM +lstm = LSTM(LSTM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME) # load data text_all = readall(INPUT_FILE) From e35b92941c262f976774367f9f8e5a03aa4e822e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 19:25:22 -0500 Subject: [PATCH 209/630] fix macro error reporting --- src/symbolic-node.jl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 8b0a5b4fad5f..0b7385ffa1a7 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -382,7 +382,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) Ref{char_pp}, Ref{char_p}), hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs) - func_name = symbol(bytestring(ref_name[])) + func_name_s= bytestring(ref_name[]) + func_name = symbol(func_name_s) kv_nargs_s = bytestring(ref_kv_nargs[]) kv_nargs = symbol(kv_nargs_s) @@ -431,12 +432,12 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) end if length(args) != 0 && length(symbol_kws) != 0 - @assert(false, "$func_name only accepts Symbols either as positional or keyword arguments, not both.") + @assert(false, $func_name_s * " only accepts Symbols either as positional or keyword arguments, not both.") end $(if kv_nargs != symbol("") quote if length(symbol_kws) > 0 - @assert(false, "$func_name takes variable number of SymbolicNode arguments, please pass input Symbols " * + @assert(false, $func_name * " takes variable number of SymbolicNode arguments, please pass input Symbols " * "via positional arguments, instead of keyword arguments.") end end @@ -450,7 +451,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) sym_hdr = ref_sym_hdr[] node = SymbolicNode(MX_SymbolHandle(sym_hdr)) - hint = lowercase(string($func_name)) + hint = lowercase($func_name_s) name = get!(DEFAULT_NAME_MANAGER, name, hint) if length(args) != 0 From 5a9d56af303e38298edbfb1c809f41a22bc87aae Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 19:29:42 -0500 Subject: [PATCH 210/630] fix some errors --- examples/char-lstm/lstm.jl | 8 ++++---- examples/char-lstm/seq-data.jl | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index 81805b95da95..aca4d7660e1b 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -20,9 +20,9 @@ function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMPara data = mx.Dropout(data, p=dropout) end - i2h = mx.FullyConnected(data, weight=param.i2h_W, bias=param.i2h_b, + i2h = mx.FullyConnected(data=data, weight=param.i2h_W, bias=param.i2h_b, num_hidden=4num_hidden, name=symbol(name, "_i2h")) - h2h = mx.FullyConnected(prev_state.h, weight=param.h2h_W, bias=param.h2h_b, + h2h = mx.FullyConnected(data=prev_state.h, weight=param.h2h_W, bias=param.h2h_b, num_hidden=4num_hidden, name=symbol(name, "_h2h")) gates = mx.SliceChannel(i2h + h2h, num_outputs=4, name=symbol(name, "_gates")) @@ -61,7 +61,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla for t = 1:seq_len data = mx.Variable(symbol(name, "_data_$t")) label = mx.Variable(symbol(name, "_label_$t")) - hidden = mx.FullyConnected(data, weight=embed_W, num_hidden=dim_embed, + hidden = mx.FullyConnected(data=data, weight=embed_W, num_hidden=dim_embed, no_bias=true, name=symbol(name, "_embed_$t")) @@ -79,7 +79,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla if dropout > 0 hidden = mx.Dropout(hidden, p=dropout) end - pred = mx.FullyConnected(hidden, weight=pred_W, bias=pred_b, num_hidden=n_class, + pred = mx.FullyConnected(data=hidden, weight=pred_W, bias=pred_b, num_hidden=n_class, name=symbol(name, "_pred_$t")) smax = mx.SoftmaxOutput(pred, label, name=symbol(name, "_softmax_$t")) push!(outputs, smax) diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index acf5f9394a5b..bd7c04576d90 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -59,7 +59,7 @@ function mx.provide_data(p :: CharSeqProvider) [(symbol(p.prefix, "_l$(l)_init_h"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] end function mx.provide_label(p :: CharSeqProvider) - [(symbol(p.prefix, "_label_$t"), (p.batch_size),) for t = 1:p.seq_len] + [(symbol(p.prefix, "_label_$t"), (p.batch_size,)) for t = 1:p.seq_len] end function mx.eachbatch(p :: CharSeqProvider) From 487d5e85300125799fb6bad3fcf421668d80578d Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 19:36:48 -0500 Subject: [PATCH 211/630] fix kvstore refactoring bug --- examples/char-lstm/seq-data.jl | 1 - src/kvstore.jl | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index bd7c04576d90..3f545671facd 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -85,7 +85,6 @@ function mx.eachbatch(p :: CharSeqProvider) end for (i, idx_seq) in enumerate(partition(idx_batch, p.seq_len)) - println("i = $i, idx_seq = $idx_seq") for (j, idx) in enumerate(idx_seq) c_this = text[idx] c_next = idx == length(text) ? UNKNOWN_CHAR : text[idx+1] diff --git a/src/kvstore.jl b/src/kvstore.jl index 1d8c98935768..d52433f567b2 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -121,7 +121,7 @@ end function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) ref_is_worker = Ref{Cint}(0) - @mxcall(:MXKVStoreIsWorkerSymbolicNode, (Ref{Cint},), ref_is_worker) + @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref_is_worker) is_worker = ref_is_worker[] if ismatch(r"dist", string(get_type(self))) && is_worker From e4b6306e3e1e0807e3a21eb6175b33230f917632 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 19:44:31 -0500 Subject: [PATCH 212/630] fix databatch --- src/io.jl | 4 ++-- src/model.jl | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/io.jl b/src/io.jl index 71bbc38a7f50..1b3ee3b091a9 100644 --- a/src/io.jl +++ b/src/io.jl @@ -204,8 +204,8 @@ type DataBatch <: AbstractDataBatch count :: Int end count_samples(batch :: DataBatch) = batch.count -get_data(batch :: DataBatch) = batch.data -get_label(batch :: DataBatch) = batch.label +get_data{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batch.data +get_label{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batch.label #=doc .. class:: SlicedNDArray diff --git a/src/model.jl b/src/model.jl index 1a9d11facbe2..6d77306597dc 100644 --- a/src/model.jl +++ b/src/model.jl @@ -391,7 +391,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # invoke callbacks on epoch 0 _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) - # now start training... + info("Start training...") for i_epoch = 1:opts.n_epoch time_start = time() reset!(opts.eval_metric) From 6ddbecdd3e72b7ca8051ce9bbfe3eea12e50f745 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 20:11:22 -0500 Subject: [PATCH 213/630] add speedometer --- examples/char-lstm/train.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index 03631c2672bf..de81e3691739 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -31,4 +31,5 @@ model = mx.FeedForward(lstm, context=context) optimizer = mx.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) -mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH) +mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, + callbacks=[mx.speedometer()]) From 00c7bff664e8cee9cece4cc2464223e69b1fe711 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 20:12:14 -0500 Subject: [PATCH 214/630] neg-log-likilihood eval metric --- examples/char-lstm/lstm.jl | 29 +++++++++++++++++++++++++++++ examples/char-lstm/train.jl | 4 ++-- 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index aca4d7660e1b..05be419596c0 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -96,3 +96,32 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla # now group all outputs together return mx.Group(outputs...) end + + +# Negative Log-likelihood +type NLL <: mx.AbstractEvalMetric + nll_sum :: Float64 + n_sample :: Int + + NLL() = new(0.0, 0) +end + +function mx.update!(metric :: NLL, labels :: Vector{mx.NDArray}, preds :: Vector{NDArray}) + @assert length(labels) == length(preds) + nll = 0.0 + for (label, pred) in zip(labels, preds) + @nd_as_jl ro=(label, pred) begin + nll -= sum(log(broadcast_getindex(pred, label+1, 1:length(label)))) + end + end + + nll = nll / length(labels) + metric.nll_sum += nll + metric.n_sample += length(labels[1]) +end + +function mx.get(metric :: NLL) + nll = metric.nll / metric.n_sample + perp = exp(nll) + return [(symbol("neg-log-likelihood"), nll), (:perplexity, perp)] +end diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index de81e3691739..d66b2b4a88eb 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -31,5 +31,5 @@ model = mx.FeedForward(lstm, context=context) optimizer = mx.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) -mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, - callbacks=[mx.speedometer()]) +mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, + callbacks=[mx.speedometer()], eval_metric=NLL()) From d685f77b4e936cd247f50c5dbc075127555c5620 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 20:15:17 -0500 Subject: [PATCH 215/630] tweak training log formatting --- examples/char-lstm/lstm.jl | 2 +- src/model.jl | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index 05be419596c0..9305b119ee06 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -123,5 +123,5 @@ end function mx.get(metric :: NLL) nll = metric.nll / metric.n_sample perp = exp(nll) - return [(symbol("neg-log-likelihood"), nll), (:perplexity, perp)] + return [(:NLL, nll), (:perplexity, perp)] end diff --git a/src/model.jl b/src/model.jl index 6d77306597dc..015d9c09af05 100644 --- a/src/model.jl +++ b/src/model.jl @@ -461,9 +461,9 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra info(format("== Epoch {1:0>3d} ==========", i_epoch)) info("## Training summary") for (name, value) in get(opts.eval_metric) - info(format("{1:>15s} = {2:.4f}", name, value)) + info(format("{1:>18s} = {2:.4f}", string(name), value)) end - info(format("{1:>15s} = {2:.4f} seconds", "time", time_stop-time_start)) + info(format("{1:>18s} = {2:.4f} seconds", "time", time_stop-time_start)) # evaluation on validation set if !isa(opts.eval_data, Void) @@ -491,7 +491,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra info("## Validation summary") for (name, value) in get(opts.eval_metric) - info(format("{1:>15s} = {2:.4f}", name, value)) + info(format("{1:>18s} = {2:.4f}", string(name), value)) end end From 69cc430743fb5c43aecf3804c058513072c1401c Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 20:27:47 -0500 Subject: [PATCH 216/630] reset! for NLL calculation --- examples/char-lstm/lstm.jl | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index 9305b119ee06..538fe0497676 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -106,12 +106,12 @@ type NLL <: mx.AbstractEvalMetric NLL() = new(0.0, 0) end -function mx.update!(metric :: NLL, labels :: Vector{mx.NDArray}, preds :: Vector{NDArray}) +function mx.update!(metric :: NLL, labels :: Vector{mx.NDArray}, preds :: Vector{mx.NDArray}) @assert length(labels) == length(preds) nll = 0.0 for (label, pred) in zip(labels, preds) - @nd_as_jl ro=(label, pred) begin - nll -= sum(log(broadcast_getindex(pred, label+1, 1:length(label)))) + @mx.nd_as_jl ro=(label, pred) begin + nll -= sum(log(max(broadcast_getindex(pred, round(Int,label+1), 1:length(label)), 1e-20))) end end @@ -121,7 +121,12 @@ function mx.update!(metric :: NLL, labels :: Vector{mx.NDArray}, preds :: Vector end function mx.get(metric :: NLL) - nll = metric.nll / metric.n_sample + nll = metric.nll_sum / metric.n_sample perp = exp(nll) return [(:NLL, nll), (:perplexity, perp)] end + +function mx.reset!(metric :: NLL) + metric.nll_sum = 0.0 + metric.n_sample = 0 +end From c5c1e057c7f45de3e8c7e3f2922f7bff3eb4082b Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 22:03:59 -0500 Subject: [PATCH 217/630] basic network visualization --- REQUIRE | 1 + src/MXNet.jl | 1 + src/visualize.jl | 154 +++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 156 insertions(+) create mode 100644 src/visualize.jl diff --git a/REQUIRE b/REQUIRE index a0f5c9865565..fea260919fbc 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,3 +1,4 @@ julia 0.4 Formatting BinDeps +JSON diff --git a/src/MXNet.jl b/src/MXNet.jl index a3e280d26a11..42c9e45f477e 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -29,6 +29,7 @@ include("callback.jl") include("model.jl") include("util.jl") +include("visualize.jl") include("nn-factory.jl") diff --git a/src/visualize.jl b/src/visualize.jl new file mode 100644 index 000000000000..982a155ad990 --- /dev/null +++ b/src/visualize.jl @@ -0,0 +1,154 @@ +import JSON + +#=doc +Network Visualization +===================== +=# + +#=doc +.. function:: to_graphviz(network) + + :param SymbolicNode network: the network to visualize. + :param AbstractString title: keyword argument, default "Network Visualization", + the title of the GraphViz graph. + :param input_shapes: keyword argument, default ``nothing``. If provided, + will run shape inference and plot with the shape information. Should + be either a dictionary of name-shape mapping or an array of shapes. + :return: the graph description in GraphViz ``dot`` language. +=# +function to_graphviz(network :: SymbolicNode; title="Network Visualization", input_shapes=nothing) + if !isa(input_shapes, Void) + internals = get_internals(network) + if isa(input_shapes, Dict) + _, out_shapes, _ = infer_shape(; input_shapes...) + else + _, out_shapes, _ = infer_shape(input_shapes...) + end + @assert(!isa(out_shapes, Void), "Failed to do shape inference, input shapes are incomplete") + shapes = Dict(zip(list_outputs(internals), out_shapes)) + draw_shape = true + else + draw_shape = false + end + + conf = JSON.parse(to_json(network)) + nodes = conf["nodes"] + heads = unique(conf["heads"][1]+1) + node_attr = Dict(:shape => :box, :fixedsize => true, :width => 1.3, + :height => 0.8034, :style => :filled) + io = IOBuffer() + println(io, "digraph $(_simple_escape(title)) {") + println(io, "node [fontsize=10];") + + # color map + cm = ("#8dd3c7", "#fb8072", "#ffffb3", "#bebada", "#80b1d3", + "#fdb462", "#b3de69", "#fccde5") + + # make nodes + for i = 1:length(nodes) + node = nodes[i] + op = node["op"] + name = node["name"] + attr = deepcopy(node_attr) + label = op + + if op == "null" + if i ∈ heads + label = node["name"] + attr[:fillcolor] = cm[1] + else + continue + end + elseif op == "Convolution" + label = format("Convolution\nkernel={1},stride={2},n-filter={3}", + _extract_shape(node["param"]["kernel"]), + _extract_shape(node["param"]["stride"]), + node["param"]["num_filter"]) + attr[:fillcolor] = cm[2] + elseif op == "FullyConnected" + label = format("FullyConnected\nnum-hidden={1}", node["param"]["num_hidden"]) + attr[:fillcolor] = cm[2] + # TODO: add more + else + attr[:fillcolor] = cm[8] + end + + attr[:label] = label + _format_graphviz_node(io, name, attr) + end + + # add edges + for i = 1:length(nodes) + node = nodes[i] + op = node["op"] + name = node["name"] + if op == "null" + continue + end + inputs = node["inputs"] + for item in inputs + input_node = nodes[item[1]+1] + input_name = input_node["name"] + if input_node["op"] != "null" || (item[1]+1) ∈ heads + attr = Dict(:dir => :back, :arrowtail => :open) + if draw_shape + if input_node["op"] != "null" + key = symbol(input_name, "_output") + shape = shape_dict[key][1:end-1] + else + key = symbol(input_name) + shape = shape_dict[key][1:end-1] + end + label = "(" * join([string(x) for x in shape], ",") * ")" + attr[:label] = label + end + _format_graphviz_edge(io, input_name, name, attr) + end + end + end + println(io, "}") + + return takebuf_string(io) +end + +function _format_graphviz_attr(io::IOBuffer, attrs) + label = get(attrs, :label, nothing) + if isa(label, Void) + print(io, " [") + else + print(io, " [label=$(_simple_escape(label)),") + end + first_attr = true + for (k,v) in attrs + if k != :label + if !first_attr + print(io, ",") + end + first_attr = false + + if isa(v, AbstractString) && v[1] == '#' + # color + v = _simple_escape(v) + end + print(io, "$k=$v") + end + end + println(io, "];") +end +function _simple_escape(str :: AbstractString) + str = replace(str, r"\n", "\\n") + return "\"$str\"" +end +function _format_graphviz_node(io::IOBuffer, name::AbstractString, attrs) + print(io, "$(_simple_escape(name)) ") + _format_graphviz_attr(io, attrs) +end +function _format_graphviz_edge(io::IOBuffer, head, tail, attrs) + print(io, """$(_simple_escape(head)) -> $(_simple_escape(tail)) """) + _format_graphviz_attr(io, attrs) +end +function _extract_shape(str :: AbstractString) + shape = matchall(r"\d+", str) + shape = reverse(shape) # JSON in libmxnet has reversed shape (column vs row majoring) + return "(" * join(shape, ",") * ")" +end From e0857a2b53dd370e677644837503704a56bf67e6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 22:14:06 -0500 Subject: [PATCH 218/630] add visualization doc --- docs/api/metric.rst | 2 +- docs/api/symbolic-node.rst | 92 ++++++++++++++++++++++++++++++++++++++ docs/api/visualize.rst | 19 ++++++++ docs/build-api.jl | 1 + docs/index.rst | 1 + src/visualize.jl | 18 +++++++- 6 files changed, 130 insertions(+), 3 deletions(-) create mode 100644 docs/api/visualize.rst diff --git a/docs/api/metric.rst b/docs/api/metric.rst index 614058d18294..db503d5e6849 100644 --- a/docs/api/metric.rst +++ b/docs/api/metric.rst @@ -50,7 +50,7 @@ set. .. class:: MSE - Mean Squared Error. Todo: add support for multi-dimensional outputs. + Mean Squared Error. TODO: add support for multi-dimensional outputs. Calculates the mean squared error regression loss in one dimension. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 2868bf27681e..78e359537ec3 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -725,6 +725,29 @@ Internal APIs +.. function:: _DivScalar(...) + + Perform an elementwise div. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_right: scalar operand is on the right. + :type scalar_on_right: boolean, optional, default=False + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: _Minus(...) Perform an elementwise minus. @@ -737,6 +760,29 @@ Internal APIs +.. function:: _MinusScalar(...) + + Perform an elementwise minus. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_right: scalar operand is on the right. + :type scalar_on_right: boolean, optional, default=False + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: _Mul(...) Perform an elementwise mul. @@ -749,6 +795,29 @@ Internal APIs +.. function:: _MulScalar(...) + + Perform an elementwise mul. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_right: scalar operand is on the right. + :type scalar_on_right: boolean, optional, default=False + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: _Native(...) Stub for implementing an operator implemented in native frontend language. @@ -780,6 +849,29 @@ Internal APIs +.. function:: _PlusScalar(...) + + Perform an elementwise plus. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_right: scalar operand is on the right. + :type scalar_on_right: boolean, optional, default=False + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + diff --git a/docs/api/visualize.rst b/docs/api/visualize.rst new file mode 100644 index 000000000000..42857c5f4ff0 --- /dev/null +++ b/docs/api/visualize.rst @@ -0,0 +1,19 @@ + +Network Visualization +===================== + + + + +.. function:: to_graphviz(network) + + :param SymbolicNode network: the network to visualize. + :param AbstractString title: keyword argument, default "Network Visualization", + the title of the GraphViz graph. + :param input_shapes: keyword argument, default ``nothing``. If provided, + will run shape inference and plot with the shape information. Should + be either a dictionary of name-shape mapping or an array of shapes. + :return: the graph description in GraphViz ``dot`` language. + + + diff --git a/docs/build-api.jl b/docs/build-api.jl index 99ae5ad1c709..53ca9f6f7ca1 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -94,4 +94,5 @@ extract_doc("metric.rst", "metric.jl") extract_doc("io.rst", "io.jl") embed_mxnet_api("io.rst", "io", mx._import_io_iterators) +extract_doc("visualize.rst", "visualize.jl") extract_doc("nn-factory.rst", "nn-factory.jl") diff --git a/docs/index.rst b/docs/index.rst index da28e3080440..4f718e1861b9 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -42,6 +42,7 @@ For more details, see documentation below. Please also checkout the `examples api/symbolic-node api/nn-factory api/executor + api/visualize Indices and tables ================== diff --git a/src/visualize.jl b/src/visualize.jl index 982a155ad990..e668ba37ecd9 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -60,7 +60,7 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp continue end elseif op == "Convolution" - label = format("Convolution\nkernel={1},stride={2},n-filter={3}", + label = format("Convolution\nkernel={1}\nstride={2}\nn-filter={3}", _extract_shape(node["param"]["kernel"]), _extract_shape(node["param"]["stride"]), node["param"]["num_filter"]) @@ -68,7 +68,21 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp elseif op == "FullyConnected" label = format("FullyConnected\nnum-hidden={1}", node["param"]["num_hidden"]) attr[:fillcolor] = cm[2] - # TODO: add more + elseif op == "Activation" + label = format("Activation\nact-type={1}", node["param"]["act_type"]) + attr[:fillcolor] = cm[3] + elseif op == "BatchNorm" + attr[:fillcolor] = cm[4] + elseif op == "Pooling" + label = format("Pooling\ntype={1}\nkernel={2}\nstride={3}", + node["param"]["pool_type"], + _extract_shape(node["param"]["kernel"]), + _extract_shape(node["param"]["stride"])) + attr[:fillcolor] = cm[5] + elseif op ∈ ("Concat", "Flatten", "Reshape") + attr[:fillcolor] = cm[6] + elseif endswith(op, "Output") + attr[:fillcolor] = cm[7] else attr[:fillcolor] = cm[8] end From c4abb973bc4033d6a1c34246678bd2410b152435 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 22:17:11 -0500 Subject: [PATCH 219/630] fix visualization bugs --- src/visualize.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/visualize.jl b/src/visualize.jl index e668ba37ecd9..e6ada19f179c 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -20,12 +20,12 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp if !isa(input_shapes, Void) internals = get_internals(network) if isa(input_shapes, Dict) - _, out_shapes, _ = infer_shape(; input_shapes...) + _, out_shapes, _ = infer_shape(internals; input_shapes...) else - _, out_shapes, _ = infer_shape(input_shapes...) + _, out_shapes, _ = infer_shape(internals, input_shapes...) end @assert(!isa(out_shapes, Void), "Failed to do shape inference, input shapes are incomplete") - shapes = Dict(zip(list_outputs(internals), out_shapes)) + shape_dict = Dict(zip(list_outputs(internals), out_shapes)) draw_shape = true else draw_shape = false @@ -149,8 +149,8 @@ function _format_graphviz_attr(io::IOBuffer, attrs) end println(io, "];") end -function _simple_escape(str :: AbstractString) - str = replace(str, r"\n", "\\n") +function _simple_escape(str) + str = replace(string(str), r"\n", "\\n") return "\"$str\"" end function _format_graphviz_node(io::IOBuffer, name::AbstractString, attrs) From 0e1515d3df7aa05debac9f478412a938146b2f55 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 22:18:20 -0500 Subject: [PATCH 220/630] fix lstm learning rate --- examples/char-lstm/config.jl | 2 +- examples/char-lstm/train.jl | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index b6de43c440d8..fb07d3368f4d 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -5,7 +5,7 @@ const DIM_HIDDEN = 256 const DIM_EMBED = 256 const LSTM_N_LAYER = 2 const N_EPOCH = 21 -const LEARNING_RATE = 1 +const LEARNING_RATE = 0.1 const WEIGHT_DECAY = 0.00001 const MOMENTUM = 0.0 const CLIP_GRADIENT = 1 diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index d66b2b4a88eb..b23880f589ff 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -32,4 +32,5 @@ optimizer = mx.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, + initializer=mx.UniformInitializer(0.1), callbacks=[mx.speedometer()], eval_metric=NLL()) From 6193ef4a3f6e6e71fbc66d9d859c63ecfada0cfc Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 22:31:48 -0500 Subject: [PATCH 221/630] add lr decay to lstm training --- examples/char-lstm/config.jl | 3 ++- examples/char-lstm/train.jl | 7 ++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index fb07d3368f4d..e4d4f0c180b8 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -5,7 +5,8 @@ const DIM_HIDDEN = 256 const DIM_EMBED = 256 const LSTM_N_LAYER = 2 const N_EPOCH = 21 -const LEARNING_RATE = 0.1 +const BASE_LR = 0.1 +const LR_DECAY = 0.7 const WEIGHT_DECAY = 0.00001 const MOMENTUM = 0.0 const CLIP_GRADIENT = 1 diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index b23880f589ff..997ba674f8c5 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -28,9 +28,10 @@ else end model = mx.FeedForward(lstm, context=context) -optimizer = mx.SGD(lr=LEARNING_RATE, momentum=MOMENTUM, - weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) +optimizer = mx.SGD(lr_scheduler=mx.LearningRate.Exp(BASE_LR, gamma=LR_DECAY), + momentum=MOMENTUM, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) +ckpoint_prefix = joinpath(dirname(@__FILE__), "checkpoints/$NAME") mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, initializer=mx.UniformInitializer(0.1), - callbacks=[mx.speedometer()], eval_metric=NLL()) + callbacks=[mx.speedometer(), mx.do_checkpoint(ckpoint_prefix)], eval_metric=NLL()) From b078fecd59fb0414a1825c643136d0705005a965 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 12 Nov 2015 23:22:17 -0500 Subject: [PATCH 222/630] lstm sampler (not tested) --- examples/char-lstm/config.jl | 15 +++++++++ examples/char-lstm/sampler.jl | 56 ++++++++++++++++++++++++++++++++++ examples/char-lstm/seq-data.jl | 8 ----- examples/char-lstm/train.jl | 3 +- src/io.jl | 3 ++ 5 files changed, 75 insertions(+), 10 deletions(-) create mode 100644 examples/char-lstm/sampler.jl diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index e4d4f0c180b8..ff982950132e 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -14,7 +14,22 @@ const NAME = :ptb const N_GPU = 4 const USE_GPU = true const DATA_TR_RATIO = 0.9 +const CKPOINT_PREFIX = joinpath(dirname(@__FILE__), "checkpoints/$NAME") + +const BATCH_SIZE_SMP= 10 +const SAMPLE_LENGTH = 100 +const SAMPLE_START = 'a' const UNKNOWN_CHAR = Char(0) const INPUT_FILE = joinpath(dirname(@__FILE__), "input.txt") const VOCAB_FILE = joinpath(dirname(@__FILE__), "vocab.dat") + +# helper function to convert a char into index in vocabulary +function char_idx(vocab :: Dict{Char,Int}, c :: Char) + if haskey(vocab, c) + vocab[c] + else + vocab[UNKNOWN_CHAR] + end +end + diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl new file mode 100644 index 000000000000..c481da1d1950 --- /dev/null +++ b/examples/char-lstm/sampler.jl @@ -0,0 +1,56 @@ +include(joinpath(dirname(@__FILE__), "config.jl")) + +using StatsBase +using MXNet + +# load vocabulary +vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) + +# prepare data provider +jl_data = [(symbol(NAME, "_data_$t"), zeros(mx.MX_float, (length(vocab), BATCH_SIZE_SMP))) + for t = 1:SEQ_LENGTH] +jl_c = [(symbol(NAME, "_l$(l)_init_c"), zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) + for l = 1:LSTM_N_LAYER] +jl_h = [(symbol(NAME, "_l$(l)_init_h"), zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) + for l = 1:LSTM_N_LAYER] + +# the first input in the sequence +jl_data_start = jl_data[1] +jl_data_start[char_idx(vocab, SAMPLE_START),:] = 1 + +data = mx.ArrayDataProvider(nd_data ∪ nd_c ∪ nd_h) + +# load model +model = mx.load_checkpoint(CKPOINT_PREFIX, N_EPOCH, mx.FeedForward) + +# prepare outputs +output_samples = zeros(Char, (SAMPLE_LENGTH, BATCH_SIZE_SMP)) +output_samples[1, :] = SAMPLE_START + +# build inverse vocabulary for convenience +inv_vocab = Dict([v => k for (k,v) in vocab]) + +# do prediction and sampling step by step +for t = 2:SAMPLE_LENGTH-1 + outputs = mx.predict(model, data) + + # we will only use the first output to do sampling + outputs = outputs[1] + + jl_data_start[:] = 0 + for i = 1:BATCH_SIZE_SMP + prob = WeightVec(outputs[:, i]) + k = sample(prob) + output_samples[t, k] = inv_vocab[k] + jl_data_start[k, i] = 1 + end +end + +output_texts = [join(output_samples[:,i]) for i = 1:BATCH_SIZE_SMP] +output_texts = [replace(x, UNKNOWN_CHAR, '?') for x in output_texts] + +for (i, text) in enumerate(output_texts) + println("## Sample $i") + println(text) + println() +end diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index 3f545671facd..8bbcfece56c1 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -31,14 +31,6 @@ function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; m return vocab end -function char_idx(vocab :: Dict{Char,Int}, c :: Char) - if haskey(vocab, c) - vocab[c] - else - vocab[UNKNOWN_CHAR] - end -end - type CharSeqProvider <: mx.AbstractDataProvider text :: AbstractString batch_size :: Int diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index 997ba674f8c5..ed4e4403c402 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -31,7 +31,6 @@ model = mx.FeedForward(lstm, context=context) optimizer = mx.SGD(lr_scheduler=mx.LearningRate.Exp(BASE_LR, gamma=LR_DECAY), momentum=MOMENTUM, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) -ckpoint_prefix = joinpath(dirname(@__FILE__), "checkpoints/$NAME") mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, initializer=mx.UniformInitializer(0.1), - callbacks=[mx.speedometer(), mx.do_checkpoint(ckpoint_prefix)], eval_metric=NLL()) + callbacks=[mx.speedometer(), mx.do_checkpoint(CKPOINT_PREFIX)], eval_metric=NLL()) diff --git a/src/io.jl b/src/io.jl index 1b3ee3b091a9..d6ba2fd3d8ce 100644 --- a/src/io.jl +++ b/src/io.jl @@ -307,6 +307,9 @@ end be less samples to include than a mini-batch. This value specify a scalar to pad the contents of all the missing data points. :param Real label_padding: the same as ``data_padding``, except for the labels. + + TODO: remove ``data_padding`` and ``label_padding``, and implement rollover that copies + the last or first several training samples to feed the padding. =# # Julia's type system is sometimes very frustrating. You cannot specify a function # with argument Vector{Pair} to expect to be matched when calling with the parameter From 0c3f32de0ad787086b5721b4963dd0d352918233 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 23:22:58 -0500 Subject: [PATCH 223/630] tweak learning rate --- examples/char-lstm/.gitignore | 1 + examples/char-lstm/config.jl | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/char-lstm/.gitignore b/examples/char-lstm/.gitignore index 4b87bf4070f6..7588feca6bfe 100644 --- a/examples/char-lstm/.gitignore +++ b/examples/char-lstm/.gitignore @@ -1,2 +1,3 @@ input.txt vocab.dat +checkpoints diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index e4d4f0c180b8..79a2040aad8e 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -5,7 +5,7 @@ const DIM_HIDDEN = 256 const DIM_EMBED = 256 const LSTM_N_LAYER = 2 const N_EPOCH = 21 -const BASE_LR = 0.1 +const BASE_LR = 0.5 const LR_DECAY = 0.7 const WEIGHT_DECAY = 0.00001 const MOMENTUM = 0.0 From 0f1fbaa7b2148e40541144d740d5dfa7296b1e56 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 12 Nov 2015 23:30:35 -0500 Subject: [PATCH 224/630] fix sampler --- examples/char-lstm/config.jl | 9 --------- examples/char-lstm/sampler.jl | 21 +++++++++++---------- examples/char-lstm/seq-data.jl | 9 +++++++++ 3 files changed, 20 insertions(+), 19 deletions(-) diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index e2abe8bb70b5..dbe972899666 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -24,12 +24,3 @@ const UNKNOWN_CHAR = Char(0) const INPUT_FILE = joinpath(dirname(@__FILE__), "input.txt") const VOCAB_FILE = joinpath(dirname(@__FILE__), "vocab.dat") -# helper function to convert a char into index in vocabulary -function char_idx(vocab :: Dict{Char,Int}, c :: Char) - if haskey(vocab, c) - vocab[c] - else - vocab[UNKNOWN_CHAR] - end -end - diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl index c481da1d1950..2df8de8c0302 100644 --- a/examples/char-lstm/sampler.jl +++ b/examples/char-lstm/sampler.jl @@ -1,4 +1,5 @@ include(joinpath(dirname(@__FILE__), "config.jl")) +include(joinpath(dirname(@__FILE__), "seq-data.jl")) using StatsBase using MXNet @@ -7,23 +8,22 @@ using MXNet vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) # prepare data provider -jl_data = [(symbol(NAME, "_data_$t"), zeros(mx.MX_float, (length(vocab), BATCH_SIZE_SMP))) - for t = 1:SEQ_LENGTH] -jl_c = [(symbol(NAME, "_l$(l)_init_c"), zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) - for l = 1:LSTM_N_LAYER] -jl_h = [(symbol(NAME, "_l$(l)_init_h"), zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) - for l = 1:LSTM_N_LAYER] +jl_data = Pair[(symbol(NAME, "_data_$t") => zeros(mx.MX_float, (length(vocab), BATCH_SIZE_SMP))) + for t = 1:SEQ_LENGTH] +jl_c = Pair[(symbol(NAME, "_l$(l)_init_c") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) + for l = 1:LSTM_N_LAYER] +jl_h = Pair[(symbol(NAME, "_l$(l)_init_h") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) + for l = 1:LSTM_N_LAYER] # the first input in the sequence -jl_data_start = jl_data[1] +jl_data_start = jl_data[1].second jl_data_start[char_idx(vocab, SAMPLE_START),:] = 1 -data = mx.ArrayDataProvider(nd_data ∪ nd_c ∪ nd_h) - # load model model = mx.load_checkpoint(CKPOINT_PREFIX, N_EPOCH, mx.FeedForward) # prepare outputs +Base.zero(::Type{Char}) = Char(0) output_samples = zeros(Char, (SAMPLE_LENGTH, BATCH_SIZE_SMP)) output_samples[1, :] = SAMPLE_START @@ -32,6 +32,7 @@ inv_vocab = Dict([v => k for (k,v) in vocab]) # do prediction and sampling step by step for t = 2:SAMPLE_LENGTH-1 + data = mx.ArrayDataProvider(jl_data ∪ jl_c ∪ jl_h) outputs = mx.predict(model, data) # we will only use the first output to do sampling @@ -41,7 +42,7 @@ for t = 2:SAMPLE_LENGTH-1 for i = 1:BATCH_SIZE_SMP prob = WeightVec(outputs[:, i]) k = sample(prob) - output_samples[t, k] = inv_vocab[k] + output_samples[t, i] = inv_vocab[k] jl_data_start[k, i] = 1 end end diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index 8bbcfece56c1..2837ab7d8b55 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -97,3 +97,12 @@ function mx.eachbatch(p :: CharSeqProvider) return Task(_text_iter) end +# helper function to convert a char into index in vocabulary +function char_idx(vocab :: Dict{Char,Int}, c :: Char) + if haskey(vocab, c) + vocab[c] + else + vocab[UNKNOWN_CHAR] + end +end + From a6886c2280edccf41bcafb0aa4af39acc6b8b6f6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 13 Nov 2015 00:09:19 -0500 Subject: [PATCH 225/630] copy states over for sampler --- examples/char-lstm/lstm.jl | 5 ++++- examples/char-lstm/sampler.jl | 23 ++++++++++++++++++----- src/model.jl | 9 ++++++--- 3 files changed, 28 insertions(+), 9 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index 538fe0497676..7ad9fa91b8d9 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -39,7 +39,7 @@ function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMPara end function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_class::Int; - dropout::Real=0, name::Symbol=gensym()) + dropout::Real=0, name::Symbol=gensym(), output_states::Bool=false) # placeholder nodes for all parameters embed_W = mx.Variable(symbol(name, "_embed_weight")) @@ -94,6 +94,9 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla end # now group all outputs together + if output_states + outputs = outputs ∪ [x[2].c for x in layer_param_states] ∪ [x[2].h for x in layer_param_states] + end return mx.Group(outputs...) end diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl index 2df8de8c0302..02b157b2f539 100644 --- a/examples/char-lstm/sampler.jl +++ b/examples/char-lstm/sampler.jl @@ -19,8 +19,14 @@ jl_h = Pair[(symbol(NAME, "_l$(l)_init_h") => zeros(mx.MX_float, (DIM_HIDDEN, jl_data_start = jl_data[1].second jl_data_start[char_idx(vocab, SAMPLE_START),:] = 1 -# load model -model = mx.load_checkpoint(CKPOINT_PREFIX, N_EPOCH, mx.FeedForward) +# define a LSTM with sequence length 1, also output states so that we could manually copy the states +# when sampling the next char +lstm = LSTM(LSTM_N_LAYER, 1, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME, output_states=true) +model = mx.FeedForward(lstm, context=cpu()) + +# load parameters from traind LSTM, though the sequence length is different, since the weights are shared +# over time, this should be compatible. +model = mx.load_checkpoint(model, CKPOINT_PREFIX, N_EPOCH, allow_different_arch=true) # prepare outputs Base.zero(::Type{Char}) = Char(0) @@ -33,11 +39,12 @@ inv_vocab = Dict([v => k for (k,v) in vocab]) # do prediction and sampling step by step for t = 2:SAMPLE_LENGTH-1 data = mx.ArrayDataProvider(jl_data ∪ jl_c ∪ jl_h) - outputs = mx.predict(model, data) + preds = mx.predict(model, data) - # we will only use the first output to do sampling - outputs = outputs[1] + # the first output is prediction + outputs = preds[1] + # do sampling and init the next inputs jl_data_start[:] = 0 for i = 1:BATCH_SIZE_SMP prob = WeightVec(outputs[:, i]) @@ -45,6 +52,12 @@ for t = 2:SAMPLE_LENGTH-1 output_samples[t, i] = inv_vocab[k] jl_data_start[k, i] = 1 end + + # copy the states over + for l = 1:LSTM_N_LAYER + copy!(jl_c[l][2], preds[1+l]) + copy!(jl_h[l][2], preds[1+LSTM_N_LAYER+l]) + end end output_texts = [join(output_samples[:,i]) for i = 1:BATCH_SIZE_SMP] diff --git a/src/model.jl b/src/model.jl index 015d9c09af05..b232ea28bd9d 100644 --- a/src/model.jl +++ b/src/model.jl @@ -550,15 +550,18 @@ function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForw return model end -function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: Int; overwrite :: Bool = true) +function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: Int; + overwrite :: Bool = true, allow_different_arch :: Bool = false) if isdefined(self, :arg_params) && isdefined(self, :aux_params) && !overwrite info("model weights already exists, skip loading... (call with overwrite=true if needed)") return self end arch, arg_params, aux_params = load_checkpoint(prefix, epoch) - # TODO: is there better way to compare two symbols - @assert(to_json(self.arch) == to_json(arch), "Cannot load from a checkpoint with different network architecture") + if !allow_different_arch + # TODO: is there better way to compare two symbols + @assert(to_json(self.arch) == to_json(arch), "Cannot load from a checkpoint with different network architecture") + end self.arg_params = arg_params self.aux_params = aux_params return self From f915865753002f88d4fe7f08b316ffd4e494c528 Mon Sep 17 00:00:00 2001 From: pluskid Date: Fri, 13 Nov 2015 00:10:04 -0500 Subject: [PATCH 226/630] use adam optimizer --- examples/char-lstm/config.jl | 4 +--- examples/char-lstm/train.jl | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index dbe972899666..b8aa6df49247 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -5,10 +5,8 @@ const DIM_HIDDEN = 256 const DIM_EMBED = 256 const LSTM_N_LAYER = 2 const N_EPOCH = 21 -const BASE_LR = 0.5 -const LR_DECAY = 0.7 +const BASE_LR = 0.05 const WEIGHT_DECAY = 0.00001 -const MOMENTUM = 0.0 const CLIP_GRADIENT = 1 const NAME = :ptb const N_GPU = 4 diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index ed4e4403c402..cb19891f03b1 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -28,8 +28,7 @@ else end model = mx.FeedForward(lstm, context=context) -optimizer = mx.SGD(lr_scheduler=mx.LearningRate.Exp(BASE_LR, gamma=LR_DECAY), - momentum=MOMENTUM, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) +optimizer = mx.ADAM(lr=BASE_LR, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, initializer=mx.UniformInitializer(0.1), From db7ae38008c5ca294ec41c0262a82c35aec55430 Mon Sep 17 00:00:00 2001 From: pluskid Date: Fri, 13 Nov 2015 00:14:58 -0500 Subject: [PATCH 227/630] fix LSTM sampler --- examples/char-lstm/sampler.jl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl index 02b157b2f539..a641ccb39d69 100644 --- a/examples/char-lstm/sampler.jl +++ b/examples/char-lstm/sampler.jl @@ -1,4 +1,5 @@ include(joinpath(dirname(@__FILE__), "config.jl")) +include(joinpath(dirname(@__FILE__), "lstm.jl")) include(joinpath(dirname(@__FILE__), "seq-data.jl")) using StatsBase @@ -6,10 +7,11 @@ using MXNet # load vocabulary vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) +n_class = length(vocab) # prepare data provider jl_data = Pair[(symbol(NAME, "_data_$t") => zeros(mx.MX_float, (length(vocab), BATCH_SIZE_SMP))) - for t = 1:SEQ_LENGTH] + for t = 1:1] jl_c = Pair[(symbol(NAME, "_l$(l)_init_c") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) for l = 1:LSTM_N_LAYER] jl_h = Pair[(symbol(NAME, "_l$(l)_init_h") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) @@ -22,7 +24,7 @@ jl_data_start[char_idx(vocab, SAMPLE_START),:] = 1 # define a LSTM with sequence length 1, also output states so that we could manually copy the states # when sampling the next char lstm = LSTM(LSTM_N_LAYER, 1, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME, output_states=true) -model = mx.FeedForward(lstm, context=cpu()) +model = mx.FeedForward(lstm, context=mx.cpu()) # load parameters from traind LSTM, though the sequence length is different, since the weights are shared # over time, this should be compatible. From 88299124eae5870282fecef3492ae96739fdb708 Mon Sep 17 00:00:00 2001 From: pluskid Date: Fri, 13 Nov 2015 00:26:59 -0500 Subject: [PATCH 228/630] add basic readme for lstm example --- examples/char-lstm/README.md | 80 ++++++++++++++++++++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 examples/char-lstm/README.md diff --git a/examples/char-lstm/README.md b/examples/char-lstm/README.md new file mode 100644 index 000000000000..6e122a96d945 --- /dev/null +++ b/examples/char-lstm/README.md @@ -0,0 +1,80 @@ +# LSTM char-rnn + +Because we explicitly unroll the LSTM/RNN over time for a fixed sequence length, +it is easy to fit this model into the existing FeedForward model and re-use everything. +To get a more flexible LSTM/RNN implementation that avoids explicit unrolling and +deals with variable-length sequences, we still need to implement another model +beside the existing FeedForward. + +To run this example, you will need to install two extra Julia packages: `Iterators.jl` +and `StatsBase.jl`. + +## Training + +This example is adapted from the +[example in Python binding](https://github.com/dmlc/mxnet/blob/master/example/rnn/char_lstm.ipynb) of +MXNet. The data `input.txt` can be downloaded [here](https://github.com/dmlc/web-data/tree/master/mxnet/tinyshakespeare). + +Modify parameters in [config.jl](config.jl) and then run [train.jl](train.jl). An example output +of training looks like this: +``` +... +INFO: Speed: 355.18 samples/sec +INFO: == Epoch 020 ========== +INFO: ## Training summary +INFO: NLL = 1.9670 +INFO: perplexity = 7.1494 +INFO: time = 88.0757 seconds +INFO: ## Validation summary +INFO: NLL = 2.0452 +INFO: perplexity = 7.7307 +INFO: Saved checkpoint to '/cbcl/cbcl01/chiyuan/mxnet/julia/examples/char-lstm/checkpoints/ptb-0020.params' +INFO: Speed: 366.23 samples/sec +INFO: Speed: 360.19 samples/sec +INFO: Speed: 355.77 samples/sec +INFO: Speed: 356.83 samples/sec +INFO: Speed: 354.80 samples/sec +INFO: Speed: 349.89 samples/sec +INFO: Speed: 352.00 samples/sec +INFO: Speed: 358.46 samples/sec +INFO: Speed: 356.58 samples/sec +INFO: Speed: 353.03 samples/sec +INFO: Speed: 351.98 samples/sec +INFO: Speed: 365.54 samples/sec +INFO: Speed: 359.14 samples/sec +INFO: Speed: 355.60 samples/sec +INFO: Speed: 362.44 samples/sec +INFO: Speed: 359.01 samples/sec +INFO: Speed: 357.99 samples/sec +INFO: Speed: 350.07 samples/sec +INFO: Speed: 358.03 samples/sec +INFO: == Epoch 021 ========== +INFO: ## Training summary +INFO: NLL = 1.9698 +INFO: perplexity = 7.1695 +INFO: time = 87.9392 seconds +INFO: ## Validation summary +INFO: NLL = 2.0458 +INFO: perplexity = 7.7353 +INFO: Saved checkpoint to '/cbcl/cbcl01/chiyuan/mxnet/julia/examples/char-lstm/checkpoints/ptb-0021.params' +``` + +## Sampling + +Run [sampler.jl](sampler.jl) to generate sample sentences from the trained model. Some example sentences are +``` +... +## Sample 8 +a, good. Baps, +To she tur in his God twerian: well Resice hestle, the a I here's a not as I lign? +H? + +## Sample 9 +ame. +What high sisiss itle by mard have of on sol I cound: +And pruch you betsts; you god eie hearry? + +## Sample 10 +and oar +Serens Iffall as a we of tere geling pover your nive relly lers; is here whill cheadaplee k? +``` From 5da45f39b38244d33316b8ef9e2d181d0942d8df Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 13 Nov 2015 00:51:43 -0500 Subject: [PATCH 229/630] use a more sensible default argument in favor of #24 --- src/model.jl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/src/model.jl b/src/model.jl index b232ea28bd9d..80f0a9644778 100644 --- a/src/model.jl +++ b/src/model.jl @@ -158,9 +158,9 @@ end :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory allocation of the :class:`Executor` depends on the mini-batch size of the test data provider. If you call predict twice with data provider of the same batch-size, - then the executor can be re-used. Otherwise, if ``overwrite`` is false (default), - an error will be raised; if ``overwrite`` is set to true, a new :class:`Executor` - will be created to replace the old one. + then the executor can be potentially be re-used. So, if ``overwrite`` is false, + we will try to re-use, and raise an error if batch-size changed. If ``overwrite`` + is true (the default), a new :class:`Executor` will be created to replace the old one. .. note:: @@ -172,12 +172,19 @@ end For the same reason, currently prediction will only use the first device even if multiple devices are provided to construct the model. + .. note:: + + If you perform further after prediction. The weights are not automatically synchronized if ``overwrite`` + is set to false and the old predictor is re-used. In this case + setting ``overwrite`` to true (the default) will re-initialize the predictor the next time you call + predict and synchronize the weights again. + :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` =# -function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = false) +function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = true) predict(self, data; overwrite = overwrite, callback=callback) end -function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=false, callback::Union{Function,Void}=nothing) +function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=true, callback::Union{Function,Void}=nothing) data_shapes = provide_data(data) data_names = [x[1] for x in data_shapes] _setup_predictor(self, overwrite; data_shapes...) From 3bc4fb5c0f83d6b43654ed26063c70e1d246efa3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 13 Nov 2015 00:55:29 -0500 Subject: [PATCH 230/630] fix path in char-lstm example outputs --- examples/char-lstm/README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/char-lstm/README.md b/examples/char-lstm/README.md index 6e122a96d945..7d625d090c72 100644 --- a/examples/char-lstm/README.md +++ b/examples/char-lstm/README.md @@ -11,8 +11,8 @@ and `StatsBase.jl`. ## Training -This example is adapted from the -[example in Python binding](https://github.com/dmlc/mxnet/blob/master/example/rnn/char_lstm.ipynb) of +This example is adapted from the +[example in Python binding](https://github.com/dmlc/mxnet/blob/master/example/rnn/char_lstm.ipynb) of MXNet. The data `input.txt` can be downloaded [here](https://github.com/dmlc/web-data/tree/master/mxnet/tinyshakespeare). Modify parameters in [config.jl](config.jl) and then run [train.jl](train.jl). An example output @@ -28,7 +28,7 @@ INFO: time = 88.0757 seconds INFO: ## Validation summary INFO: NLL = 2.0452 INFO: perplexity = 7.7307 -INFO: Saved checkpoint to '/cbcl/cbcl01/chiyuan/mxnet/julia/examples/char-lstm/checkpoints/ptb-0020.params' +INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0020.params' INFO: Speed: 366.23 samples/sec INFO: Speed: 360.19 samples/sec INFO: Speed: 355.77 samples/sec @@ -56,7 +56,7 @@ INFO: time = 87.9392 seconds INFO: ## Validation summary INFO: NLL = 2.0458 INFO: perplexity = 7.7353 -INFO: Saved checkpoint to '/cbcl/cbcl01/chiyuan/mxnet/julia/examples/char-lstm/checkpoints/ptb-0021.params' +INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0021.params' ``` ## Sampling From 0e6402df6c76abff674e417ee7c464394f6ff56e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 13 Nov 2015 01:03:52 -0500 Subject: [PATCH 231/630] add simple visualization example of char-lstm --- examples/char-lstm/.gitignore | 2 ++ examples/char-lstm/sampler.jl | 2 +- examples/char-lstm/visualize.jl | 15 +++++++++++++++ 3 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 examples/char-lstm/visualize.jl diff --git a/examples/char-lstm/.gitignore b/examples/char-lstm/.gitignore index 7588feca6bfe..d8923cc201b1 100644 --- a/examples/char-lstm/.gitignore +++ b/examples/char-lstm/.gitignore @@ -1,3 +1,5 @@ input.txt vocab.dat checkpoints +visualize.dot +visualize.svg diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl index a641ccb39d69..22166b7700c9 100644 --- a/examples/char-lstm/sampler.jl +++ b/examples/char-lstm/sampler.jl @@ -23,7 +23,7 @@ jl_data_start[char_idx(vocab, SAMPLE_START),:] = 1 # define a LSTM with sequence length 1, also output states so that we could manually copy the states # when sampling the next char -lstm = LSTM(LSTM_N_LAYER, 1, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME, output_states=true) +lstm = LSTM(LSTM_N_LAYER, 1, DIM_HIDDEN, DIM_EMBED, n_class, name=NAME, output_states=true) model = mx.FeedForward(lstm, context=mx.cpu()) # load parameters from traind LSTM, though the sequence length is different, since the weights are shared diff --git a/examples/char-lstm/visualize.jl b/examples/char-lstm/visualize.jl new file mode 100644 index 000000000000..336afea30705 --- /dev/null +++ b/examples/char-lstm/visualize.jl @@ -0,0 +1,15 @@ +include(joinpath(dirname(@__FILE__), "config.jl")) +include(joinpath(dirname(@__FILE__), "lstm.jl")) + +using MXNet + +vis_n_layer = 2 +vis_seq_len = 3 +vis_n_class = 128 + +lstm = LSTM(vis_n_layer, vis_seq_len, DIM_HIDDEN, DIM_EMBED, vis_n_class, name=NAME, output_states=true) + +open("visualize.dot", "w") do io + println(io, mx.to_graphviz(lstm)) +end +run(pipeline(`dot -Tsvg visualize.dot`, stdout="visualize.svg")) From a392e042216199b3923a9663ca81f632c7ae9ac7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 13 Nov 2015 01:31:00 -0500 Subject: [PATCH 232/630] tweak visualization --- src/visualize.jl | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/visualize.jl b/src/visualize.jl index e6ada19f179c..0b5c0c3d8e90 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -33,12 +33,13 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp conf = JSON.parse(to_json(network)) nodes = conf["nodes"] - heads = unique(conf["heads"][1]+1) + heads = unique([x[1]+1 for x in conf["heads"]]) node_attr = Dict(:shape => :box, :fixedsize => true, :width => 1.3, :height => 0.8034, :style => :filled) io = IOBuffer() println(io, "digraph $(_simple_escape(title)) {") println(io, "node [fontsize=10];") + println(io, "edge [fontsize=10];") # color map cm = ("#8dd3c7", "#fb8072", "#ffffb3", "#bebada", "#80b1d3", @@ -54,9 +55,11 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp if op == "null" if i ∈ heads + # heads are output nodes label = node["name"] attr[:fillcolor] = cm[1] else + # otherwise, input nodes, might be data, label or parameters continue end elseif op == "Convolution" @@ -81,12 +84,15 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp attr[:fillcolor] = cm[5] elseif op ∈ ("Concat", "Flatten", "Reshape") attr[:fillcolor] = cm[6] - elseif endswith(op, "Output") + elseif endswith(op, "Output") || op == "BlockGrad" attr[:fillcolor] = cm[7] else attr[:fillcolor] = cm[8] end + if op != "null" + label = "$name\n$label" + end attr[:label] = label _format_graphviz_node(io, name, attr) end @@ -116,7 +122,7 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp label = "(" * join([string(x) for x in shape], ",") * ")" attr[:label] = label end - _format_graphviz_edge(io, input_name, name, attr) + _format_graphviz_edge(io, name, input_name, attr) end end end From 75c32cd7b14db86044b906ae8ce191a3d8385c90 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 13 Nov 2015 09:09:51 -0500 Subject: [PATCH 233/630] Add gitter link to dmlc/mxnet --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 7dc99a24a7b4..a6ae2d5cebe8 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,8 @@ [![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) [![Documentation Status](https://readthedocs.org/projects/mxnetjl/badge/?version=latest)](http://mxnetjl.readthedocs.org/en/latest/?badge=latest) [![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) +[![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: From 5b9df89d68ae1847b0cc1e47d20f21e3b0196883 Mon Sep 17 00:00:00 2001 From: pluskid Date: Fri, 13 Nov 2015 09:58:35 -0500 Subject: [PATCH 234/630] update README for LSTM example. --- examples/char-lstm/README.md | 124 +++++++++++++++++++++++------------ examples/char-lstm/config.jl | 2 +- 2 files changed, 84 insertions(+), 42 deletions(-) diff --git a/examples/char-lstm/README.md b/examples/char-lstm/README.md index 6e122a96d945..6e250ba5788c 100644 --- a/examples/char-lstm/README.md +++ b/examples/char-lstm/README.md @@ -19,62 +19,104 @@ Modify parameters in [config.jl](config.jl) and then run [train.jl](train.jl). A of training looks like this: ``` ... -INFO: Speed: 355.18 samples/sec +INFO: Speed: 356.66 samples/sec +INFO: Speed: 357.72 samples/sec INFO: == Epoch 020 ========== INFO: ## Training summary -INFO: NLL = 1.9670 -INFO: perplexity = 7.1494 -INFO: time = 88.0757 seconds +INFO: NLL = 1.4672 +INFO: perplexity = 4.3373 +INFO: time = 87.2631 seconds INFO: ## Validation summary -INFO: NLL = 2.0452 -INFO: perplexity = 7.7307 -INFO: Saved checkpoint to '/cbcl/cbcl01/chiyuan/mxnet/julia/examples/char-lstm/checkpoints/ptb-0020.params' -INFO: Speed: 366.23 samples/sec -INFO: Speed: 360.19 samples/sec -INFO: Speed: 355.77 samples/sec -INFO: Speed: 356.83 samples/sec -INFO: Speed: 354.80 samples/sec -INFO: Speed: 349.89 samples/sec -INFO: Speed: 352.00 samples/sec -INFO: Speed: 358.46 samples/sec -INFO: Speed: 356.58 samples/sec -INFO: Speed: 353.03 samples/sec -INFO: Speed: 351.98 samples/sec -INFO: Speed: 365.54 samples/sec -INFO: Speed: 359.14 samples/sec -INFO: Speed: 355.60 samples/sec -INFO: Speed: 362.44 samples/sec -INFO: Speed: 359.01 samples/sec -INFO: Speed: 357.99 samples/sec -INFO: Speed: 350.07 samples/sec -INFO: Speed: 358.03 samples/sec +INFO: NLL = 1.6374 +INFO: perplexity = 5.1418 +INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0020.params' +INFO: Speed: 368.74 samples/sec +INFO: Speed: 361.04 samples/sec +INFO: Speed: 360.02 samples/sec +INFO: Speed: 362.34 samples/sec +INFO: Speed: 360.80 samples/sec +INFO: Speed: 362.77 samples/sec +INFO: Speed: 357.18 samples/sec +INFO: Speed: 355.30 samples/sec +INFO: Speed: 362.33 samples/sec +INFO: Speed: 359.23 samples/sec +INFO: Speed: 358.09 samples/sec +INFO: Speed: 356.89 samples/sec +INFO: Speed: 371.91 samples/sec +INFO: Speed: 372.24 samples/sec +INFO: Speed: 356.59 samples/sec +INFO: Speed: 356.64 samples/sec +INFO: Speed: 360.24 samples/sec +INFO: Speed: 360.32 samples/sec +INFO: Speed: 362.38 samples/sec INFO: == Epoch 021 ========== INFO: ## Training summary -INFO: NLL = 1.9698 -INFO: perplexity = 7.1695 -INFO: time = 87.9392 seconds +INFO: NLL = 1.4655 +INFO: perplexity = 4.3297 +INFO: time = 86.9243 seconds INFO: ## Validation summary -INFO: NLL = 2.0458 -INFO: perplexity = 7.7353 -INFO: Saved checkpoint to '/cbcl/cbcl01/chiyuan/mxnet/julia/examples/char-lstm/checkpoints/ptb-0021.params' +INFO: NLL = 1.6366 +INFO: perplexity = 5.1378 +INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0021.params' ``` ## Sampling Run [sampler.jl](sampler.jl) to generate sample sentences from the trained model. Some example sentences are ``` -... +## Sample 1 +all have sir, +Away will fill'd in His time, I'll keep her, do not madam, if they here? Some more ha? + +## Sample 2 +am. + +CLAUDIO: +Hone here, let her, the remedge, and I know not slept a likely, thou some soully free? + +## Sample 3 +arrel which noble thing +The exchnachsureding worns: I ne'er drunken Biancas, fairer, than the lawfu? + +## Sample 4 +augh assalu, you'ld tell me corn; +Farew. First, for me of a loved. Has thereat I knock you presents? + +## Sample 5 +ame the first answer. + +MARIZARINIO: +Door of Angelo as her lord, shrield liken Here fellow the fool ? + +## Sample 6 +ad well. + +CLAUDIO: +Soon him a fellows here; for her fine edge in a bogms' lord's wife. + +LUCENTIO: +I? + +## Sample 7 +adrezilian measure. + +LUCENTIO: +So, help'd you hath nes have a than dream's corn, beautio, I perchas? + ## Sample 8 -a, good. Baps, -To she tur in his God twerian: well Resice hestle, the a I here's a not as I lign? -H? +as eatter me; +The girlly: and no other conciolation! + +BISTRUMIO: +I have be rest girl. O, that I a h? ## Sample 9 -ame. -What high sisiss itle by mard have of on sol I cound: -And pruch you betsts; you god eie hearry? +and is intend you sort: +What held her all 'clama's for maffice. Some servant.' what I say me the cu? ## Sample 10 -and oar -Serens Iffall as a we of tere geling pover your nive relly lers; is here whill cheadaplee k? +an thoughts will said in our pleasue, +Not scanin on him that you live; believaries she. + +ISABELLLLL? ``` diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index b8aa6df49247..974989167ad1 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -5,7 +5,7 @@ const DIM_HIDDEN = 256 const DIM_EMBED = 256 const LSTM_N_LAYER = 2 const N_EPOCH = 21 -const BASE_LR = 0.05 +const BASE_LR = 0.01 const WEIGHT_DECAY = 0.00001 const CLIP_GRADIENT = 1 const NAME = :ptb From 379de2bf669f3de4fda05dfae52861a1b9ebb376 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 14 Nov 2015 21:09:56 -0500 Subject: [PATCH 235/630] part of LSTM tutorial --- docs/index.rst | 1 + docs/tutorial/char-lstm.rst | 75 +++++++++++++++++++++++++++++++++++++ examples/char-lstm/lstm.jl | 10 ++++- 3 files changed, 85 insertions(+), 1 deletion(-) create mode 100644 docs/tutorial/char-lstm.rst diff --git a/docs/index.rst b/docs/index.rst index 4f718e1861b9..05077bed6904 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -19,6 +19,7 @@ For more details, see documentation below. Please also checkout the `examples :caption: Tutorials tutorial/mnist + tutorial/char-lstm .. toctree:: :maxdepth: 2 diff --git a/docs/tutorial/char-lstm.rst b/docs/tutorial/char-lstm.rst new file mode 100644 index 000000000000..4cb9c1c814ae --- /dev/null +++ b/docs/tutorial/char-lstm.rst @@ -0,0 +1,75 @@ +Generating Random Sentence with LSTM RNN +======================================== + +This tutorial shows how to train a LSTM (Long short-term memory) RNN (recurrent +neural network) to perform character-level sequence training and prediction. The +original model, usually called ``char-rnn`` is described in `Andrej Karpathy's +blog `_, with +a reference implementation in Torch available `here +`_. + +Because MXNet.jl does not have a specialized model for recurrent neural networks +yet, the example shown here is an implementation of LSTM by using the default +:class:`FeedForward` model via explicitly unfolding over time. We will be using +fixed-length input sequence for training. The code is adapted from the `char-rnn +example for MXNet's Python binding +`_, which +demonstrates how to use low-level :doc:`symbolic APIs ` to +build customized neural network models directly. + +LSTM Cells +---------- + +Christopher Olah has a `great blog post about LSTM +`_ with beautiful and +clear illustrations. So we will not repeat the definition and explanation of +what an LSTM cell is here. Basically, an LSTM cell takes input ``x``, as well as +previous states (including ``c`` and ``h``), and produce the next states. +We define a helper type to bundle the two state variables together: + +.. literalinclude:: ../../examples/char-lstm/lstm.jl + :language: julia + :start-after: #--LSTMState + :end-before: #--/LSTMState + +Because LSTM weights are shared at every time when we do explicit unfolding, so +we also define a helper type to hold all the weights (and bias) for an LSTM cell +for convenience. + +.. literalinclude:: ../../examples/char-lstm/lstm.jl + :language: julia + :start-after: #--LSTMParam + :end-before: #--/LSTMParam + +Note all the variables are of type :class:`SymbolicNode`. We will construct the +LSTM network as a symbolic computation graph, which is then instantiated with +:class:`NDArray` for actual computation. + +.. literalinclude:: ../../examples/char-lstm/lstm.jl + :language: julia + :start-after: #--lstm_cell + :end-before: #--/lstm_cell + +The following figure is stolen from +`Christopher Olah's blog +`_, which illustrate +exactly what the code snippet above is doing. + +.. image:: http://colah.github.io/posts/2015-08-Understanding-LSTMs/img/LSTM3-chain.png + +In particular, instead of defining the four gates independently, we do the +computation together and then use :class:`SliceChannel` to split them into four +outputs. The computation of gates are all done with the symbolic API. The return +value is a LSTM state containing the output of a LSTM cell. + +Unfolding LSTM +-------------- +Using the LSTM cell defined above, we are now ready to define a function to +unfold a LSTM network with L layers and T time steps. The first part of the +function is just defining all the symbolic variables for the shared weights and +states. + +.. literalinclude:: ../../examples/char-lstm/lstm.jl + :language: julia + :start-after: #--LSTM-part1 + :end-before: #--/LSTM-part1 diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index 7ad9fa91b8d9..ab0c678f581d 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -1,18 +1,23 @@ # An explicitly unrolled LSTM with fixed sequence length. using MXNet +#--LSTMState immutable LSTMState c :: mx.SymbolicNode h :: mx.SymbolicNode end +#--/LSTMState +#--LSTMParam immutable LSTMParam i2h_W :: mx.SymbolicNode h2h_W :: mx.SymbolicNode i2h_b :: mx.SymbolicNode h2h_b :: mx.SymbolicNode end +#--/LSTMParam +#--lstm_cell function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMParam; num_hidden::Int=512, dropout::Real=0, name::Symbol=gensym()) @@ -37,7 +42,9 @@ function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMPara return LSTMState(next_c, next_h) end +#--/lstm_cell +#--LSTM-part1 function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_class::Int; dropout::Real=0, name::Symbol=gensym(), output_states::Bool=false) @@ -55,6 +62,8 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla mx.Variable(symbol(name, "_l$(i)_init_h"))) (param, state) end + #... + #--/LSTM-part1 # now unroll over time outputs = mx.SymbolicNode[] @@ -64,7 +73,6 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla hidden = mx.FullyConnected(data=data, weight=embed_W, num_hidden=dim_embed, no_bias=true, name=symbol(name, "_embed_$t")) - # stack LSTM cells for i = 1:n_layer l_param, l_state = layer_param_states[i] From c804be9df6168edae462ba5d6b5424c020ca4d16 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 14 Nov 2015 23:36:46 -0500 Subject: [PATCH 236/630] char-LSTM tutorial --- docs/api/io.rst | 87 ++--- docs/api/model.rst | 13 +- docs/tutorial/char-lstm.rst | 297 ++++++++++++++++- docs/tutorial/images/LSTM3-chain.png | Bin 0 -> 229688 bytes docs/tutorial/images/char-lstm-vis.svg | 435 +++++++++++++++++++++++++ examples/char-lstm/lstm.jl | 8 +- examples/char-lstm/seq-data.jl | 11 + examples/char-lstm/train.jl | 9 +- src/io.jl | 84 ++--- 9 files changed, 857 insertions(+), 87 deletions(-) create mode 100644 docs/tutorial/images/LSTM3-chain.png create mode 100644 docs/tutorial/images/char-lstm-vis.svg diff --git a/docs/api/io.rst b/docs/api/io.rst index e9d9c04e9f3c..f1ab959be6f2 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -59,62 +59,66 @@ and split it into mini-batches so that the model can consume the data in a unifo By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface is implemented on the provider type itself. But the extra layer of abstraction allows us to - implement a data provider easily via a Julia ``Task`` coroutine. - The detailed interface function is listed below: + implement a data provider easily via a Julia ``Task`` coroutine. See the + data provider defined in :doc:`the char-lstm example + ` for an example of using coroutine to define data + providers. - .. function:: Base.eltype(provider) -> AbstractDataBatch +The detailed interface functions for the iterator API is listed below: - :param AbstractDataProvider provider: the data provider. - :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. +.. function:: Base.eltype(provider) -> AbstractDataBatch - .. function:: Base.start(provider) -> AbstractDataProviderState + :param AbstractDataProvider provider: the data provider. + :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. - :param AbstractDataProvider provider: the data provider. +.. function:: Base.start(provider) -> AbstractDataProviderState - This function is always called before iterating into the dataset. It should initialize - the iterator, reset the index, and do data shuffling if needed. + :param AbstractDataProvider provider: the data provider. - .. function:: Base.done(provider, state) -> Bool + This function is always called before iterating into the dataset. It should initialize + the iterator, reset the index, and do data shuffling if needed. - :param AbstractDataProvider provider: the data provider. - :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. - :return: true if there is no more data to iterate in this dataset. +.. function:: Base.done(provider, state) -> Bool - .. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + :param AbstractDataProvider provider: the data provider. + :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. + :return: true if there is no more data to iterate in this dataset. - :param AbstractDataProvider provider: the data provider. - :return: the current data batch, and the state for the next iteration. +.. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) - Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that - is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this - case, you can safely assume that + :param AbstractDataProvider provider: the data provider. + :return: the current data batch, and the state for the next iteration. - * :func:`Base.start` will always be called, and called only once before the iteration starts. - * :func:`Base.done` will always be called at the beginning of every iteration and always be called once. - * If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with - a call to :func:`Base.start`. - * :func:`Base.next` will always be called only once in each iteration. It will always be called after - one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will - not be called. +Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that +is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this +case, you can safely assume that - With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation - of the built-in :class:`MXDataProvider` for example. +* :func:`Base.start` will always be called, and called only once before the iteration starts. +* :func:`Base.done` will always be called at the beginning of every iteration and always be called once. +* If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with + a call to :func:`Base.start`. +* :func:`Base.next` will always be called only once in each iteration. It will always be called after + one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will + not be called. - .. caution:: +With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation +of the built-in :class:`MXDataProvider` for example. - Please do not use the one data provider simultaneously in two different places, either in parallel, - or in a nested loop. For example, the behavior for the following code is undefined +.. caution:: - .. code-block:: julia + Please do not use the one data provider simultaneously in two different places, either in parallel, + or in a nested loop. For example, the behavior for the following code is undefined - for batch in data - # updating the parameters + .. code-block:: julia + + for batch in data + # updating the parameters - # now let's test the performance on the training set - for b2 in data - # ... - end - end + # now let's test the performance on the training set + for b2 in data + # ... + end + end @@ -163,7 +167,7 @@ and split it into mini-batches so that the model can consume the data in a unifo :param AbstractDataBatch batch: the data batch object. :param Base.Symbol name: the name of the data to get, should be one of the names provided in either :func:`provide_data() ` - or :func:`provide_label() `. + or :func:`provide_label() `. :return: the corresponding data array corresponding to that name. .. function:: load_data!(provider, batch, targets) @@ -243,6 +247,9 @@ Built-in data providers contents of all the missing data points. :param Real label_padding: the same as ``data_padding``, except for the labels. + TODO: remove ``data_padding`` and ``label_padding``, and implement rollover that copies + the last or first several training samples to feed the padding. + diff --git a/docs/api/model.rst b/docs/api/model.rst index d5245614061b..764d3e9329a8 100644 --- a/docs/api/model.rst +++ b/docs/api/model.rst @@ -71,9 +71,9 @@ a network described using the symbolic API. :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory allocation of the :class:`Executor` depends on the mini-batch size of the test data provider. If you call predict twice with data provider of the same batch-size, - then the executor can be re-used. Otherwise, if ``overwrite`` is false (default), - an error will be raised; if ``overwrite`` is set to true, a new :class:`Executor` - will be created to replace the old one. + then the executor can be potentially be re-used. So, if ``overwrite`` is false, + we will try to re-use, and raise an error if batch-size changed. If ``overwrite`` + is true (the default), a new :class:`Executor` will be created to replace the old one. .. note:: @@ -85,6 +85,13 @@ a network described using the symbolic API. For the same reason, currently prediction will only use the first device even if multiple devices are provided to construct the model. + .. note:: + + If you perform further after prediction. The weights are not automatically synchronized if ``overwrite`` + is set to false and the old predictor is re-used. In this case + setting ``overwrite`` to true (the default) will re-initialize the predictor the next time you call + predict and synchronize the weights again. + :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` diff --git a/docs/tutorial/char-lstm.rst b/docs/tutorial/char-lstm.rst index 4cb9c1c814ae..5b1c348e5568 100644 --- a/docs/tutorial/char-lstm.rst +++ b/docs/tutorial/char-lstm.rst @@ -17,6 +17,13 @@ example for MXNet's Python binding demonstrates how to use low-level :doc:`symbolic APIs ` to build customized neural network models directly. +The most important code snippets of this example is shown and explained here. +To see and run the complete code, please refer to the `examples/char-lstm +`_ directory. +You will need to install `Iterators.jl +`_ and `StatsBase.jl +`_ to run this example. + LSTM Cells ---------- @@ -50,12 +57,12 @@ LSTM network as a symbolic computation graph, which is then instantiated with :start-after: #--lstm_cell :end-before: #--/lstm_cell -The following figure is stolen from +The following figure is stolen (permission requested) from `Christopher Olah's blog `_, which illustrate exactly what the code snippet above is doing. -.. image:: http://colah.github.io/posts/2015-08-Understanding-LSTMs/img/LSTM3-chain.png +.. image:: images/LSTM3-chain.png In particular, instead of defining the four gates independently, we do the computation together and then use :class:`SliceChannel` to split them into four @@ -69,7 +76,293 @@ unfold a LSTM network with L layers and T time steps. The first part of the function is just defining all the symbolic variables for the shared weights and states. +The ``embed_W`` is the weights used for character embedding --- i.e. mapping the +one-hot encoded characters into real vectors. The ``pred_W`` and ``pred_b`` are +weights and bias for the final prediction at each time step. + +Then we define the weights for each LSTM cell. Note there is one cell for each +layer, and it will be replicated (unrolled) over time. The states are, however, +*not* shared over time. Instead, here we define the initial states here at the +beginning of a sequence, and we will update them with the output states at each +time step as we explicitly unroll the LSTM. + .. literalinclude:: ../../examples/char-lstm/lstm.jl :language: julia :start-after: #--LSTM-part1 :end-before: #--/LSTM-part1 + +Unrolling over time is a straightforward procedure of stacking the embedding +layer, and then LSTM cells, on top of which the prediction layer. During +unrolling, we update the states and collect all the outputs. Note each time step +takes data and label as inputs. If the LSTM is named as ``:ptb``, the data and +label at step ``t`` will be named ``:ptb_data_$t`` and ``:ptb_label_$t``. Late +on when we prepare the data, we will define the data provider to match those +names. + +.. literalinclude:: ../../examples/char-lstm/lstm.jl + :language: julia + :start-after: #--LSTM-part2 + :end-before: #--/LSTM-part2 + +Note at each time step, the prediction is connected to a :class:`SoftmaxOutput` +operator, which could back propagate when corresponding labels are provided. The +states are then connected to the next time step, which allows back propagate +through time. However, at the end of the sequence, the final states are not +connected to anything. This dangling outputs is problematic, so we explicitly +connect each of them to a :class:`BlockGrad` operator, which simply back +propagates 0-gradient and closes the computation graph. + +In the end, we just group all the prediction outputs at each time step as +a single :class:`SymbolicNode` and return. Optionally we will also group the +final states, this is used when we use the trained LSTM to sample sentences. + +.. literalinclude:: ../../examples/char-lstm/lstm.jl + :language: julia + :start-after: #--LSTM-part3 + :end-before: #--/LSTM-part3 + +Data Provider for Text Sequences +-------------------------------- + +Now we need to construct a data provider that takes a text file, divide the text +into mini-batches of fixed-length character-sequences, and provide them as +one-hot encoded vectors. + +Note the is no fancy feature extraction at all. Each character is simply encoded +as a one-hot vector: a 0-1 vector of the size given by the vocabulary. Here we +just construct the vocabulary by collecting all the unique characters in the +training text -- there are not too many of them (including punctuations and +whitespace) for English text. Each input character is then encoded as a vector +of 0s on all coordinates, and 1 on the coordinate corresponding to that +character. The character-to-coordinate mapping is giving by the vocabulary. + +The text sequence data provider implement the :doc:`data provider API +`. We define the ``CharSeqProvider`` as below: + +.. literalinclude:: ../../examples/char-lstm/seq-data.jl + :language: julia + :start-after: #--CharSeqProvider + :end-before: #--/CharSeqProvider + +The provided data and labels follow the naming convention of inputs used when +unrolling the LSTM. Note in the code below, apart from ``$name_data_$t`` and +``$name_label_$t``, we also provides the initial ``c`` and ``h`` states for each +layer. This is because we are using the high-level :class:`FeedForward` API, +which has no idea about time and states. So we will feed the initial states for +each sequence from the data provider. Since the initial states is always zero, +we just need to always provide constant zero blobs. + +.. literalinclude:: ../../examples/char-lstm/seq-data.jl + :language: julia + :start-after: #--provide + :end-before: #--/provide + +Next we implement the :func:`AbstractDataProvider.eachbatch` interface for the provider. +We start by defining the data and label arrays, and the ``DataBatch`` object we +will provide in each iteration. + +.. literalinclude:: ../../examples/char-lstm/seq-data.jl + :language: julia + :start-after: #--eachbatch-part1 + :end-before: #--/eachbatch-part1 + +The actual data providing iteration is implemented as a Julia **coroutine**. In this +way, we can write the data loading logic as a simple coherent ``for`` loop, and +do not need to implement the interface functions like :func:`Base.start`, +:func:`Base.next`, etc. + +Basically, we partition the text into +batches, each batch containing several contiguous text sequences. Note at each +time step, the LSTM is trained to predict the next character, so the label is +the same as the data, but shifted ahead by one index. + +.. literalinclude:: ../../examples/char-lstm/seq-data.jl + :language: julia + :start-after: #--eachbatch-part2 + :end-before: #--/eachbatch-part2 + + +Training the LSTM +----------------- + +Now we have implemented all the supporting infrastructures for our char-lstm. +To train the model, we just follow the standard high-level API. Firstly, we +construct a LSTM symbolic architecture: + +.. literalinclude:: ../../examples/char-lstm/train.jl + :language: julia + :start-after: #--LSTM + :end-before: #--/LSTM + +Note all the parameters are defined in `examples/char-lstm/config.jl +`_. +Now we load the text file and define the data provider. The data ``input.txt`` +we used in this example is `a tiny Shakespeare dataset +`_. But you +can try with other text files. + +.. literalinclude:: ../../examples/char-lstm/train.jl + :language: julia + :start-after: #--data + :end-before: #--/data + +The last step is to construct a model, an optimizer and fit the mode to the +data. We are using the :class:`ADAM` optimizer [Adam]_ in this example. + +.. literalinclude:: ../../examples/char-lstm/train.jl + :language: julia + :start-after: #--train + :end-before: #--/train + +Note we are also using a customized ``NLL`` evaluation metric, which calculate +the negative log-likelihood during training. Here is an output sample at the end of +the training process. + +.. code-block:: text + + ... + INFO: Speed: 357.72 samples/sec + INFO: == Epoch 020 ========== + INFO: ## Training summary + INFO: NLL = 1.4672 + INFO: perplexity = 4.3373 + INFO: time = 87.2631 seconds + INFO: ## Validation summary + INFO: NLL = 1.6374 + INFO: perplexity = 5.1418 + INFO: Saved checkpoint to 'char-lstm/checkpoints/ptb-0020.params' + INFO: Speed: 368.74 samples/sec + INFO: Speed: 361.04 samples/sec + INFO: Speed: 360.02 samples/sec + INFO: Speed: 362.34 samples/sec + INFO: Speed: 360.80 samples/sec + INFO: Speed: 362.77 samples/sec + INFO: Speed: 357.18 samples/sec + INFO: Speed: 355.30 samples/sec + INFO: Speed: 362.33 samples/sec + INFO: Speed: 359.23 samples/sec + INFO: Speed: 358.09 samples/sec + INFO: Speed: 356.89 samples/sec + INFO: Speed: 371.91 samples/sec + INFO: Speed: 372.24 samples/sec + INFO: Speed: 356.59 samples/sec + INFO: Speed: 356.64 samples/sec + INFO: Speed: 360.24 samples/sec + INFO: Speed: 360.32 samples/sec + INFO: Speed: 362.38 samples/sec + INFO: == Epoch 021 ========== + INFO: ## Training summary + INFO: NLL = 1.4655 + INFO: perplexity = 4.3297 + INFO: time = 86.9243 seconds + INFO: ## Validation summary + INFO: NLL = 1.6366 + INFO: perplexity = 5.1378 + INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0021.params' + + +.. [Adam] Diederik Kingma and Jimmy Ba: *Adam: A Method for Stochastic + Optimization*. `arXiv:1412.6980 `_ + [cs.LG]. + + +Sampling Random Sentences +------------------------- + +After training the LSTM, we can now sample random sentences from the trained +model. The sampler works in the following way: + +- Starting from some fixed character, take ``a`` for example, and feed it as input to the LSTM. +- The LSTM will produce an output distribution over the vocabulary and a state + in the first time step. We sample a character from the output distribution, + fix it as the second character. +- In the next time step, we feed the previously sampled character as input and + continue running the LSTM by also taking the previous states (instead of the + 0 initial states). +- Continue running until we sampled enough characters. + +Note we are running with mini-batches, so several sentences could be sampled +simultaneously. Here are some sampled outputs from a network I trained for +around half an hour on the Shakespeare dataset. Note all the line-breaks, +punctuations and upper-lower case letters are produced by the sampler itself. +I did not do any post-processing. + +.. code-block:: text + + ## Sample 1 + all have sir, + Away will fill'd in His time, I'll keep her, do not madam, if they here? Some more ha? + + ## Sample 2 + am. + + CLAUDIO: + Hone here, let her, the remedge, and I know not slept a likely, thou some soully free? + + ## Sample 3 + arrel which noble thing + The exchnachsureding worns: I ne'er drunken Biancas, fairer, than the lawfu? + + ## Sample 4 + augh assalu, you'ld tell me corn; + Farew. First, for me of a loved. Has thereat I knock you presents? + + ## Sample 5 + ame the first answer. + + MARIZARINIO: + Door of Angelo as her lord, shrield liken Here fellow the fool ? + + ## Sample 6 + ad well. + + CLAUDIO: + Soon him a fellows here; for her fine edge in a bogms' lord's wife. + + LUCENTIO: + I? + + ## Sample 7 + adrezilian measure. + + LUCENTIO: + So, help'd you hath nes have a than dream's corn, beautio, I perchas? + + ## Sample 8 + as eatter me; + The girlly: and no other conciolation! + + BISTRUMIO: + I have be rest girl. O, that I a h? + + ## Sample 9 + and is intend you sort: + What held her all 'clama's for maffice. Some servant.' what I say me the cu? + + ## Sample 10 + an thoughts will said in our pleasue, + Not scanin on him that you live; believaries she. + + ISABELLLLL? + +See `Andrej Karpathy's blog post +`_ on more examples and +links including Linux source codes, Algebraic Geometry Theorems, and even +cooking recipes. The code for sampling can be found in +`examples/char-lstm/sampler.jl +`_. + +Visualizing the LSTM +-------------------- + +Finally, you could visualize the LSTM by calling :func:`to_graphviz` on the +constructed LSTM symbolic architecture. We only show an example of 1-layer and +2-time-step LSTM below. The automatic layout produced by GraphViz is definitely +much less clear than `Christopher Olah's illustrations +`_, but could +otherwise be very useful for debugging. As we can see, the LSTM unfolded over +time is just a (very) deep neural network. The complete code for producing this +visualization can be found in `examples/char-lstm/visualize.jl +`_. + +.. image:: images/char-lstm-vis.svg diff --git a/docs/tutorial/images/LSTM3-chain.png b/docs/tutorial/images/LSTM3-chain.png new file mode 100644 index 0000000000000000000000000000000000000000..e962a3c720781e37949a0d654e11dffff1b6803a GIT binary patch literal 229688 zcmYg&1yq#V_x8}25L8r=R6t4^X@gF|p-X|GTL$UwZif7y z7yNzeo3&W?Ixz1!XZL>gb0+A8((@}sR75Zs?24?6qzVl7Uj+BnsZV?I@+`sA_BG=wgI0g}J!6uvyx` z9gL0aOxbJ^=85aVR4~{bn5^VeHP@uoF>}Y815~K3bEUiAvHCo0Rir*KsjS_@zV&?( z8T=NnnuzFuGj5}P(U-W4udi*h93}D6Wo`S?C9XefYptb!`(JC))yOCT#%|mE$0l_i zW^~uZR=M2F!+)UfK`%G+kRaSQ*n3KcIYf;(_ROA5*ev4rKH05yOB(VO&Ho*IW0;TR z?H%)Z9|&GDn*yb`3OifjTMtjF@XyT^>2PS~CU_3T461yQCItl6QZTrJ?G$#Z6%@6Rcgne->LcRq+L64YFSJY`{T=~V6Z~V z5+^DeHjI<(8zz4L&hO?p3~@a132w(vw9Wn&MoJRA-iC2zF!cEO3{IO*k08tOcyUVn zKARLYl^t!KKy=G{5G2;Fnq`%pS%n`Lq(I)m}+}NyP9ylu!#>QhOO!=@QP3EVgoP^ zR3ny%Z-EJKY4m{v32`3G42wQuX!k3R$A799rwF7##Xk|ZofJ%apE{FdLCtYH-Xj0B zf5Hgbzocg;r7G#7#JqO(XXv-Y96=q(!#6$w(4MZ@kNnX1pii=k$Hu%fkmhGX{Fy9R z=?9u^$AxlBgr@@1f{9UxC5f=yJoeaN;oWUyQ+*VAcT`hK-k95 zCgjVrY&o?;&EUu;IdTeGWnD)ad(JJ%=S`83&e8Ag2gnLX0vlrXGUuS|?sGiFW zzl@9|2Tv1!jld3wBrP@_WGY=9PMSa-(mWZQuABY$G`|)~axxNZe4M5ohk};Cduk6= z-@pFR6d24Vt*anx-@DZ)-5jwl%giY-b?7d#C4+rTwqbhUr+1i^X~ek6u^FUqdgJ8J zEVQ%tDV!Pn95>1d0=w4ir;Mh!^M<0)pWZrfPgTtQ zu$HB>r;wW8G-$2YY5Rzx$YP+IA(J6>Jv#%7Obb4k>ii$1?g*H1NDz}lvgo`1(TXNY ziM6&p3;jACj?Q~tS8m&#~6dEjXZhz>lQ{gK@ z;F5}V3&YS-0!0h56UZCBxG=#R+Nk(@^~v>ihzgm)7nRp3epZL@hK}^-7zs@qf4r$F z8qS=YzT&g6Fjw+<@0cVuPcM2zqTjE~F9JQRhj;4*bTEeS5Kbw-DqjLWE`)by(`Lrf9(D;JTUaP#^4yXq7WE$S>a5a*uqwO>P~!}6)yq#erneg zv9A$&SPL&oi6}Bb7i{Hw4=0K;jqmfwmqGr4lk!DZHk65c#bssP5u{qYCH0cakIsxBIREjc(`*t zl>8QU%o<9Oo`MTFqN!Y=v>s)1^Pg}HxjDMDJ-!?E2SKNpP32_5F2|yL%7$Ks4YUo; z7CVb9x!Wi&t7RK}+Ct23edn?rdR&`uK*;5N`mu5L-6-7=4u@_b-4(WrU=#7IiBc8s z@T!GZPQ{$utI2hJiHfhMJd#rL?hB3@C)J*$#>`@E#d|)pW=Cw3?(nq`>@dcQjmL0i z;D3g1-O5bRDKNoXU)sAPBDjC=o^bJG!(1dX7O&lm%Vv3|LNC~s;An~PC1g~DCa8Gn zdIE8O-HtivoY^BVs=+s}Hm8(S<+O>oD7>0;>BZV}I@=vQ{(96Wzj;AM`frmBl>PYe zq|xn``nrz7WqaiT)K%lMz4xTX2;&IQ+O-%AS4#3WzmBuD;D93;$6xK^uf)BOrAR8h z4zVQjB$+KYck@)>v{~^c<8!RJKRGyUzNl6 z)PW)cqe`9ZukALB{uGX9P5RIEi^2CPL|s2B3L%hOM^DlMkj}srndI}$q`X+^|ost#ucrmf3$hqd2Pf#XNka96g_OK7s%xH zorwa{gC#J!Clt%)okpZ_rE6W2;|K6DLfJJll68@{4vEf=j2chn6y=S!T{bp{n`e{;(o(R5NrX)Y z6s~=`42V#^B#Fn?owrunIm&lk{E%s(BD6V9o1o!AdeTWuFWgBWcEX-%ndobS9=^%e zbR8Tm;#ni5D)!;k^7@ga*ehLgU#L^kj5;&c@g5oaY)rJ4XDe^FcCZL2>O7y<_C(76 zfD|AZ#)bL`6UEALO;oKyh~V$hq2A|LR#J2@f}pB!xUD{ZbYcp!0XWueyBg$vk$f*h z+`lchQmPUk(pXGdckRWEqa@DV-fnLGcks4iF&EQ+`>4$~DTP51cBBojv*V0~MEH*1 zSV3=+IIW;epv;lF<#aHVq2pZzMTbTV*ZJwY;|EtEA zbW3zt!{giB?EDrO@?ct@L+lVfD2*Rdo5G{>YM$HrN>77!Ra?4dB=M!gd@>HPR5oLF zXUTMzbY-}2ODj>hQ|Q7MPoMX<{GCwBQe1zPG?y>P6`X9S3$a}_4OznW#ErJgp$H0@ z4m#`Z{nmh7$KUa1gj3irEPAgBl#h(uxOA=H3#om-D!Bi!H_lh6x-;lw$m`jBG5Fd7TT`U!W&D44!&pJpQ1t4G8oO2=cj4$c9c!1> zj17m_u!9{Rjc>k+HutQ#z1x5Ik5`V^{t9xKNZMsARd+4xD9e zxXd4%rTJs1JR-q47rD<`_5|_^nJMECw=0Vq`T9Q`P4(R-eYkVnVuQ1>o2v|JQCUIZ zlUv_C5+g}F62hvq4(|ty})783#p)DqF(xwPz$G!JJ!r*qj#!ezNGVqiUO+Era+YLLBL?+D@S zHCdmWE|-!Ybk^A3O=f@jGjIl<{XGhbtvefH@8gbtc=$TxA$=-nOqyI4pv+(25F+;57jZ>l9{QF{$wW?Z1qFGJGB_`%xKW=@d2pQ(}>mi)BSbRbQ zM+Y>GAbxP7>}d@aJ1{fP@k1)YKnzH3gSY5@r$-1}m_06wafjNZcII%+plFM4?z)S> z%gKbT6%7*i^;;@^DoJhfexL}JWoy1PMw1ZBD<`;;=J`_3n>hB@2g{2F=GF;_?JSqF zh4=Tbj;C&=l(7W+k!qJ6E63O2K#nyV$2o6P&0VhBKPR}p!u9M58?(hk9&tB`=YUQH z-KHO>bpA0UAcPz$-pJL`Zpov!^Nn5W^xN3Dtv|`^?vMEJ&dhhGE(+;w;ML@ZGy(W= zn?-i|G$&zHy4Aq#e$SMNJ?~cXs#dflRp3Nr4gOZ%4F&NuBF!wM0!F!@2Ddk(rmL~e zYY%$ckZxVYwYf}C9m9^j8%hpECYsyE?yzv7fnjc&?!d$pXMvY?i_YT%qv%sQ+qsl_ znS(y)95Pv)s3a`UE7A5jWOYlHg1mJ-V=taAwgQi=>~$M&8}nVz4Y|+Uv+@zBj=|!# z@iMFqW^R(N4hm1@6ifuDi>v4#%V@$YR)(k#ZKf#3Ge%$MdyRA|K&QV>6T+$JizjY{ z)<$mgU|j!@d~7z=$!O=i&Fg>m?dXZamWz@p@oq6%$>E>k1Rx9aNb^@cMHRSg_FPe} zq<$Dkt0dUS)4=u`R43r^@O-adoeEzsQ;P~J-gQzV%^R)ls769Bx8PXKPC~YMlb9l*gkEcRHbq4)$wW`Ukgd$k7S;pbv}Xr zD%M)ZBS9#xaGS4=&207g>tA>6nv;2-a2Yx&MV`*H-Dj@&Y3+iq>r_WSjayf;f}GtN zX?A-g_9i6AnJXMJZQQ&}KP%eXB0_sp^ENmqSD6oymZK@x_(az7Pu_7djhNE|6>p%FGv+ohG^wv-w_u6{lg@L zWy6{7LaMsW=%p|+CQ|TuIn$i-q^!+yP#$RWw(OI@#5oO%+U<#GFZz3#qH-#W2&W?F zIrrT~*L?cvcLg8QA32;dmI=mugeh0jJclHEvcWXjk>{`YhHoa4SL5)m(qhibAGb7I z))&-1;?X$^gk2a|>9}}JoMQ<>uLnbS2F ze6w1iY>1t}nQ^a&amDr6OwZzF9g_a~O**N2-^yp?niX|k_h3T>5yR|l;~bo!Qh3Gg zDBYiY_ly-cqNv08*Q4%DdP?c9#rfi9ApOX|fVtrmTifebSn;e)PZN|KlsD@vG4VOv zV7jbZ#Rmxjc-jY&3wI=#O&f+g&fuEw8c|zPPSn6Pczv|qEtFs4-XF_ z4Y)n{UBvHv`p<>M!mTmM)dlqFFrI8L-n|@NyN6EPs6UQE%0_9I+IJkXnYB=Tl>JZ*Q#Xl5}%9{ag*wbqZV^W@G2WNVo!f+xkZ_LLv zkJ94v&2cLSuqR7UHm)TmE63f6dr>L*8M||qUE^on2V3v6$18S}J+!Vvr;?DeReC}= zLwzf?nyX5yo=m2YD#|I%=Hm5ZJiEAgH%9Bn{j0W2T$K{YZ@+vuzo*mtWeDRauyFm2 zTHadb&&*7ZX87mRW-_gS`@X@xLF>%W9K0*$P; z`x~?h-=vY4B$4Gm1)_&6i$>fuZvMrKE70j^rw=t+rx!D;;!IHoK-$FlHY4<-EG^Qa z7)Lf+y6!@*%3WH&jgbf_>CV)&vL|JxZPQ+Ucac{lpT*4Fx~DgCb5#4pac#6JnL?DZ zX^=Q06_MHWb64$FL45n~n+~@{EE&c|4qn@(PDQh;_a+y)Ii3l`U8lV{v%9epa`_;l zZ^TLX9?sjRY;2&=Y0AA-Y`K6BdgEUOUL!*;7VKM`O%JXaM5;8&_g8yAN*IJoxw=l- z2tn`dbXPPk8TB%u;t}ZKy3>e9YB$%d(f%|GSHPNO1gemW=HTZT`db=Zs0oi;8zA! z^)S+wz>B^%X&_PK@PVG8MSposzn$zI4)690bY0Xqgb+v2gw}Q}i`EELS4Pfdh2O2< z`EEpd57b5NL17tma|PzMv!FlriY_qxY2%pEZ+FvJfwrs!3K4u-G2U_|Yn(C-QSzb5xfX`zWf3|+p;oyXK0$gUiV(2%Ji@-sdtZ5B)3xvOVQ1p#ZsPz0BlEnFY7vWrw1BNpwNUMA zxoxGwN5H!x#GMNI(+U%HQA+O77RV%yFdJhUopwpgnE z9?*?8%xnm^lpKT{SnB&^1Ju$~KGqzB3@v$UzlD>lmqwtZllhC++XjKzU+8vQi?R_q6|+7Il53cWivVqY77d~KuKb>sUF4sz)O(warD zsZj20htk2dzZy2$sK2e@oGz`(`rYRMJk=elP!<>7S|&Chu>5sU=lR)*f28~3q;K_0 zdGtYI{gb8HPOk;i+!>V9PvK=6ab1yKuy4QV0<}!~wrLw6b_`~UH6KsZt?Xt0*&9J` zq8Hd57Ge%C+-2;b0`AFM7wFIteh(SBd0KKA`o+7V9OwClUO?wXO>?ZRkK zX~WrFcYZ^nz~fKm!WBnW>Yr0Jg*3-K)UY>*t!tP0IIXR;&>-}q(|=^d2Sa$pyybs# zhkeaAy!_qOOT1pSWIPRJs^&VmKYqH+lj7+TKj;=!=f!N2p!?=k&J4Hi2qn3hHQhnc z4PcKE;?dTu>!z%W3Ec}yLQ5Nr)ACQuCUYIFyq5<@G(fHY+?KYzDZk48I7^fvlg2)Y zZ||6JcSE2B{;f)EAX{Z3yEVN)toXvQCQRUnu5OW?5E3%~)Vi2XAwH8_p6sy@fO39y zn%R#QFe+>GCp^}wGQC-H%cpz6A=93FFKci|`EXjD&C&r+@l9%SrsuCLrJIFbi{{v= z6_kwM@y3~Xp=~M@jBDDCEedCdZaEcD=_)x&3<{NJ4!$VdOTFdQUMu0TP0fDC6?66+ z3lx!%VT` zkb7RM%T9FyyqZ7W??ZFV7?5#pVB zGsvAEhTXDzL`X_8Mhm=v2C1+y91r- zdQu4I5AnIY89d}IF}>cGz14d?1a96=DpeWDi{oG729GFqMBCs$v11wz3@%OWSv{pH z)`JwM>Q|z4Bx4Sf8&PND8`C=R`@07}t?XkzC+LCHa zo0>CvIVN%Nzr7(wRnm1^iA3vq05;NJWY+JQy6_tvM}G76oaV7Zf1hWn*wsd;gGX9o~p-ti@swnw`LZ$T}NQaj{?w8kx124M<-7VEkrjy8}wCD$F#lb929_kl$7rMfq5T>1mn}8ql0-*gZr9| z?mpt;2fbdBmrfC=JJg!4RTC>JVgO*H8uaz2b=OP(#{~$Z*H(hi8fzow_Ho1OoO|uT?{!@vINAu7OQuh3-)Rb=RIkCzTyv1d*oY>)^?c> z5;_W;C^*uOJ--Ck{aRv}C=Ax8v9HqWjKWzt1%71?Zd0%;Z@#g=fFK2n^Vh!Ky8y() zA2;(9IK?V=Q-r`>*p-)ZE?SrGb}gmr#9T&TJUoke_cG!ocYeRH!51a9Dmkx0WW#e( z%&MRtTL1y>|F`CYQA>`;9rpa)ap7l50_jCJp;`ly5il4M9SppIxkwPg_ z&$WDpsz)763-^~WZ|&FU)=%7I{^gImBbD9n-W!Fq?hsNBzM1bg+XpSTYd+pGRn;9= ztulY~y6Csdjzlu-(KvvxQuV*P>Jp}=us^xi2k(rU+PV;F_np=?8Yr-+7=p7#s)l_s z;yQ_#Yf4H>0w-Wv~2vj+h)^N=-)0+Tig>mOO z-2yFC%N9R7y);ob^F==DiL4*l&n+LW2a!MaDY!LR|2u_oZBQN~Ad|jA$ zYOu{$mCp}FMa~C_MNPMs4K!=dTmVGa)%)ig^T7GY!IUa*g=@tRx?wR&5Fa9bH@U6U z&PvwtR?M1H@4qbq(`V7dox8B)1Ou61u8|vuJOlr;JH84A<`)mc71^4+4#io-eOZT2 znF2JpmP9MV|L1Ry3Lg=16Kix#t#unc1@)di~RKAfb{V` z*g<0_0vAI4onw8G_Q!(bhr4(%U^8oA9_XwN35%sM$+pAYHj6(!aI;02{?Rz2>qM&* za4Lq52`tEnX!r}T$!NwGoWnk;F+BMQJluW_@BE9?w;hC+uD1I_FmV=&+1<^8RJ&-W zJF+HP)V@Z8cnccU@88q;V=lc!g`dWeYP8locSK z6)J@82QCiYz1?8rnUE0kMm1CJbVlz840lnWhjMT_F{t95YTe|I;l;3B1kuAG>E>Uw zD^PhqqBR>IIx^_?NWk0q0-yghQ?4}e)3|0t;8`EgOWtsG(oO1EAbUBLeif|pl>~w; z-O^#pWcat8?#8VJX19N~&W|1j!%ot)>7W@fT;Egu+G96VA+qh{2@=czM(_t@K)59E zHGSg_@3h&V83AssKw`w<8JrdJcNKmsY!%-ua)KIeHQaRbOyMpH2eGeeTTeWNmkBfq zDoobyeW>$hUn$GC_pC43Xevv36%~w(?d%eZQg>5k(aOG*#{g9^cZJp8TM3G6wmxS> z6DKDuLi=aOSLZheHyC*Z*T1`^%SX?F`OjFG%c%Rph7W$)ge$bOcd?%TO`^zM2nbP z)ZSC|Kb~*&{MF&0yN{fgby!QNB3m`-jE!kNw+XqMVeuFY9o+;cy1YgN65Rh}17wc= za>R_A!y}$aPQd%F=k2NFi1T?9Xh)Q(9F7>DVN`CGgjTe+_kYvLLx$x#Lz$CMdniHo zXOw&f4&iZe{+P?^n6pa5kyu}!my5%Ad%;KQJmivS6?CpZ+{ytHOGK}rh|kC7|O zdQ#sK%zLw5sZ#y1_}yqbZZaCpyzMBuR=;@3$dLgB?xN7AqVPIr>+jl_gtHb^!%CM$ zsTpnat%0{o&=3f=z9Hb%wlUP>e%C42~4zSLJn2V zTF5!N3I-$1@vA$wugJ&muIY%+!=H|%4ls_4zW%}J={zQeKAGpvLzY9Io{F^Q`y?St zpjGC?Q8WlrFNzO1YobB z)teJ_1(Uq6I#u)W%PU8yf^XIwB&7N>)c?>4(9cG^JDFr ztG@5PON3C09ex)uGBOf&U(xhw6k;Wg?L<(CVxKJ8MD}D{<^r`k>>%;cAb${n(g!hv z$ILJ9mfAJZ*rtEBnZKyiG#*j;1YCca%iuCN8!1}E)$yY%mkkHt-S0@x>rUO?U>mqNvj3ijC z+KDzHpK)~Erw`eE+-rV4!n63bJ*3pYVseHJ1uV&Dfk@sV${pgKv&JpF_8MMMd8{MT zCf&@b1`B40SOC~0+KGlCTK^^OdM6v4Cn;AlhX!uSh%vUOIx7loji!|yZU&rh?Q9DG z$op5zh?0#3XS`~Pc@v6pm&%!tPtTkAVDI+J7xpzrd)*jU;U%T?jj^ul%m3PL(n*pjAe=gteYQR2e%%^KpVC`F5@YbztMDxi6*^>5Ia7ON5CuxiVml0tx4vjVR@PTXk+KELROb5) zkqNQSDjmMb`XyHVS&NCURbyg+DnjEP7VsPxll{TY2F`HT#?Xylhwj|01EwowpuQ4UY=59I2_^xl8 zO%^GgJE94mj?7*s>Jzc_6xblo?Bj*6JuKUKC-^UM>fo>9K1||2Tv{a~_KIphAg-G& z)T6FJAL}84yAyOp6S+EdmWWez#UZs}_jcEdru_->Pwm9KP-J6E0BDkJy6srU1!99W zqhZMRV!!d1ErmDgepv_;uV9y0HcKV(6;hQs0HH8JB3T+fZZ zBtvW~h8{)i^>)F-){bmIkO~8XrVm>ZC3|N#0s(aLV-SOsiP9y!=)}&r2`N80ZI%eo zBYf(fpIptf`wYg-3{1@YF+XoXrphwP1oJN@^?u?&1i`Wa{OiT&V&iVK)r<$}D)ybN z7NgRQ@yL8{=^rYMH$wN^QuL0UIRGS~l#WCXyNTAN{i_)gkR2LZdRn3f&yVM?JhA$qxi!eyVF{#?`nyoCB}?h)<88g+fhhMx6GPWF-$ z0DN|OAUt*M*SYXirp%efehApF5AT8m+DdClEdg--MiwY8N<~91zsHJfFm`YLk1ij4 zW`TeKj5J&9;9){77Vqi{6!x-2g|;hi{DDr5T*VvF?&6IeIwr533~=m|ZtxKC%ie>A z3D)3uNKTO1ozT`fhW{c{&g=a9_K%OI59!Z0K-qx+XzvkgFmppgyllApqq=3Yi_UQ? z*Tpvgsuq~Nj(G$cL9k(Y0`*H<&S0Z1D{BZq(r$KkW|Sg@=-{_db#P1&02Os1GJ zLatcVG6CtF`RFduztmG-f?nrDg<#PM*$X$AaX4=qhtLZ)DYtsAth%!Xg@)Y+RI4&E zpP=ZU-~JS$HxE6Y1>XvquPzZ>z?{?-Bz3P&__XluhYR?ev`l@MfaRU7C_|-(V}g{p zg&5I=9W`sg%WfO%5BB) zgImWN8(oEIowt^Ds-Ribj)5J|~H`tBd-k}VOJFB-*Ec;~D&-;D_rZpD~G7cU02|NXl z{iBw5#t(PCYeddV|Bh&=-f!j4jKupeE071N_1#wrUk^VnC7PtFS>}26 zJ3$xTKbusPC|{~4AzjTGUx-TH_CxvhWj9IP2xjd9wlONQF1@S)hd2aR!B~gG7*OSy ziDxUzgx#(Xkdx7zAFtz7g`FSx7m_jvKAb-U)7)HQh%^w13_dhOc_?M}v$3&pxL?%( z`hWW#5K1)(|Fmj*c(s^RCOHHhe|gs7_s+|)cE{>^6nSy%6W@|NWc%sW3-ZtjU_#x{ zBvu2Z1IM&6H2?C>x>~YOKjC=WmA93pRZHGd90$JUF4lO_Il#<9Sn#c|<;?QO5KVfz zy=PQxbsNIX#v3bKZ_h*sFpnXy(89|>q?eB^!cwCi7Gn)^K#BsTG`rf=L{%14f*gdIGoRH?-ab=t=Cxe_F;ws6SrwG zveck}oz3m*WgY?Hv_F)y!I!KA4MsL0VV&#@fb>Mx`mDLVA0^0B4pUI`znIuhndXbU z32~#0|AgdqpO)#e?z;y3IP!fMg_tKQ`xoA%A`euR1_qMD*Nlui{Ob?%kc0-+ZElJ} z4ud>hwl!A0X`#v`PzrI+e|4q1b3lO2dL$X5mToP-Uj@Ll2!V4%%GBM*6sbIIae4K6 zgCt+vYF=f=90SB|TBa>Rw=>@$AIsKjzqkxP?tcAde`Wb>kTaB7BhrjRFgN*`R}Ont z=ocVL#is|{dE?b@=_D#Yt;@D+3ljZ7qHXph9$#ePtAO-rNZ7*7dn&PoK!uf8&qfGby0xFxAJEadozy;%JY&8%)Aa7!6SZu@{j{Qr>!F0VN%H zM!SlL{lMOw^(=*%S$G!lwFmKF3sxrVbcNvhzCesfeLh;->3YRKtSoOGkJOx2ILR?^ zj$RrE8@fR0-=a$oV^LnlFefjB|=W5pkTv5!iB z=qy!;d#rzI@z|gjwPurbm18%i>d<-) z+jcJ8_Zl1O+GUOYv_t!tk)tD<*LF!E#akKPp46dzB4W%Gw)-mO%IKUv)rI_-(8JmF zs!B*`I#3ct57>Zqn6o%`w=$*Pf4+C}iGUDd2{!y6>2iU$}=m}y!Jl6NFm z3kt!Kh;~W`fzR+gt3-W(YsOjvQX0Ui04ecaYDYrbr#J%FN4B`yI`D&XO4+bpr3-+P z$OiwGz)d>0ojQPTeQ3AG9to=%Dfd#Nx4ps^B+hr~5G~m>9c%aR+^oxC_lVPeN>p-1$qd_{}*u*s*wBAtok8qOu5NEr{(`f)+fB zyhykVkKoq#J_}Wtz$29u<)<>33(S@e$VbU;`GR4)eD{}y60h!8nZ*R+cJzr&2ux}Tr>iuzReSX2z?ER zX?7P-`l;gaBn1Wvmq+88Z9HPZ?U{mkq8pGnAtH2a%CdDE2YgM{#_S?>^caRL!8%~~ zl9x}}YTr-siGmsZJHXFv{IkKlxi}&LrmZ!Kw@vcB#{W?rk3u+&>Y3|rD(wMUUQG@C zSD)~wCbgkIHw6K7;n6&X$QgT3Sb|nUJc{DKB$WYfa)*zvOU-~z($EKub)RsEOi9t! z{lp3%SnzCqw?UWXuo~Q&{vXeHkrOpaTD_h>UTROEf3ku5u{qX`GseeFcu5$u&cWop zW2V&4qzShLD?omHgAEl=X~^62dnAci$H^4rCr}1DuxrA!e%KQN>T@@+4n)F!5pv%i zEZ}k(*Z`CJHoovwzJ9iqI*Kg$SxwFrU4=RNpjJYNJ8O0-dOy>00%t5A+4_$F518C; zB$VI-VDvTaYWI-m(SSYJ6pfr<%UJh#%Ue?cQNkleJJ80gI0r)-yoWIjVR=5^6&zaA znGG%)6^d|VzsMU<-UUQ?;S-b>z#3<{*QDTUA~vRxNtY5QVG0m{2JW+(I1}&x2QPXCv$cG=Hz<(I$>ZH ziD70EZj9a-Z?)7W;;7d?E|vemKyev@z_o#Xn!_7et7>FiMx$;6J{4e61u-bc?C@5P z4QeO946e_;n8vbI?ei7NN7`0UUQ{wfgvJVwz}2p~oQbgS@;G1JncMK`J2iRB_G-*|?Km#a4ycd&JM7?b^bQnaC>dQ6?t82S`MBsXUg zMdVP=Rr%7Phu>e?!?jkxfy6qz`e4Oc2{3Z`bkV60!x7w38za{6eRaOa49&B}&h|UA zGtU0gh3Aj0JbNsl_|N#jEllijSV6a?{#cGV=%gbYg^#ga-eP*@lc_0%Fr2kIhfUcT zY&ex#@N>jv;cx?iino}gog7Y@=gI1QcXv=kZiI<;lyF`GAhSlhjS(HXCu86<_?0^F zvWm+plHqxCLZ2AE_8fO1-~idI^M0acTTxc%)pYrd`3shEo$`MPUJdqtW|W)!pJtp{7m^1)6BKwv9}Q4dl0T+*-k50Gp%-ofyqW1%{<+f) zGGA)Qza$|V_qA3)R=GAQHMy_a$qG0cryitVyQTQ9-DD`F8LMy+Vppc_Y%a-|pFeZE zXXlUHH>UV1D|(3)5CP6AW4+p)=F^*@oVvrMLf`^T<{J#v`a7X3V3O&&5Uk<4QxYd5 zrzAY!NGyyc{2F<9v7{I$N*7B3ciJI=!t6JgBU2pcA78yGr-5J|wUpsU#Pf-HMGzX) zwgD_YGXdP9vC8-A`*)qm?R7{6p)1}4cW}K+UfrZ#{d}%X$qSI#RW_%#4M83WNj(!* ztxE;B9OP_BwIt@@QW`hyxVNw_v4St)?&UMH{^w#!KXT>c3BM^u?eDJsO(fQKW)yrl z9226HhkUGG^C{e1X(};QtX_SxZvYE| zrx~p)?J;f|_FbQ?Ip?DTZe)QWmf6YCO@of(KxnjUws6b00o<`G8O9QE{U>);V+C@< z8+Nib^Uc!D_OgkO%CnsKw0SL7+K!)#+@?DlEf3ME^Xbkk5i?wwNObU6qe*?+RRe`*fM=#e-hlr0Ng9&qs|3DPip5q`K26OI|sm~Vuy|WQM$_O zu07hvZdj;`&$C=@MlXls#Lex;Z>K***e5=+=wB4p1}WzC4*{F+>s%u)wo~AuA?I;8 z{h?)VS;dg$7X5SIINx6M@F`y03!=CG44n(*h%1e=FuNZQCdgem{Yku`LOqo)g_bpJ zB>@Qr(}e)HPZFK;jC*t5LpS&8P6WamWCN5IZdCb3)Q_Ntb&)wxQxkX?%sG$!BvCH` zYzbst0Hu*yC;rR?-t#BhJ);{v4epgrmBkQi7J-KpB&Cjwk)Qvdb;WfZ)Lv7y3d{TYsn9kpFFWQx|LU7?Vf7aeBR^04cm&5R@ zmr&vMw)F?n)%J;P+F(n&fZR=XiF`RHD9sI=YXv`Vrak`{^bvw18}Ac>+l=1`8R%pU zM^f^^ZPwT;BZG71tlohgwD$~(&`b++xR6QT^J}B*|J;FePpnS4FD(4F%``{ln~F8W zCGk|SpME_JxZm9;B^ESGx0e{`TC2^^x>KP`zx`oXti~%+Y6uQy7g8fgtBHke^2Z`m z$%D=>Q|c@(U?xxU8sfg_>b6JRze%Y)KP_yK>vB`N+L%Q}|V*5W~Rg;DML>EBUvaekDa3DViN)kr{Hi$!kJ$0rS-Kk=D zk7G!?s{MbrzMCk?adz)Gu#Ug4*9ksX0{7xRBAv_X8X!vvEMNKy4{^sCyjo87@r+af zZ@dg`13jDd^4UpmIWx>9LoENt1)%vHygx2Ze5XVCB`+2|7I{7Z3ZkDvYG~v^#S@9s zPij9&rQ93T_7piUXwo=h@rM?Xspmkw!%1$fwjHPCZ>S5>RGcqhRn14jPm`pGK3!Ue zl0Rn#Zcju;+ezlh=abCvwG8a)i`*z9&V`D`6^y7L-5~k%sJ?v-9O;aF z^BdAx9!vis!!u;vHvT<%zB=aSsb}uReAex-5o6LCX3YG9R>(fd%al-7=Gn7nw|-My zFU`19=_06SAtlep)lo>&|4ix&YdKfl5=UK^V`Y4oqhLBU*H2pXtq1Klw6$Zes@(Cr z{%rNCj1^p@_i?(6;6lZMXMSVXx96!lUfYKZd|mD%qf;Yi#*qh% zZnlvJ>gs!Qq!ZzycW1F-MI`Cb{ZYCp^Vr9$Wn5uXPso@t9UUW?=HurQ>guUBwn!Mv zZu&vGmHc)i1I0p1#Ua|byd#AnwR6ht&>W0+qEcb7BH{FC;(kw=-QSGYUg2DKE zf5r!=Mw>9S)^>FKGG-%%!9+N@3=9%6@P#PZ+L-$btKsU{S3015U%tdAa`!6#{Xh6T z)m;jhFB?mG-iFf^u&d$EaX2t>AC~l_KPyz40`GKTu*$S(gma`zj0%+q8y*byDqp2- zo?(MMo#deM77RA|i7!S)e|Y7d=Ix6=o(YvLP}ttqrvU5a{#}U_zT4WpTX_{29!t6~ zOKY9XHcF*!SbtFB{1K38B|glTT$^PM#juf#akPyj&doW}CX>g8`Ko6|Bitgx(=n41 z#EhCmf(9p(n`D^=Hsx*maI%4xcmT8)!dU~F=xaqNqwlUh*sHnC6f185V8bu zmbtGCKB*>#5k>|w2zzz)<4V<=FjyX2dNfy*CsS+7y@uM@*|#}4bn8m$KnRsFD&Fj^ zW%Q)7i-;C`Ylyz9N9ZVi&Gx2~S3HSFyAGVWNe>EWW9< zsXFZX%154fvY1L_t{BbrLSpWeV*wPrFh)`z?p7&v^|8TUkuaFy086@V+-zYSzvsQy znVg&>D}oCR=0~f@Z$~g_JlvdF)$ybf)81N+6~5 zTA^0;=EH=A4o6dur1T(qZcf>zH1-Bd*7prSz67bXu3te&?_RGOcCJQ zR@znM1Xw!VFiXu_+se#t&(zh)*ZyZ+>_F@@o)`zf3#?5hDU%9UrCt7bTlH?g}NH~kyeABONf5if~=fna_dSuU`2xI(bAFO%oTZx zaVO#G>ZdOf3z~@1!Rqx-qE*lgZpPH?0@3sR&<3u@s4UBlkD}z!!=55Q6QSM@)`-g) zTFIhSQvRY=eqa+qt9L6i3RHCtRy)kIsDWiIz4_Plgq0%`%_@0;$eOdF5pt1(rI_O1 zi+>CLgH5`|sC3C}e`bIS*l6&v~j z$AQ%oW=Hoe?X7{xg?8LL+T~0JVJ=L9Wp0>(UPeXGH8CJk_l5d^1Z_2NjXX`R;DuBf zm5oX3+L+m|K!zKW;KY;fUdmy17|NUuc?*GY@6py0c*#JnWvnL0BI}%9f))22!~Le} z;4vNuajl8{Ly)zM^N&w?0PzuiiAFpZgw*AuE;odJf81jZSqr~(dh~|UcA{S8m|7(B zEA>4xy-Iqph5yo{g`%o!kzzwOdVS)YViU;PbGv zg~1ke(xWG2w!K6uPhCJT9TBPI(}V22zV8&_P7Zlo_L*e(I!H~YvlRV_kPMd(m+8iU z_K#Fp<~}k^^fz2b$+N&cTzx7pkC0}WJC#ito_V6Lp{`z5CjXV-LJc2Pg1g42>{!ertUH=ki>9=i-a~%mwqX_S(Ph zwTB%Lz|mDE5$)8(^QPmlPqGORs!)m3T(H}%spM@3sS;=HUOn(eD>)iHjxdqE`Y0M3 zSYtU-e$Wf&S#+Oqn!klL#H3EG>suT@(b7U5{#zZd(ena^U=gcCR`>08vn1$f35~7G z0BMi2&lw8c{#R;Jiy&9Wu8C$iFkHPqPSk0sRe?0765UMQtbJhTBa5 zJ84errlNl?(yi)myvAp`-&a*|l1mt?!K1qSi6znqIsDU4k*Ax~Y0ZTZjk0LxX(b96*bi3&qdW90w`g1n*f(#HVPxq9M zl>!3UJm}c7Y-N4t?&qso8V=06{lgr?j0>E2@dClv!rg~+3_z;3UhFC~l`L{Fs8*)* z`Ze(Yvo8KUcDIUUd)19zfiDRSoSt7UG6b+Buiut%Z-GVUpgun9Xsn>XXmXv&N(QIP z%N#jmY{zj%$nq=9iXDJA0M?&{J^5?HtGZ z#J6nd3cI8Py$m#^0*#A0ERPo}Jp!KTl!OF;H)|@r%Z96BoKZfpSYu`9eg5rNfb=2e zMnK9=j5y6*k||7hIuSWE@p^p}Y{yvhSg68r*n?cKaO#sJlbFa1B?sv;@WA5d| zURS;={7P%AVhh~OUQkfLR@vv}1&h^)5;p5;qyv6Ax1fMH-X~I{N8>#^MPlB_XPi&_ z?^^MZ1>601?MZ1xnegR7z^HbPHhp z-}P-9XSM&k1`l!J+<({q(UbnW^~oIN|9jhe=dHN`+ycGC0RQz?-SCPG{|`v^{{ck) zZ?fT=M}ZMe91?M{d_tTNb!!hR;Qjyq5cv<{|0MT+2mPMf*g6h3EC?G{Du&nbb>aUH zN^YbP^&r>V#jF%&Gy%l5OVz`u`a&vTZ%Dp#bgjB$;%H@60X|C5;J%e^Z)`qj+|i++ z-uv*P`3hvENrP*j6&$>rsdhOu)4BGZftHq$Ua_JX=>|+e$@hh}ujkS9x~_fs25Pg8 zv0f)xV3UjIK3s2EIe@{b53RXN%ZN%w@3ExB*y^Frb6E(a9#TSdSr4`)zSK4!P*Bmk zti{I&oGc>5rgFB_cZXPjqfZLn_Sl>`e<`u{!9C?_j6?q)m)(8=hv0v=%KL9>IT-An z)yF98Hc4ztPMKebbt~}OsHLMvz{CB^=!In2`#)$8sH}?}&%9+n{GkFdx#P9X7$6vM z*w#QzkDb|We1I|XUVbCT7URp|GCmM9!}ZPojY0W?grw2&k(e1_tiOQ+aCM?Zkq;6= zBomq0w*z9(rEY%Tr!B5$H$hiv$rsrIY3$&MnwCH z3>O=kKSsEPFN}RTdfN{8bJ7&i$6sYmiw}9lEzUct;Y;$!Dr}9h>TZ~#zf89eAd=MB zK)4(%A~L!}(bFmsn8bABxVy3f*vrv{_ z{~opKeZx7)P8}wr1ePU$-K7WfY0K;TTy5Y2-7dqac@F9y5Tlr|J1;0-9V z!EBw1J5h?~@yhW40Zw3_OtNUrVZL0#hq!7melWC)1s;VQKMH^l8l5YZ$#04>4rEZch}=ZxDxm`;N%S z-v>E>GG5=iuGidr4GxGp`vt9~n2!3io}dA8u{}QE&0lN9DYP6fD|ew9&rI6uz58ZLDtczZtCfIZPH zGB9*!jII6Qe#-h1YYq!*duTS~i30tE$9+o5a71ZXXxOmTqegpCDXBDJOZo%$V-LlP zO|VDH&;aR(k>5>c8d^p_`{eKyB;&Raz`(kC>igBBwkOJ1>>D+5h^v;C)?@=_0oaB0 z{UVv;mFaAC9j$@_G1JoALKVg{PcLkpM=t!wMQtyfggw4|XUJeAHDS(Mvb1!}deZx( z#&*)#dpxSBzpJZD=%fQUecMP*)hYclU>MW!s z2Av(R`^A43lB1!!+t)h(EleISFv@e!Alm%0x!q&e&|~-o5fQbqvajd%EM21Lm(|r@ zHY>z3MxmXswS7geML-g22c(+%-Xk=>u(ZCivkst%y689(FuD{OO;!tvF`wK+4XGLx z3gl2mxw*MfdF#q?=tJ1qI#j|V;G&MyR34Z@?sx9CURp4A#`Sd(`-wz)iGhJh%e0uv znvA%%w(m7a5=a@{CJFqJ#hW)w6q{ZghXKdUz6V?Cm|8I|2N+1JKl?Y9!eB;Yt;qv?N>oT&um!KE$4StXwy1=J zdumJ!GGhjgj|Z8Ww|bCZXPKT(F?-%J?cU##>+PyK&Sc@@XQ%uF2bTiQwpSlZ2f-L$ktgkGnW&=Q8U z^1ajPGUq!z*zTQe@~N;yr2DR3@GfBw{Yz+k#SU6~6O`l1Ya6W(8R*%+Op6 z8VM^h=sD6aj(?u@)qAt+yhf=wzSy9~;`Crs%zQDQh9UiG6;S#4d6QF8Vu&bFzZy@t z!?a#0GHR-OX-J_ZPRi%dY!?^(<$Xzze*SGyNc8vU$JQVW~3XR7Kk7FwbS~T$#LaLwT1} z(WnL3&Dm`sbtEui_Nu$aF*7x>JH!oXBKIO=1#2HnHF_!6WO!*0O!;fq*h-^j=pif0 zC=z&|kdROuum37I)SupfJ@(n|ej6?2T@;j{9ygkP9-r2N4XL@)?zF0p^9Zh>h^fB2 zsZ29RR!f#GZF8&HX_0fX`MfCAtwV3J#?F9<=z|_4@M5&&ORp@@Am=$h;~rE-i^So@ zh?H1am&lb7t+;ORPCQjr?)}}C4D>1B-g*Dx!>DGQR0wyM2u`OnjvWZpV%7>v`o%0W zrYC=bJS#f^V9wx$raf*AyAaPf>ufPzlRc4u`E~-WK6I@UJZS}10}DIDu;kI(*>bW* zdNc1omttk56}Ih*;b~#Qr1|()#)yIAVe9f;<~8~G`M#^oyvth%R2stXVoUO5r8Zt% zA%mlv3%ug#Fkt0{=_5rQS)BV3VL47?v81=#XhVAykMfHOCRQTE*{2bbT+@h|^lF=g z(KAqn!-&&6CvCo*UrKLq*`}r$u4x>n#AP94)5p#&B;LmTH$EFO)Z^4;rq9Ax0<-6 zrm9T2`|?FxgR9X-WK<|l-)R;7dAeJaG>mO+1-xco6vsvsK~s7&9KCoN&^t8@_z>-E z`%ps*8@9ogSOf|?IpeY|m6V#8C}ZI*9uuQ{tw9oIp^$JnG|seI54?gt3`B8QMXIxb zGVQ{pVP&4)7$CcO>CV>3H3G;=wm&JHZf~h{CgZUAQtF=u+pWA+^fv?Uw`KCUU?msq z!Or4k08^%>)#0j6a#VKtbRHp4gXv+1CT&_W>?dh}BzIK(K|(MvI(gFA=;K!6%ZEOU zvkkEcGX{m~#;3~}7frMo-bxLrmmXkzabB9?tlv&tB?m_XitaP7Q@#bh5k!5rWZK z9$pa%V7*K#S|{dFeam2L>yvx?&td3Pmx2B`b#Lby+u7leYj_axJVrSlVpTP@0WDbgjoo-Ou}`XD1`4tkz!q!%rD@h~cmsq>X%jFES|Aed-;2cel{_ z!i(CfkOY!?`_=nwS^fNYg_GtH{LpHb#puWZ`R~Jwo z(CARx{oURH1>&?@l{d>`MGi^*aw3I>3AO|r{Z+VR3)l}jr04!{vB1JR1>)CN(=CH7 zK#l^Kx=SAk=f=X?+S*2yxM@YwVU{x|eE^&5W(@v<+-EuZaIZz zrhMm~Uctkq>n$}{596`oDeZ=uFq}-Pb9NIKqJI)P=qmw^RqpSNxa`LvEjptrr}aU` z2u}QUEq#;nVdx2y-fi=}z#jHV)c!8v$xwAZ2V7#@cpX`V0Jzk%1SFx8LppE)SY^ge zYO2ct9K{7xtXEqWY=E;{3 z7mD4BJU+f90X^5>3EqPi+<<(;{@cUBAN#d&F)bh-x0wR%NnYMo%gO+ftv#sg_qQi7y$YCr8xT)og31T>NDWwHExIK?&^AhTmzk- zkht5!+tIA{!t|Uf_w7rv*--`BAq6>pM8dtgoo+{e68Y zn@}WrJjK3uQet4${wwG*mBD0r6kVDw&jZezeUGvj#is^a&Z^TKe4fXV+vfdxU`L7z zkfX9Ed9W2B9b{x=7K@?Ry-v6heKp0?Q_jYaK>tw$(9eqt#tR;t4a-TsW1SCEQK9P^ z9UXncdmh8WI{_poGk7HuXnPxrgZl1u zqjPaUdFfGZx~;g|2ORUVaEWbCOvZ9+5TQ*eY&umR|#V z!_xy2a5rTXBG=*j%hS6vmQuU~k%X9K5!T~Uz_uKpI44Q4J|hX;VjdKz++>6poPW8t z|GdNp^s+FUyf+;2AS-g143DSHi#c;?BWkFs!7s4ab7BYKS(h~_ZpFgAXp zFh21^71;>sxp(CVvag8nxcZ+SRPuWxJsutK(m(@rvp6Cc9r6E zu*_iJ(G|cM{LY@$TcZ$5%=qv}91Pr}H|aDU^$*GlsK@3$+;>6Emw3KJ6lOs19(OGb2tX*(fQ^G=;RtoVak`RM4v&QMY*>zq zftu~3encMwFp+G>hoosOOSyR%rv$0y@H=>r!K(>|zVC3#N1Dc?=nov6tLEXuKOFvS zioFNR#6U4vYiE5^qCjDs>oq6%dvDLs@gjuuI!%dnwM@X$`qtD`f4!P(=N;$c1ra~w zJI>>9!GIrph*g7{t=5bX*LrS#LH=}Nox_Xg_rX|`l%0jQ$YXC8(R5;}gP9azM2#^57rQrx_a*mjzG z_w=<*L6SEB3|{w$eckcLw*&;i`D0Y6(iamGM^?Rgw^vgE!FppFGsoba(1RmGpM5td zXgLL{HQ>m0hUL*+**lDij5b2`dt0r_u;>XKnsBnp1%zpwc!qBFL+B1KbxR6ayQ`gvM+RDPpgvV^=^ZLPm1NHLsYh>wXC%1I-OftXUUFw!Eh%5Mv!_GRZR!T(q`C%#cjX9_CfQ3#uu`?GN z5*4PuI^y$U&Kdv$@iFe=4nQ8w71L+e46}w_aI&g5Y-cYzM$H9<+qRO;E|%p!Wd z>9xI=iu<{YzMJ&FP$lcnTvZ+<=v=HpZIram6Iig`V-yq=m}w(N9W{Lqjp@$>kZ-jm zUc1$B^dDskKO+tP!MnOvN5w#zdAF2IL5r3B-Er(zzsPK|kQ|nQ4z&BO3|R>}_m0>@ zSN39Jd;_R^O>vdS4wjbTHN#?U<*eDRlbCVLl#Q$56^Q6fyNNPBkC}_J(RJ*f+ltNy z^Mmy|?hUz=Lwq5IY==VreBCwLm~?F4v`Y^s_w+XCAH7GpE^<$nFK)BlZmiIoS56os^$J`q5FZL7H_eIbmpwefZAKhRTGx6?JQO}1xQn^_bpfraIARLx$T(RqEkJOko$zbTPHZNt2`XQM$Hr=m^#r`&7d)Iv{w@Sx4K>i{xprZH0~7eJ$+T~xc@)%WQ z8F!jr^-(2GW7tX%m-f(>w3HK@13c}Rf7!U4Yua166=>&3dTdAu^n96Kz)}U%44a816Ztkvv}Y-8CiBVy51%|dBIeo0UuIC zloM;{-EEGmLTTqAvwL`(52{6Yk8X2@OIUX;5zjdyPacjTSZPvy?5D@=ynZg~Z?O}C z9`{N|-mL4&B5axR6KP!`?S7F4cfd=hU;84t+6KG!*$_zTeGq#He!6FcBw*{5gA+opp65ifIO-FTy2Wa@!5ud_k|1`Hszu0@3TtYQ)ur~ zvx#xLq*E1-S;LRt)#|?>hUm%*J50fpd7AeIF_9DkYtz{ zmx1-Cpb)$l43-JfirCt^t^EO7&F9J_Nyw1;nJvZLzbaZmxWPuef$^~L1xK{HC^o{L6xyof<3(0WOXhi9}8Vb;W znN^L!CsCl6pqs{1|FSDb!3ajRzOnBOvUO^x<0}d!e8@M(SdEIUY&WsD(==E$*2y}5 ztGN-Mm*byntw2CgK%G0;zVy)vlkQ?fL`+0&tx5rSx&;g8QEeI`lLVnn$a07}2zr{z z3PaK+N|c=kW`fyEWyC-!sFHXIbeYxFB=)OP-Phg~tEGv6>aatv@{~nT@#i(p9Q03Y z2xu>AS9%wxV-YrR6=~g^gEM=i3zM!ST4Sj_X{4Q1k*9wCPy5s`0mSh=EgfB7RHZxJ z$xOUFbjZ0d6ELl48elh==jyUr0uT<5G2}b+It3X^1+%86^9_oM~P5{5}rIjd6R%f$gWzyL;t)^^ZJ0edsaH- z%%TCkB(T->P%z};XaChC#LMfnKP6A3_F=(NDm})`(XnjI2FQ>3TDA9p6sA$zfNHD& z9e<1dd6azbmtHRa9abF{F=kMH3&WYU6k$92{dQn<@6^vXl2Z6JzQ}74Xwc~Uw0X6d z+_9iiTOZ~H<)ZL6cHI)=2f8R2r%{2cR89yxC>g0&9E8p8 zrA@gp2FJgPD~Rv4l$K;dJ1_JxWY#>yhul|mPjz#@N6^0BIHI40C&P1Q;1^I*Tnv z2xw~H8iy}ng7+fjRd*0=bUBSV$k)0&(nr0}z77Hg9wwIJY zq61Co@pbetH@87gf<`ZAUSc5-^K1>6n~lN1m<@o34J*q7`VepyvE5fQ`|QPt*7^CF zUto&IdHsW7z3KT3flX+r_p5)rGckEASB&|*ytFA)iu4^mTWAm6=T^sqlzooVpjxZf zRR(k;TMjWa+Br!`k2ixZXX~j+?N==m^Me6`z6yKP0#!Ka73iZ}-e15by@a*FJBf*L z#I)MX9uLQWeaFPnFi?5$8u0*13%BSppLLg4)ZiNGUefI&g{#EL#bBBJWu1OpZ;A!e z1(ihxKBHfH$AS8IBd>|?^sWDa{r(A>@TB_lSBKm{RKV`h5KpPQ?#|7sCr|}=02Fy! ziBs$ykZ_xo>R6?&rMrD|B|nTLjv@zc8l&X6dCUPgiRK6RC-@H%ykV-S0?d_B=N(^@ zC%C~tda6lxmv;ScG0HI8O$&PVy6R$z1hOgp+mJvpB~DJzPYP35PA|x*fDV->Qak4t zSO>AotM-9u4~NaJg=&<%|C6#DuJ+VKefKu89sk=vu^K>~?tXx&(X1M&T6b7c4gut` zy32{@@ELcwwW8X#yAao8;}_+E)p%~Muxw!1t-cRgfj1U{vFy;O(6bcM17@et-Q z?>2srE0{NWO3zK`*Y668f|6e{%!Di>Q=lS7`^*jT3DhGvg6w1d?v zw{YVeD>AIC%*;4BP+ViKRz;OsnJ2eEhw!DA3k%kKg@7!)ARfWsCWiuN4Qv{Kby<IvBmG?H7v#fa`u@_ABZWsKkYUC#k^}i;4T7?O+ zy`UfoEW%By^wxGgJR!rX0q|iJj`8idxfgG%pfAdtNmR~yOmm|+pcD{wo^Qa8C;x^y z_Oe0_`QDHlbAT+>?khV8(7_eRqB)|;9j&8f{?+djPLp+|M;!i54Sf5B`}*JYznx_< z%0d$U&zfFMEiDc50HY{TgJChP@MzfZkPm*DJ|GX*_{`PQr=8+EC_g zKeWD?FS)(oth^9A4es%lfOM*K5w5`FWA>U;`f5H-_9cI0ys%X12x2wxBE1F3uq#M( z4-|(D|N1r=ZNCu$gI>(YUY$4JmULsq);BB1r-~~H$^!MlgpYPYL4gI^D)Ah-+K96A zie1jTalaMh_fH;fE7WBtu^`%f)U0b{&9$>F2?(WU@4u`GI*6Nn+|Yt6kY$O+&>8Ed zM%BX~1#&i6yK^BI)In19xRDb11>M4YQy^t$>wN1gFyfF&R<+zh_AM)ltXeQ&rJ;g= zyEC)zB$VZ*chH!nm6cRy&`%Y2$zpF=kzTwt~2LLS2X#*rzX0Iz?rj zMEjSV6RpN8nk>>VMG?6s6elj)r_H~MhB8Z$!(ACiFDWK^zJ>OFq zrqSV08(x^OI_)f#QdVsz{ hXHnDZWaQb+Po{i5&H7(SGyzKYdaPGE^ih^J7A?Wc zbupscXH0+b*SUu+W)6?p8%G*W)#adatBzLYgT!KX=(eBf&l9Sd1JqlBOMN}O{U=oW1)p~-c9Nc`fw_hH3*?#f?$%D#Uq3zjJ) z&Q2y3rt*_Ly2mL#4@il!A60A&_O_MoE!Y6Y_Pi_{bG2K2n-efWIG*x7J>Xh!>URBtRlHk)o?U9kN?V&$*7u%)n2GzP&7XG9i@J;Gy z)LDEu#C6ml#urTU$(54giQe2qVI414(|MO@DP)G%H6SYvEvabWW?(N`Kf8K1HP9ia zj%TV5+sAt`B{A$twa}p%?(pRGnXuZUr1bQjy}i+y$Pe)sg#{Ml{nV2s9AU@>ObMLv z-GB{+6q$F?^i6uWnu8s^$z+N8ZZw!gew0(R?#4&FiXN8N)Wr3ry7oq2=jIivsH^8C zm+(Wi^5bBjt*K$tT#8($xoexP9V3@1L$@*IEV+t-r|HMTBr{UPWtV8YTni=eQ$Y(5X` zD57ELSsoEF?;Th5`gtc|nFi4yrUO097QDC5piTaS5zf*rq0AWoL$ena)Hfgrhv>Dm z%$?ux$Hc^hjSoNTELi;ikYkE6%fZ_BoqIXA8cG|4zzHhG|-+r9w|UPme6QK-X-3*VjD)7^HT z?Op}xzqQAa0f!BfWjpAZv!c9ZLS8@nboXgDXu-We>BB}A#G{-IrCltTa|Ld2$R9B3 zjfI^9Ap4L%spiN?x2v-wp5I*aIS}EXw&z4d!~%de6m$-C=A3QulQzp?KJY%hv~)@< zu}qtuJ=cS6n+scOXX}SpBWd<;{BZ?vcWJ4(b4p?uOQg3HK`;R*6B_!``=})@4YtYI zV8$AE`t3y&qM)9Ue(<0K=ap)$ho;nli?u@C4s2dtoJ)5AWMw)biN|h0sazR+Y>mn3 z5)(a2q+^6-Mw@bCUytp5|8O$8=bCHzQ#~d|GbuH-r@8aM+_B#zGz@u^!Aq~B1%Adt z@AT?nLk>p-oe7z|HvQ+w*0$6cpah3$vUFtGM%|v&;zz) z%S)aURM*nVJy18hy!b>$PG;t5rfR((to}ti%weJ+e?gXWkDtAIHsnfu8R?Vh>+Ht? zFkfTNCCiwNEjRwOMUPAB7MMuW!5n6@}_wn?R2A9Ub?Sy zbB(2X^I0pw@h$-cTKVz}ah!L$tq$|uUCiiVm$}GqJa%hIZ)iR}KHkx}d|6|Q8V#Sj zwHF!fw?gT4-f}YLyBEwT^XF)4VPVE`OiRpX8Z4>eG!U`t$)^=Yw>9}-_u9QS{byLX!`}0AuqeVmHj;w9?S$SDe?_dH_>`@qQh{c?o3YJKSZfjYTF7 z-2kS|)pj!q3-pgTWwm3^PbtjivQEj1gBe{N`_BeX_3bQ_W0SUc##}*LToWAJTDeH7 zZt`x5U-)WjN?rfQ^C!2m*SEcoZdj`I^_7kb)kTiiWqcy~>>(Yz*-7zTV3OVSlU2Q*iqzIPw< zUDUeubz`Z%$1h(w|2r2`%(ki-qv^FV)XC2$q#wv-%|MDY=ZI8r15~nB^nhLWhcHA zE4iJUvfG&AFu;v8j1f|J6j*yOadCbu!+2(Lfktf)WHe)>yd1Y@eOJyId(%XG71FWq zr*}&`jgtfhrlz{*u0gndEwfyLsfkz(e_+uL_v^o1W;7i}#pFE1UdpIzVbbA;x8m-v zXx(((;VR*gVeTmiHw}WuL0>(&%JuR6$NRT#cPn0f$aJmn3T3F4StOUD{Eb(9rn0Z_ zo?h8XTb+R-wxh+%>&4X&dwora6r^KAgIXFkRZ~hxT?*rp&K#j$IVszuFA}rz&vff{>Fy$)dOYLi zEO0?8ItUs3&@OoH#}N3kt9{Ms5NY$n{NP!aMJ4!f@MVo#eVes~JbCoU0QuZ2H^NqU zPgO~|&?*AykgD-k46R0=m1}UjJp4Y%&pH<2f%QL^1b+#b>Tk6+Bpo;B@G0L-Xz;%< zpxaAk-bwKJqkM-5?_kb`H`R5*RPL&QVS3qSjp$k+f#WD1@qGH})-O(K;|O)A2fg)} zxU}z<`O&zW$b1FD_~Ry6`NmcLMm)WU-zgQ{Md;gde-s_WWWSFrN^IqRL99W_hp(QA;3F$DPkOP)Xc}XbG$YQ+|vs7Cqi{ zC&sR0UOl?=l^4x6jc1x={`3zO=JH}EvH54bed(w$=W>CqU3#mgS*4LS{` zfmH8bTIT$wA64#VUimg@pEYIm4Ym4>l5d$y2t7?7s}!=uz*>=!XW_<2d2Q9n)q~G! z2FtMH_8gh|o|c)Br-sML?dgpV5ALF~5+IEftxuo1J`xCUziRuQgN5x`<390(Tx)91`qKM$f8){T5J~)DX}(h#IoT_*yIw_{ zi2E7cO!0+}A7ZgqKO$64I{6SBm`{-Hi-9sJ?Q5>*#`@7T?*iyw(_qdX_+(i#EYUr^ zaRu+!JC>`h7Z88dXPuO<_>-9*Q8qhm?jIdeGn!oCxjcX+%y$}}G4Y5UqrVdJDAvq* z+Zcken7Yl%jP~C2LrF-hga5J5**exJdK((B{@#xI!_t5`WBomcnN!CiW0A1OB#uYT zr+*TV(6G1Ul8$CU$}o-Zfs2>xEq@D3{qH!;S~6aF8mPcv6eiNR_R`R66~i`4001*h z60m)|SPj`?%GCVyqcM@_YD`cPU#pMyz?>|T2 zjagqG>w$5zQ3=27_?)qRR6|K!yMIi2pSZmr^-VBIwC;lC;uBNH*^H!qvF<$Vs$9{- zI6lpX%p`JLdnO{os7v?t_#O*>T`14n+}HdrP+$vOT(A$Zq$yEfuO)g^w{HCIr6)}l zZLpyh<8!BcAi`yhnmE2uzW;ss8cPWe$@{@DQ-_FYyd$!kD3xcIb%{iHOwLIp)zI`XL1#eeeg$&c6TKalr~ z{PZVat{qSXRMQ-#;VWf_HqvQpx_SgXxl<3}(;Kon*?-N>6M+BSGBpL+8my((XN+M}4Pr(Rq>!S1Ii*G4$@O)xZ>tL`&X!dSiY_Vchgm zYVLiZw0r-v>6;&xeqU8cjna$~qfS%%psHQ`6StP4aUDA0+n@ZYrm{UZ=gE!C8ofRYpTR>tE$^IA_d=-^!KaYHxydc9S@llQsNF^h1Lfr4 zk~G)ihoxGyR#j6bH~5ntyJrU9xQf&nv!ZwBN}}2AbgFQfe54&`8T&BpRUvYys8hDW z-jXirZ@bL0bb+fFdaH|#Zr7Et{?6|RX7`)lamM~h2mF;ZLo4Rf3K5fc$A`_E9rL6* z#^D0$b&$S@eq>P_lBenR>2@_Tv*vh!fZ{hbPvecsfoEs6&ECHAdAL2alIYr9ApRKSMi3cd$W!sP;al zts_Q0aK8voY6xcqw>{M5ZhmwpoP#=?(>?p@%V8h>m9URx1+Sk!pbj6+x z#qiK|=?l9+6ItF9;^B%0mQ%5U3Hge8;Lpg+9*A_IAZH&xN-I4xQ0cjED!5g*uSVx_ zln)8)w5z_;?6W`Sc_EMiXR&Mf6T!LTWv)`e$;8>ifyy1!cGtKGjuwwecX~bKAGyL| zA8V3gpZ@&$qfcw-72moB_(iUHYDT7?1^-qbB0=OD?sJxq{hFhIo|q)~ymd^szL-*U^1<@m^D^;)oAD5`=2ls!fz{&7$;QbiTxgkH01byggy!Yp| zbUnV*SCl#b8*0UB4wCm}AFn_zNXz-pt(f#@s{G$~o*zmF${mPZ>j%-FFGEJlJiesq zv|YkzXrA=Wr+p5)dK1q0+Lx7t|l;_CK$4ruxV!dEqsO!z~ zz5$;6>;JwPdOiT(GErOe+@&%^0Y|8e_)(&N#_kDZ7BY`~=E`*QPOjZxhYE;o3PHz? z7wyckj8(RmgTI#5=2j|3rdSClE87Q`@&Bd%NY2qKOcfP-1V0yiVE=S`!i(25+@P>Q zw^F~DY3(`;g}e z*r%_sMwNS*dwu^sk^6f*GS1$%DSE9hk5J6f z5DK_!*<)Ubz-Ywun#H~mo}H`8kZG{gXzTr zmw~5uujn4Gc+*|hjO4xvyANbyMWu(Amq7GREA-jk6nI#~lftp*a#xiqOh0OrgU7hq z@{#@N(y!Ktv0ag+{>5`+e{rE{5d+mj49U2s&wM7?;;P_tq4%*DPO%a)fc%NLY&V?U zlk03pCD!tFZ&y)7E1SC*H zIy!Y~&PD$=)(c%xe!c(s2#04#WXh|`S~BN>k#=t8hX^P}sQoGN8}_D=)j%4KJEUD# z+)2xfqUzlqEwW$96rI+{nw0%@?5FzX^<0VL%&V;6(O_BW*@q=Vwppmvi5NRXs*ZNh zK5ZiyNq(#IX3as7F3iDzA z;{%Aa!ZVRuLp+ov1}nj{_qzIHCAo1C;~(1Ib4+GHz1gFrEc&TteX zv%b?azIs*s$5}NGm?8VtD54oR4&!j|&{=-PnA?qYnm5&Kq2bWf%;4WVR3a%SWlnB- zPaXn+?#y=Zeh~QUhNZj=>|@6*{f}MdWY*Q921-vp$bR=2S0;MtAl$E>1>SlWsok)d zHcj*GG4Gwv&)W+&DTURZyA9asb&6r62jJ}XdWpMqpBBlqDUv@tCi&8f@f#C+_kcuW zbQuqy=}Hf`U5);aQETu48B#N4#?jRq8Przt_3L{C98|lxDE;=imfGnZw;4z0(vPzx0)-Z`{wBW@wBX~k*rny(+U&Ql-X2=?Z;HNKOvAnTr{B=Mn7I@ z*f7$zWPFyHWYw}ZWfNwyT(r%)^>X{}(hokz>jFL)A>!XC0aL3Qt#_gaS7~`K z+~xdw-0W1dWf;|G-kWV0a$no0QqTR}F9b~dtJ^_IuO8msxROCtl~^YgOA=C~+Ya66 z-jUR()>g>L<)?#p`kd)5gxgb+{Dd6)l1GS!Yc`{Sor) zRi{c7LI=Do{S+ep>H}P?q(k!{#*BD`I===3vC-^f!~zGnK%0zLXUP-k2e6ZNpk zhL%}j2H#@w2CUpfcgahn--7hjZHcg)H*Jy-SFzP}WWlBqN1qgN>aFcG##Ra5{O3s7 zWR33-CfPjH)56L3d9vye#WnqI&QQYol!cI6lhC4I`_9`hs2e5_8;;C5t^q8CAdJLk zD}C_(9wn*=NNwShz`AzAx z5T`?KyQqFR7abtA@z`ZH4E#)|ZSDTJA7b*&h-&2h`3?s^Mwzu_)lUT<$u)fR=eHaMA>?drU~axQ<9sTX`xan2Ti=_LCA`4ko27UC*l#XT zl@~`+*N{F&dAjkwzyGA#NWkeYT*>@LyKa5s1x;usWpAlYn1sT_-?H+p2X9tZ(wcG{ z&J5$6+qs*6FyE9AVWO!uw<-N(s&Mn(z9{7@HnOk1OSVOZgrAz~Jdr*Jy$w4~=Ph_L zY=RC{xgCtRUBpK2q;P)67wT_b)dy=8rxv2QVexbJykcJ->Qc$8B>fg6KL(+e zlB#>K(C)uKQTWszlFx57eA!GM86H#<`^`yx#dOhv)80P?|LDe_J|14Z_XonBC3R>4 z5R^JpUMKh3v>}d{*_sdD`2+P3+E%Zd$DcW<%-DZ4XI;A!`Zl^f%f$uqhg(+0Z3`}^ zdcf$`iU+d2dyrOk-s6qd-S=fPM;q*y#E`PZscKP^(xzP7NW{}pf1z>dEhk7(8qQ=B z{n9vEDukqx&Qm|D!rZE=eKlm=PC60TM*1vBmqbAGQPLr(nVAPda=vyY8g(sw(bUr? zs@8gG0248=>n5qa`KPn}GZ=L7aTq~jV>u2TTQ&{ zkgx0bfj`Q$B(c3P8FH=YMF`#-?$P_qR5W{8y=!=Us)P@ej`2SRxi?T+nrqhzHi;jBz?CJRNEzEJCTKBh%Oo!V*wfirn!by_8Nw@g4s=#uQj2r!) z+`oTPZC+^@3nT%*FUrb!|F*N2dd`;EufF@j>6|LqTxM~~aJ68P$H(@$7mYsmi?TL8 zt5iLdOz7{(*PNz*EGD#OI!t47*DrO4WlQs;opN-c-*rU!0e|(6nnmRfBfI6#F{h5> z(^Sy)$L~2!34fksu>MsP-8V<5cjSB2O?6lmA~=I>XfvB`s@!Yl@VSiW>B`F?!6Zjg z-RrmTm2ptW9dEff-qt-l^pf1I-B-`^7n>X9GKouhsx2$^HbEQN?BCk51g zuyc3}BI>5@FBO+YKG*MzlD~)OqqG;KtoT4>yUii{(waP z>0!${)N?8(T`<;9Am^4*WhVm=+>EyCb7wsF_K9{w&b~BJepuZd3=Zg2_djxhv^Iky zJgr5U%BpN+I~lwIVgq%18fSlfJWNno>*e141@BT=!BKv;^gk-!FD(B?Ib}+EGX!gP z(8=wdZ5_;D72YAzk>@md2M=;kH^I3Wl>>*iw?ZE_7ma8GA~ZX`BX!%PN)yt~PXixJ zrDSAumTnydV;Teq))|xL{x6!YIx4Cz+7FF@bazN8jndsncXtXycXv0aG)RMVcSv_g zH!?IsN%y;c?>!dGTC*1a+QxT#FN5**t3x8E?VB+1|Gu9jPjd%n zj!J!cx7T9kvQ|iZpOZR~O|RUDogKf;3;=?LJ zQ?|Pv(RC<$42IS$W>NUZFCWLepm}8X4TiU>lI`nKC1gb>qsiUJLjk^2SclrzAD^9X zukHQ#KkC$h;$dqju(FZ2AmdpXg60tX)^ls_pO$^^`yLs z&>c)i3fIbOw@#AlkF_j_VoT}hAA?$c3g+!2xDKHy;5f{j4_+a0bR=YlrCE=%S`_}c z`lgm~mn?!LH0W&ZVZ*nVYF^|0zEKQUL!hLFtb}#*O7R~s9h5^1RVNY_r-sTeEsKe@ z;?afz+ceC}Ssgl7sHSKc=;~oc>&2BK+Bq))|ARvdKz|JvCop-?D-Y{A9|nUGK-BB# zL}uD=^`WGzLap&lekUyldn<)JyWr~lrZQnL`FliMl3@wb-rA=e$&1iITL&2Nn&Hz1 zO8HouqUa;v#^cM}%?JLcy^>o{&)9qdW<7nNVB~=qsD%CU;S0&fFAoaXZc6^k0gnpU zhapDKPuw$_7{NYWcLU<94ZD_^e^RWng0x7z8S)u=1#j0Q#5XR<^<7@>4O^=>eK}cL zwoxwqa|LgDfJS^heFEy)Xg`aW6S^c)|L{rQWgzt;Ydxp(VP?I%3nx~4NjvU0Ij3s1 zLU+{o1wEk~LCQY!f;sp>yGV%*-M)HJ+buidw5@H5#Jt^MvBm5YwDEp^%HK?r>#X(h zR68CDeDXkk%K-ql<5o9C(2$^iW0`LXO&>d7Y0H-WcomwmHb z*2{@ki{Svm0~cv#U7-9Pf9BM69Cr3|Ww<;Yg!#oBbZKTOvKT5F$c8)}Mg1{T!MqMN zHXDP-!qHB=fxA$)K%gSR03ly_D~TLH*nnVK*=$W`_%u&Zbf2sR`{LwRzc)Nz z=pj;pdISeYas|(iXH1>~foc@tkDe~&h9FdO?YyPNbGSi-ZKF8_^+WdV9PG0M{|+Yp z8Jxv=J1Dq)YPr_p^(4fL3WQiKudzPMMwMaQJt)b`v;RFY4|S(^uGXHOa1M2cH?aV? zq#2v06pGZd?tKRb*z;n>w1%}M(jHpShM0td)!uOm0-|>&0L{AB$9Y#qNi{|xermVh zS?5lC`)K*?O-Pbf{ae+-p?j zi)*ETDKt~MClu%9`tVF+iPsTrpDrW16YX2C^%G{igiT8lXh$D9P4*=)(hRg?@WPh_ zTor%!-%gZLLXD1e)mLL(_2$1?^d^ zZ)Ypl7u_9h_9?(EfH7&5I)+`9hRVfrVTVSgOpCAIszqyz<&6SD({))Ok;Ed;L$p$7H+E+airx=6-7t>@ylDjGnxDICapu7-JD$^ z{+9TfDpoYk0|{nAD-HfXk=dpnyf4U{ElW#K?=VV&F6!S-XZtq7tnlVcjlC}k^@nQSI7%Tr#r}hm^+jxom|Ap)x=+K*&Qcm z_57v1pY3>%*K#i{+^f($q(|QQ_;h)ZWhDSNwe*D^8nEW%c`zdl)35i{&~K7AjX5EQ zR~RS!E_Gj+BDd?btrm$r!tJ>eQ$zkT>N!7+_2oFSX+J~Fo!}#reKDe0608~ z!lp`~ts78|f<(Z*-C!XPNK>KdIyBUL_Jr*?7`9VTMQjU}#AV^)NS+H4r7K^NWk+&x zQqr!lHym}DY?A<)`jTipzHz*!T}xz($YFvgp)lmxV|4TdfuX8rfTi@U@2?n~YNf%2 zMjrYu#CX>M^57f;N63+P8gh*(EuCBEHcv}7uTyQU^M^=TanmE9eq(yN;XG-v3_x41 zmN|NcnYPuKgDR15x*e*{uWP1Ns!JW)A}z&BQ$ZDXtS%c;zrRJg9c+6{?(vmMcEkDaX9QGEY$0O z4yOuZekt8fv#M74mEJbs=t@va!G)F*M%*<^vNy)c~|6B4I?CZ5Z#bqyL&o`^wIA*S8c+fPsvF40@Xlv`4mbaWdx~iEaua zLJrmz5m!W&IuaYv^i2=9&0`#=^M=Ms-X<`cSR%{E60@yKW5T~*fDWBP?plfEe#Zzl ze(fJzGDu3qUw>D>a`CAW_w{4s*M7q8X01SJZwNW5*z8&qtIi5ZST`GzmX<~N)4`AF z4XO?vUtKxPVC1(uUF+Jlt&JY)YdC1M`t+a)%|G_= zX~tD-Enfh18<_ZUZlEsIt)MEJeCI>c;~@b{s0&5={oY-Fx>d%!RzSzCq3^*_`NT!1 z`zD!#*ZMKO<>q%imIGhWG&((p+JOdzgE7sqHKG4yE=8fKoBVd-n#8Da3aQbO=3;u% zjT>w#A7I(k$XVgq!mah+#MfXSG+!q&_Bp99#pJE_(@Z;^?c7Kdj<%voOxy@Y-L>L5 zX}3l_>%ZY4{S(eH(w{Vu{~0Gb?l{Km_)T84A@QNPq{q~u|ByjZ)(@9W3Ra(34!%IU zFb0t=Lm2ntMl0JH)MR#d>GN$%ArFESIyBF5$bAeNM>;s6u&PmI zsKJ!-QMs@!c2^xiu))SA_BEP??c4a2_&s0cAm!7aP#cxB4Xvq$MS|oq#0<(au>yp^ zHjy59Ra|{1TOE^?$u>5U(nd@+P6-Qlf=Hutep#6%EH+ za#Dzm<9}IUv`TSN=~M)p%yG|C7ZvG}IzLhq7iR$l!iI7KtphUiii7?DC{(1tm4dc{KP4G0+a5vKK4#OZ|M{X7%Rlp;F83uV2mOn;R|( zUe8;77@2mx4*>c9CtbUWhh_i@eVPe8<;Cyr- z%tB-l5GyF+zGY#kUl)w8`mv8K6z99&MeQ%iO&h}eraEdI3;HtKFT$B&!=2q1|a4#ZzQoo|p*l(1UAqJ%}slPdKJnWQuM z<^R7Hz{|L)`%r5TS*vZVRnhi0@2&ZVLbs)ZoqUqIQLglm*peVO|hyjNkmX5zs-`g zytg~~UJ_;4hQL}|2CiSS-lTYO`{<{H)o#t6*!nXh%lU8i-#eGTql~*TmkNjUKZ%?E zTW{D+Bu9A8*HQ<@H{q`AQSvwrYd#KHKKHwkS(LYr8#i1P>DdO3>l|-bE6mC3mo5Dg z0`6vW2LTAOEa|A@WkJvBxw09Z={A z)tIIeE~;82k?i5_YF#p0SF>>(A${=59OQNdukb(mu=dJ7v1)=V>;G7)wR;O2RlmjC zWlsG(9GZtc-s1GsqS{%cO7BdEvczn0vS?x_sO&vNpCA`~%$wJ)nwdC4HQ4x<7t!Bb3 z$Pzw-()=v@M2uRcx5!!ne)<~qJmzNMRog|@P;a$vb~oAjekz`Gk8l5!Cb)6T3yrPL zj!-3Po%O50ZtDiYh=TY5@4<@gM|6|e6|dQql1|M=E6gP9oPIcyX6#^M0_HB0lG%!E zRXs!gKgwm)ja6Y}GbrpI5O{ysQhGqI()w=Z*B4|;%FKtpAc|Y)MV-_?M)Y1yb=Gi+ z$GJDCHLX)qfc{-{wy9W$}%Qf=&@&es7591crQ;<{G;YSYwV>1}0QOZnoznPz` zkvx2CiWoo6={A17IW`@M`|ar_@j~s91JLkhJ5C_?lK7<+`|`j zaW89v`n9jJ(~;6tN~Vc33$rg2hN=+rR8wx@%RI)rokGr7vY`g`GD9Jfc&#YyPmS)@ z+2forR5otvuggiW19q!fA0Nc7RDXy^8P3@5G+|EuYW#N zY)m`#Pe`7LQ+PQqqz9KG|l zTlW-x`_nxLzsqXpd=93Aq9=V^;ECxZ+@(C5@K*2~^zS?dy;N#DpkDLd&o;)gCI6h#5xCa7kTKm~~ETEUxx(4fRg^kM^S~i)TCqzrd zHk6ePU);3yAkGN_R}3%SMz5Pul@zzy;QO=*wH4Ayd$Yx~MRVm$!DF#*TaK*f(?+7# z476K8wyx(NXF-fj=VzkTT(*tx{hjv{o*!Vw1$FQLusU=<>dJ2-DLq2bK$)h8)-ZRJ zfof@9>)1{0qMQthY>|q%d8*|_;%l9$$elwqoULDrN6pMw<=u`sE zdd0K3EQk2BUjiJEFSv|RztD6Ve)eZmQ7tcc!_85%@}_ZLm_xp+@~a$x?j(+c4x6`| z6E{wfx9{14{9{KRmr)sCr$C0Q1rad=c9EP+H}Xgw#z|wUYtw`FW(PP(%>%{~D!vK@ z46uW^tsixn9WwL9Sdb74W@0rCt}Sr@omnf`z2X5Tkc?h1}1!RR#el zGnS^R3DiugQWHrel3EHABysVQZ;7F(VI-6)E>unG`~1VCE?1krVFR3BrRlNHp@WMW z_H~8}krxBUeSE~tdc>W~7#<#1PLg#PNt=H{96k%_Z3_uO8BHO%?l`@&DV`N2oR5O> zV7iGJ;^aWgJdY*mT4h$270fDy6w^vjuBbG@T-7UOassk{NKD8VyQEbtZ?)=qmGbMU zd}XG+q%kGB?3$VmMqE?|d{m&##1%gq3S4|y-HuXP#Rqi-EGE1<9?hXa;jpsfif~$e zv3mH^?=cE;(mKutJ_KUpaGEI!{9;VuB3i1Bzv5CGb!&{OnhH<%jLt{CU3(?~_4oxm zuCj@zjxIWM5$y;3P1$?4ecd&xng6c0o&TX49;E*~U%frhej5FJv*rTjI`x3AUL#D2 z5e^3R<R6K`d!4ubM7+k2Ra=*kwLu7kpPrqs$2R}F zb!0uO>Gz8%)fuXW%%vGYNJBPOg(--)jM3C=_;i#epv!O3+**Qa?@hXNHE^Ds!ml(q zIZ=uA$jnefMC=rDO6__BeyW!16?R8-mS-#b>^yVcH-&fLuCrgACL6H3Izu4lpO-ma zz6;sw<4L^u+|mvSp&!Gw2?a;xKBAy;50)qBJg%QOUC~cilAI{wzWOIs{WgFSvi>zk z0pxXD^XY`$%@a!C$F_T~gKxFFsw&K?E=@4M@lzUgJO*ykv$b8ovV~O&EUDsLB_))V zKcrYyX-m$kJo~S5*N^um=?TZ=*Ktd`4u$BW?@m^jIBp)6ZN(?cMtE_l_nm(_5wR5K zxMN)7ip3x@i;{?85PI$;y%6i7AlH{$eIKSYX`KdhC5?mu72SZ?XeIE0kn#DXyn`JM)2m7zriu>yrpqHPkKe(@ena7e0D@FFLZi5i zC~EbrhW&I(kuq)g9zYmF*DhD2DNctrw9)<1IqPa!!`Ve&$F*HzomLf|WzWF$Y&x`* zmZhSTeF#U3_Nnr{UlQuD)3*A0mF+3t^$0nPu9rQT@GQx$^1|^&sC5L5d7~N>dLmoQ zDtpV#%Qs10F)1F2wNZ%1>LchhMCiXt`e7{!CKRS_S-xVBTd~#O*s&T}K(4*2#;Kz6 zZE>vo*X00fPQ_NA7{^a6@oH75UW%>n&-iww)W0@N#ST#_rA_q30i+^|xxl{tM}PD! z8OxW+7CdmhyIjY~_+t9)tHZw4R?+JdYf6osoy75jJy92TE4tg`K>`5 zFMKv*OobB8Hl2{{Uk6x0An6<@#FsDo*%x2w1xza3Pc?4r>JDvuPctbf%F#u&Z8m(a zmI1lh-ebFd>unGaHUO+o&8l23zlH6;iTO<}uK&FQ4C%o5qQcK!-(N;cDh#-_BgM8g zBgN(P^Zn63bjtu=Jd4bZ>OYWjk`)+qE$RU^XPVZs!Adr|%{AZWXZ(h1D(5CG*5TeP zyb)zR+_#Z=J{G@C4-DUZVhpd@#-fQ1!uCs(XWI}~lpx{S%V9W()=lQ_V=!&tC##7U z4guj!&HGK;J=W?!R^t!8GZG(4*7(t4tG*m%n;8N|>L-*F!KK1ifHUEeNPE+H!Ld7@ zuc34U`Fbrq*~BN2wF1OU1{j+zLIp$46D*XuH5#8%%%mtoX-oErFIc zeDi}un};4IvalYoZ~_R~&s4JIT9$opZ$rPM#uZb}jWg`&8MBAza<jP8=ewEPY1b%c3 z!3PEqW$MKC&-$rBKq(4rl3GB@jpny8km<#J*AS^$U0>X&(rFuzc64&H8tVoC;vw{B zgbpz7eRjOkE>0EN=x$$y;}8Gr0Y_Y$?Sr%BG=;8y`5YYI_Ro1 zN=Ydlgr9iUZo|85*-U6i^Zxz2q13N#?gs~OWn+N@T`GQfJ+o=|27M^{AVBpIxAvCjROLTU_&Uvgz#qt3%hQT(0di!ba#$cgfl5xCM@w23cg|92>g~(rG`nk zb21JQ-u?K#;+n1t%KbI0<{Iqg8Y8{W`AU+BG6RIKDHEZT)D5e4kexPkzg}98hbhfw zI70gM6=Ug%*O^h$p%F{2pT@HX=AawdDn?}D2q2}S zHEGV%N$Ww_A=Q9cFh^StZEHNB!g_2H!*2Rx1Sd%vBW<`2GI*FXYjigq-9!Jnn)o&n^4SU}$1qGhej$8XwxpbK1*5_Ut8mTj~Xy z)CH^xn-~hFB^+zl_)d6vW90BFnPL~`Nr{bPG9(pWYw?nx#>oq(N=Z*-w29fHwkE#J zrywV`%p)dF;s5}erc>KN9a-tG-_irPk4C7SiGuW5+~;U48~FS9k0wG<1al{-bB`56 zv}IHZo27gy9T9j|3{AW^8G~Znpx#ijqn&k^BATvH2?{Hk!J8}>O2vtx_?Ch4F}_r; zkSK^Uonb30XSzI;fjBQ_(`XiNSAjUJu^vwaS}1)Jr;*{xyVrHvkD^pDt8^43Qoc&W z{0$qM(!4H@7-vZ5y79vXULDzC*TT;HHni>NO*9lvB15gpcDm2BLdIouUT=!gYRTU^ z^WT;94)zr~=q|!7g{5Oy?M2Y*H-n551U-;2tv>%lUm}Fg9`qarmMNN75j&lm*Uy`{ zJMzlcYz=QXNS`tm({3pqpY|{A9sFJ1kyN)|NYMywnpLk$^J!rgkH{I}#Yh1=$IEST zW-(ZSZ*jvhJ<+KtdtHSSBvDa`h#EevqvrGZ%rn6NEetweH} zQxjun z(`FLi6Sh@y!inCBCwI3wczRZR7Q4J+u-p1Qv^^R9`B5^Gf~)ZooTY2lHCY4O7{D0Y zP1NmPmvImh&uN>b>nC%oQ#Ri?OA%wWgyYu0e{rTohlUxN$HA<6Z9UcE0 zh~M6i62NRj#XT$E%eZKk>^j;x#CGaFNT#5aIz51_iOq#h<4Sbi2*^3L?d3;RUTFds z*)C7VpXvdvb>X?v)T=ZYm`Ff9sok7wEXo)!&u2VacMyf6D?9Q_LSN`tHSh1jXG>-m z7)t(iUI9xZ^bm}I`>|=Rm#NohqqRwQy+0H=wX@UvZ+aV+y;4g|%k9YFKzG~CU7sv$ zf8TF6cfSQK`JX^)`qXlH{%@$^VC;Sp0zJZbl5@T~in*bP|EO~Q^;c!?r{Br@ffU*hqW>d(r5%aTGq!Z>uFs2jh!$kZJD}TA5^r84NU?P)m2xs)_Lwh(F_6InMYk z>XKI5D&7}vdn)q|QFzk-G=aM>sfW#N z!fEMt5>{2C=yqa;tO$nPO=BUhti(!IX)@E5Q=SNz*%;Z&GS%R>ARu?+y~}j3t4bGA z2?TaniO`DCcvXjwh|x-fpTnx6u`tUgW_M~7NerT|jvS+}+)F~fds8Mdv5Zk1dt$sd z(?DDCpdCt*!)r+kXw*8G`}4HPcmI=lsLTbA>N)GuH3|LK>$PJ)LwrrxTvy?W-lQcb zXoxyhafc_p=BLXE)CMcT-_ub6X;?O|bm;fXG4F2VE$65Gww!90e|F2+C^gx^&sU{- z6{hf-9esE=F#K|IG# zkRogi4rhP~AWlXz2N==fx>nz#wqwULxMzYrDV0#Tcwur3fh){FZBPH*Wh=A#PqyAU zYUBG(*Sz~_ln)P|5gd4Kmhv=WCOuHIP(kexin3<}hGX@By618o1~Z$VbDo+z9~1Id=|BQxw@q_T>9F~1N^qcRdr z8=%|^I1^=tS$qW_tkC6q{>rZ5AZ$?nXh%Y$v}I~K?2ooSsOm=Aq=dF!D5~P52H=Q( zN;`<**Np2;$C)XHgf^!0=8!r*focN3y}G=pqGny@C3V_(q0sh%S1!Z~@WK(+DwSH> zB7;va0m`@)*Wd9A&0hFWTpqokB;Lu$3YoztiT8uh21Pcag-#fn_HQZK3{LwpSuJhl zgDQB0&lp%82!)p4KMJa~@mD9tmZflFK8LaNfei{rhjUqgJ1pOYw6#3$iwqDhQ7Do- z9MoZADA(*j#LmKXsZf$sJAqShpZ2``%V&zkj$)T5Su?`fHn3wX1vL(`Q9CJSlSCprrB1Wi55>&omXhT1hrF9jHl6TU4GsDidCXn476C0?8!n~W6TtY z2i;fP6><3Dpv)FE{ zFHBYJu%v>X@zVqI?bcz!NsYZigji{9h&|$q?ACff{e&`d#uxY_1=6J5`=oG55ym$v zZIC~b)^H2CEfYBIYIOzE+^%9M`TR~%9V7P`rs6qG6d$zN5A3`hgz-5ObiLQ}|BK@* zX_t33*k>3qWZ$`~eS*^aartj;CS;4|rcct!Fq*`^9gw|l+@9C%3p%V={;}oW1W@@a z8c>gkboZM~`vrayU@d}cr~$X;D?(s1=Vdd!T`U{&Hx{wi&N|pExV;6 zDLIH*BI*na;Lg1eO-GVch=_JoRR7L>D4r~*Ls7F#f@d+%;Z-McI8!dMtG838 zZlor{igzek7aRJS2?J0&t##aY=HsnfT-q zpJ9-Nd~Sa{*_X_33B}(koF16n>6uze&Y6+?&4dzMlXO8DD2`d8g&Q>X)$8$1M85Jk zO*A#B_ummG?;S=Cqsr}bH6;X4g|1*sGpWnge?BV51OAQ3iKZo7kmH<=)t{v6;}V;x zO>!@I=`4Rf&{nOULsZCS(#@3ZTibLd6 z>DZOaV?dSAg8ib0>Gr38JSem$>=9L8o{5a@!cMQUQg&!s$3YrC%^-N3w_@!6hLA?1 zTWUi3UhNSul*)PxYUduYw+Zva|bX^I< zgY%tvPIYYu-dFEnf0;v_Ea*l&JMZauVF{dt!#nNyx=wSB$(6)83Q>b=dhe+Nv(xSi z2e4Z}z)>rP5-m|pELpQ%&@FGwx6vl+%2YP%YInTnDq6;A;}KcEp|l@G&o?MWQ;go` zYV)1yhl~c_c*ve0ljkZy?Rp(F%`Kz4%Qi;22$2B?9d3xX!$r+#XM#IbM0MOKH%4XgZ{6z}%aX-@Kt=V|?3qcFebP z+85!WGoBKQhx0`*b}7u@xrMyC^jNo_qNNO9Ez4O=;9|IhgqqnEMn9xtI&3zKW%8P2 z=WD@O5+G;Mc1OeE1xO*#Kk5)t;V|B-?lrEdw>2T%i3M_8o$?QF&T(Iv1DjZqdiK7# z8#wbv0(lO}Mwnb@Txb@f^g91QW}XDHmnlKA5~At?YQ0V+uKE({m%ux?8t`t{d86G@2_RMqJFThXx<*;CQT71ydEG7+PdS67t3S#Py^`yxJ1G)#`r-jFz4o$ zFO-~0Mxs=%SVN_dMSG|>al?w8`tikNoeAME?98`d6|};X*m{pQ`lJJ*89had)G8%T z+<{=zgNH00d14-`C1n4MMmqmfcnv!@T?K5~hr+N==EpEpxntC^8eBjL9^J}G56s5~ z(u|taPhjiK$b-Jh(T;zYOUq_VCr{(yZKbm8EJ{2fhcl*b;J5p@V#h`(YTf+d67u_t zc)ndN&4QrL7ZMl9j5k^cWsQ?ZxJv#3nD?RGcu;SO<6V{Lk0bNj#5Y#-RmiI=Jhh&4 z{>`MB*~kH^?+W%gz1a_10I(3713GcA4|+*YH<}m^(3kK50XPKsa}hNVLnBflle_S9 z=5wXx-1)sP%f^|q_IEV->NIM{+T>u@pqPu(;pd*5_G3?Js@g~g!t7}PT;Y5 zrkh}+cMBG0e|gjt)m}&znIv?E$Um>t{|r?hOzBe7Q^X%$W!S>* zx`7TghDxas=z;+JnFo)10 z8ZRlWt9JJ18(#jfuj_4stf=0f!ELjLKW4x%XrZ`2l*b=Y^%|AK*SJfv8QhP#Mt)cT#5`u8(}JA~B<{k0X`|fLC8P(D{AcErV>(##wkw z=;MG$_OD<$5P6+2;5nkc#bOu$18haXyzcLt@@u}M&-y7Np$y+< zZ@KY`r+!>Te;G26_GAc(X-_Jl(RMtJ^J6jsC4JXr7bmE>|}29xd9F1IU|RF;Kl0arOMa;hB8)=b2X@yH`ALgRF8N<>~kwiBfddCTN0X4 zNr8cO?44}bgxuj(1 z(cpX!M(bvr#xWy)5{q%k|g9mR+6e2RQJv;?+D}^8xf-lxKDPG2%udTQH?WmvZFeFOf<^ zX>*Psy*E*-TFpWR-IHsTsL38(-}07DvbUA^RFLAN;N&k%r+ld#_F&d3wy-#<1>-?7 zQ4S)?y;+`yG}AgxOYyWc>_LYI(#f0z8yhl~X>d#BW}dO4kNb9t1O=bkgo1= zFx${Z6Im{RG==Z%WFP{XN#7R}OLD>IGuUA!3o{a8Tue*Lu{1qyjPTE$X;TI23`{od zZI#8?^YEkSCPZfah=S9&=MQ0n00TdZY#P#Scphlx*6EOFYBKS zMtp?jN+@-ODh{(; z^=3xt77)rEFG|42Q-35@XedAjiP0e|4_jgJ!jg&A!QJp3LRIs=hfOCV;-^0su zhxU^MpVJr4$@VAbr=#2MA^Ir+oci4XA9O!gd-Q7IJwo5d?|L8*zrc0W-87!n*v?l< zWSYxz{(umqmxtyqix;QG*NEkLPC4@=HdqbNe@S5_?M5AJ_`prfRzw0^ya29ryblD% zu(1A2K=0SXozN=ZOYhKrzhtK#YN61LDVxzFiKIqP>o^l5p#W7M_|r+)h9kWgBLX>z z)n=|h`_W9y;g;d9@uY8srLv7h=Fu#Mt(QtbvtUDD3LqlR_e{DI_=K^#^+93_mNd;Y zTTGsF?2i50xS98ZIZ3_Cca0h?W6F*n{@Dfi*uxHcStYv7b64KPoZv>rz-)wa-cw%tVtJP@8kWIJb7FEx0ur zRHTp|5~q*D!IiG!!xHTqbR$7u%9Od50H%01>*#Bykk8`qOa}c&UIR$dd+T4uc`>sI zzr^sP@g9R~uz>j68Jp`(a{u`wozT3ew?;nR6t9p$@m$gXUtK*jM zP6#aSww`rCss=o?;r95vKkHzq47U#Zr8?IGUFFbl0=R6|Iyvk^bL?>1SSM?JSt2MY zKOQl&1$x&9>2t+^>B=oAYOt|CV6vwV&vyjl9DsciuAot`b7@u`YbVj<-bHRVTbRK zgU-zNiWCa6gPDRdKy=Ge9-lCtU}Jp zGS`?rQR21uH5_c7+|}0UjGSo4kTqQj3>MWk5QAf2hfzEvBPX}b-}c{X--m;L0TIPKVdIlhI zV+SAf+CPpI-D9y^H#_Um)42JFoAbP800J3x-&;cOqP4chPy+R^2E6(}A97!#LKFqC z0=j@O)`p8ApR<9NA+m`5)Ankt7O2m3xzl_j#jgwnO{K!O{}`d=OEVCw@!n!ok40ae zI!nI#`?R|`0ONJ}hQgOUV$lesL~3&jTOqLES_u;bjg1-`7e|NoNRA;wTbq_BgJLK& z=6ShXF}cs-3ikv^#62{y=nQ{K(!BSrs1KA-%vwu;B%38fzirPz#lz()Cdo$1bh)M; zm8T04V=qbWz|5H70%Q&=C-FEgP3)d1uJVQ?I8!9nIP!OEQBKV(Wa`JCT&df)IE=<= z(8Nm6(~=q>;LYWv8i?G>t2~wm0VSv5hD8*##mjA#J(jg78;9M=T`*_kQ=?t24bSw2 zCwGp0@peV%a8_2GTT)zgxz^4&T+c}YV6QP#?l1tZf2Iu96C;o;HKYdtNLT7d4hC_S zj}GtFfk;(GAFfNeGtJJpSE8r^Cy4-G^9IfYZcLaq0_*661!_CWb_3)MOzZ@jOq5II zgYOb^^mtkP)NodphxJik$ln_1f)uornRLG+)uBkh1oFy@fjI~sika=EkXR61oIkL< z3?r>;MQgBHw{KRdT%56P-oDi6lpy6lvP1*eI{vc@r6(esZP&6&{D0U&HZt<|_se@y z=hg~{`NLz>_I%uOyzj zkUEKh_x(ceMzyv>v<$+!1)t8fuV3z89xq;AUmT!2k(QCj5oFJF`Y&NI@A^V(R1zmF zoHmL|whCP~qD;#I8KNEM8yB~~_?$)+O!A>_zK$qiO3ui*x#uHqc0$leogNOCysA6T zm#h8Wj>rC$e^Zk4YM*tVmy(!tf&h&CpuYaeLA2F3Df`x-Dbu|(#Z^p`zdNX2QfBi5Qp^*R$h@&*|D+<2;hO&~qgz=^i1nhy47`4F%9wn#I1$}Wb zeCg9k*t7B@TVm!2h=#r#c1fjC&&i9$Q9CJdtC^JnGSa;o{i$wx3rd#jU8>*VaS3Z9 zqO#!>vvgLshVJHCHA^rfvB# zXu!ooN6*3S+N0>(V^I22ukMoh$oDK}GeA6F^y1%| zN$zNZluY*2(p!b>%sy`KDqvEJc@>BHmdEKbYmQ2d|8Jhna5xo^Psovu7EI~*La9e> zDWMXoSsrFt9_CIhk~U_k%z$S>@%LLGgn?GJSj&8$1-!NG|5c z{!VMEv)RF83azgS_Ps{QHvyXEa-VZF->e)ZN?Kjnc zpl04CW;{XoZ;s>VyrI9SK=GzL!dgxl=7K$wty0`hRe?AWlL!&X)q4mMSZWkl`)n3} z6sU5@H8iOdiP5XMxA#jHPYR4~L{*@_k5e6ye_m&N5YwYh`y&~GgcUZWt&@Lcn2N_f zXVhsZ?pHyjq9umsq?o*$qC?^Iny+T-r%vtOAPdjNAp||(&w+fbiy?F0=KmFTK~2$z zQ&+&^Hy5TcEcn^+t1BJP;>-b?$4~uQPL{G5&L%l)axMObwJnbyk=y)226{{!Trt@)LP+-YaK#Aakz z`{eFX>j^K%WsTG6XjM4WMd%*Ewaviy07xB;5PH-hZFqh0yh+Ik74}|(oNtL^3{U|E zpb!im(6#Q{=eqtTAT-L(V?B(-p%c&O@uvA=7bx+yPQUg!gX+a|f`D7!r`hffTqJVe z9+Vpn8sNvQRaC-A_X`Qfcl>K9vPWq9n-fi>Y z6;u0aqwl@BdhTV8RMf66zF;DsZ?z?9pA?di^>+w!1eD-eqA~(i_hyjwjjNQz9HfKf zVDN(^n$~e;i)b}zdo{Gb6O45KQCX3W@1P#%L{)wdp-xYM9*U2ZAnl^tOq%%ml3ED+ z3jGgP-yEIS7qlBRjT*eMoraC=#%gTaNn_i#8#K0)#dw(Z>0-?zSX*S&ka=dZI~ zojrTznVDy1Lcy*HGHs9$wdX8Y(L@kL(7Xes?&J!f!L#Jt37Z`1*!MDowjlU*m)fav zA_BgXL1e)_%uI#7;{rU6S6JDTcR-LjrwS1B^LEwH;Ko_T0tDk5OCP9X== zAby-E+nyWIyerh>l%ebFF+nyo$e7@lQqvOR2Tn&kqb0q zPx*1h@S#+Btk7I8QSiDqw*V)L+V1iVsSlRW6aGEh#}g#8eog$knDT+UCWGm+=0VTr zz3%q-=yo9SH=(OU?eI(MEvcP_Bn%CMDOe~B9`$Z)_S{F%$im(1?7h-HG_G-fBKv~Jij zZ%(Z5JpROA8CLGzKR0+}{wSUk@oJtVgt?zJ`_Tn1LMuX&5bF(7g!g1e!Sl0?D=V1g z)U~JEV@4>tTFVm6ERfGl?hChqB^u@NkJ}gW5pP|N)7o1~DI~sgtbreY^)R8shoXO6 z?%_7z!2SJYPl5;j`(zr)YA{R=ca6{+h%wdr}4AuUx<<4vkOAujVsW9K95B}!t)KmxL zQd=lmm*=XWQCj?vQ%XwlLb{OBZmOOj;9R^-JCTdX`N*B%8T)%OnRQrUPd19poj49`i+{qiw=|y3K4jJ zlHq?Lla~FlGZWFcZK?bE8E81%SnV*Fn0U3m(~-#yUAgx2W88H)n+GJzzfbiTP{fOo zfcrIh_BJxI9WL*ai3ikv&FX5|AO)#mkQ^E!aF2nJGg_xAMyHWtngWY&7^Kj=&bhUL z%lU0Oy-$m$1!eW<=1El0^qIMN)jp~B6I~_DDg{gsr;*y;6eNvg50a)PN;AZht1Bzd zx6Q;(y8L9bI&lXwg0O@?X&&Ep<;nnJW=~p7cmR#>fEs>O4OcN)qH$O)Eo%1{u}7JE zI1)^4G*+|*hz5;s*uBD*27Kw!wl;kJ zRmyVM~ z;JIDo%D;=^(VYsz)=W5x_Rx=!9W-Xai&pFDEs+U?J#@kL`}~~Mdc?!Qg5zORr4HlQ zb@uB&btcmi9U%UP0sw7==X&@P&oAX;U}I;m>dj--ZYA@vf6}Jw&&ov~aP|DV?t!LC zACM2dZv?i;h#en(O&gGiJ-t3BIh@9lq60kD`m>*TPGmn|CZA{d1v_=OlYiMr@#%7( zulcqKxR!cW<-q9UhtaMy-1pUHTk#58sPLZQ zeUAa0L4ViVOW(Bpz&sWR%%~3Q^xDV+JcyLr*y8p++e-9cmZma%Mq?F%<$hoga#~XTKAJM^8y71 zp3XnqkWuj64?Y-P3(hQaHPEh2?N6+d!8|_3_N)1IrUiXM$aV&%ElYVD;}DO+9%q`n zfo>^ci#fZtj%^>hHHBvzUU(A&_TIj7=?{|JP)nKb+Et~l;nB@U>f5mnsRQ9lAz&QY zl>DxI!d888>IT&mIt*X9_T+fO@z+0cmRToDMa7#f*_uE3YT%${X1j zOiyKwUPKewv|~Mp3c;hQupgM$K_zqd`0ncF zS0ZnP!W*M7n}ZO}XqC^0Y8j*;5ruEP;yu?xR(H^>gH4D}g&zP{geZv^GlB(Jc`S7J zy5~<6hIws#*wYauLs}&d4^|mZrzEM7z-k9qw+8-PXsT1hy0T<-m&nXC$2wbWqiNU-wCu%b)8o8J=+Y+4JLIbuhjHW@Ds#n&i>r8EBTYTo?F5}k`V@$I25_pc@^hZYw`sk%nq5ppO z|6Tpvo-&*Vjw1iWY&7EUJ5wuM3Kx;BKAdLBsjxJYq-dmLU+K;Asu=9hj@Zf*_Nh zx!ZJ=!t4SBKM#0TE%QQb7kS5SMFp6q8V~)x#_My}-pa2z&I*=P(%NgrK(1U}$0a?1 z?Tiq7A$71h$379G%tl-rl{f4aLRHQ>(2Xn=7C+!6ijqlKCrp!_1PnS>6s0MV|W zv+>U2qU~1IKUG*0#+OFzR3T0Y3tBsqW2ZnNckp{HN_CtzUc6;Ip-QoXj{pe?$+_h= z=`l%u?e27OQ0{Z+g=mqM7#TW3$xyJgPvl|0nQOehJ|mp#l@UCLQ|aF{8Talum3(tj z$1u$0tHgffnXBSJ_4;Pp`KLcwe8D`jjO=IsoT&eA)g&`w#sE%Y$G#qg(-wL@JC5FflG6nr`CM{+*V80YQ4TBPLui?@xWJb1vWZ2 z=7NG04Ah{eCN|48=nOI(fI}o4^?TiI6WC${MaK;ai5K z_n_(>OA6CCXE?U!ya{j-PIQ15LBnKV%+(7~2|M=ly)ooOR1ka}%LTO-lgf~|$EiYF z$ynkbi^V3OotCmEu$bc(#xtx_D<=i7J{NfKUYS4 zz=lXY5pVLAY}P(#;yaHx`wm2|5qE;1cJ<7fCZx235n~##s6g>FMe|b%w!1VhN1Lf5&acPgdE$8!Ysk;|5g$0usV6st>Pt4XZrlZ)V=Tm+~ zkq0$OVkv{mMiCM_ZXqC=W=qACX7K#%;P^`~eq8gAzO~TbBu|(=Rs^^xr~N}DDUp#P zU8`|F|4Lw!*b-lbP$P12H!GEpJ6V+5yB6c4k=7$`YULVzx8idur(u&IcGiNYa9Xx3 zH$0*#Mbb8lrDNNc5~&rm6~}WNgij!TGeA484jyff7IGB8hnkk`rizRNr_(V^B0nBQ z)!Op^T3G7No^`_Dm(cffOLw9hy?gz#R_NI0*^djQC?h+8Ys5EYx(d!(OTXtwe5QSS z&opHY{OP9xUZxX_ntqOhBLhYJXQjjVRrDk~LsPzgJ~^qO+t%Bri`_qK1K68oF&WiF z;0)C)Gpuavsk=H-T4OtcU!}=4*qD(y(%|WxGsTW52hW6~vcS26o@}XvETXQn@~7uB z19FVFitXDHNRw&M1_t<4SW`wzV9&B!Ok8?N# z&k742{6H)#CWa`|j1MSclILNYeiwI-zz8ne5h~)znxG1xGo;8P9E-ezTg11X>up6p zk|@}y1Uvl^7iuA`AMfMJFC_VkY5qlvoottnoI>g>gljhMDs`~xNkm-LU#x{ z+b6}L!UyS!Xlhs*OL_LW;X52Catr&&OENleK_}5S`Ou2v@|1#<=OwCSN8!~DyVJkF zm6~Oe{^VE-;oD~^LZf@C9q9CavnOR(FJgT}J|!V2!_pr}^N?=4 z2%i#ZwV;5yXKU~>nQj)}-VrW{U_J3f3RcI9>K0M^pKY<8HaKOE&%@WZ_B19V;nR#G>lU&I!IJIRzSu3Bw2WBR2D%J~5x4eOg=S40g@4Qq zsBrQq@w&lY7Ykm3Itd)-t0=5-l8}qtqSBLh=v0vPt~D$+I_4<1CGrJAEWvKW13k4T z7ohOQJ>CDwp}0}pC|vI1SP^g=e} z^5P8)UZz?bO|-@8Jd*|%g&ypKG1bUu%%#}q92LVjspsE_zt4n49B6;a-#Qoud=-o! z4Ubb(sEN*hW$eMg#Do+U_)EVfl`Srgr=;8Stw^28ZLA-%aiI`I1|DXmC~iTD#1Yan zhx4kr5jN6#Jod+u z0$RRYyS{M6tdWcJc$=$R(9Q8HYm$u;0WW%^FkIq1vM7&!Kfzd?W&W9DITivbsg1!sAwAz>wwXD6toYn6L9Uy27q|5@n8h=Ol{wj{+U^6s&o*Yo&v z=^QaT%PTGNF4R-kbj+XDUj2K$iGNeU%6TqrU0w$Uqb3^W$=(=yTW*vd$|uwe{<#J0 zQPDvv5DiB%abGCS020Y{w;vigr|_qpaY4a1o)hG{rM|Sm)jyS3Je04>CB-Awrx?yW z33f~qoaBp8{HmliOBXCooM)K$(Dt-1UdS*750^=1v;?BAkTj=$>Z`7XtsSE>#i1%g zMR2!GOUU4ej&b_dCZdeVs&TF{<>A7XWDbX!84(k0L$*m!K&%U` zsf9$hBJotS6ZEvGLsw_?(=sGA*UZ!MyY;2)DhX?%qr!nAUZ2y7`a&Q~tQGT0x{Y9R zID5I8#AtdJg4E{5L5cI^AfK48fR5#@)AFS^M#|EPJw@3(GLvEQf|GeYXm905W~B!A zVoqaJ3bs+0)SeD$LPzn`a;oCu8>&LbGCC0vdP^#&4B{O`4v#uUrz9DBH zJaZUbv*)^}``0;UCZ>C)e}^___WXAw-0fy@qsqeX$Td-A{j4_V9^Y8VULZ7y$W@|< z=ZG`_aw>sTB=S1nG$>6cuF(kc0YA{NnEI!PsdAE$J(Ri%T)vlCQvVX3TqGXjyw;{=*qB#KV9@rYEeqXcy}hw60gq=_5V}M1CpDob#z@Z zRqs^3mk(zZRh};I+(~gY;!WG!synX^9S}r59ZXCZxNqKJ+T2BUi+@_}Hh5PKXnl6* zy@Sd+K1g40(?7bS7B?cT8#jVV@LQ)Y?ozs!{|VC?B#68x*`_hXoByL-V>C5WtO|pL zr)3MjS98RLi5)w}V;}{VY+5QYMI{P+l&F{lu|#-7uM!8#h!SB9fBk0W&$nGl{mP(4 zW1VFJp1n#&UD~*Ac+)^^hBJ=1_t3Y(Z+t`M&X{`Ks?SV3)Fk}Yh;%I+S9!(tf3EtV~%BPB=Nc4}$Dxg!5op7oL^SyC((W;_Wn1)zqa1 zpM&TE&_~x14UC9abQzs*yX8~wQ-dZwpO-(wx4(M&{V~#_F-K@SET? z0+LR#xzygbQ(GuM_z(d{YArp;$XJm;r06wD(**?cO@0dHVzp1ecsG(R5EO?l=2>_S z3_d*hws;sEZfVzjZ5M+~!xwcdVJ7Ru_N+EBVM^ZaB!Wsg?QD)^IyiGJV`3p>F;$N& zI@t1G#4@JkAqKcw0nnSdVB}Q2+vw)Qr0V6v9Eaf((X9@?z{q!;>3*&rF8=*yscenn zrmth2Fu^FwDjB0o1Zi%DAzFD|eV|?UF@vsEb4cgN+ePMNG|%{64?+>Dqn$OUs+BXu z<+F*Y2RuQWom`c`jeWZc${xXgxXciik(V_Jb>sSdx1o%#^Yek%z0~7ur|UhOIv`+t z0>Z8*Wirw_tcfu5 zxI+C4I{D9jDYbh6gP|w?6=??_foBjW&++5Ap}w5e_>mfo)TFWWoC79FM}CO3(}%Dy zkp^B0xPn_&zM4(_`zK@Ox(H~!4< z0MIW0fIhkapuhEf4m6n00Q*H+45yBwHnpG4Bx|e2ish(>Qj}gg1J5=kq=-{P zTH&7>%Wf~;Bs@4_;;IlkzNa=1uIO06`jed731CgdL5g`0)0tbyi4IwROKdPs+7i!+hHq*f%Za(zcX?z zkYS5kLN0tDaiB)Bb*VNYDW0is7fKVs1j@c-NN?;d2^PHI41aKUlGzqL1x59VU7WfjNu4F{Z6$)dlp1VOKfB*7*N#VRB)HucpOwOG?ZoJgim*ect9U!?N zG`N4`z5p;xMDRoArTl8c*}31Mtm~|~>wrU6I07z(YFBAwG=!s3=IQx2d+R6 zPFNla4;%Y4IjuYE9;j$AE|eK@%4{v?-IihQ4+S;CEJ9I+J|7O?0+lBQd92hV+;Iw^}3EvY?L z1bLJtxj(Cl2DA(Dcw>=#VuH`(1$nAN7<1mf^>F?dV@Cis1=t#qZxnXJOWd&{bfb~A zw9`k9A)N=j%V&rKgTF?tlm~3`wtbmD_tDeZN+=ysOB07J97oFy@NPuPtOGsKb^oRd8;IFw1@c+yIVjs?i8UKlOK@hhS# z5}5ReXi1k0_kk|$Mfvsn5Kx{+wGp6zLlqv>(CH7j`9fC+#f1P2Z>!((`bTr4V` z_C&iQqbrll_Knm&dme28=9iyqZ<;ZCl-$KvSXtkRW7}&?knZ5N?ss+$7-GfH0}Fp) zQ#x7kb!Ofm84rYDOJ42mVH(46LwdMQ?M@aTKXR+CZ{r*{klEPJ>;J;&Au`WcI(2({ zz_St^c<#ELV_DY@i|-rCu8M5OZ(Mc6tpq53wtElWfi#^jLRv3**^_BZ#hh%x(7c=A z9NX2swtYDv+wVYPeE7-^7_6=r@KnM__>*n=?2dt#Ip9bIezpR`WcxyU->qzIAoF28 z;$OO)ebpY`rtMB;tezFZu4t|Mj=vLp@#FQg=iBJUC*%cKB#%BJ+qvnOzKhrQyi0B| z!mtpY*GpP)AcC@FSYN0~`UJ;Emj1C&jz5OqA2eF>*!yW%+|`kZemTH}mb!!mx%s>} zp1~ctl6lVZ6P;1HQ!+)=G|#5MWlvC;pU{%ypEOdBIv62+Vd^m&Su(#6VwN3L>WU_FsTmqBWf zWJzrz)PK62BHx>3Ern0pXjAPL0krTE5lP}3%9}UUQNTW_*_JrXShnUm9y;P$2LUdE zPr{cF8#jk=&lX{ALKLb2)NZj>Zv2K447VFkGZsT$(g>|&Y=_J~^TuVf=%_t3gfgr; zYW*l8QoEqY9mC8cmiUM`N?UN)ke&$+?zrg~awnWEEm~tVLZ}{oNZkRlXaXAfQQ6q~IY#wX#wnvTCet8IO8b!Sl8Am20*kKg70mlBpjyPA&stmA=+vo*Y#=kmL8-um zp}G}`P-i9`&nekY9aa}ZJWG?Qc-JvOx^X^0oC2JHVh zY1+fkdWq28JkEam((X;T?(up$xVUA?-+`F@wsZgv_F}ajNWpW*=z0#H9ahA%18Y_p zFQj#&o=kg_vkHJ-(BZ@L;RMO+4KOHzD>W5QSG0`j{uAlPCryI2>!{Zj-0r+z4=fb1 zlB)3mxp9;Yu$F=Wa+QC-3*!=_z#Ao`VOwG}*{b`JmNBqwAv+w7U6nctvNvSXB$`q{ zz(LDib(KoIB%FWEhd|zwHhgx^+54tx1CqJ{mpAN;h7S_TJ$1n94qC{4wZEezolLNQ z{>l_VK8BA(G^iRwl@=SW+T*Dq^n;F)m2&I(PO|U$j;OjJ;en;c(j80{VXwI?pFiFj zhv1h*4^_JsNj~>saPZPdTMO$FoYu^Hl(c5z(=D(61HqaNQo9V&8z@e}cZhe(M`N%|l1{hSgwa7b!h@z1u?-BDZjDD&=ff4(gD+hovv z@i+Jj=cNRh7z{edz z>5MIu7q^xHh|bpI!eVDc1bv(10u#XMxNvT`0E~8 z9e328SF2UqquHHU5361q0%>0C);-f=1Yv_b$MUYHc?3Zl9=oJ9lfm-|?+2TZh+W;# zc)lC_*F(E^K-UJs*IkPLMz^-|KZ80P?&h0L);++%xLu^TMh~9buJ*{qg*i}+uL zIrb;B{3zbH4C}CX0ZgxxnwG|`tO<+?tK&@0YpDbuV-JUSnCLN-5;hx~$Q$haEon*} zQi2W^lpoL7*3>}rM>p2Kq8Ezp*&Agp;N=s=v|0uwR_R#~WQ_kot*raxO55TGx5d{ac3=%!|hv8Gw(T-(;=mN1|gJ4<{4N#_NF;=5DnaMw?N&@$Oo-F$5@w6^Q~Lj4P8MeH^9X7*Tg1 z+>i~b+(`o35%ILJ#^;;8{Wto8RpbLQuJHMKp@mwxNE9FbPy#ECzPTI|=xCwqpxSLg z3XLz?m}aSMa`4>Aa!DdiJZ4RXdnlSPdV8D(hSOW~mJ%PV+h`%NaMJkvI={ZO#AQ)( z6`yt}p3cT*l&M@P9QkoH$Uu?(F=YZL`VnL#6&LBP#t{p%^aIyDD)at0KDvUY>q%>; zy~hC^iXIV>vZqHxa3d^%zygsi4NXYF)N{iwwZFb>o*{zCTSNH7alcMUIXmV#(s9nrQI*|Tabep3yxX2V3DWG=H^uK;&kz%p9u$OB-*3u%ZMW()Xz*Aq)Zw5 zTQ6G5xfAuv__#RG^hL_jbvo*S@_p`2XLLo1H(`v&^|aJ~h^1RgJ{}7*^TWyWpp$;k zF#Gn>D$ilEpA2}Xa&N5+*mF-zi1D8yqwh`W!>ddY{7QEw7QP{lX8yjP-Swq zZ2Ey;KX~4Y^DZCrqT?6;X>rek5dOZ#sx}l%&mG*Rn=B6oz#F~cdm+zw_)D*U)A6#2 zJ?TaLk2-kuScI23{4eC5b6DR>DaCKfDTv%_wOa2U=!YGEg=w<9jofSoi;h*fhnh`*q^q8D@sZEs5a%|Q?BqcNb5pF z5wjl%8Z^cu9krI0sA8c)=sfH53*TWc9Qa)mVPEYI>ZJwH>aom!m<0rp%Yk#>YW(6< z8eo$V2S;oka(+ZUkN1s*qYBM+1r0oxkX3%!(7QVHgYCjP+wF(z7g1#ODOW<1jB}$x zw8-6NH$Ec`xxJ$n-g@ueWdNW0P>@#15 z#CXvT;G8h04}(JRgGmv&zxZ60n{Iji^d#+Y>PP2zDAVo7d_U>5eYihcs_~+jc-qCZ zouKIS)%_UP?d9*-BD_33;P?95M|YzkhR7=iWX5W=fZlMLXKpDMch3M?c@h}42nN-v zjd9~VYw>WI*PEG3xK2qvr>Y5Rgl$y~gKmHS=B30rMUHL~9KGrOegq4y#9^z1^5raV z&*08YV}>E0BoJ19Y0sCqAcI<-qzF0e^B4>=%=(MK25uI|E8Dav9@uQOVXX!i!Mpr- zBzV!8`rBU4Rkg}oQ7SD^Kf7J|&Q~a91+bC|l~a@#Dn+iLLfpEYbwA~W;#6uz$^GwV zn>xp6OMIo;p7fky1ZW2$f7miulKQF`cY5M1_Q#8W_$KN1ETBYd3Mle>Cz-QkWx+*= zWL@~lf_iW}btueSXnG8=bIskqVzkwp0N6Svhm$WlSB=+2_I`@lr1$xCx01>AV%o;Z z`6S)DQ`m)|XoD#ksiZZv2Z2Z54p#UjPK%eMjYOB8_yNprVvAwPbC14lhd%qaD?{`g zP-k0FbgTrD`?6nqyWZlKM4ix*TTNOcgzqwKg?lEsCEYOzhlfDu1JRvQ(BwM9RG{q$ z1Nyead0SqpZ>r{quixY9*VTYwyN^_eDvv0zEjD7tt>aN}UQenG8ulOiqTi%qZG_%Y zZ}_|ohwsyxQ75nJKh;kc{W%jNm*ra$zPdS0H27ct;cdEbeaf-2^(s$tG9KevHT>%| zCKr7uo6sl^dO)HJolkozwVAhLxxmInA%A_PsM+BARbyv>66Hi|Thaq=Hg~dw_0nnH zOO_rLeA`LO>wK2pJ%O)y!=?St?c}rDXy>*)UXk6Xu=Kv)IkCM@`=}Ad0H6ussf-Va z#_a!c0TRguWUHPCJ?{-an*ZIW<)v%g^?0l+dP8`>R;Nez8x^h#JK%pidVEln>HfCy z_?hp$>LZ-rZ5ws5qN^ZWLw<7@=of(0@ig9a6fb4FbyT!NK1dMpcK;U|%yv!ICrq{N z@y_-6;tiXQ;~}^DC>i*5i+S@FNWz7g@HZ0Hi8c1AnK25WU@0AJPcJ%xc7A8I;bQB3P#y4!ua_c!moTXpsFAoa(9!Ym*V)d7givL2wEf7Rq&pH#L>^z zVX+wxRLK6p9uc&HK6tGyBU)g(6Et`GL;Tb5b@z8Yg8{lf8dxvz3H}uL?1tnIN#YN% zlzi0TSlT9fpK@HnGigZmzO(j3S_lZ!^sG9r`eVP2_WCD%*M~(8p+{8P9BT*Ot%nqC z-vZ{9@C;8HmFA$p$T$0f{)Y`z+ZDxLnp*ohelb z%R*zkpvZZ+6E!k(wkMfKJ#Xyv#mw==d(UaT@1TNF% zkR{Zz7y-u_X>|!jq3w~uI-Ol>N#Ma2C&P_wZ=zQn;9#z=aqhD6z(lKs>Jj#b9)ziE79VwIX2nth~9>HN3# z=TT&&@nFf%7b0h?jpmib%vxjpMPsAWACF^Vll>pByU%MK=ZW74`vCEm(gS$2Gy17b zd%xcv24R82Wz&4Ckl{5W;kr)vPCW2yoYO*syXk5{^XdlIrpt+ajpf4fxB=(=&@QG| z?2lD<6;NR!k@CUpJe(tnt_t%Z%ZKO((nqu6u(Jb`ukQ6^BetDE zx9J=?DMCT9QS!k~Fe#V}rLP#Hy2u;S9z?+C7Tt$~-g)N${T(uk!cDl~<VR4Kbk}|qZjIZ{S80}C#TBIb z@E};14(@lW3QC4)D}$Q>qDTGj-WaV$<>ckrve(oiog2Eg!Wb8@nfK=+zQbxb$m;rn zcTSLYyEB*Jv`-j|WN=Xa*^=A&D%6Xsxz`csZLiZ6TvthRz*xD{u)@ALry960#bSSd zCG?62M7#Qef^NLjLK@^jRb4Q4;i;C-gR_4skFSy68H-1RihEM^ncYEN`|6F*&HzUgBd7%4~K1cIXbR=V|rj<-BfEu4c5dSR zp7JG*rgzTy8_^IkMa&j9qPX!R6iu0SYQ!s2ZiQk{d6N*@y~4wtg4<5+}P5mqv3 z*l!SV8oSo6W!gr4Zd&-Nf3~YZ$DDl?T5hxhVmdK?azF&zwpc^ttv09HdkRNT%vLU} zI7t+wPN)hJuog6l+MLGjpqatGs%E%G5?$s*g0MRxRaQOIO`>&iRH=X zZO`$Js`5i$A6a-v@H~(5W+ig(ZS@-0!)|y_p}P-4w9w#R%=`{SV5oB_GSU5&GQ_xb zuiE`FWnu1`dxrgK+oOL~cYmXjQKgRFDrT5oM6l)}dN&*xT7Vljrjcp3ORT7i!ZL{q z4pdJBFBf)Bt}+IBcXrl(2Dm!3oPXN($rAT^-j3_|t~s?HW_9Uu|C7d|jyFfE z&Yy$+RUdO9ANWB0H&~<|NWEQMB@n*hO?tn642T@~;Dgz2cD!t7e%#7FJ^`J~R6BOb zmlzlsZO8jH8vzIUNS{eHtucarV+?||sO@R=x~oGizX57&Bb-b(9Wwt& ze_G*Dl7e_< zLTTf^e)jXkLoIR&3nfqcgzKj5ARW_U)64QIY;?B2sL8!7(Y8hZPl(JWc!Pg0%f6zXpSn!y(V$3=M~MJGCw zW_+2}3W}%ea^X&sMW_tSYdyD>*w`^9d468(PV)<8HHGSr|*Y_s)UBG!x6{{7$Uu0Dwdb_7X*EX582g*Zg0UQ>olyFRV z+6oUF13&++Mibf)K3~gI2#<6=^|C*xrmr|Z2a&|8-Q+K_AXzODiHd>}CONHmO+*xB zsUApX{vj$_+p%K4W-llrz&&O9JB7pAWwzWvf|G&4C#*a{Oz`1NBdNcAIx|q8Bjg@+ z7%S`UPR2R<{uuYU%Ez~qw{g-})gXAf|E?W!fN55UnxCf?3X}9C8p}Q7=H6pCs!*`Z zCzM!RqLa}i`+EFFUo}eJ+g8{v=7k8B=?vZKd>&uzOFM)0bAq7RxMjYlG7bCGVw^T6 z#}WpSD;ky2v+W@epe0OCOwR%&m2Y%hA1D6;zYoRH-kp6bIvz75>Oj$j_a#4nGeI`r zP6lr&sb`0b4jDjc!63|j>2u?_m!Ka2*b864T!(80w-0om;B?*^eYllYdt7@x)&nrx z&z)SSPjni!3M;Vz$D)laTzlQ~^@=ha0*Jd2>Nt*FxlVHPwKYL^*tg_HMk+`2Z??B) z(v9%$ZZcya<-Kns?oC7I=4hU;RF}lbQ7D3R)xVfCfR@SH*oF0p<4QR-*hbkhL=VLg zNw0}Tal4^tmBJ9Z<#I}QvH0j#2}zm+t>@>i<9dT?53jXF?i>FbLR|VS)rFJJl@C0+jYs_F*ZF@$#q>TtF8c zdaQXkaDBRp-+f1K#38g#muBSY;Smp{V%;ta1S#u6ef0L(K_3S&7qxY{hH_^Ke{xrw z?o>}jCy@Dg*LWEkqQduUW!~^Ecnh7+1&dG>7SSjEA<}=aOM{WiUaE5gY4vIBi~F0W zcs(3OtJQjwyaG4iDNCQNCIv7Vl~V&08x(y;<=r(K`fLVWiLv9fI$Ry9!uLW@T2uH$ zk0ikmN03n;HiL1KdQv6FPCsUnZ6qyM%{GK)L|qID2r|VWC(=uzN)nhoX7_!`-#^pK zUc$0gql9IcDnvl2gDex9A0sxAaXz+~Wyud|i$uhi31yl(#z-EV2wTjCfX9>$&f3bS zk1*;Y0oy0FKT7#@6o) z5O)RgYJkZ}jM1|!%$&spPOcD;`fb~}E25Ls-gvFL*xZJv*G_B!Bp4phw0*qc@qJCV zmKy`)x=#xvYJdQ{0&cqN9e*2C6|Xlu*BRp1^dFwL)<6HjOZ7(cuw>jQcl@93e@8D1 zW@QJzzg!?)@UFj1Ank|@;>F;vxK!4=0Vx!zQ0LY%>eyx~I&t^Mx=w zRa)+xzZ7$~n%wfcbU4W$PIM_@g1>3tN$-sO_{UE2FsK@G-f86(CNchQHt&-jf=LS| zFB~M%c*XS|$c6ZW_Y~8IhG@71a1y3U?T|frf=za*HHyEexe-T70uEAkFN>W6rEWZc ziY(g3Noego6w;QNw8eltjk2Q{YmIxa$Bt;j@9FLl+?-uSiX%lUyxHwxau;{*#9S_D zoYC_qOKs+}EkC5{QHmEWmfGw{2acV<1Uh#^Z`838Yq)?+vs9pS8R^qA1|;)rg-98V z;RD0zi;tRGv#__pD=bL1I$K;wm~lY9%Xt1E8hS>QqXr)6HY=j#IIJL*4DA>f%=B(VwZZP(-(E9+#nnvM4wz^60NjNZv%7Xc4u7ZIfJ}|R zz&h^xr7&3=hE{Z4n4UdiYl%n=Yp(t*W=1+pwt#_MR`((e8fQO!(Rp1xsH|2#L6!9y zGqio;JlMPv!-oQEI*q^B+M{>Z#`tY&RIV6HQLh<6A$@fB{8vo3-xg9Dn)DQ^dn{lRywWKhGTQ85N}@Qz+1_#moGt0wPVU zB>vvqw)lsd8wu}eB{JFmK**U2hS;J8#zQR-h4a$ZT3=zD#%}^NW1a6Ph%>zs4v>T+ z!}jBa7sK!v(>})b6n@S7DeYv1hLS~lfi4L5tD0k|GSUbn2}aGnK42Vy&r7lp9)-4x zfi7zMWaZbjbs00bi5q5w_hH%f6Bk0`Ek9-J{PnHv#UJ_dEEC`|yez7ooSKy$N1OpT z<%I~i3LwRS+%C?LAGmCK_MUhGP0&R}Cib5iB9;9BvJJ%JOkUj(71GFz)KEp_UN7Zc zGn`m3jLMv8txi)YxS%5+%7Jir^Y^VtRP+`uotBR(nYLx&V>!uIit)&K!nh6t zc1mQWIU(9)58{;%gVHQP?O5PL3{dWrCs9ixP~UT}O}MF|QA0E$-&8_qOV3!1et2Sy zNGYMqyAHBtl;j-?M5tEXSm1l8i}!GC9~}$tM^iuOGj*8A503w3jMbkN7g+etp4Y6J zvvLG9cPZBy%Zm5K z>a|Mu;|~3gh1U)^Kw7BEbo;7fe_8`3_^AOY%WxIG$D9`7^>Z#I1T(fPQ7ys7#UYkJ94sErac$)87G)n{BNu64e4@W_GY}6eD zqm^10l{pP`!|t@BlgLKZVGThGf_xas6Llp{j9QbSEf1w=I~41|64j}p9D_MR-}kIk zD^oeq9|4aW2_3YLoo^%h8$Lvy!q_Ooh#4O9=YdF-P3K~pBrBWSobU2# zE9muxheMuAWc&T_m=BjtT=F@xpcja%d9hi=uH_OjX%Idr4J?ZQ^hC9o)HL&N!;2yw zg;6y%18o=w*bxCa6HEv$?$&6Suv8Q}uE!j8;W#3td@u!SPqFc|8Q_Vzt|U>;TSACG z${cJ3;+p70izYr&J+p!KugJbV62cf!w7Kv|t+uuzIrp`Ohmit7JTQp!JSA<2)lckY zY+=@jjA&fL2Legsy}_WBviH`SD->g2zWVIS;=^CwP#T_5B~arHjxA1ZyU^Z zZ=`k9@LYj*m0qzKodxTa%Q%OL?*Bv6H%C?4et$Rl z$*#$^n{3zAWaBhxnlRa#Y}dGMO)aIVolq1sb@c!_^lQnm1x3* zRMJ*?{Zxj0iqcD@x73oJgx$1+y=w)e9u_N*M+;$Rm`$OD?&{Rt)0UzDWcHQINkzfi z#z~D9dIbq$F-rXAAw_^A`GtK!;A>n=%k#OJW2a}g7h4oUm59hk|zYBU{eXqaY+HUNvR$|e15n}rM4lpsTu~z+3e#Te_ zk-V|%?RqV}lrA95hjlwmB{Gj-=C1!Iieb@!pLR8ETD&M;yc!toRWAAN*}lAXE@((D z)&|g0i9J;-wgPXpj*sulH{$1;ydI$cqVxlREXHlj1|UJ zyJfZEXpJ+jZaWL53@Jqzy|!@O$U=70^^zsGb4l@s1>vlg#p6+R4JE&_;4`v;EaNb- z>!pQW-k3$SL;+BsBR6%?Y}oFa=o|vCG2&Dw_7LeL-;{pFcy&8PM;rZnCW85_qV z-TST}b*ErzoB}5GmC6v`*7L>B@lIEe2PwJyr3&`*;K<0j^Y_)vFY3f(F?;F}oF}2f zg{!BAi{sVhXJ#@U z=v@{4aajcl2juli07P<(so3xPa?z&i^t$@{6|S=FxM;5-UYH9{`%E_4+mYP<>BKa7 zJFv=gUy0;B3dp!|sqDmuuw4OCzUzNK;l5sxy!OZtFmJSCn`L+~1L1y(ZU_MTNV4=J&o+yP7=9nM%sYvV1(|0 z=(UITZRE#vAXR>{HOLSEfR|~qBk~QvkU$7YF9Yb+UC;5UWeJFaKrFNIj@83aa{``t zNLG$IM>@aN3TK`iRYB9_nms(O9e+rK7m)!}dK0^AMy2xDF?n*cevHX_GBGqVXXI#W zf@)Za&h<~d66~d?9rf6c8rkfL;|BP?K;R`e0W9MqTBQ~@L_i%8YsF|7Kek{2x+ZIP z_>HTY)YoRiZTHkBVsbFqXH`p#%U?gQdRm;G`L`FD!X(z9{k%W;_>qU`)ptf)E5d|NU{y=`r-C3CUMt)*uWGV1-$7@xUQF*PM#8a~!8MN!s~ zL;@9?S`iI*d1qr&p`LyXTBPJiUEVglJk%JI=Wr85uH#tUj@frl9f)VE1N3A!yoR&< zx7^D&5ndo3>>CM??zua`CeJk_{hTd9$@OLT)3pBFTyGc0*UOpnhneHMYCC&-g{z4B zn&{HwRFi0*UuU=OM|bW^th_B-^9sAh#z)d^t?R@mnapVufrw^+mjzmTuXgw+Q*Qh} zECttig4&`Xqt&RC?L_f)>oh;VS#8ZRbRIq9!f`IUZ-*C88FG>7y(U(Ury~e^Qk_D) zL-~!-$(6!!x}GEOKg6G&->dxxn6;I7hM4Wci2O=Yp#Jn}I3<+k`9A^Ewl&XD?=W{l9 zBVuL0ye`LAh95aSMa)#-mZC?XzkTAX#4(P94S`<=DPD{lQaUwQD?Trlfy#4f5wIj5 zDHAq;`@T=w%=Pg0%l&#YdV|~2;<+;$X6C(n1@-xT{xA$Yc}5=XK?AL!Z#a}v)uM7Z zdcl$qWVPiF*t>6tidSk32mp!HU0EV?SS*ikkP^?CF_;D(b})Wb%3A`u$;dUeObBcY->~ zdynF3I9L!#b#AWtY?R>&_*qirb3J1l&l-eJ)xkhY=xNgdn~}dOm@If}^WSE-W#JEw zi{^6$PjElSRS_nrB?kaF*fmgr8hw7VzwkqZR+b6;(n6NaXrM6uQGLXyte1Brks>zK zi|%9f#j*ApR*bA>e1c4Yw88AVw#;i+ZRe}NbqN%q#cY);E4$PyEhCN9qg3L z^YzwlfdH}XLSK@yp0O3Kb@nYQ{Y{IMC8iVMBzT4IS?o@Zn>ICHxjMm$+)?)H6=R%$ zX=OHjRldDGMne`08X25~{0^FgEa5TemY&hpFf6NVf`>CBkohWrbxL`y7M zo%#@)7rq?7%$}DQhZHLN8vhcb+M{FGm4(#Ru}an4r?1-gcW|@yj^mpb%odWq0FpcF zv0Zf9u%blV(u&)Y7fUWBY52b8!Sc!rQO~a*Lt~YZ@!}C8l9HAs)x}u2p;Hkwn|yTl zObTu7y*Q4jphvL{ zpx7ti`e~pR;v=zTlmsW^T?$7B#cG8bn#_ukG=+{ZWFFb< zE5<@>D-wCFmu9p}OXOP3!6$quU`~~oOM`i80&UgW3^x1!S^yiX^)m=%cvE-|sX;ny zgI0DvVq!_Xv}(1`eDD!oN)NhkgtLCoT@DfqhwSb{5s>j)h34BIOB|7kiy}LX11;)KN+7az~k61i8x-wXF^~GQf-Lp5JGb#&{ytEtM<+DhS9x zZBI0xe=bKBZoQ`AdH92vRF4cmZ_j0zc9R2U|9h_woSof*_CM?-J9*0(Pr?1J{1Zf% z%bFd)0m2*-3VH2>I)GfJ-(PmWv3PubKX9AsGkoa!=zamz$0PzTR_zxFst^!Wf=?!% zH-G95BGj%YqdlP?gux>J*t@Lv$03?aujg=s{>`qZ2~zTm>#OKp5)jx=+v^xOCgO$e z*#r)hqkB^?UEzqIbKt1%JFWvTDgDM*6@-Zne{VrK+b9hY#N|Q%z7!PNzOCp_>x-JRG7p9_rb)0O z^$CAWBTo9FM^D(!J+sDOL?ARA8t~#Yq9nUo_l)h#-G|&>Z$P0Z^8_PJc znyIuz-owu?Ug!ISGkPXM!8Zq%sgjrX?$_F@tZX$o!>a&>3VZ<5UK=^fI-w=N%U*nO ziIfs2uL2akj5U@&q{xdRlJ_%$wc6Mih98&ksA&e{pRPBIEq>s?v4{NRTffNgfCcVx z6hqH4Q;#>HqM~9^_|jPFb^|b(@*yGcw0=LlttV_IBGA<`xr>Mp6a{@|#t4D0`1_q_ zxv*QMNMbZ3W$Gc{4hOcQ^P0%<&t4^QfsdUg1(5F2ktsIO4qOva`7UM*&xq5 zO4V|vt^7XBWN&`j14m+?EDi634u5T-*XoH z8<`v1m`2f) z9AfL@eAVviZ-v8OC-5h@t=>6BAjHvX6X=nVQC4h(A-W|upK*XG+W~YR*=s=LfCRBM z$$v4pfQVH+X@;M?d#MAYuthnyQ7ES+4GIn;)yk=bDEEz(r0Ww;)5{zxAU9 z=f7vj5mkohSx9=uU z-^Zmfz=*d8%AB`(F?_ODNgscK_${o${+d4?`|zJnadRYEeq>D2Y~7z97y5iSe4OrB zOJVu}E{(l7`QHqA!-!x6RR}+v6ej<&V>3O-ns$kv)ysFTTZS}JLE?3euBnQ<*y#O9 zB!x5iB_|=}W6Z)a<9F66-CL|{>>4v(9bx3=!h-OABFa)kN1s7J_eJ}w6`_5|Tpp<0 zaBi<4!lQgWOS5A_isE|ZI09SJcf>AIWA5DfP<@lt7ZL6C_Npk%?Lc>v_RERWvfNe+ zfZtgIR1tYDAY1wiV`s_7e4M|i(H|xF71~FJ(6-~$`R97 zeAphDyiVNU_)G<-Y4prnrDBvU2bhqhfxkcb{=&b2WKQEJixlSxjv=MJRq0)0F$SAV zYY@juOr2qI=!#J6A~F%wWKLm8Tg1+Hir{T19jtK;q6BYD?0%$w3sh;`wBNKp8rl<* zTl?KHcu(WLPAAGZnr`AX;u2M~!_LXf($)S4EG1DB^F?wDhM_w(8l2DjH$@#?=*P)T zpM9DJE87O48v)PZG;3+XBgx9{_UIZq6KHBK1tiuj)NL~Y0{WP!c0!PU)m;d2vZNpEngp$P( zqE%cU^+l+7FV@D4_TEsCn6$i|NvX$8VOz&=jmA zDwEejc`u_|(3n0I$Kl+&VqUF5=X_wMDF!5Q&l|OpLEH$D^6?7s`88w5CERSaT1v8= zep)Hoo;+0D)cgJ};BfevArL_Op#fQ!g4vXsP1}0W!_Ov;-*%Z^6Y>`Dm zT%sS1SQq2W>BIg|`qZf>;kMMTd&ywzZboM>4|;n#gpS?&`pSQ@jlL@DW*8E$P3^Ij zgYzcqzF>v-rP@XS=End`6<3wN-7aez-A=2lhdy7v?E!Dpxa{8RHjV7UMTHdAt~Hyz zIKP4qg@=G4N6hs_-etu9LVaeXuM?yV11P>py>I^jBi_XIHE1`qEVoaYYj z|FgenQSRBam`|Mmp`Tj3fChMMa-;r-4)51F_V0KG$zj4}D{)VfcH6fMdd9t6d@t1) z=iK%&Ne^5>R9|5ZI)Bx=o0^F0Z(upI1w3q#sWtL;s^D=1C%`xyRl*y=mMcyB`qDGP zJ~`A1*fLh-_m8u|4&d6->ct}HH2;Fz{-_wFLMI#f>xXT~V_cicuOg+Ne)v>&I}*9$ zN>@{38>l8~8(Y>m&K?N$M>z%>f>x=49Kl!qZtq(GUZ3?)Pki6-;Ex@zkgxsr=bDV> z7sk5OZ!cdSWzz)H!iy&3&N0}W9W(2?I$L{Q&n&hl|MQ)?NPG(rKI{-ybhy-+4>Ov4 z2fy4r38l+Hn5MvR^L`xQAKe%;%g>hN=CKZWNS~iYLD}m?opB5Z?68 zLvrBykGc?l0ZBtZJRJ#;JB{;xDhV@e)^Z}1KeZf;z` zh9Qbo1`%UpEo<$K(p?zCRhlbtv(_eVO=~m%*WM2PQtkTkX;;U{b}fm|-?L zudnRV%Zpe5n`NNcr#JEwC7ALSV+yg{JrXLLm(k$-UvQ zo2W_{d)*W#8>$D8ml9uQDj3S_P+IHE=d!CNW*a_zoIx<6}Z`SAOTGx)A6Jyz2I(UP_`)+m*O%I8P{YM6pTqbfnnWl=D zY2rpqB(4q)+j|kE<@3zw>VL;_R|ZiEakdvx*c78(4him^ny!RFM0kn7zAQ?+SjE2; zFp)VsU&cLa$$C7U#nyri!3%Ggm0?fUpDZ1lq#hk5w|CZfF`U#+zNH98kiFpgv$|lD zMHDZ-A$d$DP+WH-hv`dQ8ZRfo^4_bXX0>78E(1u|g(HBI$MxmpZ*^pIjy_pkvZY*7 zLw7Z&?Au)Et+kx(LWIW&*@D3iQiInd#PBQ|K4#R}biSd#Gv>!i)AY#&2y6Heys*Z1iyJa=>vEizGc zkzQbM2%w#*cd&^K7}*Vks?v+AB^JO~ON+|eHJsr@k;X6_VhW=|z$)WO_P%zFd9Bv( znJL&4{s#E0KmNai_hQUq&I?u6Wi z-msRLS;=)J6qkDhU&0E_AT_QqU0d%YCPdukg5(&tS&)*0=aDSTShs`Z5mpA8d9ZW2 z_~#uL&7gVu_72P@jY2NE67&y{1SRN?|F2cW8mh)ad8PhLQevX&avw zZVqX(G@D4SSU2&x9ug?%$2M}XCqXr>d%_7_XDR-XS*=Lr+PE9sk0gz+dDxkTv$##t zZDi^yv3!}w^1f`|1s_(qV*@ePPUN=cfP0|bF8`HPr=kTFbZ$I~>aQO?-q2{lg%4=e z&6whC2|b!M+J3EaX;TOug@_;0W&$!({2CQF3E!>aL(BuY5OlSPqi@b;N22?t@$mSQ z|22hvjBTV3X=|uI>}+~nD5-M$4}L=T@8cLn+(N^K$xuQH!ABmX8SF2i6#ZrdGRg~; zCCpPqo}mou#2doBEA}gyHpH@p_)PwlKPL~8ok5&?opbH#Y?H*J%J9x#B?}X3)U|;&xWPU*L-_x+i0{9W!`B?vbRI z02^#8YP^FcSO}b$$s`X98Q`WswGISMt}}hTJ5)Qco386(;P0RILPCm%wojkwi%r-C zGO;bPA&)uqgg4TM@q6xEo-@pPi)ozZr%aPQ{{-A0?39YD@fDA$ao;D!NDd%)AZs5v z*Eqd6JGyrObUfDY&ZFE({c)En;FV7MDc)a_@%XXHZm1guM)qT8kCc?3`7?ySbvCNM zM5t8%1sO_$cR+ePm@HF1LShm6IL^(a?zW@TBUw)!iYZ>$(i50gGwUr zx{TEsV`e7!3IS7k?4QTq`E7o7o~Cv_w-syc2P(ce-Tcd1d{}fiGz$pi>df=m&7c<> z#v@%e(l(4gZRyeKD2ULXL4Ih^akwjn6T2Y2BkgqSD)v=Rz(&c3@>Jl5+mG6Uc=u9K zu)<1g`BZ87h*lMzmE{b(CJ^@iihOkU@{@Vzlc{DeR7z&Laen>6&+kTgksA5`X_}Dr zk7DRH+H4v->B`=|*pFJVMSp?yH{53;%G4IT_g*;FoWzy<*u4(h#HNYSpT33yvCvq+>T=%9O8iY0gY>2*6W_2hO>6k z;~RI!f5MN)ik~2)3B7qhSG~A9DHKuNUciE14pr3*=hZdBi-&~bvzpcl%1+N3cr8@k zGi3%XO#hy0LdP~iq+kXb_A7Y5?Rt*qTpzd9^_M+ zhvugY#n)upQeR$m;&BSE%h0xh>kewd<6nCt4fvvVujc%mnuyzFfz8l0eddTz%NHHe z%0H!+Y90$usQ51px6OzkF;7F=%%+&8`w!xxVeaqpLK zGz$o8iUmED=TEf2YrG;>wt$x>%H`%&rVxk0W0uh`Ftir>D3%5QC&i`wg-YX*qdZm^-M< zGBy|he5ZIwK}V>t9_<$$JrFmU)wpL6Kx9SExX3UKOBL$IX~!d4)vN`AWsRXdMgxdUUN70kMM(_%TJjWf-&|EK4fHgfN%!S+@5t0#LAm}T3& zf7OJtc?x%_a=Y{jS^yX)S%Jf+DQF1Ki#3NDCsc&?s^^Ud+YRo{PY^uUHPHhwBv0_~ zTL4!mj>1EV-VD(_(X`n)bkY!qhZOuc{29Q5Q9RbjcpkUmm>5V9HBDImAOW81KaebY z9iVLP9i%oC>d+thdT0z0`cE{mm@u4+aMyg!sVwazVZ*!sBEMHF4~&tMy`u!j5UhAenv|8t8zD1N&EDBzTxAz0 zd9poOdI{({H_|L@z|nwKDq6gOb4%9R&yet`hzibaP-sk(g`AQQ8EvlzZCCzqPkgN_ zCcl{gtF0<#IzN>(PiWHE=+}rJnO2&S&vDe>ZTBrLrGm&iB0ny0Cb49RYx*duV#Jjn zu4=AO>5{2c&HRr_$WpUz(>+*oo#x4Kh6Kg~$M1CLc?CC;ECpyUK4p;lRRyp;sxi%` zSjnmGSE&t2f?Lg=C@REu8Rc?F^PP*Di+psv=#f3M9^>btS39<}WDnFU=QVYWdOdTR z9vC->h-c2T5svFQntk7wOlik3fXaOMA>o6ED!wZT`FiQqTw#DJ zH(fMm+P-{0-vsxaR>47J+?HW#Hgf@Iz4VOsk(wCaA1s^$4Y=qo&D;aUjQ8}GB}3(B zQaH(dB^rk~Z9|l(&{G(a6TwCC zM_0IN4rJVp2FqTx=MYaH)yst0L9Nci=M(QpNK|qq?Cg_I2NQSO;{nEsZC?&#ag*+a zC%!)D$cXMxc=aYdtBg8_8Mt_QtIN|YtB$+`UMPRCu}rNKwHL^x=L?f!$eKqV$(ldc zt;BEf>mU`XhCQUp*e$L^-+n1%D#1>ycl6|H3~q0fu94#dSrAAsNTZ)Edz2(tB2N5( zij+EPcnVa&u<&;26@R>u%Jp5ob zFSp7B3P3iJPaf?|I{aECPZ2heKTi2y8s@42X^DD^wTSI$@&nmt&DyZ7FE!uhiAM=q zU7K~F^dA)pqCWZUh$fr#gVD8!WYXf@s|2Z<}3U#dH2o=wpOs8!r8K{p`Z`_Ls|dWQWy5 zjaK}i4njq~lPlMSQxGh~((OXkj#u{UUhU0_&8`%L;WrL`VPURwMelb9?~iIYVOOga zj;`xOpiC=79^U4%9kAXPN!o3f-BM|A{SBv#c^GHT&z9rwcUkdJW=<=!p3jg=-X>1w ziFQ;meS^FepfR&}1{1Vw###BT%z?jQ7y{ZS21bdA3%Q8p;qJ% z0a^ApYiJY8)UU$1^K__epnKwB2RS-89e?g*+BCeU2P;C+%8@s_k|N{AbWSol z8xZTzWpih{qDTf8x8kAP`5XszqJxjMDn4o$6jl@Fu%&|cZPJMhzZh1n%=Hfq{@j7W zOD#?88(JfhpF;XgQV#~`3p`og8lCdXFf6aa4#!`b+HZB5&nz8En*#n4rwY{>rkVBJ zx3oM9`ca9j28E~kn9{xIM;g5*Jp}rZ$Uog2Zlv zAPpJYu!aIWQUbZxCnzeBWpu2s5KSrJ{bHXFcSlcAVUN!DFT8^HV>*t$gh9@6sD=v| zvf^pr3qNh&J|yfyMS`_H!Nmr`-O7(6pRX-pBQUT;cZ~w2T#Ys zaziaR*ExPrbRV$`ywBFRz1mApGwmmVJs+sYw{PJAc<#ecB*T{tn#qKlPur+h%MO#; z8eV@9{w06`ksdx!7r_r+wGzC|vC_5e*e`@nCQKrvL8==bWEw;m#OfaN->v5n(*t1KGsf zNn(E`hQ#NY@C|N6|9r9ib5jIAfRPmYt5SB7n98XaqQtf;mwNmgAt^C@5*zf-?0%{@ zESFEc{D&r}QXXVwZ`hc+7rP~KE{-AG$tO*+ZVtKYRFy2)XU)j%UcGY?_e5&p7GJe> zyD$kNVT;HO_!;HKnHZ-CS`Yeg5qUe*E(qa0-#4OKo?!0tI5;^Pj&imA4PAOAn8&(% z*G^s~7qpLq{S;@5c zQ?CYope2~9jd0)@nG5=Fkplb}salYcl^&W_^OvUnK+@#nfpgMuC(^$LpBe|*NB+~4 zF-rX!Cb?MLvMD()EI0&Yq7NjJ5S*{sm>;SiXhm`-7o4i$?15s7#EfcB-HyCw zv-r&T{TfJWhj}%X3s|I&@*>9q6SZ*Rbk_b3hTfBAEXP7(A96@@YAX`JD@CC&#>O_K0NVUHr=MaYUcYz zw|}TM9|!6F->F*Rj7ai0iii~6sSTeo7vAb(UA?MsK4j) zAK&=j-!`vqR;|kmo`5xT+#qo4V#Slw6p!9ZUG{bWkVNqJe0An0)?<@mC*niPr8k2< z6VNC_9X-GuJ!o!{rOtXd;+(}CRf?SdXtC!wU*OD*z@DoJfRhcOIOO;hvay5vt}b2RjHG)!(%+EYC$Q}Vtycz%BtR8GR8@(jv_cB6qmCh zH%z|0LuDuUYoDGwU|T~Bo8&`PYB@`6{XrgdmXh%!G8700? z0!gcH^v1Oi;!UD}o8)7EDy0W!ehp6QRA?xd$Hv9|bBgYgowBL&u#i;MCdpVAe+XQ% zp|(-3?i>G(7ub5%;J|mG0Rr{Awc)4rqvMWs;NCQHbXE5^RP2^u<_`fiHJ!!ZwGj}D z##%4g%?HvOQ4p4*f2Ecjpj}LZza|<&jma(GKEAo$x$BkeThlx#?4Y79vwf!LM;BI; zU)HmV={S7M6_VS{vlP}(DCUDW^OB&$>vzgYj#7rz9N>vALsz_t8}6nYup_rBt!l$N z;XJvn`KY^XNYbY@co2SE2FGarB4goikmB{+2^_HdEgL+hLUyZ(*1Bg{d8GdNN>6N< z>aR}OS;^nln?iCmMEo7U`aKvxx`mxjL#7=Qvq+vjPbIM9gl+`pubP!n6lnr|`)Jk5 z^DrJJCsi*<-Y{N0W8~u;yy1~osBdB5>mY6flst`At3eg3O~$A9=b@t=>lQWBrdsY+ z>y5|`h=^K8&TN~<-+?b{oSFc%v4%5K2Bg>zjs$Zq}DRU}0cc)Mc?z4uH3N4CYx={-35_uJI}b-YzVXQb^f{vQyB zgcobIFhg%MB-6ea!#;2-p6AnccnlI@Yz-Ec{IIdH{qRc-t-NyY?{{v{(|f zeFYzXOSj#VrLj9I68?Qd$Fp-Zbm`bBhf#Go&`SmsgK+~f8jKH+^k@GwSi%L7zRO}R z()m7ssVpvr9#OS`S7H0VK$Z#dPRk??6UB{jikmTFCV}E9^Fg(w?JadYGGzss&+>*q zS!lENFy;l0k02~c3wW8WaV%tuVGexQ=inr+VO(Sk(Ad**eM|R>_{elH{y42S=du@N zn?d`*C*H0P4(7qD*G)~p6ek`DP*zlT7BTVkJQCtjX@ky6d%Wns3Ku7gbVt2=5(r)J zV+cE%7$`^65(TSZrQs@~7E*8MWw<&h6P0YGhNIp&yZg(&Ejm?%Ib1)4>+7GVWThHy z|98{MzXg$Fi-LJhI!)U#1PmQO$N>Pn-D^NX%&Eq|W<6d%DZnAFGyga|{0GS0K6?Q- z^tpxuZiD?@4&zw0^bDfm$2e50VZ*@i|L?=`FD-rp%rjxgBSx)|rQ2dSe9JtPjv+tdne!|_afY(9&IJd^h>Z2! zR2-8x_t3_QqcrrGM$_$sW#B*hZaH zO_$>MG>kh%~_p!K>K@XCE+ER zQUxeyHO`&ZaTjClV`zda4=Q->MWPeMKX8tG>n6sw4LwjP*#;ft84Z5ujMn>JIj<+E;neLd ze2nQkZVH;}oHqYu*?mS*yds&DJ{K zwKok$hg|li5sR-WqSUY$e1{gYY>Wiw-hAdUBV#%g=!FsZMe;Ruc4_}$HXA$K151Gn zcAi8*P6Zv58(UC?Wm)P9`pII#yLEOO<~8snOLxB1#ZcX8V}El)Z#z+Lz!oT9)oV9@ z*PB7z$ym9dKg77c-Y%Wo_diJl`|;`}6l2Yv)Tdu(=5$b)prZkcy*de|!~EiUY(qTE z%w`A}y^#4ic|$`r!i6m_JhhT!%^K60Wu%F`d&zO*;>LZSC?SWu))yK`xC*#a`o(jmg-Wj!O#p^E?TVB zE#H5#;$pZMs3F?&gZ8&`JBcAM-wkVK4+b5n!UG?& zDp$^N(jp^MGBf*Zwel7(cQ>}t02ufuC+^41x^=^}MFTMBDJ2n`YF``CMJF!PBAm6^ zQM9fz17Kee+b?CTpIrDUKbvN(zHB9OCo5lZtybU+(d&-`i@E)7ETg~~?7VEsWN=cs z%T{I2k&9RNa8Tn+GDtI0`Q76|sgl`1=Tr_w;`UJtzB#o49a}JlHGSNb@?3u|j(#OF zWBnuLdFMA~JHu;W@W1yuw*`3~MyFOE#;RAZZVw3q9V#CQ6?l(X>?WEzqY;_gkQ6;f zLo9#sp(Wi;(DB$de-ZTMi;^-h zre)~2twd^%Mg#WnJD#Ysca5K^rXY`0tP^sG~>}H zo?&iYO?@{vGIAkwPg?TLtDpP{Uw)MB=xN+y?{=Ur*BwYUKUdqR4io0GFA*RS4^Ku;NV5|u-JW=*W12_&Gj-BK2PVz+`4lW z&k0433GSoLQmALEE>XO_?Ftp7GC`3pVx3f0sAEM!0w75vgI|;ZL(>Cc? zypH3pA$|IOp9A|Mt0Hkr_3k<@w||kmk}|&*xeD@MG-U;-D8Xvz^*k{8-nmLX@U0$=`WUGnK`hmDRksRGxS*}O%uFwzfdwT0xWv`XM zBgu46x-lKD1`}IsMg~JpgPH^FW|wPEbSj1*LZnlu!$-TLB4fQkxnlv9t!wKxBC4$D z@rikaqOrPjPG)+m8Pnnfa#~cXM97v8^5f)A~Sg=md2vmdh->a$JbYj^0j-+zY$m_>)N4 zL+`3;4;yCHwZxj`wdruF?TPK#lDmu<<;ErhQO#|Pq~%-F_TD^Y+648%k3(fCL#g7$ z0X2T`njCMyGDrd}3NU6%teisfQ$}l#Dv!32p&E#I@tgl%4KzDrGyzRaRuG_=UuKOg z3Kd5#N8+MzfoKz3KgZ$IvJx*}u;{_Id{Z1->B&pqGohq~(ndm0UJ}VS4ndGvuc!b3 zzd4!4($N5kPpLV6$QLC_vJqU3nH`l;_3TNqQn~jK6ey7SbbEP^J;@L<-g;&!r`*S4 zFd?vbO`A1_V~Vlf;`@E4kW|Y)xW}-FVG_&A$G4-cV!7_TdUUROUJIm(5<-bt->zjN z?Lq0KH3+a)pOG#3^wAXH5^COhB$)3QnbaIcjL|gf>|x0^9ISjOI%yW%I-7F>b+rgj zc*}9!+?7dhHj;aE8K6cr_+1I;(4!dXm@@~aPA?D=y}wMg_N~#|Kqu4+=LOQWMxC)q zUd`~Pd_+H(LwL=ao`6dZQAz>`^SqVd0x&-g+e!QjHUx%hsmr9Kxr9~L3dBN33A}#l z6xs4=3hLouY_PAwJABchRu?sZmtj z%xD2B1QFt{rJIWHd-T9s9KFYFLIcO}abE{RM@;AR*lcty-> z#dm%iyXQ0b6?5p9+`n#=xddaCP^UvtV=DeK{F9mt0WrF}(_O@{FfTO%gNkLRf5J%1 z2pfgg>py2RStn_|aS6#5)%E=af;5ATQqF)h+=Q&dDu*0PdetdEEMZ!ESjc)tc6HszyvyLWmLP4LOzmMc|`9g4PKi)F51C4 zB&4RHVP#uxgaII0fKUVouVC;Rt{}mbH?Z70HQM`A@!OpA)foRpuR35FSK!&l5_n>Z z?#b?W-RHSD${%u<(X+uzqUvH^6I{euVHqTKP(>fwCXw|I~A!OzcC%muMH-jNaVBh({y8< zYQ#FqHCc%zEg)6kbM_$|x%ENKFKID#2d*ND)gfD*0C5L7H)k``NDIsbwn=Uj%fxR# znf`2hc1&>qxgz~l!T!UJA$Xm8y#43YsG@7nC*0+YDIGqiH;SM4bijGKF`@eHtwW}KrdrhHQ9K}G<#{NaPoY-~bxfQ;KH>9LL2H0_kg1Vq zCOBxqg)g@d;i`}vvJB?+er@?jIJIK^5X~LzyrsvaxhvFh2Sv!fD&uQ)3}T8VJ1ds) zgVX~oRhqgt5dYjDF-`(~m*=iXRR(TG?l;0zN(8eXP5N z!as{0b#YNsf9tUMUzq}gg3t=)ul5~ImVtj1ujdJc*_L>*H=)b>4zr&31n0l(foel{ zd>VnbW1cGj=Q@j(Hp=UG7l2DL4Ykqp;#3g66!1LG3%5N?V)C^4v5N$HN=x3^Tm0C_ z`tP_2dUbO#mIT(n=evv&0eQQ%Ll&AE)Vr~Q+LCclzs(d39LSi{aSb?Y+ zIcnLXLtg4>v%`7m#*o=0-vMWd-QyWeR|sy_t=>S@v44KO8TolmM?lyTm7S_M z6e$H|ur+pZ)|oWVeucoDvZ5)JvS;4ot%OKIh?S>R_-e6jmD& z(DR2a6|N3=HiZLrMd$KBphpWc2MOlA& z{G+#DNB>X}LSA^TL;$qsg?qw1^g^=g#i#XA(NAMRZcZ6r77u}E){CDtU3r{cYDe5M zn^DLYEpW1L4oR>0^RTXlQ+#U|F49J9H>Kny%^P+-E7D&2^oHaNBPJ8C+QF)2^v|1Z zsUIsa!)qHc@%WhSO{ADoC&=qLO0>3OdRI-;7l3MLsAN4{EK;u(vH7n6@Iu*}jCkwi zM2e&j)ZLy_Tr714pMk`Q)gkhKyD7*7fPktzS4PfjHL}BA0H*pm`O72ZzVF1%*wvQ| z!oNr2eVgS3WUKF9E93RPj)zRH-7u<2yOTDG&j4g)w)TKN-nb5z<*^Zbzq8^-dv~|! zM&*0A4cfwZdA_T8@wwH@(b&^3 z#W+(yo#xqTcChyvJf(rDc=|qX^-}}GPrq%paO>EGZC|+^_)st(Z@4G!LOLG=0oyoX z`JSch39S$vu6{;11&vb;gr^`fJFkD_l3xy?T;-@oUQC1sTTlgyNMt@_85 zt~d3Ui}k+9ez z*Fgc9X4+0f--k?043R*_ep^F~d$}vGxezhio+rI6pX)Opi_e+At83E3^k58GNsNJ4 z`;2hiRHs;3@GTGKYhdaxa}EYG`-khiv&Q+XgBJ2p>fe4{JZw#8!V|Uqg>hE&P-a^q z^+v$8lc^=V%;h+F2@&1~lDX0t7MM$(iDJV(!9r5`&6XbWfXX%>OIUp9DCue2O$N^>r|s6LzsANkW->p&ei`*#bL80X=W%VG z_griPf?qfoyzidQ>Ja(Tk3Rq{&hlU8RQ@e?ZZ0=q+fz%}3xaG}Rw_)Yh>jBv|}f44#l{DBACL=S>P93pX) z+G=$l_;5NM*cu)r5)PHqlo;2>#n-mpEbQ&{P+i@w2&|R4U$zfLzlp!6TpqsHp*0S@ zfCDMpQd7n?mUI$9HQUc;_1_RiMBl)GdG3zVAemK!eLEY#9fIOf=cw%Tk&5J>kQMYM zdcm{s>5ajM6+VZuRSD+!v`+9P=c4HBM@nDV6xCrjY};4ZBNR1Uw!Fe=Eq!SxxL2sy z7`7b-DI)R`9>XMES8(`1R_M`SW>e*CbLW*a0#$cLNYXlC_H0Fx`yE9mPS#D`WOh**mjDp@}BI(Z3odE~koEyI@ z;0%zA<3p!eVaWo<5Ve0Kq~%)~K}B(**~hV>`3}AKMbJCP1Ut|cp6~9gBpe1Do|xU% z4;epiAuIYuHOts0T$bfU_P!sD9v!x-NO1ZZ8&fHQ_JEbvqStt6O zRJd$TtZc3q;J`lq1-vl@fbUmuxYI~0V3=G0wiNFgvEp1Fq9Yf}_f|4Vf;Y zY-KC9kS65sk|=q^t4V?+-|ol%gm7`Ffb(3X?JH41x5k4Vyaq(A-D=dCo}1o@^yE+p zGT#*^Bx3Jn-03`Ok0g4iBpk1};`M*K@=tdts>kkQLa}lBEMd0+$E>JMBh?`GY+Zjm>~MmM%!g1FoFhFS7}e z~GJmN#J1MSbCN@A-v#5m1~x!1>=O9sQVtLuQ0t;vm=PmSKK=d_lW0V;LJ( zw(Fjg$O&4iGg@6n4b47H3j-0I3awzo=zL(_o7Ae+P@m3E7R{d>c&NwH%--Dt1C?Z2 zR#xZswisrs^AjK`LVylG!N(-~4%leu+nnM#zvV7!u7pb5e4p%kkL2d4xECamFVwOk z;bpuN%v@H-iDr%b!SQ1`5XeodbbHx;W^Goq6IUB2Wm7y7`c=P7cO$Uo_NE=g?8t7ZYOv<7u21JOJ=Q2bq z!==h?3co$Xj&fonD*4{5hOvIk?pk5wdrP>77>^oVVwXc3sH>gSsACHM%POfz1ynPN zvw;+;BO-xLErX2}_WqN6ePhtIJjpMlO(;9TvZo3@c4Zj84M*_0>Njv6sxc3WVBsHZ zzUK3DNA*YM`*oM4uUFQ^q?7zrBx!an|m4GE+O>7j7r zCp>)gTdc~6K`SJTY|Gl~IO>KVz1EBmD zWf-2A=mO_Le8*iF{auJ;VWFi9KAbkcZC!G7G&`dkRwMpzbH1=DXInd?yxS%y-T9y> zS--09$36t%~j3Js$Xc@jI!yTUrv#j)aid3N>QD?KyTFm`<+0dv2oklY2B zH0tWnaemsEFKca< zR()HstA(PYRCfTc0n-_)?eUIL&Lu%jYE!HeJlHG+NL|IYfx)qUIF#-;?<3G#Svxbq zd~m`J1$1CI3353JV3Z$KxHLu}Ge@Ulq~_z^k4VC(b4^n=xrRrcFas3Ku{k5V- zmXkaJt#&Almr=r0rY5s_9}p3-{~l*|8#ROILeefPvawIv7hIvp%~W%M+VVjSYh%-? z<9mna5k!Jbfp9y$OXh8(zDPkp+9=&#RMw&9$IY3%WN}#~M@CV+g8()pHDhv>CeJAx zUIjQ51f$4Gy;GVHF;%PVWOq=4=b#OGq^d*k(3bjJ6TK92)4@nDDg9%yEeIb?yo|uf z-A}SeqSd1zHgecK67Au#@H?`eE>^l`#FC9utY8+Z)`B-w0^n_uU`i)G1#r$_-cuMQ zkKz?uisHh)uhwHti2G{JSN0HHtIv}bmHG$(rRV>YX+jqDI$XQ=MVo0`5EPhw*XH}v zhN`|f4V(Uks^3xH^lAtJ5|3iz!qG-E@wZ+V&Q;CKg?~@Sfrx_V-+#06N0$sqkIKm# zc{;K^7u~6DL;@B)KyT=SZJ&Y-2jycc%k_q@l7ZF0RWze#1%TOsdm( z#A+eyCd8Be4@zEfe)LFgX@N6`_zFboT96d#h+PYh8P~`LYfQ8? z3-J}2XYtezQK7Zq)GmxVe4UZAUW{!)b(gZOX2S66@GvJ{|i6*ujC>%sMou=n}n&|nG4x1C9-N~t| z`B4-xj$CgpqEuTSxX?+3k~!~w4p5%8-9?hYjr|c__NZGFGlJsx=)VInhk53dQh^1X z3$RC#2%d11c2y4Q;R!avlbAzzo9LI!#_IgqO{@2h&hdiI%4+L$!KHrXQo_W8=QhRF zZ4+pi8&cU_I}{s<*Y$BlviOYQ7itGn7LE=x)4poACRSNxJPF*XT91vF#2QU$XVGEUMg)a2-FrKOM04Ww~F;Pa|A5<_4f0NKDre%x;RoXlA zH|kMr&OMaUM0>)kx_rb&2hYX**oERHOsxSR0<435v+qo<+qL=KX4!+NAiM9rLQx*4 zsSNW)S^{L_+#LA>IPVEgdyK+24BzXkg={O+kUkjpvdm?zaswh4ENs#DOEwwP%l{4D zYK_+hzM}iE$wg2RUrN=Aa}RSPAVx4S;pfV3*L?A1`Slk7@yvdpqC>1CXkX`-ZBL5r zA6GBZY^TN-cs~S0Qt$Ulx6hm2&VKyiYN zoZHV`c@eFw*ZNmQgO>HQ;J_8lSR zTF*t4u+y;04-g54Q1ya&f|P@04FV>E?`Yj03s;AQcjzPpO%LYq<6^Lt6q0{yEK z3E~q1+eGwe^w6FpUh*H^I2ciUe9dJW#{4*^twx9Eg(ykDDaxa}v%Bi4NgKv9p^rJvujm&Ez)1P4CZ8 zl%AD}q8^puKw|Lft21!smi->!%D85Cz!v;e1p)zyfvqS$$i_UIs~@vX&rz10sT)k< zkq_|hYDLqc)Rq)JbG;phEmZWrqyDsYH)SoM+{jfm{ZNtU2#3N9felLm&4s>__Aqwsz^fn;!ag{sXc;FT>&E^X!kZ ztp6bX)@RGP_VM$}K|1}h-`ZbRfE89!LN6z;XzckFB~djIP>1UE@iX(Q>=8yJ4+}*1 z#r&`t{^dU@GQ*^4{aa)>|I+*3g-`~{DdFKedJGzBR^(DVy6#UA?LInZ}sqF{%y;w1veen+m5l}K=$CR&%GbuL9 zmeyL~2_>09gq+77fv4<_R&$8(ZZElw>@5^(yE!7pbEs&OpC@e-raWL zNq0JA*2J6nuT_=}!a?2??&=eYpF*n#!ROen+`}6hN#Pv*G)|4Pa6t_yK29ycJ6Is3 z2q+WKE^VB$8}`fa%L|mO0Qq?eyR$|TF=B>o!XP>sX}P<$af7y6%+APe{`m@}w^+yo zy3Zz3hfZx`hN9ESHgMJ^X-DOc#rFPg!|Tq5c_`O7pWr~+UQtdJnzVLU`TW}sXDPP# zkHM)L{YDz=PYchOoX^J}NS@CF)vTc(OVxHb`5~}of&`ZaXEC>S#^hXGJxQ8508~;-I5(6;|10aeSjm9K2tZ@V zHI2^L$|t|)SiKGLBg&k3`PZTfAo71#D=Sws{(B1QeV^)(bvn(^ZP(hi-QJ)&c{kv3 z6bPNIr1T_`ZZz_Z81m`Mv(5|LG=F%ITTa+0U%@R6t3HTAJwFtDSc&)hCK;h8WGjae z_l3*F$DIT4D?Gw>`Vn*4p5$ro*hYnoL2hpHC0kJX?eansCn#18Ccns0nVn27y-1b) z{CqoHFWlvlo5>3)XMoRJkkU<2UU7N1_nG7dT4h;g#BDzga9Ut2qU7p!a=Zq1m2uIZ zP8%k`m5Zx^k84(MMg1TCa+aGk#*^dpw2C;tF_SFi`e63s@ocC%e)$s3gRMVHLH)u| znVFx~U_ZlLxeArRm@U93{TIClEgZ2*TB5QRy$-U9Gyc{aaMy>MVUnwGha6O0=Q5H7 z+8cML)l?Rdjy3WXY2NTU0YAGFmov+f_c>qg$u^-8yP^8e@AdeUA=^cSbf5Sk27A@& z(o3^ak~`XFyM2+)3ed03O+QIwFcb|CJyY;)rg1?p<&)4aE&I6CYq6&>Q15m%d-A^UKwhlSa}#X7kz%A@p&FKtxuiYR#5TyE3``!C{q#?2CJG% zhnuQd0h!=Zc^J z!VO&|ALB^$D$;5;u;d%o91$zqjjx~whwd&Jlo>dnn6`09Lnyi}b&_MVavry78`|*d zms!3y91Un-&P8eZ?!EV4rOlDf(u>12&F*gC4UOcd*|%Y-cZ-g4i?E^9!C@Xe+v(mI z#ZCkx?nC(9 z$+mNU^igq?PZ@PRaGsy-e^bLv9-HH3uwV$v>dE6HQJe7y8O+t6V}7Gr4Qz{?z1+9z z5em(beu{6LD4&o((G3zwUh2a&`4^V$H{aKCQ%$!~J^>l1*Op_5ciP<`V(uuMI1Bs?Djq#3PG*NJQrSXWR|f}YUX zgs;)!BI5Hn0!gq-0ifSY{)`JuuuNE;oiHprE#T|^n|rDJME7a1E>cQ!)5Ra{hC|-0 z0vZIi{H7*iUV@;A8RB)Sp_H?I5Dd~f$SSTm^r(E#(AUP|QBMC-z!X+<+6u1=t1(IW zK0E(Y-;g58Q?HX!vthxb?w%5_pq!80f7?v|dplOJ=;v&UzjAS?6TIA!Gm=!u;!E5M zMV-as!Mz~!XtJlHd_kvqBMnKkR%t(fR!~kOe92LVJb0X?Q-Z*%p5O0atoVL<(bi*6 z(LkuLz3{qg(V{ci7>#_ zwMdcirPh6ggnONfEsK$;8|5<-suKH@#;HVGdL)UxeYk{0zEP7jIr@VuOm8Rn)HKTz z^SQsAznRwemvRz24Rn^OFwMDV8>gC^mvd3b3s>3~Pj+W;H2wsS%R5{0qV;V`@nkeO z=`qiJKri9oYeJR6&XL*!5P8+1MkAE}JNS%tjEM1H!4ypai7RVl!9B5}u0K~)e9)2a z?Lz+w|J+|PnU%wjTWqGgi(d+$wTtT(Z5O4vH;Ir~^vf2F z>2AmCg0ET~!9>Pvul4LtH>)c6*rfTT~0F~<)hNLpN4!t3zFRDC?lmt=6 z9PP^a+H*jhvKms+i?mc5DU{Xg8z)A6>L^N-6(0?619$_@o z>TOT%__GDs#D??Rim4&pi1XzWXWRW&t#11ITRLs9uUcWF-#1CN-7)hGDfQ0}ieLp~ z+(ClrU8RaR(xqV)nRp^2p@f91h;Agd0JRiTC}8BXPt@eDcBdL2{y9noz6LCaJzqek z8%MBKU^Ah|2=3zoJYKoZ$|EIyEG9QzCotCRoL0c(K3WV!UE0f!PKpPEbL36%;|ln( z9U0W{sDlxe&WWUfG*QZY8I{Z% zkX$eL*!BbcpE91Pv{Z>*qAy7JJFmOkf3ja-4y=#K;J7my*fz>FqPFIK=Zm?DM7LIA z_DP02#qJBc#nqOqE|74Veh)bJm!XAo2br?A>rW>HQCw2DOKD?6Fr~w9H=$upePas= ze-FHt*!_>`-_L89GO0<^jdRI-#O?|z+psPpEqwS_cNYf&b^l;5-6=Fq^{fUZq7+pI zgVCENfq{;Dt2BLq>2XhmUDz-cfwo+4zNgDr^GI&HcL%3z8*4IeZe%=)QE(|Ag70!TtV+-tX<@b~X9%s}CHPPcKw68UV)ZM6at z8HlFdLZh~!xtsEZ-3itD;bfIkT!eEGa%yq2;!Pe&``N~<5gR9oa;^CigqD*keoq*5oM-<2RUM!X2fN3joQY!8tc zW+Y-DuazvP=$#^Ijsp0|oUOa8X)1%Ve8ZwLNc;mfyude)zU|5a8SAP!wx5&g_)V%3 zpa1M?J!Sin!r}np3I9bP|NF)pL!>`O^DPLHcm!TF0dj~OaVoj*k4Y@93_%#76&H@~ zDv;-W)i8*;wjW6>8FANv^g7&9s=YLIej$=B zm$)|_34WWq>Ep`xGlMItPJgND{2RoQs#_6ivblhM>Wj{k8YSbviS>|n(3EFEK>(v(>|q|X6K_7Zz?cGTIw{Aah&4+Wlr3i^U9%7;Wz zcj9E21~tjevLc^DrjJVi2}b3-czpI@?r1Vb-=Zz6QeVQYugb);miqZ!RE!9{S<}Jl z*s^MK>W3-Fr9$k=ifY9**Tv744jxTvo>rp^B@^_{%Ks2ZR{oVfF28g7-=_B849{qw z##FyzKAPLMl{XxHiv+W+-XeBCzZ)J|l4J&RLE77NTvej|v5U+{{*h9w4 zN|B^_ALEb=Sxv~cABl{gjv#2u48<*|v+v+`TdJ)A^LE=vIl6A5cyH#rCxtAHe8nV9 zz@3e^jMu`+-6VwpKJnbdj(x@9m^f%JVUI>GKF13kd^{&KVFtp84)Gz&Ya?Ap$;HID zs|magiJJYWAW?Mr;UfWg>yN)E*kQ_h)lzboI?GOxH3vX<+7=jTdD}zKn4u_R4^Byv zv|_rSJ;$B$#~!iYy|CV|cMzNK80rkCKu8wxjR3dKvm2{Ab#I6P)?Gr}+Y*fTIa#gg79Yb# z26>q&4bAbX#OYnks5a0^QDA~8*AG6>$ip{9t~ro5Y4!;}bl6f9FsDG1TZavDYDlnK8z(cKHOXqT4wb2Y2B!YHT=_-;2DY{g5VPZC( zSn5=ZAnswN_FB(Wk$+)kiR$v9FH;MAEJ(Am#Z)?`*eKz1D@+Q?Z|f?p#JAULC#EIl zl`TYPj>3|YzNu@)9Q&>q?{>7?!w%P$3sh%*RPaH!$ZuI?HOQDOJpB5R$foyzmeRx$K0qcV=2kbBdXL~ z;IaRyljk`Jpx~i=Fy-dF?B4WwXVCNPIT!e|@`D!!MTk0&zk$zX)b8VsJ^jMe&J#@Y zs3$rpjIYPQI4M5={RW2CZQb2+JwmFU;~exLrN5-iE1r**BALul8C!fxni--%7)#Xg zK2>D9HaIecNZnN~t^dOm?yM-K0E1NsgS)>?qIQfW8qA(%J$%}kl(?|OD%yl33DWyHxz)_c1E4R%)fy-j9QM~YrY`Or z(@7V5v*sz3+Y*V4&@pTld`bbM;^4Oi)BGFIA^Ept6=J^;AZ%BIow`3d3s2kc$THjQ zy9W1MX~A4a*w@W%HI}Wvs3n)HK+SaFadM&6??=sO9d=Ug6!ndpb!S{#RGN=m491=n z@m-qUeGo{CX!h1EkjWDm`Iy8~F<*&C&x~BTUqh|WVpgQlrPY^sc9iA|(5?T!$4e6D zo}No`Cvs?L)LLX~nWo}WWh`0cgjx1x)wRfHL(Yz}Y}K1hU;x-q^Mo}%(ulM>`XPxh7Cfw) zG?&%rBl~%!Pm0z-ax)w{Z;x#j_`4Z8OcM#I60^>NZ;!Leu6V(GivPr+^gxg2lda?* z{JB|?kIRmRaq7mZM;6%_TG)+ddBU~TBH&85!K>ke(eV8I3u^)#lOOAGSqoWdx5ZBd zW?`^ZV_oYtS)3{I5V1FqTV;-M8J*pInn#bNwGUUGi*E!3^=a21J2yiLCwWZ@i6jlffz zN)->ffA7JMJ_`x@xC7U=Jhfq2Y&#bS&Br)q{$PKx%5s?QO+x0fPj#^`N*->`e`h%| zzBMgd*_+bR0H(La4k$itKU7?VI`Bc>|6!Cl{}`Ys`H!19wDrxD<`H*cfHtzr3CY!- z)%14{yLwBI4{l=%?TUNJ#|!RQSG{ADV|7kPb>lPo#KIT8sET9?AI)&6F^VGKmR-B_X`IG4cBU~ zxSp%3_yc3GN5s&twv6BAZYw2x|!u$Md$1}`B z37V!|UYuCG!}l~!`Ah;ZaG+CMr!+FBNbV0tJ}_gMBQ97;`+P3oTH5!LkGP78Szb17 z{A+3Sh)IW(7hyR+sduJaw)!vjBawCevjIW;Wsm@G>%ZHYk#Q3)g3T-zU$dbD5Ki;H z7Zc7&%0_UdgCL(T(xo77o)y6{&eMB=2|zc2IR;j*~#CU(3RQ`;mj`H1XY9 za99^b*L2nA)s6+Acr3Z_Niz-z`hJ`ips(fSbfKlN)YF!Pj9iCD@3{z}sML1&{r&@) zkT9ayPA;7@@-U9z0_UI7it7LkC(_6F%)xM?DyhBYSbYjKZtQ+Q-lHAznpqR!-F<%p zXx@hjBxQ0+zO6xrP1@aRBWR!+(ddR@i|a+YFy<)n#GT5S>Ia3YiC}hE&^STA?s6oU zRDeJZIVBtLk6j=q1SHeJVq6 z8ui%7j%Y8O0J+fKvV>K23hB|IC@aP|fI zEmYF=tE2|msv+y^)(w6XjlCvuFRHtby3w~{TiexSs8rs#qexEz5t8KchG6-1w$~UN ze|Ur*NNmt6dQskXOte2g+=G}QiXH!&i7x`j1efy6f8Ifc`3vq2@11jW>Q{Be*40v5 zR(B>Tf=ow3f1jqUiiYRmrdAD4+~u@-_B?xK(Iua${v#a}0ZR{S4g@KNx?d@}H6~%w z{UfN)ehn{cnE&)XbZ-HYK$xbpHX9MifIf9kaFjh3C8nb)$Bi*`j1+suepI44pWi}> zU+6~!DBbS*5{gFrkQG|~eOP&Q2atJWrzxGx;W%7jjKP0k5?_Zq)-8rUt)2-01O00- zFnou8|Nf(4sOPE=qz?bkA;(1YP!R?WsG9L0<9GF1CAbN+LRCctX0@Z-QYBT=s$HFO zt;_N&4oF`<<^;V~LrKU~tS2Z(e@-0L;%AAxQHnbSv}|C}e1wQ5>;IQ3v=BaW5Wgor z!9xB=e2>7#u>W*m=l?;b!k02|Hwh{QR%Fc=eW64bAE(g>E3FlE!q zxf|Lsh7+oM+gB%RQ`pY+va}4^8z;jb!w46+3!c#LmHq3^G*C90fDr@q+ONDQr#!xi ziAk--!ysfSo}kAFI3Lgy=3qF(WL(;BBVQ%hBSC5orJ~}+> z(-^bmvK3_wPXG_$_ALO{7%CMLB=F==NVPG7*ZGJsVVYR4!^4y8dNsz_Q&ChliHBni zOk8oHgTmcJ2$_Qj58OI)*cru8kwHB^k+Lv;ktms{+f5OPCe_=N#9QXYwoUA)5@Mj? z+39e*QR3KMSiWHKysf=zKH259zro9NyKm}SVN&Tn7iDqmwvV`wi_DMv zMkK2i1oot$HAxwVeVFJ62X}1FQ8NGGiZ8|CWXiINdut!CJR{>Nco06kSB)MXDT6&7 zS1sX35r0?Cgr3^*%y#VfyLsp}zCv6O4KiQ>%;xjE$H3&u?g%4#VrTqHK>LaSKU6@H zGX)YT`_L-2cVRSj=pbvc18Zt;bZZI?RfP+asmMlGFJR2KoBbGd_zfjg zfD+7pp$U(rW3OOY{2{wFN0NMxrY$0yo2hxi_i4S@oGf0~Yf>1accbItXk`2GF6bnX{iy6Uv5=9>N{c5c|Loo;>Zg+Kye3im#&$?bz58zf;vzjPE_RKK zgoBeK#rln#sMsfGi&>9UzP(~ZDJfC1U}OB}mAQpPhV7?M{guS;Lgp}DksPJWJ(+R} zCBMu10L~wBZPsC(G^X6+1ek&SWwGZrFLA+)k+Aqg7H43YG@*;H#%P1AlzAfJJ^4#N zQr{J%zR!!S9c_9D2c?qQS2fPw=XH+v0m>J9U!f^mzDHUBUDIr@!IJy%N_>T>|Kf5; zPBu#Mau@oOW~$hQftK*07Y$`);jITx77R)b30v^&^Y!45_9`vD7&6r}>r>ToZG$VH zEpFxj2G8Evy)t-U+BQ2F+`O_1wcOhSRofo9GL5>i5|kSlzcx|cg|rc_dU@adS=gLy z$O&2V-aK3HXm{U$PPG0@_KQ?q^CWtmkg;`IG}QcHcYh#%f8ODZ4Y^-`X|Pywd-?n1 za7{tGZu{+?R`=rO8=xX}`km-MRC8t%=|9U<$Boa6y(XUG@AC%*-_NubXU*#5jU4#r zw4cqvr^7%KIyS*n2h28wiXJN20%Z7wR@VR2-rz>8Mcq)~yIJx*(p|{n@p@@q|HjFQ z`BJ9U&I~`AQl!@9FF8x(kvVqHmB&lJ>!3D z697gcLGn5&WIAU}pt?VMlSjA5one5c&ud1n*Q34~r7$Fd&Qhw$EQ004UEro{Aym10 zDJXCJTZ7+|i!LwR7lNcN7L*%|kt`liHtd?Pmp>=}QEc%%{GsshaY|pe_Cu6&Rij#u zHy6of7CVS`Kyo8Kln*Y}eYPjTy2J71XhNxn^P_tkjXjlXA)7|-Wk3(N!XAhzq06g;e&nt>_MUirzHA*~a<%8cbLeeX@hs}iL~Z3MVDc!)3HFxvlO?3Kxm>-vF0qL$ zzFQ4FknASRC@YYZ{`HlkNgiY1o#jM`l_X*UW0Y6L9#P)|tufIcyNMwpmMaM$B3t(d z{)L~HH&(x@qhOEE-`CH-sR_Ymr5Wu{!x;Dh(DmZaXBgukxHXPiJB(Gz+=cGm8n-4; zob+YMyu*>{-|)@)O>g?spATX&!dU@FPavWNP9e_6_PCK#Vy{*bLZvue*aBZXeul_~< zOHOt$`m1=*^ZW~sZzv?WUBN4N$&V)(dw^OJRt-x1hYNK!h@^00J`$V*yGo%&a7e^m zFA&X{&A7R#oPMMWvC^rxr{Rze)lEQA&Iym#e~6j&bF)IxO(sAOp(E;3=&N5z+;{Z^ z(ln*7k(vLQ5ekP|0OB{|%0ZZ4Rsm5DGc72SEFjPTyxd_5@Ah52_FVvVX@FEb+naF^ zx&o^M2KJJkM=?OtWw*@Ff6e{UcX|&h0zJr04>w%TDWFfo2Zc0pu6^yz{Zml z-uq@J^w)89l9$<{O)Lx#Br<2f|JMI$1>>&c%%{f3HKoI?Vc7u@+;qqQSXu6*ojF&# zjI`Y^)3%4P3)UgCy!5PfL0T_Nm-pJADhyHG$Wv;Ut9^jjF(kn731b0S7wBi{B`2z) z;GN07Nk;zmXFw-H!}QnBoT4a4!rdk;1k|i-kaD0CIt{tLx(yfPL-LkemvzWv;Ci9@ zPx8*Ew}hdGT(T3b7Q_1UUJitP`DaM+?RBBzem6#l8w=!D#Y$R4* zb^tzQjeqA2*8Snht9vGIL)*C@2s$ORY69U-m#9vuCc^k@GCwTt0nuY7auzfv} zo1cBR5J~3mEGOVWxxE)aArFn`+0xoew36I3(`FJ=cgOvVr!Zu?`Anxq_LJ4QMz?AY zm2e2CJgoG&K&|f8m`uAHd{tbI^}d=M_q~6VV#K1pxEc5fejxzd+}ak&eiRi=S~YxW z=Ai;IU{=AF&bOZdkq5M&e{6s1Fym73EOUbtKJ}g(CgR|9$!{6@1JieQ9T*t7GY8Ox z6C*26?33`@({FMAX%&>csjIu)?CJO6EO$;$iK=>DSUBGm^rE%v zxB+>ea7&T~r;@ml#P}Vm*1QNj%?dOCLuh1=>xfr-FMlI63`c(1a_t=%4D5~bgZ2zO z=>bL4NVm2JgNq>yAN-g5D!;&2^roeaKjbD_mZcqzoabiiK2-Gug~ffyfl)6z{~xBu_1%ExXEjre>WQ6NsR&{(#p}NN`N0_WwpF;5vLUSLq9+)7YoK&16Z#o=MWb;3xw8<~a2b&(Qu#{b!811{^ih{a(33W}tarMM6~ z8Ow@+Ctq#}@n_Iq__y zD8skJ>w|uNC`H+pfP_hKG`7loKytkRks6ptUYdEIEL_RrAXk@g*J@3h>ya&T^gF{r z(uO!)+um>A@J@uE1WLw8X1O)yAT#RAEw!~JK&#!WkJ;^8pINGI)AluY4A&#_Z@*sT zpW#E{*_0z4FWKaq{eY-kx%ohomh-1GOD5IKWPF)TdMW%!%TwyUQbwwep6Y-1@ri$L zyd~~~<|PPEbkNSZ4*yO(|DX;Tc%I$-#4$6}jJe)Kj}~_(-Q3xE7~3%fAi~4FmaY;=E1{!NN4GJ)*Wg_Eil$t^JMHTYVcSY+}?N z*mAZMAn4PVdH%%W&TRMaV9Tgii1Iqn+j_56W%_-DitG~88!$yCT)UqwIv-&17?S!v zLI=oTtp^nel9GE3@SbP0PxM|v9sua(_!l>zzP=Xt^7pVqR1xdu2w^v24anwG;N`3L zNPd>JN?8z$CCm)Tg)Nw$POLr{ydQpcYf7!n+Wd1g?Ug)Fb?(}Z{o@aEs=`v3 zoby(R?NUpNu@{~!PXJ4m<3l7s(MTmP9pnz(GT_qRzU-wU4}IcaZyy1h%n*W@p3aVA z&JXn8(!7fYA~vvt5(arG#cGgJ5}K+T8O*boN?DcX;cCx+T`;qvME*lYwyPJB2ibAo zOYKkQi=fvYDXX}{%gf#I3jab5q7rPE-KDGzmNB z*4oa(cX>)k7Z$kuI@)njeT^?|N|RIbOEFOW@;w?l+#`OOZRoy$+Jk&b6pBMYHl*PM z3m-3-cu(zj-4u!!-RP>$fK>Q}a1iIF*Vim39O-vvh(p*3QC6&MS<< zNQXqRakx%{f9G6+C>C_fCSAu9uX1@sTIRYNdl-p#P0EUA%CJmhRV^o!1{_o_nUSmJ zbFvfOEFXGp!o;0)&@=X(Xv-M6pzLv6aT&RYu-D08J9vbBY4@Ia=Ik@@E*Pi^*Gu5) z+@=z#*;aAUBpeR&EZsh|=sg+&IsdG*76t!tiMo+n!p*K;T_TCi*N{cc@Fv)^3lgQc zmijP7l`b}@lDp^V@w1^Y$w@0ui4J!qz=mgJ?WNHMK|$~lF3WS$0SGKqn(Slp%-1*{HOt3ax^?oZ~c^*63 zD6%Ab-(MOl?>;44Gc6=&dz)B)sp{!|_~xl_W;&_gVm$NDhvCPR2oX91&ZiaBk@hFp zv-XGYXV2&Z9|y)JPP1e;P$deBZ+ma^M4dHwKLiG4Y-^miGetUF=t+Mb9jkA|riBPe5rsMLR2CPk)<0iU7y&-*F+ z1@t7JgXu5a$52IA-m_BW-&t7-mk&kx+7aN|R-6p(p?)9Hp!~YG0XQN(Y#3_#1%=#Vv&wMaQL=0YD?I0oom+P2iIGVLG zKJTp%T3z%sQjnH{YcG*LYt4wEda^qCf&~eJGNj?!qQ9N$wleK&>Wbg@v4+F5mKbwL zTBz|d;G1@6ljQk$EFnyeYxtzGGf4e${%88{ZbCC<$Q1d`lHdL0xv9Uz9tF#F2{DQ%s#YOaNPW3;`rt;J?G{+ z#lYs)o#0Wym-Oe;Bw2@%Pkyj4Hth#LGyx&i^>Sn!?+TQPegSyI&AF$ufkt?A7?{?p z{-^u7IFpN>=^d9U+1I6WgnH`Bfgjzf!f=sx0Ln!jzoEh_@ow< zLIHj;5wKfap?e}Hbs+*KEm{~B{lo?-N2!FaE1D%J>B;*FQ!MsjSogv~aLo9mIy4ce zif;FGYc4RI3Dm@ph zjmULD$h5|8;9YXs`7f|ZN~`)$8Mae+XPK>fh3$^FBoPPA40aR3dAa#6w#2pBeBfCZ zQrn{?BhC1n%_%zVpx~^VAj|+$zL;IH0JjA-FYP5V>Ad}My7)*avGNdWRo1X=cXEl( zile;?Q*0b%s)CsH5|jMD#oZ9Zr}a2Wr9Fx!H1zO*2Wg8yTk0C&H%7ZS#a&UfO?Eov zA`f1cT&lD^`-vF;0;}*@F0%c$8{sEsaZA!dFathr{UzQl`%>P2#L{-c>sM$2rh%h5 z3ZxUkY6&V3XeigMWG=cLM&f0nbx9dKGAzZjTpwciKH?uhGZplV`9-2L|1oQc`@!5d zy7#f8qIhFMV~Eswy05|oA{d+9EsRRJzIh0*I@gF4362WYEea{U57U$1#KRd-yt7 zm_WpLox~V?{@;0zCntF_agg~uXXMkoFNa|K&pPx%RQ-Hb-(pD*$x?%8erf<06q}U( zvjGEHfwx^8a|3n7hCJ_Nc~^O?LJWGgbnreg^%zS(v@u}()}$pIS(hc~NomY8om1Vp z?N7VLDZ~W{`S<4&HAmg0Zam@f^$2psub3B7d{{k`Q%{~UPl*uQz!Sd<_BEF=zf<*s zrJ44tmj8>UtBh)^Yu2H-mSCY2cemp1P@u&r?(XjH5Zv907I$|mUfiL$6?ebk{nq`P z>@zcu%${-oW{3c`fHiw4SajGAzyw;!BQb*da8( zp|Id*(^8`lvlTWMOcc}dngBQ$(rQOUU{$xJ6n|HR(s&$Uh$CYX*eAo!t z6Z;UZR=#;7K?b}wv)NNHU@es;w$Bi$$;%aEL<%-TNMacq08r9rQS-;iEr)RD)a>Kb zSOHMD ziIIn0FuA2CZ-MKpjPG9hvkMqdd*sM9yWh{Tir6)AbmZugS?JhStUafoUaF0?mkg*l zUgnuq8{fawLL@>A{u3`dPtl$aLhv)Zc4aZZl|hI{oG^~U8suxr15?nSelxssb4iu$pN2v{_3J7L~u^6e?=@N|`03qsdo zu_rCccEJPu?Yk2gD42QsL*h&a+2IEKL5oZj;OW#wp>w7$zPMM=%RZuvEa;S1uyPa) zli{zcNEx4c4)5rr;7IRy~ddzF;qnB zKm@A3skp5lrwmElH2-~%h0QR7biP3cEarbC0U9UVC(<;8>qR1OK{G$4AUH3vCt=Sj z_PsP8!*cn|J`f8hcB8NBP=@Kn3CQ}R?HV8v4pxdGGTEx^EGOet3i{u9>9_NtQ)> zjSiL=1s;jncS>Gk^n}PE_xl?y#wrme+V^K0NU>e39f#SGyM2578m-Vqa$im*BS~(#riB@qZ!Km zaGFubwfU!x2_JFsa<-|f=cu+Sb;r4GYBQDZ2$LkkpW`YN7`}Ovk zY;tqb_gONuN%%cCW=S%)Krc;%I1rNI`yn)Xy2#MVgV zNO$C12Jkgy=8sMbNVWt{I~`&&cIUw{Sd=pSzZU>IKcJZwL2G&Y6hv+u{UgAd>L|sY z-A0)0=&wG4@Y;?7q{7eiAY17N@0dR~H~fL>t4Tx5}%KZ;HZI$UZfAO4N$UfZD~H`)!I}3yqT# z^Xb5fS=hQS%uZjNdn&@S9!D_bAz&W_w_IdYK19tJse$Sa?8r5QzzMH?_z}S)3d&;R zI2mXm{Q!UDVnDS$2s=ub_LiFDNakw`A9qz&g(~{oeyu(|nq(FF@y2Tl_9hgB%n` zq5JxAD3Y51l<8qk9Ouk{bqTBGus+9(0`1MQjvKS?q!b#Y??MIyZ%$>{h~z|ZhUnuq z@3faGnB{609O$?KGg-1d+|WW!9B>I=hZ0aNCwCL7M0t{g$?*pRA-IC!>CRS7JBc{x zZ)I6!uir@{G~oGSg!f{*7CUP?*(0}+PLUcb;!(l=SEgJga@=zzFad~8M3+TMpx>RFTnxP?b!DGc;QZJPD-4lo6Pa^i|uB( zS$GdBc?VYSb^eL{m$9`*SO-j?T6jf4uW}dmaPTLE#MV#s%B(u?LndJ^Py{E+J@dw^ zBfTi;iC33tKf#<&mu6*SQyse@bm1tpE1h=2Qv4>&4?>R=uoC;`9ul)eG?nTbXf)r4 zs+yy@pQU+IeG-z&^%($@#+~|@xw0eMPac+L6x{qkJEGy|z!P~HLK@t6fHSxMFU~w+cZ{F1YB;7gHY0wGZ`7g~qZ8v$;=PCv_WTvij z9>CuKOu7EpHJ38Whlcpbp^KDJMrab4G$$P2_R9&#_5Wq$3|0a5y$y0k^I#C6$|_GU zj(@s<=}0h35$~GQ2c8bo)f`H0cm`W?&HBTLzKw~dN}3zvbRWCkV6Gz1cn3~y7@{(^ zP`yP1#HNG&B%FE-mvMGN6#@k2byMNOAGN+|Wl5V;` z8z(cpdsdt1>G6yOTtpTlqv`uUq$01cZENOd06{XEC$&!$o8IwZ5Bf}aNiQ3WuKiO` zbnA5zvX`?^g83Qm^oDZ~}FWi6N!acDfDx)!4s9Rx; zv8Rnc^1_fahCp#n^b}n(Ko5;|@9+1Z5(>kJwlEkbQT^_Q9jmV`BpK?cj&dztVha9V zaLXbbYJ-em1P-UuXjZWKWZ)G`vQD(HqBc=LH(7h;)6Z+$uB${C=ii0z_o{haMk80t z?f)WqvfVbp1Y+tjC1SYE>iVaXKSo~^y)qVM`y^;?45RNIQaVY3AfXqt0I0cI?yZQ| z>Bso3TUR%p={C9Z9Le5MjkY`x$PnnK(YDz4-G#$Y`1{>=v^_ zTI(^Vk?A@;aNlKb`gf(OtIV-XR?(Vfm1El$;HP`q{$((Ro`|8w43}%?UtP^^wf>^R zBJ*=sAX@Jhg$&=E<59gi-^P4j=yuc{T@37C41W0gMS#DJpL|7W{gia`^zOZJHoSHT z@SBIK?2j9KgLTeLFnFd*oCxAc6bln)WaO1i* zaAnPHg?s8=>!6$t0R(e9tjd7X!YXx61$R)Op)q7Fyj4Dg^Y*;1xzipJb$6De#4uKa`^mt3Hfd{l_VDnJ_y+ zG2JS+;ode6aWqe!zgwd<+b`iNAt)=D{2n*w+GllgW1=-`*I7Bd!)s{ZoK$8pSz@nP z7)C1~^Ie@1qtVs|e=5=Xt>e{nIiJgpPpQdqYMrZMUQXSs;W)B<+t*mRs|k8>CXC1Y z1G0Uv_JDwfLscuaKn|w#H=w7cCGUm?HT8p*&T>)7^veubd>DQ``Mc+@K4z41Ys>8i zjg^(MiOL$OGOX7jHl%^a|8_8iO4M+?E^Vlag*u8Alcaf!gn*!&J2^kC$d|^V&B8{MnxvTz&qhoRd)q_WsTs z6nXrsufz$fab^NE4m_7bG%o#De>PXb!wd!*so1ipQ6*)5SO%e zH%_|*Z*fn(k7($18<0kSRP?fJ59(KGmxaoho~SoCphARUUBW7?x~+}nxmRJ^Rg!!MP4`(5cwr;7o# zRxkGfzA6TdAoHg6k({J*txqE{-#Gcq@_-q#CGiJO2uSwrJ!?!tQ3>)8)O-)MS>J1DduEV^DPXhe6I{W1?x||4@IDmgdkH#Q#EuY}qUo z%&Lt9+_PV_cV9=S9*s+m5i-qq3R1~sPW)XWM)$$hss{n5kf`6Gz4zF7`rwZ zF-}_7l&4_jNkQVAd67U;++VwwVVVWapZ4+z$no~X*r=(Me7}$&DWxD=|LkYA&@w`a zzvvC3d9FyBNS*|!l7D0js2RtlEs5!gqMTw*IZH8O4o}fZUPg74rZExZX=;@HP~(Qu zCwX>EclMVg@K4(rW#@7B)AffP=%8*#8$*=uOic}~{SzY~pC zl%bG#h`4>5=J#>^r2wsh)%_@s8{n502yeI{9Qdo~Z8%wTYvOR`VLt|kX%O~{Y4W$X zVUX_MfKOlU_j{n>{zimj@;6vRe#`81u2-6cB#!=UMX+adQLJn5{0GzS(EZ2Hjyc^R zl198X-y8w1n}x2*^iGPje&_}5A?y1j%Z06GFyqz-7pgM+ea_<*kZ;$ewA@xq#pB&j z`1TMqV2}SvTKhEZy*g;+N%A3TfAV=qK8~h7+~d|~%x)oog$ktp!u*k$Der^?9S-E0KPyg*<{dzba?Z$9oj?H3&uaH`zK4jRQw_pw5rW<>3Ha+c6(28{&lvMb$z=n}`=7F#+0L;OVM4Hg-ANex_iZQKhEt5cI&iVWJAWWBrS+8QGQaYe zxxB(}OLZsFqCdDI2tb@LE>eZw@!MSh7(<4IwVJKyJ;!O0In~Uk%uuK)CDUyKCW_%q%nsengQB$!Rh#+0z)it^-onw-V>tq zBPsN#vh$#$rD-dU(0S(*>RDXMEX5JgGSBex-msuPK73L=Neg??$ZHV*8x=-2{vpad zV@(Cp`xdl9fQi~6y<7?f+aLH0=|2X4iL3k-_@qMHK{9S-2JmB5{{*>5c z(>HjvjS+|l&7q5OOC2$?8!Py_>atiO`Egayk+PRC%gvze{hDO22f@h)EF?}=DAJZN zd1?Ez9HPN<|IkfHJY7g^ie>s+2-1NE;PW|^n67xeW9~6z#=kirNWTqZ+l$LWW0u!2 z#r>XCg{xPi^USPRk-&e>o4p^1%ibh|B zI4}IL)>eTv;@2_(Oy@rXCbhI;;ZN@xH!Yz3SvEa;nLPvl+Qyak4lBiKx$bC8#FksTE%PzScdOYIPPBZ*XpzqbzL*4BVmwFF_Br z?qU}hu$bMwD8Qlqh_#fHbqet*&@t?$f^L79$e1J3M1|88MVf_HWV9|1 zHVomhJ`rUJsTp)Mb5+d+$vBxGr$G@nP!tXd)G)N^5&@KcD=a!Fj>FWX)16U4g{E-r zCARjz0vY-;K27B-YO^||#u;IMEx*2nRS$_R(QCSlP{4Y?VU5lp)p6}W=nOSuzInjS z@r-3P5mfwm<6hcx=rQS}&*r6M(s~U{A4Xdy04iV(TJ%=?P0Rmfmg7&$>`3h` z0r>OIvGc4-fSopldmC3&n1~@R-13Sd1mp0h=u#g%+iluy&D_t9Mw;_fp1)U?{~Qo(vpc#!5+_&9TP1jZ zl=gC>xT|rI+k=3H7~_o>c}Ic4SpC2$$Df3qS6>EU&WDa+2^_OC_j!FA7{HmNsfvfl zm>FDN$3fxg#5b5zDU1`PeqD>m66(2*Bag(#ZCHwws-bFd$H(n%_Joq>g@L+)yKjNo z_-AYXbep84v2C?Mxj;*G#}+<~g~mB@IQ`cHDK4#<-fvB22Pe^H2NQ-3VnQHuqzoT# zZKle*sWa>d4FyPAz6;{g872O-HzCVt+>EyuPdl#&gE@lgsNm1IeDO~*;j>V3+;=O~ zfZSE%!3e!-wGfP5)r7_GiytysL%7RURBteBL)@d1<=>+mUsp9GAA57G!E~(_bHHaM zPO1^nJuA^$H6TXC^jw8a!|y!a)=aDhw-zaWnKs;N8r zaJcikbpx!H+g!i78%|LZHlOmGj#TvY#yZh`QF)&2I1es(IxlZ>30HQV5yb*fNYAxgx z$qhAXT0wQ=gI-LIl%K1oy?SH%RFV~#;~FAv>`l5i9I?~9 z)0xoX4noO~TVh?=1z(8s&!W%6-8=igsU4dQnLO93WfdkbP^jP7Q6krtfS%z44LsYf0FLN^r|td9>|Vl361hAddSi7 zYGnjVtN}rnBi=vjbGUR<=VA3QFs=xWJ-oDwc0`(fzSW%${H#LoPVzfHrQ}F-|MB2% z?_ZV_kfd$i`K$dtw7K(Sgl%NORzBkPmJJdgR8E!*ri5zDib|dRsrAG~W0qgf+bhZF z?nhT(b$NtOfPdWiMONk~%^-BNFNEUBEAa#MWh$=VCZRYcCw>w#$V2PP9A8U;ajJ>L zaXgbZ;lo_PF_pkjTjUWwk|Cy59^2A`RE~V(pJ_ht)mH3{GvD>Csw4A#`<-HVokx~y z-cwOW4A-fT>$b=8rcWAJHmM-&Z3A%xl};}G7!Hj=y8x3HIsBTe5E#2=^ZHAzwtefS zfD1$*hzm9=c-B>)B#qF^Ji2mF2$eolS;(CVz?Y^`EJaz?RAP?Ujk?LlHl|P%EfEKD zihIyzF)HL)2IX>}+VZm0c^Bo`;10sknk=JNeSME8b0rHnG`nuM;UH$K1<{MiE`E)f zv;B7#8BjfG)AsP|&Q#MjYK5sI(f9T5)g;Sd>v6T)Fh-{1ZqoC8H-Sqxihf3thW?4+ zi3;^|to6C$;(B^-;l6fMU+4>5j_;-TlGD5H0MOf5bG_RYI$K(e^qt5YzN%E~z#Gzb z0;*PMgAJ*^i3oEJY0etD-y&a`L*$Wi%gej|124mgihwA=2k2N38G2 z#%lbR{tF`doOX1JOt91)rahv&S^-WYD$enl@=;m!ex%;h zbjyzo3bt0K3Ik8RTmB`uBii-r!DV9zi3kzeAZ`ld8LVK0UHzQbD>r(|hxHj%w(EF= zgPLWs+^fc55ib&pDWJJ_-r}E&J0n`G&!n>QAHLG-M2Mb<;3GbG@z(56$)Xs(NT%ZY zf8Lv2_CW9>DQi+coUKlx!wOMpL;r==Sev@Cqjf;amW7PfoYr@9B(^hr{{tCxxgPS()O;h>nZ!{xdrk-?abO{TI>q=@Kl8nL@A=BUXDkii_ha21J}{c=!>DSiv1NLQXJzZJJ%n$h22Va(jUO_s zrzTz1-z(xpKy?yvv>~NKKjJ1I_6&lJ*cC$Ro2Fq}8gF{rE;WL&W*Zj_b#AV>Cbdwh zK6Xz(Ly_E!-wCKVfNbu%=gTCozU6mg_}l}mf?Y4=ttje*et1Vyh7v9s-6&{Z_OUyo zj?aceF}+>7o|CSx#YFCCyl9J+9wMByV+zdoy;6X!ULDWP!mRHa_#;n#EhRB#cwy8Q zH4_-stoWor$mw>EL*aiyX==PqWLJ$J76$|nWx~XRC2C(M`nf17Rli>Ey7&e`JEwQq zJx&&i*8~ubKe+lSqx(7+yk#m9XHAgiFcHT^lI@_U$&*aWpuf30lKML*cV&u1hoAwz zZ8957>20fjo6*ApGow%}t2!~gv+NA^FR0ikj;{&2gUtJJoLZ=kALOCM13$n8Lv`oE zg$e7++ifp1TY-amBVD>j$r>;E@JuD{gNB6uB#zF(PXuNOuf)7g^&gP>MpYQtDpWZO4#yr=4yuNGxck7Snfw!vzy3qIC zzhB@|t~s~d3iE}(@Ai;$7gtU^%QDU)cKO~ma}hmo zOx0coWXuItIL=Em_HG({If{z^ta-pU_X^^>J;6g7%yOz#Tw72bxzfP1hmrh z9GbM~kTDLORBXnV<}~kxRcFlK1TI;lmZw@5!}-gzvrQ9k)ozor|5jqpHeMrSFm8RN~O9 z^(2L3VL-jpNq@6Y7}asQ$?Rdb9HDde5;Eg3^#GO_uUXIU37-L!M>$r2IZ8G0vKxeG zjsWWNYxs&8gw&tyRqPd_b6K(iI(C63)^&6S!d=8lP@b!bhmL~9Ez3KD^5D^WBF*hL zG;2DUk13u{Y5U{te?7!5^$Tp1XqMAKY8xv%t7fLo( zNv;*QOIh*mzZ@4mKMW+O!=rudV@QfrhU=2*k(1l$n`2{`&CRX$-X9?K#IfGs)}(JC zd7O0q&+8I4m>)Lq^Vt>#Gtx}#5n28VJ)Vapt#LBl{X|yWbl@zMkTuP(0~d&?N5sFj zSAoRexkgQSp2{Ps?{b{kW$qnUp>O1H?eK(FNY|VSV)WULE4d%624*fFQZiO5M**UcuM<8&fBi9UBEMpb&x<{r0F*-LN8GtqmuCde4-0J1@iK##Lwi_aOuJb&W^J`((u9Q)a*%vP2s54VG%Jr&h ztITPKKa}d{#xBGyIAbBV8nL;UMu8z|LrPTJS>Zja(*=7$*>-gzysJYxX?JiC{5}9jqg9$D(&?8`_4>tx|B=*L}GL~97+^T_K}BLe#vjOlPIM^o-~E= z_sDM9`A9qG`6Y7Ic2vqn^5YNJyjyGC&OYufX`G&?ZscAww(P7fk-~q$4?Dps-v%Vy#g0(iFXvI!XRZTt>;~<)ETf`*rz;Q zX)^g?(}`s>{xZP5qZ<7IH4d10kw-~K>!!0UPZ-LmjvK4IgmF`9F*-m8e#5>0GQrfr zg`6_rFYwQ&G2ibCbyAEF>VpQ)|DBBb00H_T`oVNeSWdXDP=Ua@fy#qk7bg>3Pz4L{C~`CUwL z!%ZQAMST3kX;%7}Y3m6)gTL>YSi2usl^ly|MJfhoU`|6pUyUwjYhG(*pK_xUXnnrY z>U5#82&^+sO!xlEgmq86w4_q_qSD6QqD2@iB8(KL6XW?41TNGkKX#fNxTP7YdnX%L z?;pXo7bB1z1YwN&GOF6QJt_2E|CSBN*v*zG+0ELb_05(|icaeA-?unhT77m)(CPTF zaI>5*9^VZMh<8>ig!_Ghxl(b!m-(S?Qb42JstJkD>rBbbexi)aa{BA@;vl~#9OK;# zQ&)e)Tv?&vg<3h|!~;Y%%(^=EicZ6ZY`H0ZVrG!`k^5Sv`~o;xTd1W<<2tVHV6yj8 zAbI4O{GI}8PqYa%l;$I08HbhENF1EheT)0g zGBLh|qH^||cMsUHAQXy)5PO0gj!MG>%y!inrVK)rt}fGU>vq#^%te zDptNWaz&vu+ce}rfcv=7q^iopl94Tr{s9Jv-GF^y!23Yh+|Cl@s@i7O;#=+WdlX&q z_rf-pgsi>{ZPH+CLxq{G99hQb&t?*d2Qxs3aNJkZhyC2aDq+-u>BIQ#2N9?xF9G=_ zKzE}XGyucSk^&OzJx_pL437VB>MWqlm1-_Qm>IO?c;0DY8~zWBQqW4Oo| zs-OHfcD;F0;`zm4h}^xx>v90ptTz}LzumOzN|f!rnMbYuZn|`BEfqd7jTtbPp6Q- zB1D_8StiGMT9M8Bet)Uw8(yRK#ULce;@&-|5L|ir5YY1C#NEuK(MWL5co<)$l$z=q z21`w?Yi0U0G7usXLEk5aP$(cZb$D0{n2eJUkkN{6Q34ZbE;x_58|a{N8?k-sJLn5} znmB{Ae9Pr`6@DA%ywRRmsbcD_J@YLWm7&FVX^`37?VfH&EWp(I-K(|Uc%}9IcadxV zC69q(jW~eXY(XQpI%v-Gv4$NPq_~D32bj zLR{yt==fn185}=(z{#PL_TaZ0tR09K7}KIrDi58cFa+gGZCwk##zF&G#yGFfBEB@H zFgfXK0Xi+-KQU&bMIj*$r_7KRYQWzQFr0mvVd4K=mflof^Bj4PvwyY&cY`Y%Re*+C z-43R(@|8b=5<8hxaz^(DI<+2O7QJI^e;7HlD#(%!((Wuf6=~$nBJI*pFR6FognDgB zBbV|sf)s%P`TE3JBe@mq!6Kc1zd2S~wMF2vrE-<00H{KVeu12(kS;Dz_)da z5qXH!G{Q!XM7MjbNzjjkH*%mMWi`^a^HJB7)4aoY_fm!HsTpjyb?;bx?JKA^<8!M@ zE|bD^r)|TNF~E{Qc`Kaug#>2|e?o861AD za*|IBA()pOEF?oPxIOq~-rB!TJFw2mg2vQnCGfXG6!}1;Ooq1A=VZQ|(Vra+1ZlDp zee-7{3mRxHcY%)+$gPalN?2&Y_fL?2eG;NTw82JQgh|zb&3m1t5Da02EG>C!mof`v ziCj4Gx1ce;&OcbV{(2)ne8qITcF7JRVr6!+x_bS*TC4cM+4(S$8_s9B=KB&U!4!eOTs9+E<>ohCD;IrRPXFa3}PP;JJhvuC$C8o^uVD+6#OGgy!l5>rneH}SQ zCik_32RfDIvuuB^Qvs44~Ybf9SW(T zK(bs>4?5j0`AoN1ac}!Z@WUEdybr6XppN2IXP55o;|6yRJY1m}h%`5p4V6FsYkx%z z3t$HccIShHsHq74H*w(~PG6K`eo#wS`(9o>@Ilx!cG(ZSCZ`mlL*KV%qoD!7COnKn z#<4&4CP3J33V#flV_GUwbW}%E9~vGh0{7c+cjk!Kth}FtsN&=pm4;xWK{&SOKyk;? zNmSP_?QXh$qWk@#^vi)_^HgFiIQr%Ud5}WUh@zoZtV1DP zo4;ea8qI)=UE63ICnhH=kZJ{i(@rZlc6k5tevKx@;9Y*3gxyG1B)Ph|+jtteyB#9h z-pa7q%DG$dc9rEnVY^;3BYRiGR+nD7j7RMt2As~7CNZ;(qL+m#1ZmoMFeHtz!CA<^ zr}Tt>0UDVkXjG)__tPZz^(SFRg#t1wf@ihIvF;H(KO1457|ep0yyqhHz~Y$0Fd>9o z1iEG#8!%&1+BpwHW+>iLu6Npm@=Fo=VQ3Bg~1+KOqix#Gm--ko|#?ut)L&XbC zPm#2&7hz7O_A7L8&Tjc~9H*y=&931s@c0Mlj}l-?Fhab<;+(tlpM3k&hrX+>cmTcEopwRpQu<|; zwPjyT0;PS%I*pZn0BEor7TN9z`s2OD@#=yOG~jx}Yv$(r&VSaKN2Pc7w`E{u%7y>@ zFK7F3Ersv)@-}OkZ4%$d7{BzAdC@(Fw#aJXB9?vpqJtMO^vdUfzw z`-7t3B2|?HX;*)=5`Dj`RibR@fQJ%sFety?i&da{6iP@PQEm87@SbdzoqdFu5U?MrF8Je4;mxlEq*U3O!+hA zb-!MGzeo4B@?iCnz;FT9ZmKc_OE1;?5}_hKYWns3 zq}ytiD*|uiIi2i;()+QsGC8;CPtQNg03#W^d-bO<)fLuZ+P(>M2`?6u2xU(s33%#1 zh`^9fK@A_OKLQu-?2vy*ErFlx1MzlUGA+1*05xhTQ)-yk0nrdy14SBAJB#(Me+#&E z(ooH#!o1yJ1&i^ab*Kf|e7Y)?h1;~l&BKE+N>n_UCnm)Rcbh2wk8ut4e7i=k%<<9e zI}RWC88&2y5Zo6g){77<+G`1u7yH7Ek4T1(MuLUZ!;FFjhKn=+h6pRr<3RXOpLR1W z;fn2nd?<_e(%7}~Yr;3arZ>p7 z=Tln)?Hv(=O20vN)A!+O*SC_WRb_>yUr0BrZ!&`)u5e5y4Y{-~Ge4kbi{CyEPv+|< z66&_KVp}ehTi&y`Os$fI&rI+W7H3}h-ronl{rw$BMV2tSIl#HzT@`|Mqc!nqrg%G} zaBl-s1mU#xh_+K4xV<$aH$->V9{A4O0+@FPKiAxK`{OqEp=BS_9Pni8LfTiE4gS4m zU;QVC^#$Q!omii&a_)TkG2~=iMO5<~+vuqUO_F?-fPPxMz3%jlIzbUVN@@;6AxTgC zk~oDhl5fVAK4JhBadKMK|hUvLj+RB`46xG2dgWd)6={ z$i15GevN-#i2+;F???hN6o4_Ij1dRnKnt`F72RP8v2XpYAg1qc$FVKWQzgRsF8xlqJnbqu&B6}%OnkmP$QYIaS_Sum1rA>4B&hDedUbZ;#RnI zlj3C22G6o98;&rh@=ThV6J3Bpv--1)In<5?t&s5>^w^6iw3X$+FSKA$1R!d^2W9vT zpT$4-e}9>M(wyLytcF5dsQ~#k)CL#SZ$5o-%x@mZ9zAuxDH2A%9^JUnhe(yMF)0;P zSz1Da@m8{>^CV@!`(0mPwfDUlrt4qBwGwf)At*oKV!qeaerf0ULrUFTqo!V&HK*i; zkN2M@UEAx!h0l z-8J|!|GOs35uDZ_$S&eBb@yG{zuRl0=`Hm&2kWd&&3z3R&}E5nr!}8 z3;qP}N1N$9jt!!!XH1*cBRK+9BE97us!V&fiF79nj^$=5fZkmPoWW0s%r$=G{C+># z;*7o?=fZp}wuEn9uZUE7vjR_v1v6}@?@diEf?EjXT5UKG=sNU0JFRd>ho=ed9NBchdCd%V*T0UFS=Gn zB-wVIAbsW=<$Sr$5J8G|x`TIa#1>!4uriG=V~S+nU%3Y&B=az~;EH_~ql&!)9ylFI z9y!D@#9%c(lZexP#~@;S#Gmqi;zmK}F#~1mg#nc^B(r3j%v)2JByysVVv)25)w4rF zoM&ZAYE|byVRtPMTd=Y}ZAdmir-Yy0u9hO)8*LH#DQfZaQ_SjTP3tysXpMs$4=z$m z{bKFLp`Fo*K`3~%()7j+6K@v1CW4Eeww%AO>UDhKT7+4=AYb<#;Oj$k;K#w5?ZKpP zZ=+eRgV>l<1|w0N9{_t@50sT{$BbG|BQ%7SaLvBSE!q=)Yc674zW$UcT<^ujswSc^bX}Jq;m+g(M%a%<(uN?yKu6j@hOig5=8{v+VPWImHVH)-XX`##dq9o^eI6&J_!Gk3gKTI!K7dPwxVKjSjigLgVE7~PX=#MOn{)D*lH0bus#K36M^E_4qvne}{+4-4Wo~1TOL4eeogR6#cEica2*N$*k zLlJyUEOvlpg%f2G4c+rE7!n~aED0rY1EewbB}W^k`cr&7pEvv<5xlL7)UrhZgK#Zs zzsw;sMgth9ZY`LjNI%B-u%WzT9w$1FZ?stw&4mL>Y)&0&VpAi<`MGqhWCU$q+Ia_C z1xjh@l)3C8=uzw>OBj+d+2c~=24)r!)B{GJAa)GcL*-VFUeeWvB5u*6~+mwbjGfE#yof>qU9qzCgKEG!}ZlNjiRyY@_wsOa+LWqZw5)8jb z14NliLD0gTDlt#}4Cw_a^O{^!J{UJvK2Rh`Ce4Z)-*4${$AHzAT#vw=!MY(tQ~b}E z1ouO>6AD5$-DY+!NbuY5!BTDIwfGK&ZgfR<33y>f^Pc=>q_TtZbAta?{QYCXcu<3Zcebw?lrZ5gx-;QAk)-PMA=!?2rRjL_lR7M*5p570; z`5vA(B0nL7(t>+G;hZ(_$i{fA8+g z7dY?$K4@7_g_FghV|<-4U0975qbFpCWn}Mu_9WbLXxTG78+||=@6&m4-kkx89PMq4 zn>e`XH@*$z3mG=j{_59xI`fa>INarZ8v+aT#WOuiR+fHTe}bw=%=Ta^)1Yng$rv}W zTxN_!|B|mK9_<%;>&;2Lq0x-*d>S^x7pbkolRIUU3Q&S>Svu@zCG z-;<#^hQA|XPw7AzO}5QRj1TwMLohc>4KF4_Yz#f%iV1ONIJ%P79a_8~yBITci1)?1pVS_fl`VhL<`OZfqz2?KX&#cf^$fm7lW;lRw7qa2!^~Y86uxu4s~u-b|rYA z3@$FG4F1PBTpWhxcncRbGDi>hmi61r5yzX-ckuY`#}W3LqvMUgNWg+yCra?TzKlhy-YY}c&P zuP{nP@1m1?6G6=`M~3J3v$-6dLn?MGq|IE>kZoO8{=F6*syae%LWr$l&ebPmCgpT5 zl;cSzNh}E|_$KZyVfs}PKP#eDnYgiUJ$2M396;V=Runqq5qI9#;a!NKoLHM%#+h|mDLc+vxMZU%viz?kjhB`FkY-S$rqHkR$2nU7 zFD4*Sedm|F)$ezPPCstg&Cw1h5^ME66>Xb#1iEs)GI#5i^xWlF_jptcwXbfLG`mxz z{?%HOOwGXt(c=V>Tt73LkP)^Z%CaNqdZnDnN3C`B$19V-8(xlg{bGXB%$Hf8(j}|B z+08XawXRw5Pdb$wADHp2(O1O6nAvE*$UAGU$);-S)0gJAWh-FBM2H!k?V($@uf5y5dCUsv4fI0 zY4~ph?^%CEiq=-T4P?;zzk|Zg-N`FNW#n%3TceZX75fTBOxg1v0)4KM^YnWQ(9CygsZx?Ob5)d zup6XpY{AY>3- zHVvP+;?HqXFp%=1RY#06MZCUr*m<+~@a?uszD{+6SCjY`FhEU~0bRvOu2qkoHaNeg z`Na?0IZLiY^|6Nvke-o%aPieBrmGqIA7RtSS@+rij0Kxa!>Vc4UFT%}FLZiXDS7ZG z$29%yb)XJK8h#R(pC3Ofh?*D9I;{o0DN%gOqF!OyLEHaReC2B$7g1L?JmhAM9i&6A z!k7Y@UEXElz(D-j!rihCIoAEL3LPa>)Xe;AehYs%fFpOqv`kNU<8#D0C2&tCP3r1a4K#r<~>izIJ>DQ z=z(;roKG(KYMj9^3cchEWWk??dFFINe;j}D9REL>t}&_-{_8f`wrjF&V`{QBxhC7L znW=8JYqD+IwmI=;oA2%UulHN6bwAYa9`3WxKHxZD%a+M!j)ya5fk|CL&or4nR|Bn$ zRT>)Q+JENWFJZvCL1<*JHcsYz$(bd`|4>0SsuRaw796XAfpY+QhVuCL)mFlIp9joB z@tFbpkpi&a_A0mEoLHm!JT2g%)``NL)HnTcMq8-qp`l6~CUqH}V}Z|qCDO=odGEm# zElVy97>~xTZixKDKAPSX1(GyIK@6CyXIcBDn6J&w%^ChHzO2RnWQ%fX2A-!@^n%{u z0nB;~3dix4+^K0RNMT|lT$cqMNnp}k)Tp5ngtUtg$2Re)ZEO-Vd722uHN{l}384}h z+!can!o4aT(&lyd;4{rSq@{InypkFN%i)JuwdpVm7N;4;ryT=+UK%Md6Uld)OW$ZJC<2#0Tmo{60y@{*;=K?qa zA(o65tv~wTzdj(VEhf-U4N&=VK^I_B>-n$>>9jX9ffxZkIRSZ-!X#TvKcK;yT@Hdy zCXyWNio5VY!vr+MK9=m0fmsY>98Dg=>uTwmbt?rcOPyj=xj#Iw@rAenunICjH+D?7 zPN&~k@N{hqB^5XNgN9|=oT?zP$md?;@13(C+45z;Q&j*<>rr& z;65^X3bz6h?KNB@k~0!T9L_4kxtn2Jxuz4?adJGqN_1--&VJ*T1C7^T?pBmOmy2Z= zhEKIvLe3EB_ZNRey`K?=2oowAH^94aoBv$Af;cuFmggs~HfaAgVKz8+m}oP0rAJtR z7cx;JdJ4qLHBK!wV>u$+Fj_o8XpDV+zSqBl4uC zl3Ig~mc;PmCcc9Oh|;dAlqhJAEN4!NFsFg1dkB;^l6^vjcEe-LYE9|TD0qROX8bH* zks~-8;{+5{hccZY^`A_+m^u%9K#TcPd<}ADOCVt?Ja;(=FrPXXS}I+KFV~c1&;+ub zO5K47Ox@hF$>3mS>#Rs^=kMXjw&L!4H;SFz*CZMm^4ylsE6e#)gGAH0gE2^t40eq_ zg>@f*W0$>c(MID&Ur)&M51X_NUY~jhP%Xni8O!j~NNc=Y|2bLjXV6#mTZe-ZuEJjr z!a)-D%D%Pi&p*BLkA4p{rry?7^NTS^;QBsgONl8Z(f$dn#2JV?MA20sfvOkWSQZ}& z-(Rt4^s+K=5~K42U!@PvyRiQ&!8Bp-ZvjU2MM@qX=^Edq;m)M|wUK5a{ z)x2skd9iS9f?z4%lYqr3?E=8iC`9ER0I9{@bYG`u={=VXStx)aB5{WfYSk|Is07<&Ua>idMR7xn!?lD4Y6a8?Qb|9@D8f;&=x=^kvl zLe5x92{VCJpgvc%dLXCQ3a$uW9~YNxBAK*)7D3YdyHhxq`}4i&=e%;c+CpzB z7-jRR*{bz>^52aw6_$i}SwHeo#TKli-;4%Yo8@4n}vN~lZx*p4ETiu->wFy$n%)ZsDm*g{ybMM#GddtD(ay9sis zOtbh`C*oe8$aSE+t@0Q8Xvs#mS6*PHgU)kNrh^f<^KU+M9_)~$mjY5ZaRJrJa(`wY z?rGy0P}djKalh5){_a%``crF`{Mue7t8iS#J;_t1hbl1mE_hu#(V&=^3}Tv_Rc|k^ zEBl^t3!MBhC3Nkad8%_8l|(7_X*TH6uR~-KUHBQ&RE`>T3vw%%zJsNIzC5wuj~MB} z9Lu|`cRRc9*G{Ubh|$HfXnP1y^^Hw8M}&2ZbUHb?-g*Dom}!3U(;H$CM_|2DD!L33 z^8Fx(!=8gnu80VFl@fp27E^iWYzb$32<#6ASS7a>K4ELn;H_~ALO~)TACa$2c?!~I1_F(Y zq&8DzaxTHjVXH~^DXuiQ2*vR$@jBEGy;Pku7f=I%!FqeRFnf9VXGRN?wJa*vD7&G9 z2nzV1VxE#sWxs%d3|KrlEB@>Vx3fS~hjMZun9oVr{j#?qOhYm2W9S|m2&_-JSrl*H z&$O<-Q*5*mDH~1j5JVkM`#yn~cXb6MS+3b3)JF93Eh>_UdYS-pI==CA^sr^LGGkG@f7l8tf z%)&`K;UL21O9;kA6k`yBa6Ts1h7*@g8a910DIeMb)M?#*9p*+_EK=;c$(vz6?*V#%)Hp@tv~5gSNYv&Aza%4FQA=k=+M7_QNxkuK_D ztY5#-Qi>Z#;K1e?z~6HgLT@HR&&?`a6Eu|T{nt8YIaifSiXuoA=eX8dYUWS0##f2u zLJQxkq_M|PCgRY*nLH`>xQWo<)yhCrjwXt1?JZ5dbjcrlXWq(Xnrtl@>d`B%<0q%D zKwXep%Hov#3T7DJ4#zB9&BBI_y#%XMphZl=pOS9*@Zh9;%E0dIe?JhE=_76XFi^=) zh3UjR=VVrqWUtE5A_;&hGVlRPb}nn?Kd-`ZzP7Ks@<+x1GriD)+CQof_|Iwu8=p$J zrTp^bLX$ElmhB()K?FPc&TE3;Un|i<(eYDHUC$iF8UF?(3wlqDXFw!!Um5~b6;vyP z>cDM64pgNcmZI_{SiNw`;8;yTCBNUR_D|PvnHzjY0b^iEAFSHfP3(#xH%Ozlb@JZ7E>PlYA$2@FNLh;{zIg&o6&rOF-tgU#>kC*} z$Gq;1WvE7qL4zq{*4;OBDpdHPqhp@gd6OENg6>_Eab&e`g2VgQmCjK5Pfl0pKF{00 zWyfx}N?<6~TR#5vwY22|3q5QCev-Ye0%%isOv>)5?W{f*U2>Uj4BeVl6pIWD@gy}y z!oaME3N;hXFT(TaY}adsg#`T1Z`YqmQ3deaBR5(aPMQXMuI)|x=6_G5s|v;f`^Z26 zCh&xZs^S|cxyeqMf%I3E81OX$6UpPQAbVZ0Gcl1Mj%=Vkt&vr~LXIR!0Q9TGW)hVZ zf9Iwff7pbRCp%#PYoFEC!hU>XZb$@{Bd(QWS^W$KR8ar>7OhwKHy&>WAI)!_NZ#~Y z(w9CPgl;IN@#VZEL4WSawm!K-m>jOisI+?ulJ~Ty~}-4&)6VN{^`)c zPClrU`%a5+?qk_;|MyAaSAp^kM*8ce6QdvXHpse-#fwfgB!5?fhC(ah zTPaZi)dNm8Mud!xVWDU&9Gx@?jP>zzc;in@g!LaT2ncT+V-TZ#&0scUboMCLos<%! zfdTlxhXY<{LTU4J?j6PCa()X!kizQirT49!zhkJ<5qg~;J%O~Wz`Osr3lr1$YZ@=+ zO(kQsG;g<0?Dzy*BEZ69G+jts{!HIuLd$&-(dg*Lj+O4EGHRf``0B$J@&I3Ep!JsE zPUh%XD}W~fD_Wz3iK0CP<~=z6lqId@+=VwA7?-_s^xGpe3>`F8qTcZlwUNrR#MklS z__~q#tutGkIW2Y+gt$@|I|hO?O|#HQQl(2WWdYs4V$SBD#ep6HMD%XuZPiZ@!(aEI@$+37UlxQ{zPj;NjB+JtUdFFom z8zGvFC*3is%qcFfPd;Pg6-X+GjyPZ-%P({w3wn*;XZy9O8U)cn(d~ZJJ#0GG>Mf)m zHYvI`i^9RdAp`~vi&CIb{APoP{0>RL(pb~8vK%pF??4E|n~L!F8y6V#I4~0Ks|$@2 z$@UiI%?XG>C5Nx3h!fF1lxZz#E1OIczSlRpa4sz?Efe2ZJ8A1$({r5^;@4bS-Fm;* z8T+Ll=H8dVcZXV{d;fWGY% z#PzGc7u%Km7up>1MXKA0)FgE>oI1P=*yQ`PQ#|MwPvP#{5p{~GGz7s{!CAhHnb#3t zuSmAuvaS}=yFxGC4y7IkgrC2@Hu*nOzmDUX|0@SA*C;D`uTDSt?l!JD^dxHTtcNr8 zR=6LwZa*=5A-~VPyhBd$Kd;cwAP$i~zM|%Y>HGD{vT!y4+lDqf>NetK)j`x!#;U=K zwe&@_bCP%nLC13yCERn?$cL0r zQ3YQbvWPfKR_(uS>I_gRXB%I@>_TmLK!+AR&RXwxt^BEX^d|P#1Z(RuOj4^0yBj+; zmJncwfrZV^hLMHwrn9`>4>uZ10Z2Y@={of!K0f$g6P%HepJmD+JWs9|8x$t>{bi_7 zL1DlaZN3Revx3u%FBnI56}y3p;nle@C%Za#vRFC2K*dw|>v^w_($T7Ll+c&lSjera zs`{p;?rQ{P8GgHdY!rL!UJ6gZW!pVI?jd%uF9Q1|{b3_w@|^c{vw=XO`&A4F0U_Gh z%L|@;ZZ0J=ZSr8$m=p|Q0}6EZhuu*YjD?;5Ph|Syxkz~*Ysn=MaU50Vlpdd zR&X5UEc5A;w*{bl#K`~@80b{rV9iYmrVa1KPA7S?C^4D}-f1ijPM7K9G{NPRt%rFBBLiHRKeFLQHqhiX`*s7mUI<9%tcu6lWu z)(kFfhj>+tBA}Z~Msv{Pjf`qUAbYPlci2$pMv}W}#~})OLs_=RvZ||HUO*?`hXas= zZN0^Bb*KBCkB8R5$Ov2{SA3|9kAcUoANxypDz{8e9Wi* zCl34&w0nF_zU20Q-TKe>_(4j;7jM&m%S{wtSQruyQhncbM~`FB$%1Ln(0mt#7ZO0| zK}x(*1L0mYwek01GrY{M9nIin(!I7FMc@$Z^`7LNZu8yRe()C32KPUYsGZ)M+LeU^ zARAh>#&0Ew|9vjLj-*Jo-xyaVjzf zIrUgn?B0d=2C0}1iNpkS=#NOS4;FxzD)rLz53}Z7urd;ei^a9)pt( z{W*R>3N#rhs*zXg796J)H+-C9IO(&UdJut;?4Mf7fYo_m06zcDY!JJSt}n49!{XmO zw4?IsXCFVmFF3vj*EzG5IJ%D8oR4o^ueLU;?Ins~WC=_Vh|c^?PJIGV?ugdvWEA4M zgG7#*>NzsciB7C3`qvy3GJ~BhLcfhBso9$uTfln9RoO|3iV7#*1cz9fXx_fbD=Hf8 z79t%oIJZyk58mEl7Cvm@+&JH)WEsS$3UHzw8~~+8txM;6ZcjPP%*}mn;Jw!|-FOJ2 zu=>unp3b{Dyv2*jNl{{*rh8y^+~5CcUfy&)vwP1UIKh4~f4@EA*)5O1szJ$&R`h?Z zYu+EM0y&eE|PwI9^|4YrTKRBJYp4Mu4+YYB!AO)u-KgcZp4;7!Cj}Jspu=qAUHL?4i^$PiM zojMhfuvC}%;?jN}S+qmcb;osDw{?J3dKK8+hs8^XJQSz&a2ye3BMv*_4T+e%v>dPm zs7SWFGl~@}g9!s1j^}iQb=WSy2Kk%6vk~p=jcSblWiuxpxdJ<2P{kv+lh)1m42tRJ zkyCU^msnQ{J^V&9AM-SQ?k&P9?$x&69{v*5=QWt*%w!d!3+*MrOlA8{_rBPddsOR5 zb)DFN#}~KO8g0z$70))I&Hs=>j58Cly}1yNuqRR+(xR1(1%BLGbTjSYmTUngaVRIx zdFTV4;7`;$uf3PoX;qySO9we_m+{wwm^UyWGM&rVt&hR&BCW5t7yOtnYFDfZ5R~LQ z{OUd6m-6n%-sGMV1q}Njbopr_KYHbnl@AYR_hRP%6HDW{G6G4t(>j z{e?*Ha98^yL32=;^}^)8y58HmLJZ#jW(^fi5G=LI$PV6)b8Nn!*blDx zzfE{tzGs1s#_LDQ3VU-_w!9*MJlbx?+~7GZ?`lripGyC36~h5!!<8zLMBzrjD81zx zUtq2!vG=ej&vgl)N+nVQnUL$W7)JoX|8nVK@X9;I0my03gH`3(R|5Jhb!F!*-)rBN zFRH=Ebp=G0_fshIKuMM0m!iL(=*av#S7l17paRL^bfnnQaS{L~=U#}vRDJ_XXCEU7 zSqKh>G&CvKsIPf0@Dp9Sc_0S4bX43_K#yQjfPxJ1CfetbT?kv^7+ow#TCYS-1ZBt3 z&kQ0U;U?n3*-`PEHrL}=ikVT)2|)7YpRB+@;d&kVb)G>O_vC1wndalEqaw11*P zCgg&HtOnpEi<`N>!-8?yWDJ}JuY_VZB@~y^Ra|g?o`&)>GWyq=!YU{jghXJ*+K|HS z4d{#Q5oV0KgX5BcwwVk@&AoPuy`;81c7_v7K(s3pY61^XUH~4~P=aKPcd~GlFCWQuCnjR;v6*OgW0zpW(cdcR1w^QwH(5d8`4IRlX~4?mUUk# zC_8d?1yl#VfuzDxhBtDlbl!&h}NVQgg=XlqXVT zahwx|z|eBpu7)Vd>r8;wR!aXws{YN9WMXn~F=CriJ46+cRmVJ7%bkO zZBtGp22v!`XkT|2I@yfVV%KQR>CqDaHQ5@yUGv_!h`=COXD8l-f}bezN=pUx|FQAi zpF)Lx8i)qW!xsN?{1};c=>cL1eLG*R02W*-AHgY;0najsn27rO~$sp|vps|{J7DTjpn%6*!|kN+^Py= zPUkrTJvi+u;Vu0IGVpM=AK!0F$3Kh??wVgVOdXrTs=8eZ!sAxF~|v zoGe0+#Rkn(=Q?#pAXUcN2KVbpiQpu=*zG?R4{Dhsx|MTIv2+GR4E!phcnIJ$QPj*f zz3vd7=uKH0Z7zLBREk(a^FlBmx2-OuMUwx&6M{H2SDJw=45pDcV7*5vp-Gj*ygsfrC{!yc2}lu@Y8$tqC71q z5o%?jGG!bmmGPE_Jq;xho|1K*q7sC-vIwG)5t=-*wF>90GoSbOgPUDT86NXQ) zW+y)L&=91W0OnlC2cdEO79FsU4PJ&l!U#xB!B~&q4_|ziYz)xmvB?xhEE4;X{3*{X;jFv3V)dOFMyeGIuea2;{b5!CkqUN1k>l){w%Ein* zAv=$Gv`k&V(4_q3J?cg5@V3DSkJCW_U25%H{`zi*ulxbsPg?Y>sm;JzOtLl2*>S5C zRk{L}*UYczfl8Bl;k6GLB3Tsm(WxLiLHvw6vVo`~uJ*B*3^p~R*n5qVY@@sTV!p&_ z$Ij}6mp6;Bqg~yJTYs5l~Li9N)}hU*qQX8P&u{ zE?UcOcH}r#)L6*DEz8QVkEvtJx%JDq zsFzqbzG@$<*&1l7wUgiI$~q0<_rQFn0>bd!cW=P5mhe3HB2iT0pi-9P!ya4g;T`ra zH~C^6HO_xe9@1Zhv}=?<2N#S0$T^|8F|-v_075w)W|O{V@3Yk%isbwwCdlLv8?zQq zX$<-K7(?tGR?`%CBQt1tr}rrX78q->LJkYo^E zs9Gc7D?|cJG>q_5aLmDXt>1F5v!Hv|5#stp{}_iIa?&+%UOfv zl!C2^h@}JH{Wxk$F2k99Wo>`UCkpELH6&!)6k5$HA49=ivdX7Kj5?P_<*pLKV&8(nbR&X+^!nO+45 z|2R(UbN|~05R2YWd{HYl+BCiAo*IfFI3O&A7X?fT(XSAgEKQ=&(%-zQ!2h^z4Xw=p zA}NwMZlx3`JOr$-Df{TB^&;q&-Iey{g6zAOa??2?s^{(KzUA}zp^b1a(4zToW7La7 z`)WE_$IY+)W${^HS?%Crb(|ZUOvI*~|7sA~6-PArc9)*Q$B|7sY-170H z;9PxCJ{bv;KA;A3;ZvuYwcI$ZC4ipA315@8BdVavkOVz`(ElF4va>iE{jXqhAgYR- zvY(;l@eR3FZis3B0jc%kEq9CrFs@`uR?_R8DNL(g=t@6EMoW>YTou(3ISo1`@m9r+ zSHJ7DP80O18xDxy+;hSagnd0vItIZf@H^URIO3&;7$a@b<$~lgD|u_qB?MkW4hO>Y zB6wtiJ2FnJbl2Ey{&IREiu9LyhWr-FF4VdHnqH;g<=C>!G9JEpjN?^4^+xd}x%`2{1x|`g|HU9QSAr`OCiNVvYoYO@pJdbD@0ZkH zjU-8qumA$oF9~dUqRz7)jDT0CXu>zc7W+Qa-~;y2T>`JnHACcxT5jBZ&YShKmD#`i z?%UT>dK*n{QB<9dV-=6@MDAuwr@wT|t_;t4xB6mkx>mMj;&?UBr$b0coClIjl`iMq z;1lkKW1aF}X>3?*>vCyVyurt8J*tXoQDhR+h?CM5iaPCA9~KDdAs*x|93rAh8Y%<& zYBUb5^IOQus@=kzh$f*I-SYN~S>JAhGMvdapQE@3Oc^LG-knlEEF@OhOTxa{`=)1v?0sWBst@W~cAd#(}@jj&6Ox2AVs~MA7007NGHv zbOI!XZ1E3PU`n+C(AXVEpvMPQGK|${Ia1Jtyq7i6+CgbNgeL@R#9khTX@5w7nYvVH zeS!2kQ<=J;Pxi6$J$p6B^~DaOgsht9-2T|h!~hN%8O6*wtHdF}#PiI;7m%DLLD-QI zy^poKa%}yWikghFi-LL*yFFI8(k^d$Jh8>(gTr#X?0WEwZw4y36_{xIy>%y9!%V5|>Gt$^ zOB=}VPjeh%!%g0L?c$1r%`O00bGM%4La5rNlfly$i5mavAqHVOmcnm*vjNx~=!GG0|-nF8`F2!&6 zG5K(LX1^Y6PJGAFp@rW~)vs>(J}TH|fpA=`Bp#k*Vh4l->KvhylAO!+q9!Tq<4VqB_67yx#w1Yc24wu5jDaA5=6kYNJI`rK#755vXL#1WDtN zMHpu52`fH{EfWD)?Mjm+Kjih!IfjNHJ;h z<+Ll#HHr2{^E-3j61sLS2sAI*@{6=zWLbAVGLjPb;{Wdb;Tl>14uaL#UKhPLLluf3 ze)h~vXC87Xu=kY7 zMhRsHpgfqXLLFdAxI)69vlp1dauEsblOJz3^6-ReCa@9s5(B^wK>EC4CsCMi*AwgM3CNoab3R~Lg zSaRvzl+|};amJrhs|{60O~EP9c7zJx>lFq6OG8Gmpj=bWtoK<1kbeG%pi70-=izkp z|5aCj7wQw^?qTV<7^&BXWhD2x`pN0SUew3e%t?fe{W7j!$B=%T9Wh>js(q8% ze{wk{8`-Zil@)2(jK9`Zze>*ojTyg`3UL!_$FZVCJnJU(<+2UK)1F%Bfrad=tEyyC z?3S$`+1D?^e{GH`xQfe1Dj$VgjF@XRAB=HHXF@*KkDT&5o~=dyy8St^z#)2hjm@Gr z$zWReX792Mx&sMl5@bsn-rr%=+;RWdweO!B5va=kMwm0AXkXTJ=%jyNr;$-aQ~UG< z-o@_qTzlK9shm?5fS_BW#~#C{0kJx zvTSMzw@Tm$_q6WffKvG5urGS2G`tGJaPgN0Y#g6pfF$)l?nos`0T}j;P?5hj$NYL5 zoH&UBILjWDCVY-l%WRtA)*{l5VhxZoB!t+&d~}Kq*dej$kApHR8uku*a%#rU4s$z2 z!UYE#y70i6>GDR0=GR9d-kdujD?OmFk@HFdXnSDlfV&u(HQVLeFWHIZ$Y1P z)#QeHdk;eY!}5WH=jIo%C4EN-zw3CwzON7og77nXF93xteuaAksLj+t z*Pf}Gi9G$JHpwiMV2uKVj#o^7cLSti@!7Iniy3pGXz8L39eo36#S{f z9Po&c!QlRoPd9}!nuyiaoty40f&vT2&$Tsuwa5GxbC6Ls#mwU_aQ6fX>BLeU|58o% zd$er54b9x3ssCP$)?6<;ZA!KXP|E*qW~$xJk87bvC?OUcWq ze!BHFU3nf7-Yam-gYVHR_9Of5IIYc>bIEC~#Q@(GxNTN80+O8xYI&7|S<2*cxB|m) zgLEa0_9k>WHfNRw1#OHNeZXyNOg-d8+v$b}v0BI2p$%n)VvTts&Hd}C%hUFsH^Lk& z;@^IE|9sCJ7?3`DQ3Iu$0xb8CkOHMFb4R2f>N^aEWH$^sx2w5)1|hBsQzpTjQ5-Hx zqw2>g^aRhePVCyq2uS&hWNn(IQ_hNHGtFLy)N2lBSlk>h?vQf1ILXS0AhCK~t#y|T z#%AF8%a>wh8yrcuQO7i%0qICXY}!=LBy#TfG~2wLPVA($d$TW=2&_v4S(K_~!w3AP zlh7@M7~G-N#qjcL6T7j&>o+T)|GU=lyh zcuriIV6|}GnM_Kihnd3ARFJ|4%c797+g-VNo9wOEGKPbkLj<74baUrMLZx3u8R@X0 zqN}Q=Av2T)NZoxyrLnt>+t3{LbVsjbJ2>#`qCkqR43Uc< zC*(!Ty^{RSm}PoZ`;?=YK5m;zB}{32oT4XhEKqVK{~f*@B^+Fc-R5}&YWW4=SZ4u+ zsWLP>9cJ4CvKN#F)*9SN9`9ZWrmy%1%zXDy9;ak5j4jQrl~*f@g;2JM8(rOeVxPRh;uL&p{WnG^Q@2RJZu`LYHZKtsmO49L0PvXz2=E6 z+z(ZZ?{9lbS(IAx(yo=USa(`2zdY8se5v2-G1Q-@#Zh+C#)kMAyhg2~w&>e+Gg%LE zme#s%1btpkK7u2vf_dA=>;p#&2NU40g(vb!A-WEk;;`gK-W8{tYKo2BLl+dx-os(; zA{3TtUHvSBQoS{&#E!a)Q!P&MXg9fCA;N9PF}YpGC3`wEf2i|3>^U&Nb@sZJ(<9z2 zWB|Zqy2Mo3H$hdxp!6#xcEwX-nT4&gu@cU=<`fPMyh+Oie0EBS-6$vo`MUCo?JuV) zf1sFBopF;00qf~wplK?8#sj&>fwE#Bnxfy1mS;q`qiskjJkOmgbyVWeqM|-Hm+H^X zl_1htC|vH4)UsaP3BBj9rcdz`bl#b^H~3&^R`Y{_h2Ins`jsQ+Q2$Lqhw+YcYmfJm z{METOQIy7~OB7b>QA6d!dD1L1LgpU-FYhdi|4TnX9$M||3qD&Z2B0P(?N0RnSH}-z zUtlAl=pBP50CCu6nu}Vxo*xLNSDnZ z4m5_95E-~|J-GIh@=IZJ4mJ){M|1U|!X+l4s#ww7X7iYmI;#;C3KQE^Ve;CBzt$I} zeBpAl9$ocsUD)Hw0ofe?KXRO*Rw!jl8t!rJgu&miwc2_=`fUR}D~%+m(Y~k7uF4qG zV0}X&5a&GVScblF82i?hGUNK)V*l6-Yf5THj?_Bgha~IIAitB7rtK*I7oowPeosej za{agGD{Nfh=N`T0{cr;SGDXQW6w9D_j8S!Xo!q}#*kwKHk0P}Ifz0M)NPyyQL~X8K?#bu&pLNlxua(A+LuM+3@F#@ok(1O(R)DM?)1 z-rmy9_it-;In=CVMjG{Esh@x*FiPIMcFFvNAkC*Q{@7nBGf9fa#ByPSGQ7<-m|92h z=#W+b4JdMW2Y|Ew@Gw|3H5>9t-sW+zebSc6Bd&1L9mYJB!!?|PTO!Jp-ySm>lko<& zcHneMlBph80hRjk|6{4a+FF^+))6!rnO;U3+S(Y}}#r8h}6oCi!0ue0e3 zGZ)EK7CLx{73#v7QN%d00&mfYjW|qPw--aS^Xw*fqUAEZVA}YtT8;*9EwExm)NLq8#tVM=s;vGe)e2ox4LwS0=gGgkYP%CblF*RLc+G!>vorhDH@fCPUQ~r!m={?EG>5kLGe%P979|+;8tFLt`gkN&` z2Bbg$tp)VNpJ`ALH1X9H);+P3^~CbNDA%!f=_;+A+5)T)&3rxw6MM| zz|{F~ba6d+U}i{&1V;EgyXHEDBEmYwz;PQ0LdX?}sJV0{pJE3?0g}B&sROE1hHO={ zY336o0io~*P_nMkn`HvL6d6sbeb^0N122}{@P zt|69Xnek+4Yv-^KtY1^kL7+Anv`jvn1rJ$U>RztAaCO7t06~H zo&RWFLS@6Q?YYZGeDpkUst~^gT|sqb>Tl+GJ5@~spgtH1+~G;>&?@@RYI%Yfd*tGl zF#9v?NbujgN@8UE&2;D#NVJ~v;dVM)Vu7W2=G$k=2SdcqzpmK_La!CYtDD$F!{!xb z0!t0MUu5!8Fq}|iU)RCbhAVQ_2RCnO+;}vwPTYe|v>r$sh{H};N>@oswOMJjVi_0X zm4lo1)qO-cG5;Nqos6>7Nj-X%Vi5dXB!D5{P)pE+0INaGDVZ!NzyA(PcA!?KiwFBf zQ_qL7QMi_9_um3Ns<;cI<8w+*@+XCc@<$b0Zy^VIEghS_E*Ru2uTd;qc6NBfY#qu_ zaYIhnX&f_-u{N2!(w8JODN9_6in!HB4W7jGcD5pVj|BFmx0rAbjH9#%8nK(h&b4WFPdUM8&csJXVnaO+4gw2Yx?&-)~I> zE>?@0ddQFRezVGef>fdh(BxuiK;>^SKqHML%Z=74+@T_KP!Q9jRgV{3lg&oh2hA`P zuTr0X#oW|A00JFHK-|%+=W*&?T18GSu6H7SxkU|3a^xOddsF|?PrvSQ_U%1NN!f6{xiV#OH*pu+#ZDGf8G!QxwQR!D7=^qTZ{)Vx$%bZd*+9)+E=0UFKO zH`M;P5jt&E%lTqhsPED-8{FYgZ<})A5HFHxfqXrTUtzv*hjlj_G!_!q|WKM`Kqt0*$rIL2RP5jy z{U0zly0bFnKL{eV#p9@dhOu(wRURU`ElP4#6qnNBfE!f5=!ZLTrf-(}fbc%sm0({t zFeV_7R_e3v-i%%#f;jOOL0X$)@D!Ii*k2#T&o?n`lfHpE~7Vdz%0?IZ3x!(oyY%2L+g@ zco>&lzBXd8c{Qn=hz#S#Jf)}*^lPuj1Q&m4I9EZ^u@~N-QNGWsPiu*Nw-F@EjyxLh zpWO=mGV@6=nY68aMP-AeZ>Aj1LaRicRAueLpMXQ-An|U(?3uU~Kj2ZfkSX--oCs1TE(L8J{xtC^g&f zi9QD!579Aago4yjl^L;^2y59Bf}QrONRb16QryfqcBF4Sr0S}61}!5_TcTOG57id1 zUqG2gMft2@cRqQSApY&1@HtTYvK>dzyu7{9?>eqtv-gGJ7)h^d%Tw-JXzm3Qh=@=t z!4Y(dw<=O>8tARRG$fc ztFB!weZ;R$R~-@Wmn%nFh@CfkT3s|y;N*Q1m8D>kHhN`3=6yj(>4hz0A9a_~`GaAB z_A_!Suga!Jwgqs{15$;UCwgDmI|qtYO-dW_n(%*G-D=~6llFozQQ~wko$q%~#aflj zqX7Y`Qv!~s@^7E;l1)SMU=r|08xBssr{q%uhhT9C8NMm(Q;WQLKnvYfojVv*p~O zoPtk0RM)Fa31diQ`V5&k-ROPslX20GGyI3LP1U)4tc@v7Q6NU8x#wVWP%>5H9ny-7h+fA_!hZ%x*dp?eD>D_q4M_@!*pc zI3UlagZFn_|9-#Sawqp^W|(1rSj)3oAUZoqy@XNU_mj9#HAJAnIpa_EBw$A;l z#)DU_Fa2Z`caY6Kt;I3p|8oI;W|jFn>$=qV;EqB+oV)QPLE^OVERtb!Z?M@j+hkL! zS6Csd`A$MtsAr+5;wSg+FHm8y`mN^gr_u?iquWdaWi|~}>Vl~XP5KzKU!f;BQCi3b zTEqX3rf-angz37CCX-}h+cqZV#J0_eZQHhO+qP}nwv*fQy!ZRxYxU}`I#v6ejjBA> zS}RuS*UR=~-A5tAQD~q}^)zT3(xC14a2Q~>-7&!PPww~#G(h=omC*`nV*xdZPz8Iq z-P8X;V%Am9P~La5hF(g|#t`H_Ta-}Sg=Q;})=X+z^VpF6>(%1sW9e-)86JTjA8h_F z>Vu&|UbSA;9xm&I3LGt&-M@y@PB-Po`M-6frbJ7>oGl4{!D5ey8U3%}5$4UJF{CA` z!x{_}g$e>fz&1&7U`+oy!|3eYT4cy>+V`T?9bxMh%rxEuA;KeHN4`kpI}(TgBh8Bw z8pKR_2x*9b`{z%vM~Elk1WgE8&3A)y4^qI2h(rS0lQxafUwj!SEEMLL$Uy& z4LtHyCHmjVlmAiQkTt|^L)1qDZpbY)#t!yXbU-87jE3S0(7+30eWb7(LDYzTn=`R? z@bq*)nucPrEG3YrB$V^@nVFZ~U7rZ;ZUh4=v$5$EX92=0?Z>qJpEgItdb?->ubyma z4uzwY(RY)hTXty%P#51RpEeh^bhre~q*X09_baWQ#bNP`xLW432W0|ICy3NTP={Br zyyF=zv{qq<%5G9@ZjVpX3Q472d6)wqo?{AJh4m{z>`#78D$H=Bg3G&;gIrFWzGLQ* zthA#x6}TU|bq1RMaJp6O;FJZD+i%6X*$}z>4C{E+!=-R>=@`}hQF1dq$#O%Y(q1#g z=Gr{9&62~$4H>I*L4xUMGKsPxb!fcvCOR}(WF30s$(dhMoh7p>`FjubGHM&-!#2C& z;uW~Sm3Rw+1C*DKRpKj${anU~+Ci2)X4M580`TP>m?y$ zbCvUh+F$(bF298A+OaQ|SqF(qwTSIwe{eq;Q2YwdAyTPQLS>rR{H6jyI=k3+f@~FDAVJjd_A_2XPDR)g;P2B4;aF~sB9CZ^illX3mvem&ncW_a+@kd zvfJW(-azC}gzdD&oxmf^YB7gVTNwIrU+t7fX|MvLN2iaSPZ49-w3TEa)AIUDXHd&I z%31Z{5Fny|$El%HjjKmDuA&HncV^jw8PARysvqm!5E_FZ*omkg{-xI7zKl(~>DD+= zS_h~8NP8OfxYy^7e~DuBN0Ih)48v?oNuMENL{T5A{UU;p&+d-33?rGGn9(1t*LN7 zXt+0;69O=(dmT4RcplfQ`7!(ZIQWlM25YV9Jt?;HU9G^;Qj~f13t^L`J6m-!^?>Lw z?PUL2%DC-Y8I&gIaFB)9U-6{_-X$Ure-|!FUuYD>BdclT8LL9DTIdN(So&P_c4}AROVb^=) zZU!E6qt3-LYnCQCi%M_rT^Fgi0TXfvKC;g_=_1}0Z{k@;LX>28UMeXz+3<>cu_%+r zdQ_(NaO1Y41qi#Wq~cn?afdrV&-Jm8zL;kEx5+wkP?$QKRyyr%-*ESt5x{h}k9d~X zva=Ds=IZcAOEIute(;--Cej#XGV0lD?QLOdH?LFZy>dl_yP3L41O$FTK=NQFRRWD4 z$75~PO{h7W={5Dzn_U@Hz_AhO1mKC-0SFD-1kwrOAf+q-FibQgLEh79qqmj{iZMu_ z0i$9VdQwFbBI!w{h;SjT;jjE&^!`M&quw+sdHDb%`@qWzX;^R_R6Ud+n_X|x6mCEpzffnEXqZS*TG(dRzpW%ZlOs~VEg)DhgsM<+}HoW(LCXl;4 z=*2wNwU$ZbJ%L&}S5m_v^0`_$rE3$MF@`4a*BPg9PP!3xJM|Ne$qMn=h zqmL2)N#{e0JM}9tJwL~r_uid9YDV+#ap5FQsK9_6xk^%>Oeda`|N)>WL? z=5Ws?4aNrO1Uqi1|5(^y1BVB_q;!?amVU~K{!Fhu(od3AxCkvw!#Y65HvDWLn)4w;xJNNQ zJD$UzEu=R<`dWRlc&B@_JCh>L?$8Miu$GM)EbE@TwmxGY&8BWu_2yPuT>n^5^nZ5r z>ChRMm0)y6B*>EJl2jjycKMrE@!J$Lo9Ujt#$Cp?OQPwP*By&+vpx)?DT4lx*@E=M zy%zwbNKyU$v#$$N4v;Y8Wt$ft0%}+;C&w3h_zE3ROEe-hzLe4()WSHtiza$8f&PeqqIAG$_xSHWL^KpX|)?uAZQ`l0j<62;PfM-KN+Y_7T^LCo1Rk+#v zz5V5h)vBK<`}4O!j=BW;Du47aT}o{O{(U`^QbzG#gpnaDh>i{-Cf18NqTkS0CSAwN zguq;&^^+j3SpgLZan?4}_tBS@ z$PEGTm>D3&?-K_2q^_naQJ8yKc}ZE^>vb78U$1o@82|&#GtHNjC2~{o3$~o?o!|6) z@~X|+DFfaF+|>e||7V12`Q+Tzc^#t$gq^d!0X$^!*cwlcl+AO(8-@|OTuBvkh#nbFUnm%|ce#E7rQv4rl<>1`!pfh2F(VYO#o zm{s6G0N-j?^j=c6I2*XzdP&+rtU`#Zy+8bNEVXitrn!^HZy>Y`cfI^+;X#aQ-XC&qx$@W)8f@ms6mLY`-5&SO zE8Qrcu(5YMy$r*h@X_oiKwJ+yO(e^QefM<3?O*+LWHNV?t+Fr7e;%}P;mz>D#6Ptz zqVa#fw@Y6Kps`w(KH}W(Gz}Vt*ouFb$(}ma+Y;{99n`&wt$~%9=O`S)M#!(XOigxX zlUiH3J{9%=^5rW&mlOMIm-RP8HBpbyY7B;}Lg@qEcXutLs~k~)2Bh9RpEf)6UzeHn z0B76sf#OkKE!8Rz{?4pOQ@9V<&`=1SOMh0ix&(WTR0j6|VH7++HDRgaV>@q31q3h( zybmRq#R^+EmjZqO6mw$G&r-K$aDF9%W~)5591vjSJ$;~nwxX6D2qW!T^!A;MTbTR# zX$}nPM0kJMh$gv+yN%KVaeQUdo9$3p2zvQKAP{NZ_k>zOS|FQVcC=j#?=QtiMMCp3 zHePaBb1jn032tUTIY2HtzjN*{gnQzC&Z9qhu+3-o+Wzu6Y{{1B1gTcU{I#-M^+m1} zbByOx@DQEaaO+Ln8I68uz=iab_dxN9^;{_cPcXoZL!;sK@6O1weJotwfA%_*xdp)X zDo3TaJtdKD)5>xnAhx4Kx;`@6*J2c+2wc|}xsJ~Y=iMZ%?k2)lx}RRJ#owFiD!@QC zJAZ!edEA-5 z`l~-NaNkDzsp7s{S^yKe+lOK7`lUr6pbUp0s@vTAZ?DR728NR09i^h8C7d580Kr4Z zW%NZ4e{=iN;z@XGtLwz%WVItRJV?!SSGvl>_4(DN{Yt0(bHGI`3wu*wAZ-)(0|byE zb|#eO!HedEFaH$xPwomdL^0eHv;=$m31C83T`|r4#|!?Ri3@0eFNy%Vgj|nbyV$SV zSk51t{`c-Sjfj-sZH{1@WfQ!eRkqoN?K|AC#Y}-JoljsH?{wK}c9FO(*MBsBJ!ibQBJ{<)V%xmZFqvk2Cc*Q+wV&XEVf1^5YhnVJNSO00^x+N6 zOz91z+L{@hFmg4@wqL)@kDNT@9@)b|A_i&rks%3?1x53kfH>Fop(nYRKzw4hMCJ>H zf3w8RbM8;kh2$b$Keus*u%g*;Fd}GA)DMz+w>$r;O2f22qac-8x^%LgEvB=6SS?}u ztcV{ZN3$k~jNUi7<+s#{fMYzQ%hJ&lJ>1eDPBzTwx+xUdjS+yuEf@YrEFB%A=|f^y zku3o+o89^VQ0rTDY!&%C#T!zI-1_+=pA&V3XrMkJR;A(IO-;8JtyrG0)Q~3eEo6c$LoV2abe@)^f{3y-Ky}*RiL|_yCk9 zDU6D?0}Vjh>Ac*AiR6DJyBqfZ_o9z>GwUo!vKR;gJ#TZ}*7A|_G#zJiqs56rEj~42 zF%yU%kl|KFDx4s4k1WNTnQo8zh;8>zW9S zm@2hjRbgWeO=U_qm4{0{5WbAFd}T3h(-K1{wFt`NjVF^rg=d(kqUHUR4o{3kW{(5D zTF!@32Ca}_KF3_&scaWw(JD+C3*^yCp_#gg6PhXk8#~mq*NXS}f7a!*kPcl5tQh)H z-C1eSr8)13JzhYMsnw$gZ%+ExZ6A%6_ZOD4$b)JVOpjnmQk~(KgTTyY8SYN%X3O^x zuAR$HwwMBd%3Qo`E%0GAz)m6ppw%5_RSQGK?8>S0!GIYaQ2hoFC-*~&+R*fCCzxRF zrfZZdDLmAmF1Ce&`*ZuP|D~Y44~w@E((`RBnHL9(MY7t&HLCaJsFHVR^LgNLhF(uE zDNLJgs#3bRkM6V2mP&alE27%-Q+mU}9=Vyi;*V;v*q{wyA6|#9=`96#;ZN;`)MN*g zmO?4e#+VOYwZ3F_&UtgDc$S0EGu1b_V_f3mB4&$>MXE0E|HEbGMqXn-76{plI8wTe;d#b zHwYL=J|migYuXROxSWu|?KAwmB4dWRIHk;8Xe&zeBA^sPri% z-QL%bb7Kf#{DwMLIYjW0O8>juhKdxb)qbSn$>i+hggHN7F4cm}d2*Bnu+HYsUbPv! zp*RkgQ25n`x%Be}eaY1VrPfKNTI|5<4NtjFlGPU|-S?c`a+s{b+!F%ZiEM)&IsAQt&*i5iqJQuBGk&7Ce)XA;aiSLGP6v)uRwxw_fkJ;5>y}n zBQxVc4?3Rj9xfVZ15}+qfSR(L8Km7eLm7?5pTnqafI-a45Ck_6D@_;`7@)E%MDPV| z73-pki;E-mrox*jEbC1%OwagUsQkrR- zl7E`tI@?zFjzI%_n=9itUC&>A>MDAKx6j|_@(8UJs9v{d+M(I5`w}XR6WqYhThBEi zn#xFcfM}h)kbb=OvqKKMkG1wM47}Hsv&ioS#Y1nj_nq%AJhZTp!Q2y=-O<6#A;9S> z2S|MggsG=(vh2T7+M1{=Gn;&7REJAuu0p}qcH-tXU^&>Q5vY>@ylQ4tEy&kPFyXt| zjwY)>cOSD=!f8LFsexKJAmREeNmT8q83K_HznG*Vr5k7+e0B;H4G zbd+p1_GTb0?WP^c)<<|XD@q8C05E>pB#-2et3ll}AVrK~snY3U4it{eu>vYKd#2QZ zeK?&XDLjN>Fh$>2kHag+85d9zNM%1S(t`U7)Z|xDH9hIr^it>dtrhuQ!epL*s-{|f z)$V@^li@s_Rj`sDc|l<%lex8+gOmD+WIyq9eTZeuQ%OMs=S9-f7~9pF7W`i~ZLD)a zpq&!Y%9YCzlNLhY7!1K^^%sVRhPoz=MYO%VCQ(r1f$%{8u^#_*qa+L&WN6PrJO4V?LW7y!x*&9- z{VQlJ3~DkdN;a`Vd_FKDWwsveYyl37p9-*Ye1RXj#=_VBc5Y)(8WF<2LPS6TGSMI0 zr7rz0_HYErCh$JT-7oAf569n1>k2m&Qf}J-kIyp}ixm(n3u>(U*GQ|%RyFwmE&1wv z-?)fG*lsKp$C``1RkYoCM{?ZDz2pdj>oy@j(mj(z@)2Ekb^MMnDfr=Bt;h7x zg+Pk*U%0b2XQK{MTgKhX_x{v}Mt>11JGLZn0Ox@XuCH8+PmBTS`+(UWHX$^L^&ku{ zz?Ei$LIn$yLkdYCNLrnSA7tLZY1ZV_qG%h4MyFs!xyVPO&FFW46`3~o>qoh_!KT@% zPm69znTgN#&1<^N$Ud$H7kgT-{lc&8Kv2AQ^vjkBa*;a-eV7N9cffM@>!K+_5*V$heb77-Jczp< zLpUvCCn~Wj%7SXJ+D0l#xz<6d?^vV*U)TCW=jJ3$9K^PLz(OMAs8k7ULdcw=oo~bIS^S0&^=_wn%TScQ-F6rgAkIc4DY=w zrz1AtO|h0XfcV*tSaW2s#0aLp?7{H7+;OYeaTHmM!NhK8N+Mh^%E)S{VAfOsj&X=Q z1K$oUn={N{k`hkDpDbe8dN5{R85%^oHyo+auh`n$c^~Pr&be(e#;2E)4mrrevmhFM zs*vr2zojlYe2k#B=$WfFWukVt77w3~LqG$of#%rCKn?EZLoFz@qYpkFB`zua1uU;J-)7;KG&D{yI`@;I>T4Q zQuAot2{Kg0g}=-|ERB}MO#Uk-#0%7WfxZrTT-ob-ln3~y=GvmN zNp#f=za7q2mKJa3grkZB(FG#|}|MuY8N9gj#fcA54Wq}DX@8e&|YTmMrnR^JOkG1e^2zom_% z#mL2Nf9*?zZIn*36poPd4%uWyYoaF9(##iZFt`J-`kiB@))cf1ZQm~u2?JbJ?SAn5 zMwPJ#2qYMIc@tC3AA>u@%+$nN#^#@KiQ;t@y4-X7T*4u%`8kwKmx`l$?f^ zjrnRJYI9hRcLJl}q-xLsF_9$;6i7k%Q-d5Bqs)G8zo7Us758o%_!&JAX7X_l7Jyl& z8|qE!B0?R9DZ&-h3KAIvFNwt-o8lW;@X)TTy~NWplqp*$@LEr+JpX5S1OU1A8|&sx zP@t6>J=~cchSJ@&ZuD#esu0)3vu8T>v+SBGbx;x*?{DeoRfy#SbB|D z{=1^u4z_cHDMq1*;9z<@bmbxCc88F4WUJY+T8=E5l^IxRC^&wibk5psN%5q{I7`*g zM#`&$70Jb#0@-nr)ypAVI6F~R$W@Bkh{vHMugUzMj^b3cn@Tg-0OCOBYKi_&k;O;= zxWx*Ytz~DzG73d)da|kU6Ibwq!7)@SI}ahQWjQU!Afg z{k*@|>bObyh5$TUG%K=qHvVN8X?|cM3EsB31t{R*{~TGt23!Gt=abc?V8lq70WLy; z1h>xnGvu~6NpXw&{z*ru^P!<`5nQ#5BN)jek9vLcCoq@^M)|#(Kd?l_`iaPSao`e_ z34G36Y!*a$dWNM&!*t?i0=ZLFi|HL@2?7$yA@V7SDVSXyuUy9~v1|v#d&Q~i=;cK_ zSpGbLEPRTJ%VklsxFTG~uPl*VrLxSkbTHvy{I;?9A1m|3;+O84uRyH#} z9YTofk1r!l2RCtNDR!9xEe2u9BnNiGf%2m?I!%R~;s_X|WK!~$pZ|ZGiFrdB54Ai6 zG?}e=Z;&)&gqmz`mb_uVc7!{Ngn9ucGl8Oq5RvgwVRED5FG@@$vz{e+$1b&S3>q_2 z+HgzvIXd;Ho4969WSxPtUXm?TC4s-t2-RXOD66cF^W-YN-39)OwNi*EWHSh*jRRN#TJ(I^C!-Tffv?fxt|20ByYAH1hSh& zOb?Zr7DStnLqy64gGvm+Ava+oX2+Kp6=7F48BWL`%RoVhy0OARrzq1%#3xCFq z6bjAPZ3x;$+F17ITsp>U4m7sNz%jnCaEAw*BZXC_`<%meYcf@A@Z%T3b9LV`5PCFO z(OzHgxv%n=pxP08SH!|{U%bj{mTy8PcY(GRZy5gS~c+0 z4&u)xA3~6<8&1S1B0R{?l3WuokdnfU13bYBdvl0bOj*-XK8u4<5PUxFHev>$nKE!~ zJ!%jZK{TXxLvUAek$4JlJQl;tjxc+{C5tBxR?-B06!du& zP9;I&Jf2c`i=sX4SZ**=XzIjKS#6zpZPbJtdJQ_-HHwY*(yG!=V$ZneLRuQBA|L9Z z0_6=a1BY_Etb%NC%p)uGDyX2hhF97D?%mp>mqMLapb!WV?qm9Y{en9I;G||`ZuI(} z@={f=fkI~vCYKKBbJz|VrrV13N{(FCPFoB88a`m@K$nYJIl`gQ4PFn9iiPSaiWX#e zpj>!NZjc<94`&4l`o3+V@TV!0dxRd??MYlXohnc-e{x$Cu1U{fkSZh$65Qb~jJLW| zQvw3?wWf=+9LRcvG*&bZNaGf=uxu`z+?D2eo=1bFlyb$zkCaUqg>Q13S@!;SJR>Hy z4kal^j*c=B(RxN#ZDg*tO^sZOYvE@yeGR0092eNwFJXqOA}&thTI~&dn(-nU7E3!l z#WR;o97;EW_@>YNwP(-^RoIr8wOv()mJy@K+Jtz*sxnRO5Q0S4DwDG8MIxwh;5bB9 z-$2N{!+^~nW?@0SL&Aw}wsIg$8+-f&@ zxncZ>Y$mGMEMrtkT%tGw8%+Gkf4$3Sa~H{t|A>YRkgnWt@B; zSmJg=U|8kzlGQ*rQr`v-IoE9%;o%i#5He(lDWm;XcSIQOwVP!Lk??U{@DEf44PUkX zn3#4HX_{oU-tsA>fcvh=IC(U?SYcqjQS)SDklMJWT&P+v9fR>1b{Xu@7r*`6#p z`+By@IF8l0DMSANb`&?FAQ6-IRAx1{q9|cSQ)}_^LkpIBMiUo>y=$5=iH89(fdm{j zGy)U-^&~Mv#7)mqMC-(Wgo+OW;R1X1lpiAh!G<1OOp%8WG?Jtk1zx>>j^m!tCF1-RY`SPMub*-*q3n+&`^9mwt zIfJM6@j)rvVF{vaav?*P4cMq2Q?e#?+n3=}_GR92sTX2DWAZW!^hwM7Zys+~i_p*z zHdUv%)<v$NOdb?C1jn1M9Ysb8fIzGa#j%wt}uiB@V19wQ@cj-e+~y)MAZ|Y8uFbt=^6p zd-QOj)X{+>J zdFHT(@U->6l{w~3)JK20B4tw%3=GtTRTy(*O=r!Lk1d;?8q~6G^I|Fw!_YE(8UM&v z7^lG%osr&EQJm8JA~vRqKb&v# zXs&y0t6cyPdxil5z!u-CPw$3_PZh&>jV$e|y3;#vX`SR&iJ&#p7Zr1^IA_C*t2aLe zX;%+@c&N18w*|6V)`hha2e5YxW7n(_Q)c}d%O>^Y9}+U3vTG-2a$C{8lJmwwfKp?S zC{sh^Lg)6)I-bX)ha1eO{{)toMoS7-j{U)^(khoEZSozWf)%%?N8}0o+p8eZ=y$OK(Cnj-C}#Q3mmV88Qcnm&y5f_K z*H>IrR5#TVr`}_J^VRk`F1Ic$`1janE1%R<*r!$Frdk!yoX0c+$KACFp&ge0LK}<% z{|K(M+T+?w5Qkuo1K7tO7i-MfSVyUg{mE<9r+>RY*TvIdR&w+VNJ)!=Fn-iDBgaa} zUYQ{T?D0HwG5FboLc<$`REp2Jr>=9d)PiUP6ee+^nytg|=?8=Xrv$*Zs-bWX;G0}1 zT-CGWYqk|!5LvmLRm=NYWtR{3M@6mit>+*s0-c3|Gv<_1Iqs|{97hiRs2$@d=@l=U z%=<~v$Az-rlb`;mAX_SeosBt?usgeHgi@i;=~GDW%`xfe>JfyrQv&@hpC zLLq(9MB|==TVSQu9`}ATYl_N`8mz?%m@;QBRC6Mxsc|mmoNSm;mz-$+=f6f7-03Q9 z`1*Tx_;xp1D>yK)R}Tl9Djd!m5CQtC;|=w^@qVMOGPtYSywc^#`c=*QfzIPKg$9sA zKY{I~Xg&np9L)Xmu?HLQ1DMFcsm%Km=0xNuC1EWjr;C*SJ|B9Q$_0&m!rqvu9Z20X;sUM^bSXi))hmdAn0Kzc~1v{?z z6|*mOJvlrzb21tDzG&V z>iqBtx$aJ`a!0@c4!h$i_KPfYG%T1kvgT?g&=9S=nrBpWBD(oeRivmNh=zB{`^3ci zJU)SBW=;w)IezE@W)|V8i71KN`x?sW5<~Q$<|qeJ3l1lK0gICi!sN~DiTHl#^Jt~v zW(Ti5K>6g+$z5j{w2{L#K^3XxjXjmZeJ3d_e-6Ag zxY_j{Yg#0kq0u&a0N3r<_YmV>*(7=G=V(Q2X0gjfwo* zL?BI;>!J1DZ{7P6I$v9W{Q|sG{dF||V(s1RSXo}XoWERti}ro$0R;d4k>!pzl>K!` zhGnYbG@)>t#*~$D2- zPSOb|(R^8c%iwiD@xJW(0|BDz_m>Ua+Wa-T9}Pabza2;bM4c1YO;5hdU2O19$uV|N z_WNq`{-6jRyPXDI>&5xYOt-ebhV>`IzkOey{RqbOTtc?j?0v7~vO(}3ow4oK0}96# z#Z5T}a_opUobc*|iCyv@p=kZ6C(Swp&l zg?nLTpu+`8rQEXF#GndbIa6LG72OJn%Wmo@q3IiJEdxO9bdhHL!(BFDAry^+)r3&- zv-l+=DR6!ttl7(#u<^Hk45ZubNgDE<0$D}!#imxK3@3Wc91DKwvTv;|sKHK^ose+I z%cX`SYM5jf?~U9%z0Mre9NupU)ELAjo8lqUv9?VDDT;0IuL*aSx-#%q|5Rv2i}2^t+Xomn=x!Koxt;?KIw6G^Rjw)s{ZTYHqIrJR?~ z$QBhCGASO_GlVeoe2nUdrYYue6HlA!?2R%pTuLm$p9IxXwe#OzCX5TAvH~#YY;cZ{ zG0!N=43V}?HTChxZv1{@@1|9#oD1? zzfQ09o6{#1PVar5zN}l1R1IEy05ey}9_y}m0T|!$iwk70r%Ag0@F*Tck?tLc0Qj53 zY7ab}uEY127k1X)qJ?EpE5$bMJcq@5B15jn+Y6L7?6(2Ny9WT;IbX*G43_f(-{XD> zZ+E=e0}suj^GEZhmkiM-&%4FQ07^Q`RE0_Bv*3dp^Va*fW{Q^kj;{#TGSE)5*Jpy{ z!QDT;@Z&Z+bKOmMf7qEjU|Q$i@?)P@C-D0-T4l%c`twJ}&a22nFC(K%xNHX)~u_4{iZg ztU(tiO)I2zrWV_r!b4A&Qoa%>{{oTenU(a)6mm)Vn^p4LXZito_m-VFVYMSAdW+E` z^(@q|qknnvel%bcrMRHf@*Rx7+7wg@%p9wN>A{wY>pu~K5b=q^!?M*@Q)2sUAY2pd zJ+?_#D^(C;1`D1ql};*a5^U|aE^eqH=hcvn3{vkQEh+|Fh_x<;jwiXNiMl?=3~`>I zju7eu_9mFShqUh7UHh-&gCJsTt6B+7)E@sRk%@ocGX1P{8Mu*+&9aE;a7qEx(kbKr%!CLLdB%XX30`r+r3JSJ# zfRul2t^9UbgRXUf>g3=N!h#x=BLRX>g&qNvO7Q=&=d^Ze1QU}qy16#b8cM+LSM|BJ zZbkE@^qg3sPbQLQoj|(ZpDZw3IL9Z^ZKM|)?8_}(uJh1a7#I`xDm^IM_=^XvgD)4=k>=npIHd9f$@fTPbs-{3Hxq@NV;;*GR4AJ4 zUo3YMavP9-qxzlXN0qkppOv*ixa(~VViqdsM;9SFof*a z)21_M+1QsBbjqdT3XpCt);BG{Hr>|3ru!YKUiGgP#17vf!fVGxv-q@gy4>-99wY;> z5Nt;zGNV$*?IM%$l*B^sOH}kyjUDOHENdgs0@Se6frb}ySMJ=56TQlI|Z zEVJLL>0i5MZ+xoH?nK=!_S!SQ$)9$yaQwh^ZU@u3Z=oBE=J~93nREUk)qZb#RzA-D z^4AG9{wl8CdSc0*JZXd7zG-|0jTsaIvN;%G?D~jRd93Q3Mq4IKoZhg3ju-2@n**up ze6ni!nEPe(YJ{fCmAaFU*9pAbdHcP4pUoTP-1V+=i=y*s|1#|Sb)ku%p#5TA-EW`$ zEFK{Ys%ihYc{`}%fFYfYyKi~@LeF#4E2C#=>4*0{P1ldMaUSLJkl^G-{MX}M!Rwdz z6WX`W{S>EMv{hRw>Z z_4b28%XlW;TCCI0rC~#1bx2+Ba7_^3o~?n~ea|i@Q`-R0pJumq%fR`wK;z>*p-5D! zYL%>vw6w6cJA5>4&=#;{GF1nX4=^L#?COMvs5cr_zC-7Mc{zw zrB}ig&+xIFpy`4A@7DXoK+2G7N0tnQYJ@1K9Xe_%<-Npqrka#y1StbKuS#z6hJJ?k zGqzuO! zAW?^Ruh;MJ1%xd10%~le{4Nl(}2RmSo6&Wxl(A96{!JwFc1y?5X~mYVrgd143557 zqH!$R5!hf*Z1%bcji^k&XLjjK^~=jBsEO$LVikAqJ~%w}0h0xuuP2er5Sy19#wZ-# zz}vJy0xm~0l>9(njzq!+UB?v(F+{b(5`v;b#79%z1pES$?^)U%Vx z`lcCHQC?a=5MfvZ{2wt3;)?;b4sLANdpEmA8*Ud4s0Rt5EAY%VohQgD`zy2JDa zAQ=b-h!9{-a1#;P-o4q%!?ax|5MO6)#=Cdpw5rct?_0-EAB&FDeFAu|I=87 z(k@5!U#!}%cX(S%EZO5PlW3g?%^P-g98bL3FNR#V>39uS+`+IkyZ@@>U&&!ryBuzx z-s-#*^7hAPz36W2K8Z9}b{caA^}}{Q?-hdpY2S`@|76++eCLb-)~=OvCpzzUhjkw- z?-wwe+XLVAM`d$XpUl?-)lFV<2js9!#ZGbdyZ0>?oyXmTo%hjeb)V967c$*jUN6k#*6FXe)XR0a6iGy{+vM*G4yR(ytmy8xD>W>ytg6oK%^Q_!cU0WF zk?h`MUTXyPW?Q~s%B_#44 zuSVEhY!C!X2%V}cB5E7h_|Z&Po521Ki6a4f{=NY0%e$aw8fs{U6AtY5#|A1Y`&$W# z@S;|q52uTQ^_7x{QltTwzjD`331%cux(L`VAd4~B+0rBYryNEdD)}#ncg#3vhSw7d znnEx->i0Eo+F%gWax8XZuN~FVe{d36lAf5o+Y35i$z9XM^@>2`npjI$At)Hyp7P}$ zo2^W*Wk^{;AW@QK9V3%=SXVi&G?(P;>Noku!h#{JH(EFrkrEW>E|H_ugy^L0_L3FP zYD=e!~gJvD;rv~jo^z&L9C=FJT(*T+s5KQM3_WDK}1de^LYk8bG=ut9t)3Anl~ zd(O^Ujtl5$XeO9Tui9WGq#R-s7i}e*nxTdi$vWw5Qon|edglBxcQ1S@E2SSibv(5* zU~GS*OAGECf&+fMLI5do?!n5K!C#nc5g_7?=r>n*JwIiQ$I#7QGk#$9BM-sOu?z}M z$q{}uyFPI9b~MP;Um}IM1;_DD)eoVvZx`5G3G2DyAB0ag(zn7O$>NG2p)YdH0&Rj; z7iU-UpKYw1wy>6=Q1QXEr^Jz^E#q4~hDiAJaRmj0j|wEwWV-gKrj=`z9Y6nUkU_R?EP4T=fG648$F>Smv2EWD2nb zA}JY-vdY(2TfDR9gU!>ea`=jRIuYgwdDmPd8OxszLm zO0G1~p7camu6t5Z0TXgO(@-GqHWNicyD!}wk`fz13YhatG3}T&jvw+#oNK26V-Z}U zJ60iplJY)g`3m!|#B~2(5t1jI;xzPcvpXgGiI+2ea`&m06{6m? zJc1;bnPoqSfa`=1y(zSo4mU!ufhk0=$3wG}^GVd?Nx`G%jkNP)`B zpE}V?!<-Om4G|zLu=Io4VN^BBoo7by@?C2iexH51n)cI!AN8-fFgC~YNPRG1!_p&) zbh62mQyDn+J(?vq31Qz}xVOmqoUy3lXvgF{b5_S|o$w`?;LAH762!M&YN{|4W^|n+!T(KD^lbAuE)k>!Jgv4LV&-G-EOBXO3Xst{L zW^OJ0nI7h(nphoo{#kg5h(Pn)p#6VZ0AW)BCxc>3qs&s1&Ovsv&*RHEjyR*dgbCC) zhk~~C%80=^aCYO(Z8@O^gyM~=`{;1V7ziti#>&i138#h)L!?+Yx1T<>O)S2`6qE*a zHS`VR4n9VH!0DohdakA?ZVBgVTA7_13^hOD?+f|xYro?Oh09~TnaAboJ^N`hdVm1Ul!aty{A+rBTJ;@7qo3}0y-!U$>49~Q zAE^zUEG5YVLuUqf2bo;X+j|&3TdM*RN@X?z0^NOv1YgBCTfS$ z9}oSKuC+nT=p=59q1iWHhV#(d{F6?Edv&#ADS0TBQ*$ zOPY1ME&eLkFag$-^RSh|FX@bc^l;X(Ll7eI1Ink7kCm`|^RjG|EwSsQAAs(DG#B)} zd6ZBzY~OXVl`I(ynPJ#RAm=FB;J-+N!UH-dDazA)->V2=hWS-M?M%Xb`3a>p+>Boz8@ zsjSN;EFN^0!zMGc#gV!0bL^S&v=$<{DkCf!7Gk+5K%>@nBV-$|i<}qUMR~d&G(~Az z744Xp^^GOcT?l2tC$-eCKS!q{%M^?du#R)-F}PA7qqzI|EcXi6^lLc5vsNm$GLH)A z;~cPBVG5CXl--Ta;91s@qDhcSeVf4OirJ4@R5fRwH z{-gZ~OJmAky9qVWvo|M7p77UNW!Q~HSPGW`v5dGW|4Jzzc1m_2hZS{2qhtR{P^U9s z4}(~XVQQ-HkC>e29@k=`p$JzA zaHAudKYemrT|X0Lfk;^YXO_FBJ-#RhI)X$-5Y} zM)hwioaV)|k_p|$-k;iDry7mCG{1(Li&YV%T^}&7Nzb{8d4M1%yl&SQK%r% z1Wa9Wos4e`WR1E*9mciWDF#Da zu{HL;7C${c-(TDXYJ_UEC!CZ?F=th$n0nih!lm3mJwh(0TbUejAcgMtqK!AfA~TH4 zzmPJ6R6M_94)55};%Q3U@I_*$!J~A;`8f{=-Gq3QhV_P;h&?K|NQ}7xm50?ZchuYw zGKho`Wm~&>0acr7c}38R-RTyCqCD;k>TxZl3;J6sPassF4wY}kMI+sck6u=V%`ZTsplXO~3wN;@a}G?YFsd21YZS9`UvuB;DTK zTL^iS7mKY`)jnSh`fRdw;XL9WMINA^M1ebY1CjA%_u$D}9{uHQbneW; z%ngU%LuN_d3xMXqM#OG^pUaUZ%WIYGiOXgx2-OGfo-m z@9IDV*fE|xDHG0O)q-?0C&;(X)M9ZJP{PfyduE}f^Bt>LDAFos#Uuy3xUuQkHMvA~ zA3!fjm(;Z_t7;{WXjl)ll(I|=my5|uR8i3s$*t##w)?La!owUXWCB?@(nQQ0ModLM zF}8qZ_E1ZkZiJ3Fl+2w#gXk=GnTx$F>U#10@<3X^xq;5)yEa0t^oy4ZW2(O@hd(Fn z?e%e{jK$P@DUW}7ITpP~@Du~zYOysD`$)@LR`8Q|*R)MCJa&m+aj@*RYQ!Hn=%z5# z)*NH`_2*hFp!W>-BhCFd(u5=*aedC6Rhh=#7p?(bhq4o;87`qZMXMb~~$ zRdFWPi|U`APEfhMNf>4m(wY82Zm&=MmLqE@m#faHStk>%2qz98a^R!|u7Q43l6_X3 zhzH4T>0mIAGGWO*+o!!I$H3@5S5}tZ(G=Ao^}r|D#+j1gp&`~1&5pv0KnHUU-flzjA z;~g@_mK{{V$JTxCbBz4AH#>XX-|_#YX3sm*PXhanS`r!yC#QeymGcz4Pmx#<&um-> zQgQSSo?Fw)KVj6l9VE4dFGwU)zV4*y0v=jI$D40DJtf{dpH8MXf*m=D|M_23e9LQ?cHjkDREMSaJRSs7!rT!*!%?K?Q6if#4)gT6XK4 zYE4_GA}POFbEeV7IqBzDZy1rqA81Kd5>aFeltQ_btsz*I-;@9b$bt}05%h1om=1sF z(aZE(e-VSk@i!RbAQo_P_N7)FpUMpjD{SwU!-Jx`!RQj2y9HQF8907Y*D~BTRszf zd^lSY(F5l&mWy~vx}*$P)j`}Tnr{=~?%X<{8P~h?aXzHS@V|1#p5G%&r)|OufOkFJ z58zdNhe{F)G`Qiv(ifl{Cr6Ytt!jpa-Cu?EBSXku|8Q@|ZHk2>F|e#XQ&tU+Rr%{8 zvN2X?amq%kQgoN%mKDsHr%ZRojGLF}7rXeoRCPyH1?-)iuS~zSu3Yxl3Zs03CI%+H zl2YVSXz8lAqBsj_yCjO{qtx+6EswHVTKi=ZM#$;=*N0+aW}nO`XT~XOB4a;x@blvT zkx)OM+gOvau#m!<&^i}$o9lex=i8RxR?V?=_HYHf70g;2C1YeWw_D^?AimSLMmjDp z%G95aT9p(A=89%Z+TcIj7g~{c2%xSYv^*bSbed|i2c>#B4r0Cg*tb2KMd)!xW;+OK zcXr<%>PG)_0**6Ph%?%?2Cm2L4=e726yG?oTKFHzFM}Pt-=G{nZ7#zKq)?2!tVF8) z&enxur2H0dJ<3Z1750j0wxcVJ%!%dOf#9cfF9-qIu2diVH6Szo+bqU8EV~HQJ{U;8 z@Q>nwN`*XFeN71=X%NpA`&wfR0}{+D%(L?kr@`HmP>FvK3%0GmFLbrC;Xf4yo?N0= zOtR@hQ5r|KFR>ZE)6|~Ij@Z#E*V4cP6$JV+5xf}RC~G4GApKR zUbn@SPonkOU5H%asj)c6=vIki{yCBwG?#DqUkUqdTsc#l6U*ihl1rvM=% z=CC!XFkM#710?wjzcV)`By4{>0ISXts4y_EnhaJr3@Q0M0V01HO)(}V2AVY#FWj%w5q;B~B|W{>Z`pG|nB?#8cvO)MP-rZx0~2KZ zEy|!oPdLgx1S3U$Fah^fML=5>c1!!We}^2d1Xt;evYwHY>&;)6bQ%m*c_B#g@NwQUlgN>okHh&*!j6+#O-(NqyFm4QE`F2|iri1ZDuhQFEL(v>0o#qX5R z#60jC*|!NI3@aq8ixBYNXdw=WZQ5rz1}mn2l)0%*v?Ffj5LP7+(q5luw6 zI-T<yV}7JS%f1#Mk7T#_JPd=_7;b!g|13( z*n|oF6kJrBKy7ttgV^iEnjZeq@Y>p;fQgW(!OWhXt((6b%}qvx%al?tJYW8H`?0f7 z>Vu8p;FY_qV`q1S#c7G~-vQUPjq}&BibD6AL1YSmpzYs3*`#VbSEdt~;RXsOhhuSs zlXT@6WHyFOQPK4+D@@Pu;)|y}RmQggTi})1XxQ;%7OK;9h=k_g*z3Ix zieUeEHhA&XR2LV4R4V$Kn`y{|(e@|pbNtmim*0ECmH$E5vj1HHmu;g=pnlIRIG4?L z?f=|esRh4N9Ya1RJVJXzf~}oq0N1TZ4TO&x!L%SB64D6;Qd+pQXBnEt|QYF3h2>QL@sT2dgfcN(pBQj_7`@YlnUo<3wz^g}b| zEo0mKdk}rnQcnLBvtPAlw^0_4rqaCb)NLOt$_?_3blj`Un0-t4_TJwRWCH9I_%$_o zB4-GX`Dpqj>L#y79=?PHYbJ(Q7sVizyyWJ@ZK8 z@9I_6{}cY1tg$1yxUyn?qYJItL`D9&iCe!qe|ztCob_SBKeDkYyuy#V!Wc4gh**%l zM`%Y$V(FvEeWEMIN31(JnUy3_1wn83O^FYqj0^S8FC69($nG)T4n?~wwfrgmL4mO= zg-&`|r>nd^{YdmpDB2gLKy+Bo#Yw=g(uXezn1uFAAr7RVvfFv138ya7?c*Z=loln+=oj~ zT761oYEmWx{ z8>88Il-u0t6iC!Rd;5}~4hD#hXf*9(6gjtx)wU0=DA)5Ma*s@&A`b#>{H$ACmoEQ$@k7?BqanVFY@}}%Jd{y5lmY7cPA%cZB zRrV{4u>vozt7?pkhWh~ohNnD==!4LaErX3E9mbkTz!HIyDd9+iOHZwf3h~g=i+(ek zue6w;K3EzBPNz0Y@=S|H8#XDq)PG^VBE$zFSbF)#E+@25vk$muj2AVWfO#X!(t~{5 z`sjyVr}?=}UkhM}M%tRx@Gpur1$((UcinZfdFB;s3@G^W>6B337d?Paana|Cwh@ID zUn;18_YemsF^*oyl5ayNWVO=2_~4b@a6Y~cb8BkKlo*HY!y5(+iGAem+T;9Vs>r}b z_rxAT5@9(u`PURwRlDvvReK%asUrI_Z0Nrp6v=}o%pi<<*vcJ1MzO2IuzN?adXiL; zn2O%YF720l%0zC1|F?{v!C+xO7eMu<=;T|TtzC?yz&9EKv)anxF_(U``h1EHTv5zMJi*O28dSujuQBIL$`3iv zN;WBH{W6E0oys>FOG}w5O?Pk(pa6K(MtIE}RXlc3302my`W@+{r@~F#TZl)Um07HR zvJ%`eqDH%N0=$BH5Bcn6F>x&3mW&mX+Z=*LgnCttEYKcMz?e6lc!>wfbaG& zm;RK`_G9Gf`{tFm6Y8;UPwJ#rgL^J4xSgdFbm?axNF&II8)xh!g4mhQZ^bJw-*2sg z`4!-K^^%_{oldPoLhXu@RLO_L(x*B#57Z_LAH{M_sxus|LdHtLd|9CS$3lZHm27Q1cfNOihR$;HwhgLv$xFJL zs7T5dv~pWC(ew^2=9+3qw1#$`HxPFQ>KmvvvYfAC1Sa!`R|)bB837bN&l`D;?0wfI z*_URhGvR}0drlnSZ1!0%tp2uWovq?P5AkkCU+^>5^hbOF*I+&b%Uf6Wi?3Yo_d<4U zG0UAFer}n|Tko8WR^59a%S)yp6!6-(;W{4)#uD3>yPLfOz>REG+hb94tKI7tvadSY zc>6HjH#oTQ%Z43Kd*zJF%R&#ypFY;sr9!?Zy4PaUPyKA$Gd)fFYp_(TQeCYIbdnd~syuv{SIo~K{&ymQT5|lSD1+wZwMhba z-``oNn0fmj#<7rqP&V~b4bJ>NjIK=edlY8i^shohRt*i$6*6WtWpU9+GqVmZEJpP) zw5hIq(!8kS;OzXR$1onvDeYzYgJy!^F;dp4x~Sa&7P2;$KXHgCt+#^eDH0B#eWv|X zGrzvd!3=F@Zp`LA2otp21@N{hNvCbN0C_NF*INjQW3{}1)G0fG!^c-5W*{>l5E&+z zIieMHl9h;#rDilMRt2Rlu2tZJ61c? zPLt7R+EX;ocMicr=>N0M(4(cV?}F@+Iis{#X@Hr|i9T<+a1RX#BpRv^vSZ-~XRCBj zP54&1cfcmWRy=_{Kj-6n{?h>v^upEE&HG@E(gnn`TKDgNuj(Q|re`{SU0h@__!z!g zQN!zgg$+>c4A_CHIxi~xCO;hT)?2OCebPGZL&?~6?gI*ad}i4IX5|m~RXxkRZ7_Y5 z8oVA*3y9BScAjm1MwNHn4TEAVx-h$6&H^GyoU75GGh8 zoRfKPY;3J~%(9vUIf#6XcFiq(GH&p6P0Q^|A7$Le(0r6#VVZ1KzG8C(B?t9J*LOIN z(R(j-*aIj@n|cj8U!LchWoN*Xjs->P6RTEqk1D z1V7M?kZtf8E5X!{S*~E!rub=SEDxPYIXR)l77~?c4yk8WUNik|ImiBqb}XC#=6RM- z**5HY_WEk5E4aO^{ejm5rHiM2QkZ1v3_%ghI{)lE%4)CL9)oJy+Lm;+4D|^6_;yfB z$_bSNv>31dT;!N8N8!E%oXa=;$zxMW8FJ%m3>93+1$=LjC*(tj)T**e;w|IKNNS9a zg0&9{-E(KX^Up_F08){WApw~lnSWb5Mav`$%vPN>5TC%NCnQYpY>{RTmuhCoDM;UA z{v?X{PXAPZGr1DsVorCGRklpS*9aDHJJs!u{mY_5W?fGHeNJC5$dn)hCr15a&AXc2 z;zEIKFuQL4H?M=j1H1>HgV?9m}${Mvvx~+A} zAM^QJRfq^&Lqx<|co9&My}>x1{@d%Y9}Q#2bxoI{ZuZXV^z(k})#s?iXFjdo<7@%! zb)%un{r?#MKVVUKAJ-N*3f}SqknB4fNhr8z)-o*PL2M+wEn28sOF4X3yGZGR~L zH)_K$!_Ud({j05KWpL?w-Dmis0iJIExS9F^-4yqEeZRajl4$N100!mrZtFiiE?)!o zN(>A_2fLsCl6#(O`j6t-nHt?@r0V=VGXZh^M|{#V#+5ymC{`=Tw6hZJJM~#(*cCD` zF$?LeE1+v;3tH?hjAq%8AhjEu{u5LmYdS${{ADB{s4p{2(^1Zn>D*3*Ny`gc#Yp{^ zvsO9a)=1sJs^oe>VrB+q!TA2jekmIwb=Hj^QXH?yW8MSH{ZBCp$LLn1t4S4vy_Fo4XKD!7!fQQ^sZG2|6?-O+Iu7NK%_47mL?Igdug!duk z%5z0NkG$qRBZkW$=KR46GrbjBuoe;x?)*m@wv_gY$`;!%-T3^W#Qt%D&98u%eQJQ_ zFmIAVWMHKc^D#3%a?R1-pj`gepxv5mJec!=$W+40_FuxP%dEa*w^Y)RbrYNmNab6{I^+8v0)z&dtHm(b<@b+4&R4<#UJjho^m4 zHa44}SQAJf|4ky)rPA0*{*C0FUexh_YumQ-I#~3JUDL!_mD7Pit-+j0LC>W4eJIoE zX2BYs<|i(48*1x@Bx1OJ9yibI?AR$T4GS`{-hwnaae29E4tMLGD8bJXsZf^FU&&pU z@QfY(WmDM;JDavWIYd2}TNTET_n6ebcrjcPuuF>m_3PzS#C1f7Q(>&qh?`KTYhqUw!Uw55o%G;k^y8k%R}sTJPS1IXTbE=`M7pxBF%F;p&dz3pXc$8jg)Ku;4rX zb7Y@(cGmFiP-u_k^8@Gltoy~TWq8wJyB(-r?T_PsjTi81+V8T3_>8yD;bYB1{4|yU z%yN7|@tX0aX$F^*J5u?e6{PCcd7|mz`k%gxd3OvbdF_6WE$fs7X+aMiT^}gBVUc+PYGu{M6z!`!Bv3n?>NVu9!NmT4^r?VS!-J2c)hcyCgi}M zxAPR4T36$c(U{1>OaP0*xR+*1UH)%k*j^wMYCKvMMm*=6#j@mrHtx}+t?JB70m1c2 zMz&)0RApVr0Z)*xjXczvlAI|*(szqzKWn{h)6TF2DZRT{^Xh`0+z z09KVzN&>Ap1-Bd&8O|VihpJ+$+BUYyf@Up}7~0w#4`S0KmK@En3RUriZm;#eVJJsO zscmqpo1|I$6~yTnFcmz(psB2*sm0AzP_L|^wQTjYckuUtZ}D04sm%5e=XU*(bH^44j!)*i1HH7MMi4aDwljLcUs-Ch!T~J;Mc29moJ|x$tr_?< zo}H<9RbU^_0R+V$faq0%P|M|Z0If>j-$ulp4W1Um_=^4T8%BvUtO%iw#vxZ7d36z5 z#sPlSJM#%nws7F?+FAq&gfxXjr{cPSZJ)YghmzFG)jc_t=Pdq;0!2^iiykH?_QUMf zqoCHDVXdQ88qc8~q5jnpFoJiU)BQP3Bg!Axo`p2&pP*d3TmOT(N-y_U%GjV+bK7ZJ zSr5#V9Mc~nj3hxk*09nH>C-eRtu>23*nbjj54@ZOmVsirQ&iVC74mu2wlQ=nSJl`OEffRgj$R9l0z6J}bl}kqXyze0XcS@j@$$d}pf3^|& zt%o?2{Wh)+!}#}$b-|EVgV%hgi}%YYrnXbo|MdIdUM{TL>lXI9n&j2CItKW*u)HsV z(lQL-UAk6aMQbi}%s zV8%7uRblvKo`^YOWy(ixtx6%)bD|T5*ukhz-sYyYLl@K^AaHhzlqMr|(DfWI!r;( zz3MZ}lA_>d-i&EyY$UV>G66djT<5E!XG|1xDvN&MA|Q~*EK_L$RNT( z#f0&YdCT%7ZLjowTE3>GOR5(xe3Y>K$55&s$ z?7ool0F{XRdI;&%DLf_YFE*H?MAy9Rmjp9}2S&FQ2=`?<3<RJ8q7(9jy$rx8FM?^UtN}zFHLUSebtGpe(Xv(>oU+1!1=|29hZL4Nrjnr`pG_6YMe7@6rc zU+5RL&|x#}IvX1}HsW`{7N(K5M|3p*4==@@`L5SQzS#xgot&ZnJPy@<_wjp9hL4aN zj{7VE{%=oK9XE?t8`oR^!M_IY!;1O%el#_F8I;bEt`ha>?rU&0;NN9II{UR_24=)0 z9Tf9JeO#{K1Z^Hlx6hJ&;Ez}cnAz8S(=)k@RgB&R@Bh{>Cr~RLed>v3A-_Lw^)27P zX?zT(9({&UxKY(Tu3Rsnto{#hT=&V$ejB<+=sY?8D z;2v+t26}hochNSQ`8X-Ue%D)hvaS?g6M;yo;P;##8QRs$gIC};^DXQrQo}T>42etN z&rZoNKxP(6Dl@kYrfv6cPep#EPnjYH+X`~@CiD4rS0c}S64$$QBVp5UvL;Z`2F;gQ z(&O<=8;JYFP;5iq1;#hOAbus9;=6)KJKES3A3=Wcl1dZXsYBxS^@N8-p~qf4w*`C5 zay$+ou(4m-^G=`e0D!)S^*Q)R44J*@RsAEwm%iMx%(7&`kUUwX0Uvu(Mno&woFSjv zsQ@%qZmtI2^z2KrI%Z%4n&)NB{&d#z z*VSvD)b7>i1J8Fyo_=uh+$-vSWbZiU>cU9hI&pW|yanq=BfGmUwZA*ULr;HIFr< z^U@ErxE3}Uo}bf}s*69LaQqu$G(B{aG8>uF^1e}`$3+-b7_Fp)Axl4b6a*1{U}G1#zjkolS)@%P1W%(V7%e7K)>28SsuO4K3n#QfaE^0B|=4dC{Y%mMEkQ@FK1O5 zo04mhjG7j}K2{(`EZ$Sh+3!;$W{U<7S4cP=kh53M#5~rExAR>e zg&i|Xk<&#X*6X_o3~5{p0eQxZQmDI=Ao9k9y6Nt%ftIp zc^_IG|Jh&&K40>$L5jY}TpI}mxO%R$XnO7^7uVO}{$Kvnjymp!hI_OQ0vRLVw(p)` zch0&W{J|Z#V9c_?qruy}kYPN5kPqUpgFq|-BX>wNlOW2no}Z@gb3xU&Kgy&v3jb9W zp1sq42>$0%#NlTKpMo8r>QB7?fM}Erhx)taG7-J9`4>g z8c35NFGH})0v1ZMp(TQCp__x?bnFF0Vj;%qa!*PKsb<)Cc3d%L$G7Hv?s@oq#!JZC z5R!sxV`EwoOT9B7O$qtUEkr~LH#SUNHU&G+{u#>kAIiE4d$3OedU|RcICBV#< ziEoQjHI51nlqf_0JpoS9w!DO6_OAysJ$}n|OC8!GjJ}6mtqHBJv>HyOVhBc@P1yBV z=(Io8B45rJ>OLl&h|Ndtr27gyn@7t+8j(Z24uGV$1f~4$#`&CI7AEO3BJVcFL|Q z=dt*Ci5Px=H`j{GSScKrR;{$}W08t8ySCYo(U8!sN8ztg@|kaOaA*wZn>Fw^&S$1x z=b+kF?)1GQ8>4rHyfb`FJyoNQ!CMOzz_Y@ypq?k$Quz zXroe4Cncd-z>%^bJXzK(Xp$0a0Isd!{#WeVhVVNOWgm|lp=Ns6I^RPGb!Gm44;&cW zB~g4DdcAU2eGtIL4tuH#-$F2anDM!ASA`7pi;ZUD4RY}Bg91$|dhXbBQP#*Xv#DgE zjhc5e!3|TeqH^0XgMC4W##?#9|M`rhz)5+s%M(Av_44-QpSIdCF90~NPEQI6MVWAR zDlaq>!Pis8P$YSjlQ!bO?|PBA*Lf`#N;ql#g|9c*3D`xX8ez{0-0V+WqQ`CX?`>ujVV6A*Fd@OfXO$)(hbu{)}!#A81RBr9~#i)Nu!Z>eYuDs zI;51|rvx7h)=hGdC>YgTAI=rlB_u@64FBC#VsNxt{uXi0gZVe#&QA=^#?O%Dtq8}2 z>(h4)s=RWk>Fh}L)xa!f12}MAp9k<*Q^i>+hzR@!-4x;{IF1B;G|by2dYGYf)%MT= z-F#aY?wlJ1lY_z7m5&i=_L-2~CSC0?NU^7=abbdSY*{|QlWdCaUyZ40{;e#^StWK2 zYVoYJZLO28m^6T-(akG;DYQWC9a(+(B_RTRZB~?YtrmdP?`}Y(O2^WQ8v6@l$Z|dB zaf(cS_o#vgcu-7DOQL-Hti`2A#vt1|b;2~|fF7_iJP?wtr2LNM(B@XD4J#_Ds=~Ib z#MeS2w`fRWkSLdu%Q{+TB#u2b*Zs5Q^{jQyH88q?=|A`*P?+?;7l@xFw2;4g7dAWp zcfgk9g+?6_@WPy}_(-!x_jmP4-Yb6|SsPC~N6CRuVplGefN7)XjiZsZf;QGDuaJQe z4f(aEU5Te}Fo|lvxqakCe_O3ngb?a#2snw%87K|pU47lp7Ex-Yr4T5WGM z7}zryAslGbmwi}%>hhkIvY8*)oA5!wInB{<`Jw3vf^BGL8T zr_c8i;N|Ikn6V^b{^Mt(Mg`fSw^j1(V39>Bc8fH@A;4v-(<*v%+)q5VMViWCD(@xG z(eL&1d^?2-L4#b`CC#EcZ?XFXLUGw)Y8^mT$|v9CYeMwVzG zqJ1Gi5^&PL_pc1J*j2e_;C-AQJ9Ln$pEEI-s;aZ3w^JhhfVf~TNAO16T@gu(^u2vf za*awg)n`f+otM0A`j_@>o`hBK9ow{=^>8P{AIS*A2mB+a8PkdXi@(5&K~Mhh5lXVw z|3cCT$B2Nwr04b|Z>-|Ub(p9OP8bViEI0`$qwBl{PjpKqT~+p-tWYh45zBCcHMbG9 zpL@r7E2~ghqXYN5%W0k=7V)x!2uC*V7rk|6mlf;_Q$Lxn8~9(;-O1{ZQkMv{A%j;N4n7&?~bUgBr<^EhC3aY7AZd`* zpuM|ZN`;6;B-Q>+a3a^L4ozy{RyAvj&ORn@LtF=k{Mn8RJb-q*VGw8nys{;N8W<6ayRbpt^cp!2-?y z%a{)gb9yIHK_3X<13ip^_fnw>wgWl*F`#0+Ta{IYHg^Ykqz!pE3}w><=IN0V?~V{@Y92 zx!+hale7w-_S>tl=W zqKDv!f(Mg>;3V1f2kRT0ZyjFrnu)T2Z=NCs)va2L%afccvajos z;MK22A9c&?l&dyrw*L8!)=rrb7i+ova1|kTO(Y{JbXW@JMBePL86(d3aL@wuRNy=Y zxq?wHDawGcIJAM=lwmDouZ8myNU`wLP(OKYJ%M0PZNacf_Bbl zTJ5pR@1A${=(}NGA$9vcuv(dG+IY7vG|pw@k5*t@i@^?Po^k2)lX1`nHyYX2zZ^_% zi*g*Eet*awvpw5Aal#{|NU~LNn%w99DZ2B8-Uqwoao>XkWNt5S21CUzvXP6Mz?5KX==GiqP=#yV-r zU+1xww3iCXJ|dzeM)BfCGbzv7WXEs?;{b+HTi^;^h$XnxgNSvAYYM^o%(2D0=jT|CV)LUL)g!* z^Ck=QKF=I@e38(jA8lD)P3Z^FU(c;Au=W>_SpGx-rdM-_}i}6-Vk$>Dw8*gME17V9d4`O*zT* zJ@*TFAr!}V<@6r%_Os!AY zntK|E=CK~9wMF`_iArghPe|A6uHM?p0flU$`AZ*nAX1Hk1S2At_F0K-)O7SVN8*kS zGc~pN;~p_ks+OMFkxc;wW2;E38f_2`DR65-->UeBSu(Dw zL~@ka!Y|biRPWx%ZRH+j3nE*$mR5N?U51$uDqc#J*F*ri*k3Bd@PI_uLYEC6Si&%>Zn1BIh4v#7LfT$t zYRrJWFqx%EO@ns%(Y~ESoC^EhY0WQC&WQJDl?6WoYWE-P1IydvhidmHW&27}J@oq1 z;MwO=*D<9-A9&-YV!QG)y&m^Z_>Swp>$@+CzTV)SA8fIJL;`ho=HnJl`~XlVH>^os z0t;d6PXjgszI7Z)2ZA=9IgY`hFyY_vsf#QcQ{Kjd4<;ooi86cj{tTw}g8DeOM22+i z4F=XRd=Ls3rhCXu&IjI;Nc&`kudz9?mVH_3W2f>PUv|Btr3cjLG)kIQBQav4nXqN5 zyvTZX`gYs8;F!u`#uv>vXCSzmw06!Ja~hl$3LoL56aa847`shL2@XlZn?}vX^(m>t z(Hot5nM=@wrPo`1m^i_KuEFJT&2+I@`*lsG0#+IX1OfXW`ci{N8; zU=caTd4mj8FxigO3oH1(^zeM(Z{RA9>$;xa?8S|r*SKvc$f1mooTc>M#XUG6F`}Ha zM@t))EXd{qi+vm>#!x+wl|X9TRk8~)qq_$PQTMkkG(<7eRRrRJ9T`q<|jtS@|8 z8jjP;!m2{f6?9uwzhCwHo?YdB=JdPB+eCaPMDvEPt?t<>AZB?;+G-zWf7TL?H~Nj1 ztgpX&uh(;8S-^Y!)N`i4deFx3eaMdPQ?GBZxf|7^br`FCGe#h=_jfsVWwCL0v>w=w z%o-`p7zpv}Eq5-l=m&~zOs}I)x~61-@~+LazwJl~A1J~KRqqbToj_{T-lW;dm79=* z-?vphuGdqR1h3shCv^B+H4h9K;l>Y3?^Aq(9j7AKSGDIT!9f2Yk>^+~JA>e%_34W9 z{Sv1)4a<#1kcRYkV8&HoXnP%))<7=$W7!0rG50oz?0E4W_ppA6lh&^8n5(df6()vB znEU?`_0~~seL>eS1xibCcMER8-QA&Ri@Up9f#U8~+`YKFI|=SqG&n_y%bVZxzR&lq zmA|s?UAeh4=j@q1bM|<)UWm1fh?a=R&3)3uL?eVQv^HutmrqD{F8Niwjs*YA+G3Z`E#FrDFhPHEiur zFyhVEeY3jVacbGdv8s!I-*4f=fDHs0LA^9B4HqhM;>pYf9>*%PNb>uHM)FI2v$Y!^ z_0#&(w|XdT#^>X8?=i``_i&X=WH9Fr?Y}@%?A6z=_MXmY8$^~D{LWNUvj4PUAue+CqQD+RbppD=b#o&yk3Nrl_&-SwPy z9=+y$-{Hw#0?1aIz2`jr2hr@?KKt$;VRZey&utB6&WvNc@FBQxxu0xpj;r+^F#XG= z;x*N>RKNLfF)Bh*`|yXl8lkvWkuF|%A)b^8(w&Z65A6;rKUW_2UDO1j8l zKs8ce68xqqAtWPS=^=)D&1}kfTi?Be2vO?Wnw|sTI^qs7XE$+)dD~(_LuYH)K2g59|3qyCT^6%SWE3JUP1s#@?o%?`QAwa2c7}%XZ z{pNV}LpOUUhQMy^@C%+C1ObB_#lcytiOkRDvtJZA z$qtqpt)%S1-h)>K5}#P*urz|mBP-QO2{Go5I0{c0b4}C1)uTnrT`!!ZIqCzESoZvz zPd6M#{7hteTVqk{K{cm`*PXCjK_mub##w#RE1m#6BTqbf$ZSoF^q>%h%POLquOI_# zAnV$UTpqA5Aq7}XNw1u+VXPb-pD@?bf04{Tyt6w}#u=VYactYbkrxVFz_C=(m{e-d z2kTHJN@lM}4Wp*cz)4jzDSKnk)Hymj?=!He)CWyeD6y)kc*xRHSSTHH7HAgO#-@6e z@X=lYN+elGlh!v#_{E%L+yP0`fC^f@!UL**-$11YI!HrS07{Z&wXoZ7cFLeL1{KAL zkdU(#4Rk#rE1KZ;4X6l)P+6hivRp8-u0UeSm%a<}Hi{O+Koum-f9=-ZG?qNd>YT)= zkX+GEEuWqbE;XQ4d|`Bqo}(VaHrs#iBP=iADcc&qvGm;rfo_?x`p07Wh8%4tg=0Lq)_?uz#I_PWHBZ_05Hwf>&lO<;QkhE`Lz)B)< z(yOai3V0(Ko7qgkjJ~3hAIEy)5$>8H<0J%a@_ATuMd}<60AV58HSV!Ft`%nd@JW3e zR-N@>ZWqT-BBWi;S+XDIEQm%gh%BhE@9O)gF}E`&G;u(Gzq1DaAc? zX>3)3yT+m(&XQjkrypsbLz1Qq-M5ao8Jk}d&-}yBQz|~q%I7FH&qNPBg$k}r-zq=( zns}-!w~hv}!qvnx?R5|6tA?`E>wAJ^%?8b|3|jv&iJwt>A!vtZPm|9S)ST$ z-PfcBnFSv(K@_zHlO%ItFY}$7LO5Q|@~$f*G-^?f>XplUGX$ z8YNrOVh7~Z#wLEpcm9V2vr1KviT_}^pcHZYq2hcg$a{&F#c7Eg!G&f(5}GR}%z{?V zn{8*w-~#<(nww)i9pL!XER62!-{MYV{K&@5Q4=SrB%uWc@|nTAbH_|@SlzSSh2LZu zWB4(z>7G4xK^I0w?lUN*Y~6vsBtG@KSSY=O;wQH{jsv?uq+_Kg zVgb}GS4eYT$>HoeSB1@>;7*zJJhJ>Z|ag3~957F`-&+{SIz1O^+pSags1*_mgtWaHl^%)48witqD8*#-hUE zP^~X>opA@2+1G6p>ip?IS>=sFf|=qcSs|Dr40}DxgZmYavUV^11jc>A2(Bgq159m_2A;RLdomNE?Wj2yJ5gy-7#(z1o4|4<81oK9Gxf)W`z_;JIus`Zy;vwsbvLC5;kVls?m=DXXj6WwJ@*e6(;1}pv29K*g9fwyZOlbYv_ zI^ig_<N)qHsYxv&~Kv`6qNWaqB896vKaJCY-~!5075eR;Z@i zSvgf!%0R5IoRd72`0eWR{RnJBT{i!gDh3f=6w#E z-ypvb%$DAev`GOOCb(y?$ARnb_IL-4d*skz5jUrM#vhQodpI&Ei)en!L?7X+@}HAf zIEa+bu#Q-FME%XxeA>XQow}ctD?7#5E^$hbglMVOPHTNMijw1M84lmsW68iXdqso! zysj?{ZzW*3Upd`vVYeVH#d~7;@$fST@qhme28M|VXy@f$#7PV#Q+-)gZsTPsI(S$W z?1rYE`2kD6t&Bw_k^iIaDppEC0^PENL^+Hv9a72=Qg_{hm;u7q=FaRMngcU2Py-{Qm2u@RY`vW|Bhrpaa#7&T`k-sM$IiVO1) zVj_JZVFQb~9-~F!ik&R3m}_$5b+wztH8weAj$jX>*+ zwtXx2WI__Sbi9;TJ|2u2Sy2P~^$VI5y)xlu(f0w@mnMd)C=X_q&C{OBe)vz!hH`bqvIdX4 zB}m?sl~~0`79ba9O=L&v+;UFBiU^rvCQRY3mLmI&+pv8fl6<$S&97BmJP=>W&ypN(WnxOR!M)~!AU$65Ms=z zzP1<%BO#8g_RZSyWL1(`x#EdoH)=Owkbc-TkE%KDvd;4_=tCN|=wi0`+Bu0cOm=yL zbIGk)&Gk<}{ju*ay;xuweL?Quwx{mB@rz%|q}jCVI9ZhsQlrIH6Dc0@w`nZ~Rnw?i za=#{w3-2bj>_Gab@W5`%_B;^{38tj>IghGuzg*Szo_i{2U)meLG}5R&`|(1uEG|45 zq1~RMBUZ+oH<_yMsc#h|68#tHAb~Cjcyvzdc3tJ zlHnks7qr!qeu#~kh$alJ-X*v*2{Um0oQqBSxjCD&ITn#c$3Oer2ZNin0llc;p-MWE z751xv4j?A_uS8`Px2{dV=C8z8m6~`v9?_yU3GR+Hosf65!^&IY<1CH7!5W?R3C-=j z_o}P1YwhpV}Xcsg{a-T$ml7LVr$^swg|_SzI_1!D2{tlA;y+dXXHz|x+P)tH-Y(;vlMHC_en$C9&IO%lE_R+AV?gw1Fm%E@YiORofU!SKd z^msy?!2<})lg(U)@@WI%A$WKc5tqH2)7S{Phj8)+BWz zx@?TQnxhWcXm57+D|tOB^PY|QtS{eLPS&AJ>726Lew~ul!wX?36E|^JbHzBU*0K)# zS-Jg)W2?XW(*~EyLmzlhk{gU@Z>bsI8(+c{hBR-YPI*Eo7IoiJCUAC`l~#cS`5L_? zcJHmIx(XuCKv!}HONnj8D5N^37*k-F17zmDvCVMS2_r15)zx=PIbyJM zjgwUhkuw+O@Ytu6j$LG(FNLD@%CL@#o}FfA+z6IEuRu?-lK){-!46tnTqLoQBS%M% zfuKuCb`*98hK8<+_ovP?3d|&RM|#iUNXd6WYyT^R4oK&wdUrmdr|~D<@dnII<=;Nj zk*JqDY}y@5As)0kT@^Ocq1>$BpU^;6&@&Q~>{vtCvmV_oP0*|x59Q{h`(@~?lu;s{mW4~=QdLQ)TGs8#yXT=>mbmWHcIxOz2G2-+#M$tlIaGon zaJAAebGu=QQ*(xY3tj%bsw29L;o!GljkcX(f%HzW={AiP$3?0o4t3ncXns)vp1oDg`+{My)bTfe~W!&jsQqBEcIEk5uv3E#KLB4Xp_iZ zF0I~5ya#rtTE4Q@$0`;pa-dG2cHBzDYr594TT}ex?6ol^qakk}6G%VPD}Pz6`%qScBK*!B2zeJ{5|uDtFY*Z;BF;~Y*XGywk$@3Ru9Z$Wf@?CL;V zy$DCsM2Dk6eB>jJ*N3RjD+Wkf&XTcJ&LJ+l=rr1BGL`6NrdMAQ?g&qJd)VdoOrbDI zZ$S717<^DuGocET;f_O?}VZGRe!7U1!N+DvP1SqfllrRmxeX zn`jZ0YbVB7cMHA06GvxOaZH!J05yo+toRU3`bc}rDi(>C*m)+;TzGuA)n4Z%Y)78p zhsB#(qwBzkzwmzlhoNe4huYzA_x-OhuI@DT%G&fKQXtBI4H`~jPycH756E15aJi?qsUOC(W<+MbSuk_J) zJ6_@0_)@oO78HP8bk*2zJ%fPS^5nG%8Cgl1vurE(V-V_=@wdd-G3}{iMF(`3x?gKz z>A5}!-lpdx2xhevDVI4n(t(9wp0!Ys<9Ni+78jDcWe(8^+dCkq zOVlkkq5zMlp=LJd2pjm1cZk0VuJAe3w5EIoOTUIatFa1Tc0j<-f=|+tPY7hGeWV}V zT_dU?-_iIiF$mN9>_wsR*E8V_oes}wnPW-|9bUOe<-O)FDO$G*Vi)n#yjn&#vG(PV z*{x<;65fM17waO?drhpUGh)KTkL51ED)X4$ z_~eMLHNrELQfsjA?(~7l@N-d&Ibpa!zpzt|Cscj~R*5n}+LM2V3p?zQH(U`b=9#WF z^E_bJY9{$4B)=wtfV>FVYj`DEJ2Sto%y^RZ_r1)oL)F) z#Hj(PbUoIaOz)8!yxhL38fk!7uW=_zaM+#|8BedN*hDO-HZLq{2s20P~E5I^8JIUAo=cz{1LZ71cEiUs@iyNb|6= zZ*JCQTO0WBDO;yMgK_PPvX!e?zLa=~=;s7@pz#+(M2Z_(m*gzpWP#j*u-C4Dv+gCI zCDKZd9QEcZaK-k$KUhf9x3>GP(Q~oF=g#qcsqxICeS*W^HhC(FBNG2p$3v-2HXTec z&b9u>KgqUN%kYsbAAe~A#)r@1w3ITksK7Mc^wJHlvP+Sbe4fJFi@aj-Nk&)C*Eq^A zwnosIehanbieUXOWQX6m9CGN)xmk2@lg$uYINJ7E`p$Z~`iBwhCK(Y_zRv!^loB5M zqt^9akl6}{Q^6sHYK@SlOZ-W34v_*LCN0fY!{xXx35i0fpwi21?3R17GtK#K4{SpR zV2`)~5*v&kj7hlajhDpn<)!L^kS3x3EvWwelk{AV!<^D?u$v_I=$}R^9)%+f~Srpcvh9k6^d3v-FeT>}dZWyXK7W z6A7u<-0$E#7&+Y>#CJJ}-7F9BEowAy_3{o#D$`z|Mr zAgkRmtX2!Z%v{PoWO}^{zxTv4#M)vas}d?p6ii1@AmE|MoS^{e*xEGS7Bil|a3NqX z&DzOvG?{sk>Oi=tL$C|RL#Mcs%lCI%iJGl>Q~!F>_4Q#g)`0399o+|6p=^h zlHAE>xf4OKTd!v^BsIn^Dvvx z8p{O zt1*CE%SS}NQ4KPgAgN0Jn**D(9NJh3DQsb$Iz?|fb#tC$&7vF z_Wb0M@Z4qRS2Fu?3vRb45_9xRQS=iP555*aCS)aL;%th`4FM@RMK?Iw!d_1qg)WH> zd*gtzCoU4AQowbCz=Vl>ckqNo@=Rdq+Mva)^S&h`~1G&W;NPwu<;rq`M@>*$A zp5}K3%YOi~?v{e-RxOgzc#Ix?+Pem{1*YxHQ8<|Ys|zr^8hC(g0Ob}MVBgN^`D`oHTi#6sp0Ije=vFp#=3fmGAC z;@d3sFSZ8Z4UrE&hoTzmV0~5ZTYYR^@W*bQu)1|Mc{>kjM16p}xVvuOExpF$`kI@9 zNzW8odf_I+jKTJ zAGkW2b*r`_c1!K$yDq8LYQ0fjg%E}aeKf2_C$k&%BtUmjCP{K*k#UtV~s+#dLJly4m^zHKg@&U=m`9j51UftNy{KX*2+ zWLZ>`q0p3*(dVHT0Z{%6pp05gj$iF8b44sv=(ATlscI-*_xi;D2SiMm^8w4CL&_$X zn^pq*YNJ3qiTLncqjP|&E>pl4$Yw7!ny&Kn+UO(|n#LDjsr@MBiCUyi0#py;D?cz~ z^lbz3;Td|W&UdXDJdkJm2J9S)x5%|TX`2s9WrY}=5a}%p?URPzr1V%;p#U|Pd)+Jj zXZG0z76@=yYZ{u@u?)UC&`R#h0UCVQoYC29nShJGYOI23@We9#rqLJg1{4OPBND1%P8;+vN zkQ=%L1Xi`xC?oMe7OW@5lu_&V&nb-qG6-m8H)KCWL@l@L69pjq`*9nT*5@Z<@uUDy0la}MFD3DVMNKH7hZyptiwaDVnCC{*&>~n z+t7y1_d<=vR!gqe-88)kM5T|J`fGm)6rZsit9N0K!s0+?!eQ;}v>J@NoBoxuEL83Z z)Z5g^dBu~?IeTh1uYdYl@8A`fPhC+lY;RvB@N^xhJ!fqh>*6PQi0i7o7sGBo-tL)P z9zD&>#lrcM)zfOFKtRNq|8h&<9}2L4!6fOXtKYm2;NQvK zT%Y%4t2wL4rK8+TZ1DXMz>-tyTCP6t?{dM%H5Ks?`YI;`zbP48&O=QB}}+ zlZxGP)$)tZ>7KA{G#D9pC}b69@tyjXouoT%X+j>@C{;}#efV914AmZ}zMqwNcp$<8 zIJf?k98QA=U7Q^nTe}d5#ROB>Z{(5Ry16Mbh|XuJh-i8l!YFclU5!Fp_sY6NvYZ4;E9*CZx- zerl@atPBx1r^jKPRJ|j~Jc5%hT*aAi(e$_9&KN%fuoJ5!_X*}v!xFqrDwL_ymCFKU zKU2W`9UB}(!N{($@>mJD6NZi?7OVqNqLE3}^5jX!@u%uN`5ClM;p4$P2*4~yjk%T* zBT=|2FdzRA?-w|L8h?8fwODB%?*_ zd?>OxJwf3>6+vIT$GMBP(r3-$b(O0$&f-Wh)A3%ihMoXlFhvzS+DYVeyfF9D@$Y@R ztrY65h^0|=d`0wYW9LaC{Bcb~-b#6iGsZAizW5l6ST?1l#x10p5q5ED+U83Cm|KYg zd=tb$2Hug%L|7!GH!51*;{+ni6pFYVmaAt78w2Gn(nmi$n8NTxjkT%>;3HjPC-GT6 z;JxwA-9DM)O1R)3Mq(#7;U&9d8Kn~GF9=`Bc|T?sMu2Epl^SIHCn&P&DiHSCj%BI| zB%&gF_DK&=xOYs%2la|Dms_1JkiOl`Ql}mpXPR=#F*LaMTt|0?jVb5n@Q;I^u&9q` zL(@vVxi-vGM2D1R%)qf6A0zx6fKi;)Z@Vb;)Z^YgxC-TJ$;=8IW)JNW&{jc#dH;#8 z^+;j(dWEBb#t3OY&dg^bdq7Ty6(7pMm*ui+-&(lv;2xSHy2K>GSstt*Q*h0tj3(50 z;)Kv3$6h3?o3z`2Y}xN6EUYktnR_Wbi^>|Dr=5%PCn4e`I6}~StskA;Q&BaDvHEAO zF4<|LE=LD8h@Vf11coz^MDLjMk8DD#L8@ImY!R9*=FoS%AcRdGlsX(_0@Z+u55+G; z3i+YF=*P1yL%9ULXri%!k`yOXY(ck6#!p3J52$NiCdhbi+1Qt~Z6~ZJQSo=M&&p0eYZIox|m8aa5O!aMuwB+KG8e~=E24sGV#kZpjRxer&hF?ccBy_y_ZvM zV<&=YOrQ2RgHeqUhNDKAoM(bfhgzubu3{Q~{L>9^$%qg%U+ahAY)5C$W^124P1O!@ z#ByoEhy}L4FP<%{tWnbR-^%GaKS5>fyxp)decVeiRU_9cmG*a8T(c5<4I^C#qZRxm zCYC4IrSb$N8kBYX*Xw{(o0%bGzOu(PoH~bxAC;a_!B+L_6!BF{Nf%Sy;&N#*qcl{> z4~J9C1+Z~LBgsjxQn@%VC-RIsjZxJRrra~-WgFbOtZm3WMTU;Mwfu4B6+1s@K(jTf zPre4pZpyvQ=VJki%`h+Ub4fA1bb!IjJ}hx=kXr0<6hG1LeQ{a!jkiaE@D^k0^wr&} zchEOfd)I$B;e-)pcV4$ErV{Jfes^yO01B>g$N}sh5x6!@Nq;Fx49vkhuFJPP5Z3Cpf7+N_XFxpkHSj%+^G@)I(X8L#T4iz0tv9_A|zYe-~#4`xeqJ z6(z}SmNO{w-l(TcoN+vV5Q}Z%TH!{ApuB>D?cf65^UV^>Z=D;{$CW>GuHfEuY4X6k}H`_-iV#`e7 z-%}>S&hbgtAEeXEzd34*#{j`9#!1FJ%SE+y-?{x}e-?K+eHG3F9m2;lbcP_Ig3Loq zNU3IFJ3X()jZ+e`a4BZNIu(2t9O7yT_FpX?~ zzaQW(1~;3~@nasT*Vrnn>Tq%2#jS1p;0hwx6DT`|yNdLu6$XmBEE)eT@Q)l$hRzFL zj*lB=V}ShuMx5@;GNlzn$9&JKjFT3GoftlwT72CCR#M=UL`^DZXNy-z8DmgWOvX?) zntMT2#WiN!8a7)v$86hiST@k1`Ips}27b3C000F)xLFbN^2n3B@3O5s}~b z45`*i7vb42MG8%9Fb^R*iYyWLIa%rpq-6>d_S*;QDD6xjv&l@3Q!}&|Y|%s!DG;~! zt{cmcY0}%67iKndfrd%M=v)IH%ZQ{2)<0eh+)J8x%Fp9Z2Hie!;z#xKT98%Q$9yy%#CzkM77o(M>9SUj5ma`tS4m z?Bj_-jq+-vk>L9qk6n70EoMM^^q6`X2{MI1EEF9(vTGs|q@mV4t?y}Js{`$k25|S> zGWN6;M97jD{H>lQT01N~b?h%1jpIQF**Tvj?$D&u&h0nkJea^ZV*P)R++K#*E;H#i z+ou%^>~AyXblx6Ys;?-`6XIoR6a%NeMQ9_Fgqu_-!NH+?I%R*I9ssfR;Y z1)#Oa^Jgu&6v(Ekt*)|w${-_G;8FD$8lARA7y?#>UGFgg=e$C$a&8s_SCR6>;T@m9 z+Bi7NI8tIpq=HQCJAT7;-5Ta14_LKbYzKY}-gFpUFhpE9zbae26aJDD3TM*{Ox{08(~Nws#r*b$D0R#$2SF)boAtpCWX}=|)dwV3zoB zYm+j+VZFe1hb-V(ta>8v`i|RYDYfqo80dtio00sbY1S%U~i00T;g(^stdTzP z=PIMb`z7Z~W?8*xH*w+pPNdGZY;_l`SKE~_g-yVfoXY^z^``aLo#Iq#qf}5U*O2^nqtp5+Y9wk|6BduJJf2=&2du3R{VTG$< z(Q;00piqtcJbB1+_I&9@`pYuxLqbHCgk+DI%g)f>O|ipl!pJB1>)QfzTtA#RW9f9a z*fj)y;A}!j$*Fvj@}&vtVY0Z|MGWEY(s~mwa1ZYg=uYz`R=K{`U^kY6Mj&15VA8m% ze4iGjVZ24j@_e?Eh5}a;UZtIHWaD&aPAZnL!w{CIUUlq#IPXpCj#AVqH>++?O1dMD z-8DK=xRspk86?k|X&Yzd14yR&4rwFzwO4#kW^HRj8tYi7*jKZ(A)XWbSIU%GOGP4D zjhXCL=%~8}n#7>@qa95_yQU&lV%zcuev zf*Mp}l;GR7CTo-8^{$Q+0mVF9Xi;CZ_#T&AO=_PzuvEv*qi*SUfdhZeZCu5G~FDT zdSiO(ms`7L?EkKy4}Q8rJ+0a<*ohYT6q)3ojNe%-XrDZ6j6&a&^90Y7+sN)57}Cw) za*mGavfRsOkQWo}>Z6Ml`%sRFf+N`pnU z^G&WTOgSfj$S-d4Kx49FUmfw1oT#}d4P*4>H-U<4F9Fc>?a1 z3eI!i0hxLhiFjRJP@KG05Q!Ki6`B?0Y#c1&DrNM6Eo022=1xw}L-~USvq1&e<~#KI zoM9U_o~ESX;+&H4{iJE!_C%}^C0M44)7^GE!j9O#d)_n0Kl`K^T!-1hJd$l_5*?eb zu?!k6Q1-=KNz5#miyRqAXCk%y;k?#<%D%KzO;4q3l}&q+n>AX&S9X8~wA#wlw8z-? z)T^f!i2fpqC7Qf&RQE@$(|S6T7Nl5J$RpW(G=@48BR8 zXo*|m+@33ScC)_RXh0P1Wy2IT;eSnQu`_euQ1>GBRl#g907RIueqeZ8hEDMfzPHe1 zmf8vvL`NoH*k#1iD|#1Mgl2L(yTlpUuv#SkP`kK0z0vsRE3x=Ek)^+D9m7r*&Ex&c zK`-eT)5Y>-OkOCAFAPSOkzF^Oqp+XOgyhJ&Y*-(_y&hs6Y$6fIvmC)y3E!yD!&Jz# zgl#aNG>It&4NAvt$v*1kWGlS&ZMDY&9OO zSeRHVGE`4e`V}?m>wEq)j6|=ebpmPGu>j!A6PX$ThbIMh%A}35Au%XClDt-`(YCg7@?C0WsOkZy^YjN{sW6 zn=<9(7o)NKg|aeMIa3!vZx+Ht_NZ(paDGebwcS%4tAYpAcY0?qzw|7aii)12FIlX`mEPR^ubqG{b72r@>f zXo)HCr!#np$Nf4S(Z66JQ>CdmR%v1$GIA8;rL0=7yCggD0efQ#{C$VJlHZuicNdLH?6F#&~kSkqjJh`Z*4q|jPD{!RD*0{r6d%4o)&&ytb0+1ZsU z^3>7H8s?E`bY}qG`|2BFhAks?r`@)5*YWoFZ7RrE(oN9uNN=f$qT+G=aPOc61p!%b ze~E<)YB9vDJM6Kbdx*+U%eomJ329J0>R)rL&PaTYs7{X6-}`VF^fbL}>XEC^_ih*m z+{A61#8=a!_=I+2>-MnmP$>P*uIjT%YVf|ca~KYBtt`wizw8n6I&|&vuHLXgH8f^e>cgx&%4SYbrC8w|sd)DZ(?7-n$KQ^~_KYxV_9u`= zMUrHNB)TQ7Wx@!Hr-h85=y@Ea9OoY(5u!c{jK@zH-q-j~H#7U2jlAErn+* zqktCL*=%mQ8`@1gVyi{3;6*yD)Cle$C z0yFbwI_l|OyJ?o=P5VhYrrC1KaE~#wzQKCEBgZ*eKk2EC2)o3At1aN0TP_r!qvyu4 zHkvx&>i!1pB=vZEz;{uMLsZRx0vgsb(G3>s4d}#YBBC6fl4B^BWS^E6rovGGqnyXD ze)@0gVxb+Qv+qPwAk`d(@js*_%Q2Ia7Wcq(pJ4Fhzfo^VPq6+Qu=U`v;Wz3hv93Q= z5oM8i`51VKzgD_@zEc+PH~nv)UpP^ORfpc)-Th*v@@)vMa1AtQ4sQDJLi}@GYQId% z%y{hsMgw!kETfL|NP@9_7tBf8;`{=~2fUgjnZGRGUFBQG1UQ4hjLL%3$0rv1rJIBR z=6S;!tw{WJ1*`Zf@CrwkA4!82O3u%Y7 z8`3xiw#ggFJV~SO4(GXGefzKtM0sy|mlIMwM1$oJXm(Hb;@r^&!n#aeO$y6xR$rL= zsEKoG%Y_gDY_Yt%Of81W05AGPa&vbyBYbqU)M(anFZ@W@>hT4dm{aBq9DS2CTju32 zftJ*G+%E*UoLE}4GKPN;NQIbzN|H+C+C91UH%3Lfiv>jF#2Edk zEULMh+o%@IdYw(qo|H_H5Iwp^-T28&=Mbw3&2(mhJhNyWx~=!_cH$*o5<|)t&!okJ zNU!(A)oiDu`DVs83J=>(X(80UF;hxv*#ZmrdQ+>L8ZJEq!3wp@gHI<RD#hEs$B zb=gmTzD1j3f7|+tn8L$T1dWd4;0@HAni_|vtghAJX)-G30L)u!-+lVX0Nkdnd}wDX zM&>W2!B-e(9GwPvmcpOq91AHg$yzI|%jea~xg@=Y2})uiQwxAnr3-s=_X8_=Kr;=A zD~kj)GeW$FiSqGOj`gG+zop+=*r$4DN9mReMuN%a>y9cO*~b4A0T!OaWV+O;S@f`X zC#kaZaF9xWT8unW&1K(v3)=qQqO*KLXa#LvBj=Y{@!kbs}wu^j3Ua>U1AfEcDn9{?KzJWB)$0eYWU2@G$R`!vGQ- zWfHx6I~`m>eWi>A`KdCHT{OFjj5L`INlF!qnPR>Ob*HA4kpnkYVfs=WCfyplhF2b% z*4jE%hy;`uZJ+KNx1cmSxq(*}*w|N|Wc~1A*PG2lVWrgW)@+>5xC!98|$^iXtBM-3|oAX9a zX_kGTL4~)rjfryF+DFTd=5-ZbbgE1*xG3GjSmC7St6eu(daLzSg4^!MXipXJ(>nqB1zrzvs#35#Z;Gljy1^>=OJ ziG=IjtFZ!%fNbE-nY~FE&y*`Z`u%;UG@QUqUAME{T~t-y<#s;5x1jzk-Lq8=x!QI@ zdfsVjA%)0DWosOm4xGluC5rZDg&DWn5AVNr`-p+|6Cg^{iy>~#&+V-0o?@Lx*sK_cn3`09>j{N z2?CTPJ)n%axJB%62_ckSK4UTtPtg7&blC;F{R|R`8AKBlY&x&mob+0)@~;dmAJck# zOZVsLxwd_CuWo_z(2h^rj>9*-Pn~}oMY@7o-v>*)9{y5d#W^+TxgIn>c;daIZ{1&@ zq1gK_tv+83I0pE`sU{NaKErG<0eTq$&!ztnEf>GvS2P_TWJ<>w#|hR<9X4PjW#VW#62kuk&}VwnW+dW=pVi zGFAWD388YHLG_})b>v7jfzg~}OZ0~y@{(J007z^H+hq-OhuB*7Z#2XQ(obheoHV)0 z{o*Y`LT^?zOBI~bDC=A&(d>ZGTmZyzkJWnTJrrM9P&|i@d{A80& zShn7_ea9f0P%e%4p2Fvmr-Q2oUzoIYJB`Yx^j5Wnm6_qn?%Bg!eZ<1s;MaEeI2W}Yzs{V^1D|?c1?T_ z8FR`Rp1OrQHc2~jfKPJlKZ;^HO6cW^IIlu`KlM4nJUC-`w^zQI!aY-&>DD#r4emD# zO0!_FGi%`FmCwVC{BJLSAXLqJ$MI!VR(#6?Pn|^cX z^wzpHUJqWZ7FC_i+rHW8XspS`@dV#ZWZ)H&O}%3mz1lM6hK&LbQqHW_n0L8nn!*rO z0~w-&BWoLSW#;s5=G0^M{7%}( zaCtUzEH3}!pI+F94;YWkFrPG<(*_+|E=e)it!?(V=G%rg8)l_G@aQXftL0`XELg7J zc{&HEcGz{(^>Zas@AV@Wasy!wxN*%&tz&`GyJmt^2lR3&+2NSU6mJd1s=Mzm6lKxk zW*P1@{Iw(YSjvHNEt^$FvCO+zUgOHuopnye9WJpH6ds4TsYdc@k=rPb`^3V0XwvDN zay$rlP#RC@k$Z8W{ijkAXXLSm?~%K2GehflTpebn%R*C6fmw&vdf#b>-!%@W0ZS>DUNKE3xg55fh*%QU43%## z6=iRpgx<=_^IAA>C<`sg1GWt~dNG`5sRR0J--WvU(<1ajUSw7u{2iO)S)^R`;Qv$u zu0>$F6g8nByB#t`6&($wk^UovK`v*BGR~mX<=sj~)SVNwA9ni{)=-DldzpHFsYjHO zq3K`#asF+GE8x!d;gRh5K*>H}QmJ(>Ez4n)=xtvK9YgR9(Q?ox_cPRk==@H++GVeju!hXHBC%`z+}P9)PKJ~} z3c5GP3>tdjhm(2XqiwyTlluHI7JLW|cfK8~SBkaL-)fE_dnOY(x+i&;z9kO*7rYJ$ zD5}gEuROieCQ;aUs~;sDK2Em%Xl(1(+~_#v{mA=N*L#{uL5TK0QQ^D3q(NzAIN=5l z#d?@`|d(1+MCgb2r-Mdvd?-Gq}0mce=cd(GkT4D3DXuwGMty zrDEpMC;nYJ^t=2-huAtK-bcX|jUn5cL4&EUvyxvX2i}5542AI*TBa}X@o5unet44G z=fZdEZ1M~AwfOMX<+)Ut{VeC|co?`pYG!`Z6^4quG<*J9{UXGEgKkbJ@H0OhLTX!BfosRAvPXxc;2hAcKPYr9fW!5mjloBr*)Q5Q=hrtf_Vgug4pQf4mv%c+Z41P4_baAcIttYvZq z;fe$08hu1|$Nj5@ZY^h$ztKGi+o5JFD^Us05HKI>>R?!Cb1CaH0PWr5vuxhSI}pV1FVA?Q?E`Fp%#r|YzxYd4F1`LspJM2!AO z0VK*o>xH=#2X?yBd2YxIKr!GOZV;k3h-U~0E)XvuAk>WzTm+=fk7QY`X2X=sw2u%-XDOYpY)hnST zx3}!)nQ-7f*G|}NkKY?7^Sia^%~m_Py_gc*yFFo7=0=+?-!G<`HKnGg85` zECgZmU5!oHFKlxnVz-s{hikD%ry9yqDjgz)ucTp{v{yW{O=9aEevos2XNarMq==}o+$z*7*rb$yAmz4TE!0b z1dB0itLf-PZvQNKyUSKyKH|tbGv}rI@oP>1g_9qZeBj7P&{m{BohZ1kuue=!aXcGDN?Hu=eBy&qK7P zSZQhUp)$7Pi}wk09$viH7aUe;l_-;Ni6Q$m^v~^N)$U@4`IwC{qzYP}eTp%YWBRCgFtgW#u#qC;`jQqS>U;6ok3IQ z%6l*<4w@>r!6kC>HJDwIg$>Gb3I4cbrZLDsUg~_(ASnqWZ`tjUciw}@79?QJxrkZfK0BoLFjRXlna$Upv`X zj?XnP@ND!wKc5~VlkR!dM3rf=vu@Sf$L@U<#JSzn=gX=qGAqhc`^>c_wW`>AGBZ@A z&9qZ|1xL_*sH!}yFi9#!otoOFdh-3{+V;0iENLej)BBLhr4N26qpCx*jW~-@(p1i< zkvp5q#{N~@rbieyH!_u%mdZuVN+f3 zF6uYp0tZ(dTO@s&3JfE`btHV4nCyzRHT|D8HCyh>FP6{p-zF7rNp#_C%!1}#(ms&4 z6o4yV3tIo`X4q!hf{qBZ){*Nv^16X`2A_XGn-;9h<>0d3@%WUwQDK+g9x=HQW#1je zAN{3noBe%r>mL3^(sIwM8_Ca}8F`74jJW924FdR~$+OE}L88mJ;ccHkCArKTG?>tt ze6jnHXGY$~D~f;UhU(o^TX&RGJw^LM*GqD<#MbWX%x21(`C$4@7Ijcw@>hQQzv#6@ zkZfCP6~nCB_nG&Mod@&1np!K#!LunM?#>4K`cEthEVvVZDxY%D9hHPkhunS-=bY2C zxaRfDS_RQLHA@FM8^7yFbeh#Rh0i0qCpNY-cYg;Fc(5%duP-zD5$rE%UB(Y}d~!t* zEH3SXcZ|Ffr!nT}Gy)f(D{PP$2Q8<@xEICP6@kWrpUw4x$%kIR5RyfmP;woRiqL5K zO9d1fr+8I;GNIl#rNJ;z{$-@v)((D#{qv1Wq?c^S90Kg0xpYb1<&*>lZElM;s{Ez! z<@3%{IS);x>Z8hpLw)E=lLXj>-V72nm%KCy<{RwBo1asc;56Ut$ih!iqAW~Hng@C3 zk-priw0k@C*6qj1KFS+c?8b4~0`K2XZ!Sy#9+;#wQs*kAh?nr)ID-B5Y=JvYq82%+ zHH{~C+3*{&pB~r(5?xrdk@*=PwAW@nG~sO8^NN}%oQkG>Ow5`Sis#9?Nmo5@bYrSO z^AJp8sG@m;gUPTs)i(S^McFW6( zABJIZT4@~XYr1xq zlDI?_`T!c&FJ)6xa)x!gTaQoH{p|i(!n3sW>m-&?m2!fY@vJ)el{yj!v$Y~WdQTO& zAQditnMd8#vnIn4pP1fMHxbwC!kT%Hao>{F!rvz9mcJlyOv_7hcuujkOxvL}{*$jd z{*+7Bo+?dOjd8QhtYc}UXCqx_q1{!|nQR|QZX#kWcCj}dN`(AQV&~*3=t1Uh=q2qV z-}M(iyB|*;x=_M)qNo#~%!Yl!`=muD}R_e^4jl+Rr|8<9ZNz*~?~PhwR`#v=!m zXiGHzy{Tm_GYbeYGg<9EO3MT6-z|!0)&(*2f>WdlY8=F7DfB ztqH#>iM9;~rl6;;uS@NTxr@yNBbf}-%_gtw$}Q}TSaZJq%#@Yr_2h^fj3C55-B;kF zl|Z8rqI%wX*|0Wi*PeUN3Av&iOoQ?pO1yeiaTG;;L#5-intg?6zH$O;PCSreV+Dr1 zBQQkKmUh+7H?7RhNH(_p`UEpgcf7&1a@S=cUQr&CSK5p^%o#g?xJA321x4_#Ytt|7~(e1>4`V)nac= zZLGJOSiLZ~JTR)5&8E_p*a%exLC}{9z~?E}%MtZI?keaFiR`*RX`#H<<;p8ER{UNF z`K}ko7`79w*+RcPAk6WaMeoKd@jH2W@C!GbCwYIDjnHe+{!nCE6U2FM#IlSblW)A? zLDV{^@DqG;Sz0|Me!}=Q?OpJjFw?H@JV~eAf48H|{I(NyPWPsgXBYN4>D%#FY;#`d z(RS!y7?qR;9S_xXF7$o99nghIO-ho@VBfh6k{UB!fqz#_brb{e^!wf5dc#Xv&4o%o zKyb?7vpNyS@-%vx0OxmI0~_j~xV#&E2$243tJLtq(C}QsD{flBJE%h}m<4|9s;EV6c~BdF5GtfOb(E;W zRaaGF7;H@xj7ZQZW4MSL?=En3DlkB;fMMcgn3lLLjS$TCH}2a*?E-J=G@76N9=F9`d3+d0-Loh*E&A6 z$oZ!p<=xjuhT3u*tQ~i7y;3wz2abBNf`7*d91x_tM;7aTkUN=k;|pTPoQmU}GIop1g zLSKt0NX`Ov%%I&}0Y*1fMs;q6&~C?#hG~Y8i4=~#OS}v_!OF=YWT{b1JW{=-3*hQ5#Qi98FkddMN^|;JRC-t8+L>EQM^DzZVX;`3II) zxEK?u(s2WN!0$L!1g}sR13Y@E;U?$Pft^t;PhhU ztDqFN%~%wM7AE^E&R6=R$6L?R@|e)^!KP6!Wy%(ZkD`;udqpSD-WFNI^=>$L(j3nD z$gGqq>q$=2hNail*b+G&Q&rWYN-S?PmX0@5{h2(wYabJ85679g2OqAKDJ~C-F6?N$ zH-A#k|CI*aVSDSTkJI^*os>u8NdVUgQJfDNF0Iv`bdaN!&1C^%F#`9?{RF7A;fL!C zP8dGqM(;u7@xMmBi9;rSw>1(f*DK~Q3em)S*cNk`;b6EJ^l76tkWm2g3e*VL&FJp= z@Zovc;&vI37t;5%YQwd(^J58b!{*RvtQyC!Aks3e)Y8<{l=i9~T`Y=~oSkjR|1IAW zr}0p{lMAVbFJ7Rm{*Dhk4-J9>8i`BoT4Nj9+*D_GHB853)48{{e7(2{itk7^vq3q+ z0DFG<85*BUQ08gJh#TpXr!`$wp;c$2nq6RG!SD*xfY-ok{$f+j>!FZ2D9Pz-=Qos9 zImXx8r@`UcAsxa`^Ys~iG$YF!C31-fmP`hU$1>E_J~ZnH4Qhy7?tgq!{nK>oe4vfl zS7w*y>0?%qgX61RX}@^7A+zFp)F46_-0|#1=mU|1RxLbZGT6j~;)XZw=rZDgl8^0G zJoEUC=UZ3pb8Cszrr!~e!L=#=-Q!yvnxYF47HrR-i-VveO7O?GG&o3_PmXW}|NfAP z?tOu7a}rge^dn7r`y{HUU_#?g+#l9U-nB$I%Wd9LCq8!fE`QkaEIfxs8%tB9D!f&} z;}_0MhGM>yb2P7Ph1I9jI+n6+x4)cfqw3|??DOi^oDx$!9O9SOj`6R!_r`=uc)}LJ zc6R8E4-&kc+bihDfiQ$r0PZKwHAZ?NW zgl=s>%tj-FZ|TT;cVg1KW#)1boQeWM#Vw@JNzn3sj~97}Vr%AGe~0EBm%rmwsii7J zO%5}ca9)*v2HbiZq!U{I%wDvpJ*@guc2qNNE-r}zydJWPiRd{(Y)pRaHN1i$j^pl% z>%`SJH}uZ2oB2m6U~M)oSec8C6#F%M{evntE<|77Av{r=E4zK89R+*;CJ6D!AhV6T zRH*AGSlCo4^i_S>!k8ta>9Zt{_4nkD&et}+wQo)jf{BT977R^1Ch{-KH$C-?YZrG- z=!R~Z?#e667D*>qQrtwrs_z$M~t%22nkoLxdth$z|%a2lCmO z+oDdVobjAh^^o-`FFPsq*l;y!iXv)oFJK~s_xFRsyHa|mmy?j=>Eg#dm{$SeWgOoF5F|+@RFWL*7(9 z$IGIN_T5TGR1L~~(8l9Z*imyLo#2)2vBI}{s8X1CWla7s$9sg{SacCp84`tvsF~5s z5Mz(-+k9B#)s8G03g7SJKjN{E$&zspj4_?y73cL=H4jsn9b;UZ_V=sDJ2L5sw@ywH zk-nyJzzv9t=&SGY)o&;9k{#YV$7Ousu21FJk#y;UmI0Ir&(ZlsMAlHZ^n%ZeGA%0} z_C#EECcP4xNjcbC1(bwQ0kO9;F#c%m_%6FDnJ<%(s{g+4!syneckzzk@wQA`&@MRG z%42*q%?C5hLv%pk5S{<>&cXHod2^`&GZpB0=O^Aw@xr0HyegLsi(Az2+k2tL^R~;7 zG5#@OJ4j-9)A~}iw0Lqib86fYj=yRRdO_QxR zwc{JTVrb(PN=2_D^ZT1RE;1FeehuPDppkZEgJ9y|LkgqJZ;Wy3azi~jlEHFT6>*=w z8I_9Mp0-#4;A3SglRKe#_7or4u5A8$FB1;zL0v(gp4Y5zZq(mCcp~0s3S?jrwnktd zihoiz!zh)(K|3llu=T0ZuEO_gCReBHOoa;#Rcv-nbZmU4buEdOZCDIH%_L9~lyExU zuEC3&r>idf^iZedJfX63husga3~3r(B^1H~YQ|lvgx3k~bSO0}*|{7yKLnUgPl_T) zuO^5Qvv2(Q1Qt#Q*Z=~rgK}Y{9I#=B<2zyz;>Dx8l1?D}@1&rlF%+Vq)zr~in>z!R zEl+TSEIhFVwbv~Lk$K~&rH-+z=f$($*oe)AVPXNmZvBj({Eh% z>o-6nDvswK?(a}7cF6=WZtA8{Ph{=J^~zlwWGsWasjG((y>7==a50{YpqQ^YT2X2& zxVId`eWNMFvF(>+%J7s{6S5jCA;W^k#PObacEPBqZlW~dAH%Q|yA)_4{2j3+#a|W1 z7_d>P>a%;6qQY5Bg5MO3(a%ODoGAEk;13e-U~_yPJ7P=MoNkJO3{Qar1#li*UFOQll(b2M zz8p37^h82D&w279YjlO3gFRz@;{}?biK-EVbPN_zoO%&+&sp^#KI7)U+U$eom{e?sFE~xl5}+*G6jd) znk+lglsXD@w+xup<|13OY9=%C8EX0%wAI_D@C62W{wi~sm?qS=JFDF4ycPh4DLj$? zlK?|{^qF9SEqCoB;6}t)_~d9<%WClPB6Q~vq2w`n=|$dLfcD6SjX0UL^=eR!B)FQ_ zYXb)ed&w=xD`$a&K9En*r67?VV~lYBzgmEeut<0|?$G#tX4f8|mt#p47w3abQ_$*ejSfv)Bp1gE?2%r%NTE zyE6C$K?v&aRe27Xs4oVMkf;S~&M%AijW33~bzUZHOTu^XXR7ED0Z!peO8 z04@>}whw%)5xHxpzSk|P8g$thJ?f}_Kp&`IO72FH`i~Po2tY8TmR)eA(Eb-=|MSXo zE$3IyT2g^y_utlZ!!XiXEw_Q1PiD~kp^15@qEOYWd+}AcF{KMS2yH67nU(c`Z z5am&ByThE-R)(hCUzy{z%J$94wdqCp{J*6eZNx^RxR>1s(D=#67ewNqG{ST!5BQ^d zJKS>B|Lq+}tq`!Fh<6oo+2^;D8OK#LpFQ{>iOdc#uK7}KrLPM9aA^h6T~|7FSsj)T zo~Xw?DL%zl#Z(@gifZ<-OTlqS4Y#z;A5R6unrO#A>JGZcYAGq$Iev3iUF3SI*NDWV zluR~r0F1}Pz-hX~o~pM|LQ;7Sn1WvfsR_kDsYHh1dm3UP_DR$TQ$iJss%|e1rQGd0 z=S5!{s)oP|HmlLKu_h+=q59hOo;S=`-2O2|ZBT(CkuWw!sxMmf%}ss$8Fs#SNU~v- z@^3GDdqdc_eSDx0d?`cge1`9;VviacLdV&R`{*pGJBKv>hV_1`Pg%~ni$%NGpOy2) zOwjHygO-M4;G9K81(%L`L~Jwk{za`BD#G+QmF`w8f+m~XvK3T(;;yXuFwQ_|wTP6m z*@6bV673*?mhkit_2?9Sb8Uvo#n!NyCi=Rf|Fg+qr(W}_6L-y{(fdW%*jI;t#_TuiT<#tLA)oDR+TqL3yAhhG#i1#^d&cH5JdEMuac(Yuu zJy)?92gA5($7Du`frB(BCl4Mr&1_&%oHoVqH%NTv=uh1cb#VPqpwu>7E0(_U`X6N$ zM#bV?#_j7=^EkD?c>|}(wf@%vG={vS2a>5AnT-3npgT^GS0Vg-$8+%cq|S9|Iz#wi z{Sw7pT-tRj(4FwOy#&q*!Nz$r*XzF@e;m-e@tmE`1Csre)W5Ye3{`Q8bGiM~qVyw| zJqrJre`Gims!IiiWDXwUfoY-R>K{H*e=$9-=a}qsF!-A3X{9d2sb-xHQ$$PPB6P}9 zldLMIL7-$25|)^MH4*Aty|Mfx^`E=awRHxlc#>Rgzd)jLGorUouw}>XNwub!SpT zmu?Nx%T=L3a=n%NOa`Q<1;aU61wM)gw<8{-S~*0sFb7pkJ1ts?u2&hrF*@5qvO2qa z9RNV&BNVXJo|eJK1Rb}=WyQ1hY)YptKY2fPKu`=cd=DC6H+0Vq?b=Xa4sy`!`|e9= z-N{-T?aJt&XP%P(?c}rJJE8>}jvxh<&8I6`S=MJt@`CaE1LM?JQ$xJO%~!72l8>eZ zq=M*WjNIx8PEK}#*$o;>g7GPm)vB2oZtmYyUGU0~PgL8oK9=|=!_v!LJs80$(S7L&eI8;7^i!S70!j6t#k}kfI54yLO zlLY}WFyOTAHFZT)E6s&A*TX?qgdnf+HRjC~IBa6v^!0FS0AlGv-!e$uazh>%imNQ76oe6=QhvCjpfiexji8O%JW;il+3L) zqD*Gqct<%J^l9SOucV;lK<<;4jkNjV-BEKaTAAA@gz+3q0G#VgK(z!GwWFB!>z zUJl=ThN6PQM?o;u7V7++jtbu=&Z86OSdFp!;oIU93(aR2B3Zyn88uWK#i8t$N= z=3j82`Z^-z-3T?LJzHTvIpPD$cnXxv5!5uY^(x*6b=*71!)XTY*!3fGb~&cp(iaptr8oe2FakKa?pJ>97n@j zhT$U)HDwJ4Nae`bodq*ESD-;r#}OVYPJTP!|_6>fI zFY8N30*krd;R|R>9cG=Sqa@09qjY`4=n3d2S}vAuPoSw0g&Pma#)`l-5tfa;fRkV| z>yx7|qsLehU|a>E0P#IEprZ&&X+--EeM&@5cW%ZiSL4qEivsJjA4dI*GSs_snE?K2 zCyP4Fgp{+q_P8f2qvJmrT4Ln@*)AY&^5OyGw1A}QT?U-L+%3nF@bp9=Taken2cN-^ zH^0~;wTSOwP;@ z`Axr*EP&tx32)6@r4n@*J9L>@G+a;CWRm$^zhW$_Ijp6B45$(~^m!a+^iRcL0=lB$ z=PeB$b2_S3uVwHlL{F4QOR*$@ae(PzQe77xC1trU86T?JMEAvjjj+IjMsqQNr(56A zMxZ8!nFzh@x=sgq6eYvNNMv^So0GCq2~za3{47y>2#!C~D^8%(7^Vt{%&pmb8MN-b z!zrZLd+OpaUB@Z>T08e*8Fe>aN3+}iA0<)AtkjekuQ{ptyfhvunR9+<#17<&*N+*J zGPVB?OS$vAvm**jKA?)sISS7va7D{a3Ln&>Ixjc{>hZfX#tDuRmIBSsk5Z@4)3;y# zK3wGBH2vq_*Qy2_rGR-O|9s|W&F*YcfEW+FmXzyt;3VX9jun>1FY2Ec1avBXMo#%h z(FyaI#Wo7xBttG6**A%c8OQ+yy-Vw2j2uGcc17d0N;^Cagknj>-xVGIlkt-5D;(x; z3U54jDI~fnV7S8k+PROP-PG#>1pT;X^jdl#TW(K6(VCsfQDm9FOw`iR@^p<-I#0!2 zEYKYHk2)Eh=aGPB$Z(N&ukxlxG~N$0*tlaU^L^s(h7IjQZO=>vxyr-xvt$Bdw4cT2 z5j_4%K4{cLQ9ON%2~jql3!0m-l@WfXCMadR316R z_yOpQqI8~zRKby!%CQ6A;;JR;PbVMMWxD9U;(zmr{3rOYu^Ii z&rJLj6jQS+a^1;&CFsE~QXMp%SZ4ap%mbIe=^r1uZFk04(z4=gx4*^s&ckC2>1C16 zP>oG{G6=#`C}uX$|Fxe#vcs>B$$8O%STIF#8h%NwDWEA3{(PEyoG~upvD3Yd)_n8- zh&Wsw+g5d{(R3TKsWNQ|EcFEdYPyGD`7$ygv9Qk8;nK|41FsLSp^Oy%|kuhFLdr?thFT3 zz24-c{i<|I#QGTkssZ3+eNg`ZaTH8pMLLrK9a`{brO0nH56>RfSQ%q&ITb(uKgGtH zwE3O{~3s%JWg)Hg59RoHTX072UnBKC;K4a z`z5gRSwWC!w8%b9ufK+dhTX{;I_O;ndXyn=2uF|Ff11oXXSccU8-jf(_}kt#)0}}c z6xi+|vaJ{F`bRXA?d*QFJWU2|BP3LoV;}l|{+=2zE1|l^kpzjw-uxg&S+n+bNJ(J* zVQs;-X3!YfOTp{c{}gE>2G$%N&7`!{MfdTffEOfeKhU8Oo0ucRlg&sBQeM&mwj#=pR?D_ zK2A_2REVu^Q+w}BF9Xz_)+6T*CZ|eB6Dev){$n%yoSIbi;%7my%F`G*J;aX2;D>|* zE6MEc?Ch@!9J>dtR?dE0f(ZJAgpsX(8kP0=pz$zOT$L~dbaWr^J`ojyYVgjP!gUSp zp!V{B#gAZg7Z~67{eR$H>TojF4TsYdfH*AplWSK12Y#!f|KSh{OxZ0@W6>TbsWksJ zy855>K6gWmVS3(D`sW96h4L+{NhSRP=AbO>z@B;+*i3)qWg7FmTnx?(7haj|m*9y@ zA0>Pv!}LfLsgU!Wt5Dtii2WAmXb@E$2bvgSLdE~v{^+$gUrbSwzd6VM;TNT-$2n>N z>njKp@UjUxs|oL6xD2o;Kn~<$&UAt)ED*BsgksYZmH&*4QYH$hxn2c2jcxa)hVOR5 zo(4Xg#n_}Y<<(>7bJyOjvn@dJ@pe!29B@TF5xIANU?YL%>|$4IBs+HciT`P`V%)e-3(E1TI({ zF?@B}13*EVYHNM(L{U-EN=K5lsUSTPFm>eLAEQ`%Z+$F5Nb3`NOFldNBtL6fDgw}z zMUUDSO;y&z0VpD>Vk$qCdfz*qf9@1-gd(l^+_q-Omj#=DFCJo(wF}t=Fz!6g z&;>Tq(Q$Jlh~v3O2OhtB(K5&j4C*+UDf6$MhgnQZsT$M>7#9GEV@qcIe<*pqRJSV@ zY7kCUpoJ(0q>aN-`LUw|*b6n1t-^HOqd`r~TAT4_VEs?aN0{M3km^8KNnkjvFoMb& zv?Z~&gT(1|>co%~O@@`f_VnGjy2CbY(H3AL2x0ZXtkGIlZ^i1j2(w_;B}b`8R@1Dm z4pmaOZokd#)nL8902FzUn|%1r4p1W}5XQK@8sUTewUMT-Zi<9j8(ZzGDq#NMMaXKo_QFD7*#%#wwj zMv6ay3P5h?XW-Ev)+VpV)=ug)8rRK1(2*gYmu%%%)3y&1U_`?wg-^Yj@5BG|-J1L- zUC%wL+(fUzXpjX%VF$&0Z`ekkS;MAh91kTX=+`8w6=W*+Jry0h^;@zqv>~l*9p+#ETxEzay$o=PXnQgyqvMRoJuR37 z(c|5X@9Npv${O86QGk7el)o2kjEHLuBlFF=7Baz&HQk>_%4SL?^z{i^% z0B#c}h6GC9aeHT3wii{;bD0jc7%q{hJpO4!hpm*j=*e)wsh}Hme)swoid!?E{}A*= zB@jS$Ho;DEfQzVOp`J}RJpk44W! zKIOeE#{8r*7$QH^J2r5A_+_3!qi*uHruhG_)@)DOugI5Eahr_7VWxku51M7Zixa zgwdD8XbUP#Wul(NHEw>gwEP+SI}HsJ)0dMoO0@szpZ01mUmbTI4LCO<%|o*9-MRr{ z`x(5mD2DZrZFQ&Nin+dCwRiJdP$6toutMlp&VN5L1kGEcBC^6!amBnJaRBCo`VKW{ z7xK_k-C%^Vyw7i*paWV`(pYfAi@=SS(#>LUdi01ZvD!8lZ1@U83F-9zyr`dLD!t56 z>ZNYLc;)yB3abr3{SjUN2Pq0b2VnRuY`qgH{H{+AA1Sy@I;O59B0?!X9Q`k-D>GA{ zR6h>$g1I|Ea2gPRvh*f~{P+o|!k4E@?h&C={ilHYJW4MR)vPk#L)-jc;*EJ8Lfi=5g@SBNRBgDvVcEK9stz}kApMStv`u@2A;}ku?hT7EqMLmD?zwT)Q?dHI9iD2CKEeAvgeu(bjYG@w>j^;k`i~nD4yxr z*>NPx@#^BAf|QZgHF7Lm7dtQ24C^^~JZ%Z*C4Nr&h*7wp6Y7EquZtXV+cuLD8!q}s z&Bfy~2F^mjzky@{U+0J3`kuSzxu7%TlN_8 zmRv2}9wqFGi7Yek03+Lc{_}9GK!FRu?s-MjL)f6$W4w9Im>W3|<{*73dI8^{o2v~m zw146~*|@{5!Did?N;abD9lzVzbDiVX%lFYQA(zpjyn^II`HVgP^9LWq!FFw-lK|qz zl;be~L+39kslYK0c6ScOjL{#M(*q!qiH@(n(_R#7zI#&Y9Av~UPONMBMVn`8cM$y5 zS!1v9av__Q70m}d#W1Sx8u8`{P3_wR`ceB+n21v9mWas#qHbEON#J_Uq3})TqQ`Qk zS3r{YU)C;89R@R>D(ozqQpf2-x4mu&t2vV9W=|K9NUz2`w6bVS5a^xN&zP9)TH;M} zm8%rOho6q`#9_-l&tm;&5@@IWmr?Y*c6mI{x&wa{qJlu6b2Sx}^s+K}>+foQ9U*j5lh#5yAA8tO$eHET!tY7s~6`8JNWEL#Lc#xj`o< z?7ppEl!50%y2Xis6(#9Nu*_TkT)QFqHQE$P)uA_g!JER7U3JRkK6ihr=@r27KQGhR z9>(aD`iASAELKnMfY(>y%uH)fK_I-Mj1}5`*E6w1zK2{}RbFYqnwQP5EuW%vYeeX2 zvL<*k6ruBNHsUS}3mrC6rajG=>Ouy-In2h!ruysEud);S#*>-e8_#VUaPCj(8G5q> z;Zy_by1Q?`JzwtRDAL6Ba(b1|vAGBI@rGVA`ol^`q8OS@4^9X3EDJJ&e0cNTB|{za zOO=?kkpN<1f`Tv*O?!NeS3Q^*D3pB9O4aL}q4#u?^mV+Fxi6cKcx7^>?d5;IASTW~ zIu>AMq(Y{n2edRO4$Cj7p)yf87BD@=pA0L5TJf-jJbVF;O@X{opAd3@fhE*L#=V_g9Mm zi%QjYxGk)+Hxst=!ag!9>4f?kEbb18>1BWm*O^XJ%bhRQD&-rhcE`5FG&!R^lF!D?i0lQnEG6%LZhFN>7TE{SAc&ZLvVUH8OF<4=< zqS&DTw&G(J-p8XJ2d>qD_ekFKfZ5K0NnMrtZ8z+`>gwOh-bsXWW_$#+7Hv?gASw#q z^Jw_i?ZV$gjf@%l?Wjkh_D!cgIy;V&dW7E9z*FsaUkeD}H_ zc#?Ui!iZRzTZ2&u9z*R<8eHCO-j&DTT`yQN(6kHP8)oCaaX-Q{h1-DlU z@t6j2NoWmQNaUDsTm%4VEyjd|gshyLqd{UCTkRUDlXXt|;Q^e{XN{KEqR69EB~=f! zWC8i*9k{V)WmyxzZAqm?3DSj-GP{HJ__jd(xqxOfqKtbbxqY`e?^u%#u(6J>!z=!7 zm%h#w=HoqyIWXGoEx-a6@s)}2zw8tqn?;GnjfssksIzOEJ$T}(LFzA@+!SSG@_DW3 z|KSw7MNv<0FIYu|gKfKd5jI|6(~zN%1aEvWBgClwDa(rC=6F*?OxP4NWlo+O@2^8| z)>BCUko~l)UQ~IR=Q&+A;UwhZ56v5_1%Y;9}~3{%m8Lg;MsTg z^(dpz0Kh-g1;(x})GU>t_B{Cp)7z<8_)KG#l7;cvbN%>dn#*Z>_(STD4|!>Pz#}vI zR-wG9U()w@XVb}L5@PgDT!hKEO!J@=b!JE zv?OYFc9EiYS4fDv-WA)?qO9{M3^}>UH0Q~62R?@4Pk%=Nnmwa9VjUBz=D(Gal}r{a z%w9f{K9-{}n|IK>*)psp{1EruVS~`KFhrhc=n3c@(5jfmc6NYzD7n*KT_rQ}8Nh1X zj@noB)4*a64BMalp&xJyTu+I}YGHPZfawO-}h*F+O_^9M$2MS?t9M><+#8@0L?p^VqyT#Z_Y8X)SuMbYXco* zD((fkv%;OXE+P6?61V_}E&!h62`mwUIO6av>6zNv0r@<~tH~Jq~MpVKH zqk=+8N3CnGB5(0ES@BH)t@UJ>=QP_i-EiI;l6jAcI|>zX`+${k_;yX2{p!sLwIxjR z`de9&;dqV?U;!va85t7IudY8YJ{+ZNa3&M)cM{17!U3;Vz#kj%!hXPszzx;+;xnC< z#^~G}5j&6R(#!MxM&FUqQM!w*Y=$R5=yBbM{3xDZZ^|a%85v*YYK->lDsR?C!@0d$ z8>DwP$q6ws<76)~eC|GO&=-HTB#Hy#2!-06D~=8gXuV-F(ZE{M|ODgDVcz^Ou(T&>>%Y zF-2O&sP`$<#KYLo&@`|KiQ8rA%R09eYxuER5`k_PgDz>p_$^T11FA0N%?k?pVv3kU zVep4)-{~*l-W+!x5@MmWc~qxiNG*fg#U+u*D6z=O2Aq@s-b;+I)^;+=yk z6N`J4d(|7F5={%(Nra$VdEXGI1s0Rm`iI+Zs9c4qTpLo^g&Oc4{ZOdx_?KXMWLcz} zlik7d^J=~P!opW0opLW^)0$bdVQ#Gzoe#3_x}~2#B7@?HXVixex1ij+bH z4xIx#Q|ZxwOr4enI=2FCXq!{+&daLD8;~0X0B}e^#)CX>u1lNHuFQ)IwuEniDFVIy6)J=0kQipegSj z!!&M4`WHEWR#qQvA9gnN!l8_I%nH+Ei6UL%H`+ZT%I?p$+#B-mqxRK*o4RONFcjSN4U_SlC#TZi$?p^m5{o zg9F_aQg{}m1;HcSGRp*;`j$A2b9}oph_aU_i-t{C`wEqo;~Ccek8zYY0K_~NlX-bH z((O`S{!JwuBYjY^F=8w4Cl-XNF6>TAF`2Qw(d{iNJC(c-pAzt|Vbdw+|w zGu0H$#l;2I!avSNH8RX$i~dLU5^qU@O=AF6MJxM9+#=0V9ZX`*u}2nJ&(|ed6RVl- z3;)#NyAUAf))-7xCvhdWpqG@8;7C$P5wOk(NubHkZ?mnF_!h^WAM5Bd@? zwmVVOmddVG$MP%j{;hqYaW6^vI#s&O@*pc95b3HfuWP`+vQO&UgvqgXF0-30mRFFq z8eAe}W#!E4oZcn1v5(YKA(zw$JPdOh=!%i0_Rxjk#5oq5mzMV$hcDg-soxQ~(*3&K zT=>8Dt3|TB^+TDp{`a9jAmLu z9BfirLpo>Hyu24(==l-Bh^{p^FGRo@@{t2GFt2vYu`K!doz1vqk)a7>H^JV|UJXa=@ zK0nA{uy8yT#P(TCi5(rgR(j-GBfOvO^5&l1Z1J_K2gd&1%HmDN;-Llv;_lS7`|HA$ zay&;Ni9HnhMK!^P=g!&C@TR*0Fzv}He9O-j?}A~FypfT-DhXWXv0rh$J8Yy5M&kF$ z)vMYpWbnsGM4frhz7T%4u`F!YS)g^l3;*t*n8>T8kZ3&7{0@NMfP9G&@~C3=E=~`T z>GogCX#24cSXH57%$Sb$eh8I}l&iAQw`OPi?(S}t=sgM}LqpcLZ_i?)MuUkV+sw8e}Hvq6y`O{vLV#zxld(})-52__w%{JUq- z#v%O|MpulQ1(@_sF+YjSVB-38LD zHA-ic!K}69y-qYhbrD{8%OIpRVDmk(XV)x7ZybLoo583oM%(YkL)6IFkd=;3F5z=R z4_rQrZ_tVZ2oU=EYJT8@=3dsKy5*(Cd+%JJ6f z|D*1$!`kYec3~>ifjTKgO0iN3lw!qeNYFx|IJBf_aCa$13L!vncXxMEN(sf?y+{ad z!R71?ecp4P-}PSSI_F&9KVSYRbZ70gX3g9)bIC)o7zqmm)j{Tav(-V zm!aFoBQtSS8G3xE`|BXuv@5}2I1$P(eh~mEoi<8C@yXx@8VCx%b$QR1eEHIv9C2qG zZ~K+B#i0BRU-E(ctU`y6PkggDjh9-rU|#o?3Xq>Bym85kF(EEJ{PJb)snYt=X{EFw z{hK z-7tCwd_Ca@jVrWSugW_BR>Sw?uJusShJ*jf_<^UhX7S@cSR{J#CMP`N&dxpnbJVAb zWEnaEW?Tdy_#fHxl#BC~S`F47w`y6Ptm% zy-RLGuhSjT>TApn$e`x#0SQ}5=d&wqi~#)2CP&iLN9%9d+gCy_dtE%QeXHa8>56JU z^nsH%_LX;4-kxB@I-22gQmC{Fa5SRwgX-VceK#t9R9e1w^5jV)`e3;6E zs=oRJsJr3?YlF9ZlIZ|q8CVv4Nlq*ZP?a|J%e^@({T%BU)eLBlWQuO8(dL`<0W7n` z=7IP<6+UjQWpW^E-uRd>rd;ZL%Q6X}`~*&_GUS1}zxy?o@L@|obLHYjMHK2Iqt?@u zw=AkrY%tHXCr-{TLzBk+;bXMRiJf0br3U^mdNfUdX)AHnW74SN*s|D0xYL4LX} z=f*Z?2ff_)+$g?eJL+98(DWeSu*QVD(+X(LMs~CJ1V@DAB9<&mn&Ios>0R~L=tji* zd*ljpXrmY-QXj7YbIL`P6s|ga-T%E`ef>I%y{&TL^*4u{;Qb;T0(2oX^8I;mcv))4{ft-V%cC0BT?1slTHNmw~<$0It1q;v976qN3Wg z^>{P#%~>D8BO-^*v^YHG(B`;Gs9`;1GP$SjP^zpyYsS9;_Gq*rggIAhrNRk`-V5Kg zco_*?BM>9Ew*-Eai@Ot5$WINXUp}vW?QV#^QP-umniD^goRZDk>JVT+r+2<95RM3v z@1KqLVHhI(b;XYtxD?>HH4FTBWsm^&DqOQpc^bh@rvHCmEvK8gjnE=K-Qi8mzk8tu z{g*(|OyakP2nFz)nBcxS@Z(WQXA%IV9|~Y716=gp|Mu#;o}{YTRUlXr=v^G`t`_M@ zKlq~ouoHsL@~$utkr^J3K)tG^uz9HI^#Y;IbvKa}M1kCOfzvlUKmtYJs+92m)l~_* zyB70Zz)GE;{%yT$1^wOs-PZ6I;gB@Y+^^hUpLQf)Wp|#*o|Y;YLka4J=(I{EASigF zc?ulL)56x2sO+1X!LYGxqAa+hKYq1<%M*wRd;>}}f=M#ts}*aQC%~`&fNJ4PSIpVU zrvecCy5ErJ01Q%nBf{I@~zy}fAo*@L^nFiwKKbQG`xH@{<{Hh3>*S}{!wWTW@fU+EA1G(mC z7AO=M_rKQ$?qu}Ul2-Ze-~A5-{FMw`?cIN__CMdF{tjB4mg?{B=X(G>O_KeGh7)l; zM`n4TKY5=!g$99^$6qjPffh0)`tPp-_&G1zngBh8kMF zKdb$pT=k54!{YX2t}`7p%ewH{JfV8_|7Cz5Qq-i~^T3<`<&qw!@#7!oe)dmL{J+0C zl^P)N$cKkx0i50O?>uyolw(voPk~FW5L3V*IsTg*2LBrcCH|Ge{;#f@tqr}>3<0+WO5~m|iy$JaA>^0gV)p!pC&_AyqDQAJ?pZ`Ca zJqw=y{Qp(+CS8@Q(sPV0t+s^0W5%?ND$(x!9CUWcER+fOW?2HgF9mW5=;FN{3g+<2gXxB?;rj$aPuAvp(v*bt)atb9xgCv zq$TUfj_V{P3!~-~oR8%9jvm5|^t0iYfO$sSAw6>41V$riJE&e+IhDI*SHxAQ^=pq; zIBUdVqRWO$)c7zJ22)wDdR(P zHVnk$e#=g9aaeMok6^2uDjiBNcq=rSo6gBFhv*kh85sD=U7^BOP%c5xNyg)Tw^TG` z*}H_}Ln)|<3f#rF2Y5CHv@IJTJ>m(qj{ePg=X^9ZHK=jdp(cvXHSiquJ88P025zOu z|4w-^_(HDx9W;rqL{g41infLpM-A<`V+*|h3ih!=wWR=s3@Q$KBkE3H?+EokQI__z zh|3XyBFI5tmC(pX`q{}8chT=9q`sm6>i#X6r+;`NNFIy}WC_7nDj+a>rn`m_k1`}s zEg6|&K+B!7u-`d+Neh(m-E&N`1n;}tq_vLFI-Bd;W_Zg}{?)ZrEsf`?Y z*854vZzxj3s|@L#D=Z{X`(~9Yt|mP2{*PG{8KADMAj_O&_n?pOmK|AeTJYt?CKeD} z-j`tr&F%=)F=K`ms^z=L_v8^eb)y_+aciKDN$~0!1g0WUDc}77^#Y|PUCbpG$&BOu z3dR%fu;g!Q2sR2@b!yzZzRy_}{*77v6uenZWy>K0C21<9%ki!BJk71`vLe>V7e!IO zA^zfAxdW+0iwcFGW4Mfz(*A_B;2TjM`_ouLmE)1eA(s~%uXVLt^H8tNT?BRKDlcbe zIj;mA5L?g)bNIrkpIMzN0M0#Zg-eK@V-AUkNdlMnZIFn_HY611GU(a28ZLK^;mXm> z@>?7*m@TAJ+1_+KTHidM!>FP>aPqAWr6c|{zdy7WPY>?y#RFKRc;%~ZJ5*#Zj*`+} z1#2Z&1+G!yRT#yoD56vPqVQy;C8y-+Q2A@RhXW>iD(F-tm&3yMEn&}kHCtR11&*!r zRl4ygQTI@}D9f^v(sq+*kXWcni2Y76Z`mxmt72&UpiQ?D zPm!#?EhKm3wVHtx0KV(Aw(TSHfx?8-Mya6P>;tD09A9BwdjBs8y0ND?veHw2`&J7v z^g3bsEs2kgv&hF%&t)rb&lS7;{Tmbs+0i=d%*!dWa2}SZh8x;uJS>+1a;-GCO8%cWpT>G!P zg>p(x{4DH%2}}#hJ=C6d5XaEq0wU+QT8whKbk&k&MJ0m=7JVfI@ay=4JY0dQ&iszB zkO=$HL#UT3+~vCoNTy#cKY$U3Z3QHgDK^*)V@>@c4rKsDFduvLb?B8tnJm zBldSgPJ|y?N6_Fn;6^#^bYO~1e(3>(4*}Y?kY4WY>aEsi6dY!S#L+LI50r=4@@PU0 zpQqczImpXQ@GwO(cPm19n^g{^&x?bKO4LF`q;|6S(sx&DxZ89UVB6pOQ+U#+=G!Y( zSbmSKJ|c74+UV(emM-)d_XTbQJ-aIth=HM7K(>O1r^RejV z#8xjqb;6HlbxEGcb>HeArZZy!nI+4e%>Z9E9agA}SThlK<04*vX8tPg2Ew;5TRSRT zP-y&o3Z4CIVAa;>g)+VA{gS}cP-x_roEnDLr`kax*dCx(LO7_qtGCBW2L>6#^nX)c zrcKRZ6cXJf=$4ROBIjlmGVc8`nwDHe?o66rwzGcCLLs7<|ceh&;5|W6!2N`>S zXsO#88+D^(kn-pw6srtxxg$>ojL!F9qnGJEpV(SIwM#vT-nHV`u=}*kJ$4_DYhey1lRou|b z>`P9c7pq`~S8iM?5nRzL>SJPOm#{x=J_jh~3rx5UiZawvtehu@&$8>+bzOtbl>@rm z{EG$s3q=EQef8XGi9q_Hkpf(W1HPPkF$5qfu9q^$sYzjyOi&r|VL(oG60)QZX%mm* zCN%SOQ=Y&Xq}7{lz7D}oZ*W5R2%EoVZkRX3gaRuiD?tFl2l72lbj zUHzD+k}yOXlT4`Ta$A2m428_^XMw&4>5QK1!aX37TZU|TCL4n)!AS0>5%ZWC#Kd(4 zI36~;SaqONpS)MOZH(%Wa075e0DbpbzF7t8r39al`gA%4!1KbM+#NUVO@%4O*z}NF z&5tv=DbO`y@NlaGy0pcp9Zg@3Wk%?9c8T`Yr>72}ix`qg5G;b0^IWF1DMj*NH!it+ z(JO9lId4B-=~%P3icHKns9k?JotLn*su;oMPFs`p{2Zx^y{B}=(tRSK$@E|MLm!VjF1!W-6%t1H zQ;HGAF{;8Rf15om#j_aEZZjQ1MXRd$zk zfsedD(I8?kA zr|)g0Qzvn)^i<0Yu?*}{%A>C5#P6ScklqZ%=j6O*BDrXJ(uZBF!BP@&&kB1WS+3FN zpZa_-TWS!{S1-xoox|^2jtaAv>~t~E)#l$VS{v!GY!O0|l!XJ7_7!|N>*`rF`-Esh zZIvc}mY#O8V!? z>O7VFp$!D)`Ct0nH%-kZHxI1#5hK02gT_;-(O(cvGJlts^#Um`-I1c4eP4l2MUK_6 zl*LTPZ-ll{uGug`72r{>ru8-tU?Q}IAT=p(wuHi>FA+z+gjP|QOMY$9hw$Q;(%AE= z-zFg5bJoxb7$DZ^&ouE06COmXzQoa=qUZuR%-D-ql^T=bbXzC=Nc*D`Mu3M*rK-UY zW;bqvNM8S*S(H0^eE0ymoQd4)K8&=lcW*Wtq3$oK@Dvhwi#5xEec>GHA zg3>lkSZP# ztd5#{k_GLd4eos&vz<2-h$OA4gM3(a(FMl6>Lx;E8VP9eQ-ao5=+1JvEp9MaN>%JD z9pfxfJ_Q^JVK@a^a=f0#a&WJw@fk?7ZII#+b*@nus0*nebKKHU{3L2$J_|xz~~!} zRD*+}Kj365-&w;#FO^ zKj}2|3$9mn=_>CJ@NBgD)>AMoDCk`VJF6UWpFcbW>2`!PY`O#Ov)r**XUorf{0BZO zVZpDur3RBccsBslm9CejA<(y(8cW6Mn#(pzIOvl`0ej+6Xcd)tcF%ica?5QsuzeyI z%%+1#o>Iu;ZZk?T!>R{<^BL9uBU}_55i`wU>B^Qm+#U)H(<6Uu)L`zDtf9cVP4+^~ z-Fkx6`5xzOBVNm{SkahCj$lTd=Jy=1&p_nHMlbcbJ)VjW|H4`Y;YQQZ8YU!AuRm`I zYqkEK3j#0$$N*s+E&5Bp**EGvUcA37Wuc1e4C)}_c64X+)N}=gkWn|%cS3@gapq|S zuvA_R5ekGL{=D`VPBSvzAxIBj7g$$6nvJ2(aOm@`@w_qc3OoH=R-(da1dcY-eMyu8 zt)xFy{3E<%;9HsT1P4|>UURM6&1Pjsjd2;F%e=fqL;;p^U%__!t7})nECn^?H2Qq& zw|U=<;HmCU_>cug-)FX3yBZYZS*>*rRJ|%2W=1S>g+t_~Kq)Z~j+|3JbdiGt11b8D zc#1BA=a?Ds{j{DzfDQ*VPD_~zuY}FX0E`ofz69+K31M9V>>Gb8<32zkibE|wmwczp z!$?{$O~{g5mVBw&X0FOdMDZDsVV%naY9Qx2ZApf`*?jEfJMa7pvgxI)b~CVE4{Nys zOTfLSsL^NG_4yuIkGsd*o7M|E@|WLnc8~Ggm{}{k0SqXQ0t#Y>x^PumsT^Lj4jTUf zXxOns6dVlf(axd$_wy{P?Tn+gd5thRKq_Tp+D+>a>g=)Mp|pDn9d>AK8n&D0Vsp@V zTzx!q!QM0##(In_vG#vA0Pug;UM++^lYT<7QG4ZpkfNAms=K_Hjnzd;|J7}2~%E2ob-uSE(E`g_T&;<_?)92IEex#4xk-LmO-#Zn7?f7 zjtk7_O4F1-s&__8R9>LQ6iLFf~n_7p$3s7;_fyR zG9>CkZDxmndf$6d)^Kj*ud~^WulhRoUQmq7dzG>I^Kk6~QqmT<(NW)-!f89Da=N=G z#*+d17^%R{YGFtQRg9Hon8!1$4pja@lbe{16!%y*30S~3H4-M=zP4{;B8{~#7#Zz&D#4EAtvys zE%f`!TzzP&KTzRNGDdqx9{}z90l^C~p4;3Qc|KmSZ@lsau`ZDrx*^@s+TEqASvWhP zT5NopuQ8CugJ-+@cLb~sIN36A6(?16e<~03!${K=G}6NPnH0#`@f)U#Ck*uEcY%p$ zW%~joN*XLQBQ-Be^Yi_YLh?F90s(Ny%+TUxitX$MFDYXMT7SZ9wZ1pk!X_=@Dh6Wm zUyi&)tACI=umf?5Rw-`s9kGz9s(uADC9KJ>2OD-9?WK7VrM+9MQzq3P>e!@cbaY13 zw^hnKQ10W(heBWoVga058g*PU?9OS7MPYA3b22r^Iqef8%M7a8N{;Jyd=4ZMuGbzb z$xN^@E$!AeMw77>kH0U$4Jx=>j|}MRIlx+T&!S~h0o}~0yP-4hTtW_t4oRuLtNuQ) z*pnP#lLakOYh1T#aKW`pbi9AAB%Aga>w*tljIxFyJgIDpd*1Uw4;acpHkOQ>j$v7` z7IS334_cDvr}TF;>%J_BCe|@omSfyuMYd*VH0g6=V~m4G<<^2G_MowOa%@Kt08$!t zV;pE>H5_f4ZDxxSLNGNYdp1)XAv5f2fQDvY&T4$Q5ZVDv&8ySAl|OBAv`X!?+uc)AyIL5B1uQY; zuR>OdtsCA6{{VNIuwC0b8rEN}kM{Hg9F(>aq=K}eY^f#57cw=?Yv?=truaReX8n`CO8SJkD~B{L zv0LgPMWgDhRX2e2-pY1cpPPp!gCkHNGCU%4V6s%323m3Y>eXB4TEpKE)M%M1p&AuASTH{D(&8?*A@PV+@BHi&jE4J~; zIL=voWO>C zR?Uy6P?Z;}s`tk49xEla2|D5!Yw&2VpGf}*OZ~wgjzIgm*CuT&#?zV`^YX+WFbYB0 zVA_I*vD2leX~He(g8VugLW@Uo^PVMHRtLpB^0FFpFUJ!^O(<--TGJQI+=&J3naYeV zrzIE&fl0r-V=llK=9<`ntm?z1CNN`ky(msrlDP4sCu6s*`6E-E0BW`%ubSDiIQ*ea zwZw|-EJt`lVBNscY?Gvp{7U>h9)z0JRi1YHGl3t`3I86-x?M@o#rDrb=g!%>lBP4NlB-zfrjCBb8Ow z@Qp~=PHT8~depXHedZDK+ds@eZR?8BDNk)^A5k>!fXRUMn>Q3jPnj?LC037d`kylQ?Q+z|dp66eO+)sox> zqQ~Dlao{s~wRW0}q~z?(3+e|qvUl2?^qs*HrZAGjikwxYK;A}E(5?mDQ|$GPFG&Ga z)dNPUL6wqm;Eydj0I?wsp;G!=e_T#i)54#hGH-hK&nmhTR+Klou^LysjnHt}aLeD} z`-8aN2miemKJb7<{ov)LT^x93W;9Et?>f-`RE!j=k>=*pSH_4wKoC|xyoy{Cd zYovz$ST-A%xKZdjeQ2$2KmSX@IQs*IaVOB9HKn%P2Osl=ZHg@{b9C0iJHq+nt-M%x zXJ24*?$hD2Y&5$hNG3Eadd4dplJtjxMo19Tjg(bW(D(S7hW7T%3) z0KKdo6>l|b4t$Q)D&Os>n;8$<0Z(GoyHj1Io^PR(}) zwf7NME#Y43AF}yNEfbJ=tF!T&1Z%F-j>NcC`W+&T$?S9*R==SVNgL zb{|#;Ndo&HVE7={M5l_O+fM}l=th+o-s!jvWBKVzaC$stWx71eS|e=VLoK;eBH@72 z!eDTHRmhIvkM0hog?SC!)SVxDvr<&<$OU|CE~ZQg9m~={2q$TTbgT}{z*iMTJ_e~c z<)*5C8qHKE1;j0hy9W$Z%LwXeur{0TSxnM5W{Rge5Z|r#D|CCeIFzjky7D!S2Ajvk!ja!}`Ep5XUB^HfM(QSGlCV z>XK=)%3r(y3|zDJW=wPSM68#sRWtlzcKF^1Ic~=Seb74Z&FWeXz*F-I&9#a&3{OAy z1V!;{?{1BGZx%+Cb^@HRF`Ja+5)+VVSfCqw z-Cy`)!$-MMh$VVe`@@{%ITEl&$KOh2@Mj6W?N=I8iJNgbRxOj_%fVE@(j*mkoKGgk zof<_IQe4PSMsKRmji@>s0Aor}|zj&)d>IJ90a z?(KOsm-NoPMR9GUk}w2fgK`=vD`b7PY4wC3u~ppaba;8ha&84?nf1 zzdl^Ui@j*)=$?^3IJ|znI5w;!p7*9|d;>7uyX4}6vEUrFAEGXm4cD&yX_6M$Ff`Xc zIlf}h$Eu{h)V5)#$!@pCM_{?c?W0O%JX=;0BcChhr<7<;9M(!A0#0#_Mo~&5L5mJO z!Ee{&B(pZb>Sd|SI>ZsKE^_I)*>z`#t zmIwyAUv^F|ln(Qcf!FQ$ulYRI1?f?0^dz{Ik~+dq9dS{<9G6}A?EdKGUJ5g>n|0F2 zGgoNrh}RP$zXiM)uNuN@QT8YadxNJRJhz)IH1xC9mtQ(6K8>J4cmuDyBDp{Fo-C}# z=^}Q9C(qx!cT3uwl87orwl%Q60&}e}jZ&@lYY!vG@LN(~x~rdFmmiiV0)w$-vqCdG ztH8f&o2`dJ<7wpzz{Yh3CVsjb@op}}7dXB&5fBjRz}pFUHQsZp>=Xuc(fn17Q?tUn zE;#Rf!AtX|o~`%Izz@^CZmT5P9ibpu`-QID?Dxhwmc9)3(sWLG1%K&C|Bs|~uP(i3If!Sd zKVW349Y(_*PB=!|bUF8P(~M(%;3lbE_Z06z1&_Dhl(|&^-};O^sE#!g#HH#9o+%D$ z-sea;&4}dRDApf3AKJSAR1;`tB5<2e-F32)MV9S_qm{c%cXQ=m{M9sKl(VAIMO#x` z?tGGcDUAR#r{-oy|JV-yNV^Ho-Ll#sQc`;&uRGHvUM!#~e|&X&885wEH}lIDt@$KN zPo_c~*Fieq3jbzppH)9R3! zr*577O0@+es9M@IHO{zo+m|uX?;4t^8v^~AHCLYa`SP@rt{(YPCSkP*3HPkW(%s*! zj0n$+$0)aRe>i~Fc?R)h(RoR4~*&m>B`!qn-I zO0xR42X4Hvm->EseN7Y@bT8cPot?I?pQuu!=pcY*_!-9 z1Ea6m6}$7BLI9B_P(kEE7fx&*eFvr#gThodQ)uae!|B5iTO1R17E#J6{zj0SWz^l7* zk)UyRm<5NXAved zMscJRvx8-1nY7(8Zrb|YdEX39KZB4rW3)X%U?gH+mbGy=homeK8*X<|F@GWp zi5#p?uB+wry{!CN*=j?%?%vbE{;rTWnRD+8T6@$A5T7zK&|zAn*70_X%xI_jV!I z8Tjz=xDsv)T!;&7r=MtPqJVjT+@Ha{Yt`yYCqgq_CF!GEu1n*zQTg;b*RvS*z$W*J zbCY|yM-}*~QJeUE)xsm26x4bhpj~fop`rWE(5q(hc?yjz!D+q0UqrU`L>F*(M{M1F zqt@g){L__qzF{YpI)W1fjIcTCFQewT$74(KVl@HPJU6Vnk6^3reY~04DRFF0v?_C0 z@aZJL5cNmKoPQx+^CUGuvuPbUARh!`LX6=0c`b9|SZ~3Zu!c?5;<&!!Bbf|~*EDhc z*EkvIYjU%SCo0WQGap$OnT9ap- zkj#O@$7~fnSN;)t$7SPN&raZ|Zqw;X`nsw?jpRH_n_q*xEE&uvSVolguHD0JOere+ zGt`tRIH-)`^QTobT?XsTateR8Ot2OE|BXeiV?39WXnr0QmY2P$a`f=g<*Gi}S?UZh z;IuVbvu!2}D|6}qjl8{H<=m8$?28$M=Rk zij?t3+{ks(N}|N6*|v@co7NxZaWk|cctaVL1^u{{FcaH@z zuhvATS(_e;>6|K}WZ9m%3Jh!2f_*Hmdt}|I1?$x288=7zw52>arS9j33t2zmE6XnPQ-1y0lI*1?ApV?BiQ^=6#qm(M(Ms1@(6uH(^)OUd z_WLMy_dqe`RB^V3aDGrHYzjlEWiQL4uA8Q4EhhmC0|N6w_=vE zarc{>iP=`CbKGW&gqGSfAPK&#_1WJ`1kn4dfxPVckn=~-VQvSQ_c=G%0pBp2$Fyhs z)3kAyCa)U08MR`HD8mVcUuQk=HF`!P;3r!%OllVcM4E}f+Xyy~s<&qWpByIn%#;>u z`&#C;jD$d2dhD_oRJ_=4pk_^Lc-t(u=v0|@%L0wN8x=uAYnx?+&5Z9~ItpFm#(XJ{ z0m0{8&H(Ykd{1|(s#r+nyI%bXFqg=-;bMuN$$;*+0nTSFiLevdE$3rtEq{?&9|i^t zd#0kpzNC_v>E1QMb~e_{M(1BL>=+(Q+)}T5BVYCO&LZ#)h5n=l<^aZSG4ffTx6!5Y z9xb~LyAi5g5&1hK9EQM+U7cPZ#6)R0u*+C4Oj-F|yT{a}{abO%X`NE;1InVop91;m zr)#d|-Wn5nM(u9m(&E;iMgCLNCYDm9$2rBvb9+gjBy(T3_87(hFvphEY2KDAsT|;> zbX@2B$(5rs5=mFOe)U3S_ipoIy3JGrVKXyY4t1NSShvdP*|rh~0Yf6mt>Z@e5uUKa z**xJwf2xWB>ngP~!gh{j5Mt3Y>`skQ;W38%lcgci|QA3h%Q(&|sT zQMG}eiskVq^4G6tH`Eq?O5{z(jd_6_X$0esuHhmBCNXa;i`rH=HiB0W8t&oK9{>SaaA-~8MIXZ zNfFsGkXg-heuC0Ly>%$y)>XOYTA*cF=4x8dQp?ewujAm*=T3|$Mw89zCZ(P-(278X)tSzM7`PZ zH2&O=y`Xp0@9LNrp?QzVt1~^r$;o+R_ZC1TEi_{G!OUVU4Rp@G9WU%sG;BKrgK}R) z7D=(R{O+2$4a9JO2=So1Qub7y2nin?5k492urFR#HJm8M6yS8!CRlDn7JOB~vp!oJ zmL}t>KzT~#ZjIOz1_lPUw=*v;ziAn32NnFzr*HMg)p+aex_qguik%ALT-tp2qWzrT zUMJh}tEhH0hY=@2jh$H=q0c7sI*BApZTDIn5A_}PWz@w1qDZJwmHzFxvYRoPR~^hj zrp$ylOxri#oQ&rGga=88*pnsMmoP9=w{vM^9TNg&PT`NH!`XfGrfp*x1$;}mr*^XW zq48#h$WH3#Af^Yqeoh~nvO5lnK6UGk?i`|RcW1c|?bS`l7dAP56qtfxJMhO%%Q>uE z)#SF+9^Ag&q{~=2)M)e}(-UvS%YIf%=VcJS+FMm8h07WpEXgW<*(ZWAD78uc(L$Gl z)nr<_K@USA8ZaVUmBc{Yr7koeC;Cwjo~m3;(}Elsi#GBebk7ZRE#L(ObJQ7=uc>=V zJ}UouL=huO*n zKV=$PJ)0dvTN&dITD*MGFsSAJBQ9N2Adc**e3`>usJ_3PN4!y^cR5|QiZbSsebfw)NN$$6lr7INGm#eQ4U-~$1@;VNA~hU}I+~PDQy^wNwe#BtHplf?i=usH zD>9CYk8-ap&LXR5 zXQ2UbRb?|EF!BRM)Bx|Ib2O~%P!QHCh^BuAhOiVjqsv>v!WO<;iN0x0b^QuLJeU#u zgV$r%Sy{HMKu*cLVn4_xX!o1%o$xn4%%XZPgy~6Tk}vkX)1GV%=}lJ{;5r`r zHr4uo^UCif+dSnR$4vRWrlHmIc_cCKjy(OGnwLM>LRL#EYhJBZtZX0Pcn@9B8v-uJ zBIvM{vZ(zCw$!x=XVC1)y#>$U44oVYN`AxmUCS=2i-aM-Ik0U{W1UwcUWN|~&#gG` z-_uyY2TtUM-F5L9&pD}E|9zo!T|_FX$7VwYDB8QY1anv6*&5I9xB{()4CTCY)CyVJ z4F^)na_PWd?G#+zmT^`sc#bvYHJyr}SvsvtUnz$wJn+7hsY{ioaB|+0buhAIM}4pc zuGzZpNN}Cr$so?={mn7awuC*uZHNA;5w7l&kjq8KXqC2MAc%&U&%Qvz__w+r7CTO6 z0Dt!~4y;Fp1}3mmVG~c@9}7uSv~kx@uy#7cx)cd*Vu%YITD#B1uSYzzKV98bdsT4n zzIO-D=eD0ir6yapFHWPxrLe#B6wO&q5X?l{3U_amO|h36+5LxB6y(9E^v1sP#n}eN z$D2d^y~1e%$ww)g-P9xUdV89jTlz@h6wU)bzWn2&+T!hQ9~h7u*veUF^n1BC6-RFLk-2m#DZU~nhW;iTtHwf2GBJ?e%;g) zqKy`I6tqU6&Sfr$MJ>+q)64;UR_R5+;$SEs#y4ZN2>i)rqvQPGr1|!(x5tL2=WH`A zSXt0>A|v7Jwve8xI{-q5Fj0s2OB%KgH>vv!2yRk2e64q|{?gaA&LFePspv9?MAk9Ml`%&o4w z%tdNr=d6H)J_|9~o9u~pnJX9O0|JWaCmTpXVsN3g||i9HX-5VtsF++;AYYO7(Ep$YIjus zwJ*+&(J)3G0+jTVlg3`6J}RaY78K-BW7L}mQ2BQ+3q+H|ynQ{`H7zI?J9FMMj34rE zN~GRc)+e=Zhp_ixJwO~J_vSxaT>TLlJ*$&`jV^-ltlQ@s*0t|#m#Exc~8y35~~Y} zV~&@@G%Y^3nUhSE1`1sptVZkakpMt3ATAxr8)h z&wHUe6)f8d=^9|!LbA=l$~M7R8i=Yd%ntcs#DkgVwqgVsWkaySq=Xc7Zn;s#bYnL# zI^pHyA-fIU2SA0`gHJmUSs-(s`w-SO?~~=jm{q8a4aMIuILTMnOJSQM{yXWPz)j^h zOs?)c3^}SCVcsz=<*4_{!9YuOmElI&b!~!nquo;sd~}Sl_yMWU?${?4Wzoj?e-Ir4 z{G;`w`)PR@!On(tUAGMvtd=pGk(-0S!UbE`ffKH3l4nF@h)0S=H&%I1za5H!=r|8| zlq5Gtg3+FBC0Sk1la0Q4+3_?G{ff9o-psR$^}Kgj@!8{GDEOQiTjSp64Y8_#%e$?! zmZyVu{Rn}w+0#g^AvDC-mpZ0Ud(7WrkhHXqgp|P8Em>{zMuHm}kyf@O0nR z2)2SB1XY8GTTV^C#cX(z3Tg0`*NQ}jj~C93_b~c`gR~`b>4B4c`s9&J#cnGu79u3{ z%=lt;y<#>UbPEpjQL!e@2Ups7AO)Kw{j9NyH*gno0gk6E$U1N*kwA_hMieCAa@Kz5 ze_9ZW2!AUwY^U8RVx(~fT2z2nTPNfU@((M3dp{pF24x}W`Tgu>$1QL&;ESb;AB0d} zuCG@rG7VLKLJXNV=RFnt>jT&eR}Jd|o+P110?d80i1PzWffs&@$rN38GDh&s|J~mU z-~q5JrFFp`%nNnbr5PADk#R?K?{ndF&zq_TFn;4X8gROXj?$)C$U#*mHgO@#97)ATUf0cY$Co&3KnXd!b+px!d2rRVXj2qn*N()kZjyb&etck~&e}h0m$4Ma> z8bUb24OU%hXbo4fPv8Ek7S}aqyjI2P6&MX!wPrb|i}5~r=^^+W6K%`mV76ic!hX2Y&CVrqub3!OVE2wljooJ;!8Uo|`527qa`BK~B(x=0rIXVZXt#cfL&d*H#^8~Fa7i(8|df* z-%{I8i@}LtzHR^ssjsN!Lr^F2r>`xHr=3pRCQF13Yb>m9XCfb9W(@(-xH4`_cCaL2 z936pJ4F47KmjkI-6Az@;h-Ui0iqX^ZycZDr$Fc-2+jtFo$ecSd<31(IjL;33?lm8_{ISer94P1RLME5nb99>HZ4Eg3S@su!DJn9bWZGYY zZM_=a`SrxT;7C-fciAuy`EFI2UhC|F1TEgm1+n`0$E!)3svn1DZjJu>S}k1kcW0@mH-8AmenNBaKQs> zg-*&o64ZSLfMH5q^AY25@WJ^{RGv zo%4zgb~MMNGVdP1#$EtvXZG$m3BNG{YQcJ@{O=omGv*?ff0RCs<1Hrr{7`X^AqjX~ zWO^~KX7j}ZTq)k>ZbXA!F18E|=-#$4#MKKL1pGX(u3HX}p&|iF|5#7lm+^3FL$&>y z54P!Sn9?py5X&>qV6CrTM0EsNU;u*upleweNr5yKDv+C7i=_Z;+>SetAYaR8n+tEz zKSkBm;8g(pQV{5;{edF_Mk+Li|)#EbXarqP91Y3o4BWUAs}2`=Q?) zg$Wlno}&fDr;D0x(J9kb?zqsitfi1SFoRFH0~UGLqEs{PQxtf8R#&}4QP8O0?1c%B z{d*0L?s_J{lbKn_7`)C){OU|QT1l(jWa$nn1nM|Iw zM4m4$@a@ZVp*}gbec_i^-9MWxo*EN1ux{tCSj~BYc!9ArV?PTPdyWR?=klMyZ?5I4 zXRB3w8n&cr1=@mg;iKX?okZy8g}Qt|1;#6-Ou(O7^%k_%uefI|l~)F@M4OPJlZQAh zDY!x+@nq#uN>>9y?IT$aYU(P5En-rWFYG)7;%|5;2)2ummcpm!CEQe({s3L=HS;|G zileUn75a}q5BXvIJzjWf8k#ZvX01tTn53ug8HyO+htrc@4=3TDv`8xq6s z{Oy)W&2NBYji=V=ARa7#JoT^J<`REQI!DqX z`!2?o$d_j}DezxVmA_kI7a8GRNF%in^m3d7GB4n{ICT(>db6} z8W^Zf3H+@TIY3>)rku(6zpMhU?A)n~YfTUaG{ zewt(xL0Q79+%)E#l8qRVsqXyuqp7PtDX6%sMA2^FkcR%zW(Y|`kZ)`SOLwD}9}yAr zT<0N&8n=1PrE+|7I-S0u$2H)3K?V22mxOq0)T?^ssr<$(5?jHqi2$N`QOUvL;H)>e zn|lHDoy<)~YF7a=NdPSWymPgHZ%FYzz);Bz;jw9Tk$^^+dih(x>LZms%LezZmDlE0 z#(Ax5olZc|l__@S_4sU487WXUkd+}{^0zL*X=I1!MEb2{-<%Sy>jws(XjyTh^FG-C zsvEa){|$wxf9T7%N!=KYU=k+5xpm0kc*F7OyS9~Yfd0UYVTY4JRxK*> zke(v*((LPtGiKv*N2Agon!;3|$91B;lskGk#4UKa4Y2L}k zn(mcvXQCH`TME`x!CYw>MIyzHuSoR`%cM}5(3{Hp(=e4dEwS!uRe873b!uLE{>i0z;U$=^Q!7+7T9swY4$VjYm=#_B@9TwStH4!z2*0d-{X^> z)HNZKLj4<;0{w4JMrQAP|Embk?tj@PPT39AtCa5PSrUpuYl&=j%SbrX48#7B?8y>Z zi)Elp^Uu|SOiMYm6q8CfSw3M+Z92%JC7wEG1mwHXlU3KoiW+>KUd55`M5m3+G6V|g zayD?IynAWV8cUR>Q4Yn6#T%{a(1DQc5aZlV@oH z$H)BPJlx`~OQ*WEE>4_$%wE!*^~!%S#Fmk;^Ad(frOo)OM2F>SHK)5e4+2=~*jF-*d<`aWRPG(Wd4P{BAI( zpqCTX4ESg30k-%}hjlrTY8G#E6DKDIiBJu)*PT{Q=0G{2%!J3xX3TZxYpKG|aW ze_01pPUDNcJE2~B`Fn(+yU^R32dSk4-`;n&S1LsH9VvJAy*Fut);b{Lm1J;#!vg#5xiOwS8y$J@vjnNR~DL>7G*&Ok}2cY=J>v;{%_9hzSEfmGi zG9*2F20!EB)iCn!#Pm?zWyJESzVgfgrnjb+Xr_PC@V~}caMi`FvBQ8qMMelk{8}%e z&z_Pb#MLt7wNa~W*efs$Ah*B7%tvtzxNcl({jFA3th=z*FOD7n&TVZ&g-5<)u4fio zmpr@>B-TgHBnTs8g|5kX4oYAC4SGsAn^v!!v4COiL;Mut#7uucm(TH7WJFGU2}9fc zIZnBQ{Si_i#D!(juYNcV{^8l5i{%FGOst_rDaWeuS4c_2Jl=O0Em-M-v50T6d!4jT zU~5ZT?pSKi$d0r-prJAgsK=qxo&+d0GL_17^5&%Z9daXg1^*PWWv@RAo4&q(zgYX! zW+dNGEsiJ!qeNPwifR*0o1ktv0DdFn6f`G5L`w7?wgTSP}@6_4MHE zI>7!)^-sTQC%+n!Q9A4<{e<|aASUlQTa>q$QxJ0m5q9 + + + + + +Network Visualization + + +ptb_embed_2 + +ptb_embed_2 +FullyConnected +num-hidden=256 + + +ptb_lstm_2_i2h + +ptb_lstm_2_i2h +FullyConnected +num-hidden=1024 + + +ptb_lstm_2_i2h->ptb_embed_2 + + + + +ptb_embed_1 + +ptb_embed_1 +FullyConnected +num-hidden=256 + + +ptb_lstm_1_i2h + +ptb_lstm_1_i2h +FullyConnected +num-hidden=1024 + + +ptb_lstm_1_i2h->ptb_embed_1 + + + + +ptb_lstm_1_h2h + +ptb_lstm_1_h2h +FullyConnected +num-hidden=1024 + + +_plus0 + +_plus0 +_Plus + + +_plus0->ptb_lstm_1_i2h + + + + +_plus0->ptb_lstm_1_h2h + + + + +ptb_lstm_1_gates + +ptb_lstm_1_gates +SliceChannel + + +ptb_lstm_1_gates->_plus0 + + + + +activation3 + +activation3 +Activation +act-type=sigmoid + + +activation3->ptb_lstm_1_gates + + + + +activation2 + +activation2 +Activation +act-type=sigmoid + + +activation2->ptb_lstm_1_gates + + + + +_mul0 + +_mul0 +_Mul + + +_mul0->activation2 + + + + +activation0 + +activation0 +Activation +act-type=sigmoid + + +activation0->ptb_lstm_1_gates + + + + +activation1 + +activation1 +Activation +act-type=tanh + + +activation1->ptb_lstm_1_gates + + + + +_mul1 + +_mul1 +_Mul + + +_mul1->activation0 + + + + +_mul1->activation1 + + + + +_plus1 + +_plus1 +_Plus + + +_plus1->_mul0 + + + + +_plus1->_mul1 + + + + +activation4 + +activation4 +Activation +act-type=tanh + + +activation4->_plus1 + + + + +_mul2 + +_mul2 +_Mul + + +_mul2->activation3 + + + + +_mul2->activation4 + + + + +ptb_lstm_2_h2h + +ptb_lstm_2_h2h +FullyConnected +num-hidden=1024 + + +ptb_lstm_2_h2h->_mul2 + + + + +_plus2 + +_plus2 +_Plus + + +_plus2->ptb_lstm_2_i2h + + + + +_plus2->ptb_lstm_2_h2h + + + + +ptb_lstm_2_gates + +ptb_lstm_2_gates +SliceChannel + + +ptb_lstm_2_gates->_plus2 + + + + +activation8 + +activation8 +Activation +act-type=sigmoid + + +activation8->ptb_lstm_2_gates + + + + +activation7 + +activation7 +Activation +act-type=sigmoid + + +activation7->ptb_lstm_2_gates + + + + +_mul3 + +_mul3 +_Mul + + +_mul3->_plus1 + + + + +_mul3->activation7 + + + + +activation5 + +activation5 +Activation +act-type=sigmoid + + +activation5->ptb_lstm_2_gates + + + + +activation6 + +activation6 +Activation +act-type=tanh + + +activation6->ptb_lstm_2_gates + + + + +_mul4 + +_mul4 +_Mul + + +_mul4->activation5 + + + + +_mul4->activation6 + + + + +_plus3 + +_plus3 +_Plus + + +_plus3->_mul3 + + + + +_plus3->_mul4 + + + + +activation9 + +activation9 +Activation +act-type=tanh + + +activation9->_plus3 + + + + +_mul5 + +_mul5 +_Mul + + +_mul5->activation8 + + + + +_mul5->activation9 + + + + +ptb_l1_last_h + +ptb_l1_last_h +BlockGrad + + +ptb_l1_last_h->_mul5 + + + + +ptb_l1_last_c + +ptb_l1_last_c +BlockGrad + + +ptb_l1_last_c->_plus3 + + + + +ptb_pred_2 + +ptb_pred_2 +FullyConnected +num-hidden=128 + + +ptb_pred_2->_mul5 + + + + +ptb_softmax_2 + +ptb_softmax_2 +SoftmaxOutput + + +ptb_softmax_2->ptb_pred_2 + + + + +ptb_pred_1 + +ptb_pred_1 +FullyConnected +num-hidden=128 + + +ptb_pred_1->_mul2 + + + + +ptb_softmax_1 + +ptb_softmax_1 +SoftmaxOutput + + +ptb_softmax_1->ptb_pred_1 + + + + + diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index ab0c678f581d..e895d8389e74 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -65,6 +65,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla #... #--/LSTM-part1 + #--LSTM-part2 # now unroll over time outputs = mx.SymbolicNode[] for t = 1:seq_len @@ -92,7 +93,10 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla smax = mx.SoftmaxOutput(pred, label, name=symbol(name, "_softmax_$t")) push!(outputs, smax) end + #... + #--/LSTM-part2 + #--LSTM-part3 # append block-gradient nodes to the final states for i = 1:n_layer l_param, l_state = layer_param_states[i] @@ -103,10 +107,12 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla # now group all outputs together if output_states - outputs = outputs ∪ [x[2].c for x in layer_param_states] ∪ [x[2].h for x in layer_param_states] + outputs = outputs ∪ [x[2].c for x in layer_param_states] ∪ + [x[2].h for x in layer_param_states] end return mx.Group(outputs...) end +#--/LSTM-part3 # Negative Log-likelihood diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index 2837ab7d8b55..1ea93e3b6116 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -31,6 +31,7 @@ function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; m return vocab end +#--CharSeqProvider type CharSeqProvider <: mx.AbstractDataProvider text :: AbstractString batch_size :: Int @@ -41,10 +42,13 @@ type CharSeqProvider <: mx.AbstractDataProvider n_layer :: Int dim_hidden :: Int end +#--/CharSeqProvider function mx.get_batch_size(p :: CharSeqProvider) p.batch_size end + +#--provide function mx.provide_data(p :: CharSeqProvider) [(symbol(p.prefix, "_data_$t"), (length(p.vocab), p.batch_size)) for t = 1:p.seq_len] ∪ [(symbol(p.prefix, "_l$(l)_init_c"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] ∪ @@ -53,7 +57,9 @@ end function mx.provide_label(p :: CharSeqProvider) [(symbol(p.prefix, "_label_$t"), (p.batch_size,)) for t = 1:p.seq_len] end +#--/provide +#--eachbatch-part1 function mx.eachbatch(p :: CharSeqProvider) data_all = [mx.zeros(shape) for (name, shape) in mx.provide_data(p)] label_all = [mx.zeros(shape) for (name, shape) in mx.provide_label(p)] @@ -62,7 +68,11 @@ function mx.eachbatch(p :: CharSeqProvider) label_jl= [copy(x) for x in label_all] batch = mx.DataBatch(data_all, label_all, p.batch_size) + #... + #--/eachbatch-part1 + #--eachbatch-part2 + #... function _text_iter() text = p.text @@ -96,6 +106,7 @@ function mx.eachbatch(p :: CharSeqProvider) return Task(_text_iter) end +#--/eachbatch-part2 # helper function to convert a char into index in vocabulary function char_idx(vocab :: Dict{Char,Int}, c :: Char) diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index cb19891f03b1..f96fdde1d674 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -6,9 +6,13 @@ include(joinpath(dirname(@__FILE__), "seq-data.jl")) vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) n_class = length(vocab) +#--LSTM # define LSTM -lstm = LSTM(LSTM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, n_class, dropout=DROPOUT, name=NAME) +lstm = LSTM(LSTM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, + n_class, dropout=DROPOUT, name=NAME) +#--/LSTM +#--data # load data text_all = readall(INPUT_FILE) len_train = round(Int, length(text_all)*DATA_TR_RATIO) @@ -19,6 +23,7 @@ data_tr = CharSeqProvider(text_tr, BATCH_SIZE, SEQ_LENGTH, vocab, NAME, LSTM_N_LAYER, DIM_HIDDEN) data_val = CharSeqProvider(text_val, BATCH_SIZE, SEQ_LENGTH, vocab, NAME, LSTM_N_LAYER, DIM_HIDDEN) +#--/data # set up training if USE_GPU @@ -27,9 +32,11 @@ else context = [mx.cpu()] end +#--train model = mx.FeedForward(lstm, context=context) optimizer = mx.ADAM(lr=BASE_LR, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, initializer=mx.UniformInitializer(0.1), callbacks=[mx.speedometer(), mx.do_checkpoint(CKPOINT_PREFIX)], eval_metric=NLL()) +#--/train diff --git a/src/io.jl b/src/io.jl index d6ba2fd3d8ce..48e0a31e32a2 100644 --- a/src/io.jl +++ b/src/io.jl @@ -58,62 +58,66 @@ and split it into mini-batches so that the model can consume the data in a unifo By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface is implemented on the provider type itself. But the extra layer of abstraction allows us to - implement a data provider easily via a Julia ``Task`` coroutine. - The detailed interface function is listed below: + implement a data provider easily via a Julia ``Task`` coroutine. See the + data provider defined in :doc:`the char-lstm example + ` for an example of using coroutine to define data + providers. - .. function:: Base.eltype(provider) -> AbstractDataBatch +The detailed interface functions for the iterator API is listed below: - :param AbstractDataProvider provider: the data provider. - :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. +.. function:: Base.eltype(provider) -> AbstractDataBatch - .. function:: Base.start(provider) -> AbstractDataProviderState + :param AbstractDataProvider provider: the data provider. + :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. - :param AbstractDataProvider provider: the data provider. +.. function:: Base.start(provider) -> AbstractDataProviderState - This function is always called before iterating into the dataset. It should initialize - the iterator, reset the index, and do data shuffling if needed. + :param AbstractDataProvider provider: the data provider. - .. function:: Base.done(provider, state) -> Bool + This function is always called before iterating into the dataset. It should initialize + the iterator, reset the index, and do data shuffling if needed. - :param AbstractDataProvider provider: the data provider. - :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. - :return: true if there is no more data to iterate in this dataset. +.. function:: Base.done(provider, state) -> Bool - .. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + :param AbstractDataProvider provider: the data provider. + :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. + :return: true if there is no more data to iterate in this dataset. - :param AbstractDataProvider provider: the data provider. - :return: the current data batch, and the state for the next iteration. +.. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) - Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that - is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this - case, you can safely assume that + :param AbstractDataProvider provider: the data provider. + :return: the current data batch, and the state for the next iteration. - * :func:`Base.start` will always be called, and called only once before the iteration starts. - * :func:`Base.done` will always be called at the beginning of every iteration and always be called once. - * If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with - a call to :func:`Base.start`. - * :func:`Base.next` will always be called only once in each iteration. It will always be called after - one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will - not be called. +Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that +is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this +case, you can safely assume that - With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation - of the built-in :class:`MXDataProvider` for example. +* :func:`Base.start` will always be called, and called only once before the iteration starts. +* :func:`Base.done` will always be called at the beginning of every iteration and always be called once. +* If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with + a call to :func:`Base.start`. +* :func:`Base.next` will always be called only once in each iteration. It will always be called after + one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will + not be called. - .. caution:: +With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation +of the built-in :class:`MXDataProvider` for example. - Please do not use the one data provider simultaneously in two different places, either in parallel, - or in a nested loop. For example, the behavior for the following code is undefined +.. caution:: - .. code-block:: julia + Please do not use the one data provider simultaneously in two different places, either in parallel, + or in a nested loop. For example, the behavior for the following code is undefined - for batch in data - # updating the parameters + .. code-block:: julia + + for batch in data + # updating the parameters - # now let's test the performance on the training set - for b2 in data - # ... - end - end + # now let's test the performance on the training set + for b2 in data + # ... + end + end =# abstract AbstractDataProvider @@ -162,7 +166,7 @@ abstract AbstractDataProviderState :param AbstractDataBatch batch: the data batch object. :param Base.Symbol name: the name of the data to get, should be one of the names provided in either :func:`provide_data() ` - or :func:`provide_label() `. + or :func:`provide_label() `. :return: the corresponding data array corresponding to that name. .. function:: load_data!(provider, batch, targets) From b595394759c377aac99fe7d986c13e14e42541cc Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 14 Nov 2015 23:43:39 -0500 Subject: [PATCH 237/630] call visualization in test --- examples/mnist/mlp-test.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index feabd1140a89..4931944032a9 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -65,6 +65,9 @@ function mnist_fit_and_predict(optimizer, initializer, n_epoch) accuracy = 100correct/length(labels) println(mx.format("Accuracy on eval set: {1:.2f}%", accuracy)) + # try to call visualization + dot_code = mx.to_graphviz(mlp) + return accuracy end From f099670c9d83c0b60d9dac9f41753dd1490e7eba Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 14 Nov 2015 23:50:41 -0500 Subject: [PATCH 238/630] prepare for v0.0.5 --- NEWS.md | 9 +++++++++ docs/conf.py | 4 ++-- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/NEWS.md b/NEWS.md index f970f7d64703..d50bd5ed9300 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,12 @@ +# v0.0.5 (2015.11.14) + +* char-lstm example. +* Network visualization via GraphViz. +* NN-factory for common models. +* Convenient `@nd_as_jl` macro to work with `NDArray` as Julia Arrays. +* Refactoring: `Symbol` -> `SymbolicNode`. +* More evaluation metrics (@vchuravy, @Andy-P) + # v0.0.4 (2015.11.09) * ADAM optimizer (@cbecker) diff --git a/docs/conf.py b/docs/conf.py index b009877694a0..c2a405765352 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,9 @@ # built documents. # # The short X.Y version. -version = '0.0.4' +version = '0.0.5' # The full version, including alpha/beta/rc tags. -release = '0.0.4' +release = '0.0.5' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 37b48f18ac305ec6bf89d592886225d73bf26994 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Kenta=20Sato=20=28=E4=BD=90=E8=97=A4=20=E5=BB=BA=E5=A4=AA?= =?UTF-8?q?=29?= Date: Mon, 16 Nov 2015 00:55:16 +0900 Subject: [PATCH 239/630] fix the sample code in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a6ae2d5cebe8..8bf867f39964 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ mlp = @mx.chain mx.Variable(:data) => # data provider batch_size = 100 -include(joinpath(Pkg.dir("MXNet"), "examples/mnist/mnist-data.jl")) +include(Pkg.dir("MXNet", "examples", "mnist", "mnist-data.jl")) train_provider, eval_provider = get_mnist_providers(batch_size) # setup model From e8ab01d26d8ab257222baabb8e4180cd14ec5aaf Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 15 Nov 2015 20:25:37 -0500 Subject: [PATCH 240/630] tweak visualization style. --- docs/tutorial/images/char-lstm-vis.svg | 1284 ++++++++++++++++++------ examples/char-lstm/.gitignore | 1 + src/visualize.jl | 34 +- 3 files changed, 1004 insertions(+), 315 deletions(-) diff --git a/docs/tutorial/images/char-lstm-vis.svg b/docs/tutorial/images/char-lstm-vis.svg index 8c5261ed7f44..cf49d097bdc7 100644 --- a/docs/tutorial/images/char-lstm-vis.svg +++ b/docs/tutorial/images/char-lstm-vis.svg @@ -4,432 +4,1114 @@ - - + + Network Visualization - + + +ptb_embed_3 + +ptb_embed_3 +FullyConnected +num-hidden=256 + + +ptb_lstm_3_i2h + +ptb_lstm_3_i2h +FullyConnected +num-hidden=1024 + + +ptb_lstm_3_i2h->ptb_embed_3 + + + + +_mul14 + +_mul14 +_Mul + + +ptb_lstm_3_i2h->_mul14 + + + -ptb_embed_2 - -ptb_embed_2 -FullyConnected -num-hidden=256 +ptb_embed_2 + +ptb_embed_2 +FullyConnected +num-hidden=256 -ptb_lstm_2_i2h - -ptb_lstm_2_i2h -FullyConnected -num-hidden=1024 +ptb_lstm_2_i2h + +ptb_lstm_2_i2h +FullyConnected +num-hidden=1024 -ptb_lstm_2_i2h->ptb_embed_2 - - +ptb_lstm_2_i2h->ptb_embed_2 + + + + +_mul8 + +_mul8 +_Mul + + +ptb_lstm_2_i2h->_mul8 + + -ptb_embed_1 - -ptb_embed_1 -FullyConnected -num-hidden=256 +ptb_embed_1 + +ptb_embed_1 +FullyConnected +num-hidden=256 -ptb_lstm_1_i2h - -ptb_lstm_1_i2h -FullyConnected -num-hidden=1024 +ptb_lstm_1_i2h + +ptb_lstm_1_i2h +FullyConnected +num-hidden=1024 -ptb_lstm_1_i2h->ptb_embed_1 - - +ptb_lstm_1_i2h->ptb_embed_1 + + + + +_mul2 + +_mul2 +_Mul + + +ptb_lstm_1_i2h->_mul2 + + -ptb_lstm_1_h2h - -ptb_lstm_1_h2h -FullyConnected -num-hidden=1024 +ptb_lstm_1_h2h + +ptb_lstm_1_h2h +FullyConnected +num-hidden=1024 -_plus0 - -_plus0 -_Plus +_plus0 + +_plus0 +_Plus -_plus0->ptb_lstm_1_i2h - - +_plus0->ptb_lstm_1_i2h + + -_plus0->ptb_lstm_1_h2h - - +_plus0->ptb_lstm_1_h2h + + -ptb_lstm_1_gates - -ptb_lstm_1_gates -SliceChannel +ptb_lstm_1_gates + +ptb_lstm_1_gates +SliceChannel -ptb_lstm_1_gates->_plus0 - - +ptb_lstm_1_gates->_plus0 + + + + +_plus2 + +_plus2 +_Plus + + +ptb_lstm_1_gates->_plus2 + + -activation3 - -activation3 -Activation -act-type=sigmoid +activation3 + +activation3 +Activation +act-type=sigmoid -activation3->ptb_lstm_1_gates - - +activation3->ptb_lstm_1_gates + + -activation2 - -activation2 -Activation -act-type=sigmoid +activation2 + +activation2 +Activation +act-type=sigmoid -activation2->ptb_lstm_1_gates - - +activation2->ptb_lstm_1_gates + + -_mul0 - -_mul0 -_Mul +_mul0 + +_mul0 +_Mul -_mul0->activation2 - - +_mul0->activation2 + + -activation0 - -activation0 -Activation -act-type=sigmoid +activation0 + +activation0 +Activation +act-type=sigmoid -activation0->ptb_lstm_1_gates - - +activation0->ptb_lstm_1_gates + + -activation1 - -activation1 -Activation -act-type=tanh +activation1 + +activation1 +Activation +act-type=tanh -activation1->ptb_lstm_1_gates - - +activation1->ptb_lstm_1_gates + + -_mul1 - -_mul1 -_Mul +_mul1 + +_mul1 +_Mul -_mul1->activation0 - - +_mul1->activation0 + + -_mul1->activation1 - - +_mul1->activation1 + + -_plus1 - -_plus1 -_Plus +_plus1 + +_plus1 +_Plus -_plus1->_mul0 - - +_plus1->_mul0 + + -_plus1->_mul1 - - +_plus1->_mul1 + + -activation4 - -activation4 -Activation -act-type=tanh +activation4 + +activation4 +Activation +act-type=tanh -activation4->_plus1 - - - - -_mul2 - -_mul2 -_Mul +activation4->_plus1 + + -_mul2->activation3 - - +_mul2->activation3 + + -_mul2->activation4 - - +_mul2->activation4 + + -ptb_lstm_2_h2h - -ptb_lstm_2_h2h -FullyConnected -num-hidden=1024 +ptb_lstm_2_h2h + +ptb_lstm_2_h2h +FullyConnected +num-hidden=1024 -ptb_lstm_2_h2h->_mul2 - - +ptb_lstm_2_h2h->_mul2 + + - -_plus2 - -_plus2 -_Plus - - -_plus2->ptb_lstm_2_i2h - - - - -_plus2->ptb_lstm_2_h2h - - + +_mul5 + +_mul5 +_Mul + + +ptb_lstm_2_h2h->_mul5 + + + + +_plus4 + +_plus4 +_Plus + + +_plus4->ptb_lstm_2_i2h + + + + +_plus4->ptb_lstm_2_h2h + + -ptb_lstm_2_gates - -ptb_lstm_2_gates -SliceChannel - - -ptb_lstm_2_gates->_plus2 - - +ptb_lstm_2_gates + +ptb_lstm_2_gates +SliceChannel + + +ptb_lstm_2_gates->_plus4 + + + + +_plus6 + +_plus6 +_Plus + + +ptb_lstm_2_gates->_plus6 + + + + +activation13 + +activation13 +Activation +act-type=sigmoid + + +activation13->ptb_lstm_2_gates + + + + +activation12 + +activation12 +Activation +act-type=sigmoid + + +activation12->ptb_lstm_2_gates + + + + +_mul6 + +_mul6 +_Mul + + +_mul6->_plus1 + + + + +_mul6->activation12 + + + + +activation10 + +activation10 +Activation +act-type=sigmoid + + +activation10->ptb_lstm_2_gates + + + + +activation11 + +activation11 +Activation +act-type=tanh + + +activation11->ptb_lstm_2_gates + + + + +_mul7 + +_mul7 +_Mul + + +_mul7->activation10 + + + + +_mul7->activation11 + + + + +_plus5 + +_plus5 +_Plus + + +_plus5->_mul6 + + + + +_plus5->_mul7 + + + + +activation14 + +activation14 +Activation +act-type=tanh + + +activation14->_plus5 + + + + +_mul8->activation13 + + + + +_mul8->activation14 + + + + +ptb_lstm_3_h2h + +ptb_lstm_3_h2h +FullyConnected +num-hidden=1024 + + +ptb_lstm_3_h2h->_mul8 + + + + +_mul11 + +_mul11 +_Mul + + +ptb_lstm_3_h2h->_mul11 + + + + +_plus8 + +_plus8 +_Plus + + +_plus8->ptb_lstm_3_i2h + + + + +_plus8->ptb_lstm_3_h2h + + + + +ptb_lstm_3_gates + +ptb_lstm_3_gates +SliceChannel + + +ptb_lstm_3_gates->_plus8 + + + + +_plus10 + +_plus10 +_Plus + + +ptb_lstm_3_gates->_plus10 + + + + +activation23 + +activation23 +Activation +act-type=sigmoid + + +activation23->ptb_lstm_3_gates + + + + +activation22 + +activation22 +Activation +act-type=sigmoid + + +activation22->ptb_lstm_3_gates + + + + +_mul12 + +_mul12 +_Mul + + +_mul12->_plus5 + + + + +_mul12->activation22 + + + + +activation20 + +activation20 +Activation +act-type=sigmoid + + +activation20->ptb_lstm_3_gates + + + + +activation21 + +activation21 +Activation +act-type=tanh + + +activation21->ptb_lstm_3_gates + + + + +_mul13 + +_mul13 +_Mul + + +_mul13->activation20 + + + + +_mul13->activation21 + + + + +_plus9 + +_plus9 +_Plus + + +_plus9->_mul12 + + + + +_plus9->_mul13 + + + + +activation24 + +activation24 +Activation +act-type=tanh + + +activation24->_plus9 + + + + +_mul14->activation23 + + + + +_mul14->activation24 + + + + +_plus2->ptb_lstm_1_i2h + + + + +_plus2->ptb_lstm_1_h2h + + -activation8 - -activation8 -Activation -act-type=sigmoid +activation8 + +activation8 +Activation +act-type=sigmoid - -activation8->ptb_lstm_2_gates - - + +activation8->ptb_lstm_1_gates + + -activation7 - -activation7 -Activation -act-type=sigmoid +activation7 + +activation7 +Activation +act-type=sigmoid - -activation7->ptb_lstm_2_gates - - + +activation7->ptb_lstm_1_gates + + -_mul3 - -_mul3 -_Mul - - -_mul3->_plus1 - - +_mul3 + +_mul3 +_Mul -_mul3->activation7 - - +_mul3->activation7 + + -activation5 - -activation5 -Activation -act-type=sigmoid +activation5 + +activation5 +Activation +act-type=sigmoid - -activation5->ptb_lstm_2_gates - - + +activation5->ptb_lstm_1_gates + + -activation6 - -activation6 -Activation -act-type=tanh +activation6 + +activation6 +Activation +act-type=tanh - -activation6->ptb_lstm_2_gates - - + +activation6->ptb_lstm_1_gates + + -_mul4 - -_mul4 -_Mul +_mul4 + +_mul4 +_Mul -_mul4->activation5 - - +_mul4->activation5 + + -_mul4->activation6 - - +_mul4->activation6 + + -_plus3 - -_plus3 -_Plus +_plus3 + +_plus3 +_Plus -_plus3->_mul3 - - +_plus3->_mul3 + + -_plus3->_mul4 - - +_plus3->_mul4 + + -activation9 - -activation9 -Activation -act-type=tanh +activation9 + +activation9 +Activation +act-type=tanh -activation9->_plus3 - - - - -_mul5 - -_mul5 -_Mul +activation9->_plus3 + + -_mul5->activation8 - - +_mul5->activation8 + + -_mul5->activation9 - - +_mul5->activation9 + + + + +_plus6->ptb_lstm_2_i2h + + + + +_plus6->ptb_lstm_2_h2h + + + + +activation18 + +activation18 +Activation +act-type=sigmoid + + +activation18->ptb_lstm_2_gates + + + + +activation17 + +activation17 +Activation +act-type=sigmoid + + +activation17->ptb_lstm_2_gates + + + + +_mul9 + +_mul9 +_Mul + + +_mul9->_plus3 + + + + +_mul9->activation17 + + + + +activation15 + +activation15 +Activation +act-type=sigmoid + + +activation15->ptb_lstm_2_gates + + + + +activation16 + +activation16 +Activation +act-type=tanh + + +activation16->ptb_lstm_2_gates + + + + +_mul10 + +_mul10 +_Mul + + +_mul10->activation15 + + + + +_mul10->activation16 + + + + +_plus7 + +_plus7 +_Plus + + +_plus7->_mul9 + + + + +_plus7->_mul10 + + + + +activation19 + +activation19 +Activation +act-type=tanh + + +activation19->_plus7 + + + + +_mul11->activation18 + + + + +_mul11->activation19 + + + + +_plus10->ptb_lstm_3_i2h + + + + +_plus10->ptb_lstm_3_h2h + + + + +activation28 + +activation28 +Activation +act-type=sigmoid + + +activation28->ptb_lstm_3_gates + + + + +activation27 + +activation27 +Activation +act-type=sigmoid + + +activation27->ptb_lstm_3_gates + + + + +_mul15 + +_mul15 +_Mul + + +_mul15->_plus7 + + + + +_mul15->activation27 + + + + +activation25 + +activation25 +Activation +act-type=sigmoid + + +activation25->ptb_lstm_3_gates + + + + +activation26 + +activation26 +Activation +act-type=tanh + + +activation26->ptb_lstm_3_gates + + + + +_mul16 + +_mul16 +_Mul + + +_mul16->activation25 + + + + +_mul16->activation26 + + + + +_plus11 + +_plus11 +_Plus + + +_plus11->_mul15 + + + + +_plus11->_mul16 + + + + +activation29 + +activation29 +Activation +act-type=tanh + + +activation29->_plus11 + + + + +_mul17 + +_mul17 +_Mul + + +_mul17->activation28 + + + + +_mul17->activation29 + + + + +ptb_l2_last_h + +ptb_l2_last_h +BlockGrad + + +ptb_l2_last_h->_mul17 + + -ptb_l1_last_h - -ptb_l1_last_h -BlockGrad - - -ptb_l1_last_h->_mul5 - - +ptb_l1_last_h + +ptb_l1_last_h +BlockGrad + + +ptb_l1_last_h->_mul14 + + + + +ptb_l2_last_c + +ptb_l2_last_c +BlockGrad + + +ptb_l2_last_c->_plus11 + + -ptb_l1_last_c - -ptb_l1_last_c -BlockGrad - - -ptb_l1_last_c->_plus3 - - +ptb_l1_last_c + +ptb_l1_last_c +BlockGrad + + +ptb_l1_last_c->_plus9 + + + + +ptb_pred_3 + +ptb_pred_3 +FullyConnected +num-hidden=128 + + +ptb_pred_3->_mul17 + + + + +ptb_softmax_3 + +ptb_softmax_3 +SoftmaxOutput + + +ptb_softmax_3->ptb_pred_3 + + -ptb_pred_2 - -ptb_pred_2 -FullyConnected -num-hidden=128 +ptb_pred_2 + +ptb_pred_2 +FullyConnected +num-hidden=128 - -ptb_pred_2->_mul5 - - + +ptb_pred_2->_mul11 + + -ptb_softmax_2 - -ptb_softmax_2 -SoftmaxOutput +ptb_softmax_2 + +ptb_softmax_2 +SoftmaxOutput -ptb_softmax_2->ptb_pred_2 - - +ptb_softmax_2->ptb_pred_2 + + -ptb_pred_1 - -ptb_pred_1 -FullyConnected -num-hidden=128 +ptb_pred_1 + +ptb_pred_1 +FullyConnected +num-hidden=128 - -ptb_pred_1->_mul2 - - + +ptb_pred_1->_mul5 + + -ptb_softmax_1 - -ptb_softmax_1 -SoftmaxOutput +ptb_softmax_1 + +ptb_softmax_1 +SoftmaxOutput -ptb_softmax_1->ptb_pred_1 - - +ptb_softmax_1->ptb_pred_1 + + diff --git a/examples/char-lstm/.gitignore b/examples/char-lstm/.gitignore index d8923cc201b1..a393ee67b410 100644 --- a/examples/char-lstm/.gitignore +++ b/examples/char-lstm/.gitignore @@ -3,3 +3,4 @@ vocab.dat checkpoints visualize.dot visualize.svg +visualize.png diff --git a/src/visualize.jl b/src/visualize.jl index 0b5c0c3d8e90..b1fa90ff3851 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -35,15 +35,17 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp nodes = conf["nodes"] heads = unique([x[1]+1 for x in conf["heads"]]) node_attr = Dict(:shape => :box, :fixedsize => true, :width => 1.3, - :height => 0.8034, :style => :filled) + :height => 0.8034, :style => (:rounded, :filled), :penwidth => 2) io = IOBuffer() println(io, "digraph $(_simple_escape(title)) {") println(io, "node [fontsize=10];") println(io, "edge [fontsize=10];") # color map - cm = ("#8dd3c7", "#fb8072", "#ffffb3", "#bebada", "#80b1d3", - "#fdb462", "#b3de69", "#fccde5") + fillcolors = ("#8dd3c7", "#fb8072", "#ffffb3", "#bebada", "#80b1d3", + "#fdb462", "#b3de69", "#fccde5") + edgecolors = ("#245b51", "#941305", "#999900", "#3b3564", "#275372", + "#975102", "#597d1c", "#90094e") # make nodes for i = 1:length(nodes) @@ -57,7 +59,7 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp if i ∈ heads # heads are output nodes label = node["name"] - attr[:fillcolor] = cm[1] + colorkey = 1 else # otherwise, input nodes, might be data, label or parameters continue @@ -67,33 +69,35 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp _extract_shape(node["param"]["kernel"]), _extract_shape(node["param"]["stride"]), node["param"]["num_filter"]) - attr[:fillcolor] = cm[2] + colorkey = 2 elseif op == "FullyConnected" label = format("FullyConnected\nnum-hidden={1}", node["param"]["num_hidden"]) - attr[:fillcolor] = cm[2] + colorkey = 2 elseif op == "Activation" label = format("Activation\nact-type={1}", node["param"]["act_type"]) - attr[:fillcolor] = cm[3] + colorkey = 3 elseif op == "BatchNorm" - attr[:fillcolor] = cm[4] + colorkey = 4 elseif op == "Pooling" label = format("Pooling\ntype={1}\nkernel={2}\nstride={3}", node["param"]["pool_type"], _extract_shape(node["param"]["kernel"]), _extract_shape(node["param"]["stride"])) - attr[:fillcolor] = cm[5] + colorkey = 5 elseif op ∈ ("Concat", "Flatten", "Reshape") - attr[:fillcolor] = cm[6] + colorkey = 6 elseif endswith(op, "Output") || op == "BlockGrad" - attr[:fillcolor] = cm[7] + colorkey = 7 else - attr[:fillcolor] = cm[8] + colorkey = 8 end if op != "null" label = "$name\n$label" end - attr[:label] = label + attr[:fillcolor] = fillcolors[colorkey] + attr[:color] = edgecolors[colorkey] + attr[:label] = label _format_graphviz_node(io, name, attr) end @@ -110,7 +114,7 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp input_node = nodes[item[1]+1] input_name = input_node["name"] if input_node["op"] != "null" || (item[1]+1) ∈ heads - attr = Dict(:dir => :back, :arrowtail => :open) + attr = Dict(:dir => :back, :arrowtail => :open, :color => "#737373") if draw_shape if input_node["op"] != "null" key = symbol(input_name, "_output") @@ -149,6 +153,8 @@ function _format_graphviz_attr(io::IOBuffer, attrs) if isa(v, AbstractString) && v[1] == '#' # color v = _simple_escape(v) + elseif isa(v, Tuple) + v = _simple_escape(join([string(x) for x in v], ",")) end print(io, "$k=$v") end From aea096f55f85ad7fc3c21dd098436fe455af327d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 15 Nov 2015 20:30:04 -0500 Subject: [PATCH 241/630] use a smaller network for visualization. --- docs/tutorial/images/char-lstm-vis.svg | 1284 ++++++------------------ examples/char-lstm/visualize.jl | 4 +- 2 files changed, 303 insertions(+), 985 deletions(-) diff --git a/docs/tutorial/images/char-lstm-vis.svg b/docs/tutorial/images/char-lstm-vis.svg index cf49d097bdc7..610abab774b7 100644 --- a/docs/tutorial/images/char-lstm-vis.svg +++ b/docs/tutorial/images/char-lstm-vis.svg @@ -4,1114 +4,432 @@ - - + + Network Visualization - - -ptb_embed_3 - -ptb_embed_3 -FullyConnected -num-hidden=256 - - -ptb_lstm_3_i2h - -ptb_lstm_3_i2h -FullyConnected -num-hidden=1024 - - -ptb_lstm_3_i2h->ptb_embed_3 - - - - -_mul14 - -_mul14 -_Mul - - -ptb_lstm_3_i2h->_mul14 - - - + -ptb_embed_2 - -ptb_embed_2 -FullyConnected -num-hidden=256 +ptb_embed_2 + +ptb_embed_2 +FullyConnected +num-hidden=256 -ptb_lstm_2_i2h - -ptb_lstm_2_i2h -FullyConnected -num-hidden=1024 +ptb_lstm_2_i2h + +ptb_lstm_2_i2h +FullyConnected +num-hidden=1024 -ptb_lstm_2_i2h->ptb_embed_2 - - - - -_mul8 - -_mul8 -_Mul - - -ptb_lstm_2_i2h->_mul8 - - +ptb_lstm_2_i2h->ptb_embed_2 + + -ptb_embed_1 - -ptb_embed_1 -FullyConnected -num-hidden=256 +ptb_embed_1 + +ptb_embed_1 +FullyConnected +num-hidden=256 -ptb_lstm_1_i2h - -ptb_lstm_1_i2h -FullyConnected -num-hidden=1024 +ptb_lstm_1_i2h + +ptb_lstm_1_i2h +FullyConnected +num-hidden=1024 -ptb_lstm_1_i2h->ptb_embed_1 - - - - -_mul2 - -_mul2 -_Mul - - -ptb_lstm_1_i2h->_mul2 - - +ptb_lstm_1_i2h->ptb_embed_1 + + -ptb_lstm_1_h2h - -ptb_lstm_1_h2h -FullyConnected -num-hidden=1024 +ptb_lstm_1_h2h + +ptb_lstm_1_h2h +FullyConnected +num-hidden=1024 -_plus0 - -_plus0 -_Plus +_plus0 + +_plus0 +_Plus -_plus0->ptb_lstm_1_i2h - - +_plus0->ptb_lstm_1_i2h + + -_plus0->ptb_lstm_1_h2h - - +_plus0->ptb_lstm_1_h2h + + -ptb_lstm_1_gates - -ptb_lstm_1_gates -SliceChannel +ptb_lstm_1_gates + +ptb_lstm_1_gates +SliceChannel -ptb_lstm_1_gates->_plus0 - - - - -_plus2 - -_plus2 -_Plus - - -ptb_lstm_1_gates->_plus2 - - +ptb_lstm_1_gates->_plus0 + + -activation3 - -activation3 -Activation -act-type=sigmoid +activation3 + +activation3 +Activation +act-type=sigmoid -activation3->ptb_lstm_1_gates - - +activation3->ptb_lstm_1_gates + + -activation2 - -activation2 -Activation -act-type=sigmoid +activation2 + +activation2 +Activation +act-type=sigmoid -activation2->ptb_lstm_1_gates - - +activation2->ptb_lstm_1_gates + + -_mul0 - -_mul0 -_Mul +_mul0 + +_mul0 +_Mul -_mul0->activation2 - - +_mul0->activation2 + + -activation0 - -activation0 -Activation -act-type=sigmoid +activation0 + +activation0 +Activation +act-type=sigmoid -activation0->ptb_lstm_1_gates - - +activation0->ptb_lstm_1_gates + + -activation1 - -activation1 -Activation -act-type=tanh +activation1 + +activation1 +Activation +act-type=tanh -activation1->ptb_lstm_1_gates - - +activation1->ptb_lstm_1_gates + + -_mul1 - -_mul1 -_Mul +_mul1 + +_mul1 +_Mul -_mul1->activation0 - - +_mul1->activation0 + + -_mul1->activation1 - - +_mul1->activation1 + + -_plus1 - -_plus1 -_Plus +_plus1 + +_plus1 +_Plus -_plus1->_mul0 - - +_plus1->_mul0 + + -_plus1->_mul1 - - +_plus1->_mul1 + + -activation4 - -activation4 -Activation -act-type=tanh +activation4 + +activation4 +Activation +act-type=tanh -activation4->_plus1 - - +activation4->_plus1 + + + + +_mul2 + +_mul2 +_Mul -_mul2->activation3 - - +_mul2->activation3 + + -_mul2->activation4 - - +_mul2->activation4 + + -ptb_lstm_2_h2h - -ptb_lstm_2_h2h -FullyConnected -num-hidden=1024 +ptb_lstm_2_h2h + +ptb_lstm_2_h2h +FullyConnected +num-hidden=1024 -ptb_lstm_2_h2h->_mul2 - - +ptb_lstm_2_h2h->_mul2 + + - -_mul5 - -_mul5 -_Mul - - -ptb_lstm_2_h2h->_mul5 - - - - -_plus4 - -_plus4 -_Plus - - -_plus4->ptb_lstm_2_i2h - - - - -_plus4->ptb_lstm_2_h2h - - + +_plus2 + +_plus2 +_Plus + + +_plus2->ptb_lstm_2_i2h + + + + +_plus2->ptb_lstm_2_h2h + + -ptb_lstm_2_gates - -ptb_lstm_2_gates -SliceChannel - - -ptb_lstm_2_gates->_plus4 - - - - -_plus6 - -_plus6 -_Plus - - -ptb_lstm_2_gates->_plus6 - - - - -activation13 - -activation13 -Activation -act-type=sigmoid - - -activation13->ptb_lstm_2_gates - - - - -activation12 - -activation12 -Activation -act-type=sigmoid - - -activation12->ptb_lstm_2_gates - - - - -_mul6 - -_mul6 -_Mul - - -_mul6->_plus1 - - - - -_mul6->activation12 - - - - -activation10 - -activation10 -Activation -act-type=sigmoid - - -activation10->ptb_lstm_2_gates - - - - -activation11 - -activation11 -Activation -act-type=tanh - - -activation11->ptb_lstm_2_gates - - - - -_mul7 - -_mul7 -_Mul - - -_mul7->activation10 - - - - -_mul7->activation11 - - - - -_plus5 - -_plus5 -_Plus - - -_plus5->_mul6 - - - - -_plus5->_mul7 - - - - -activation14 - -activation14 -Activation -act-type=tanh - - -activation14->_plus5 - - - - -_mul8->activation13 - - - - -_mul8->activation14 - - - - -ptb_lstm_3_h2h - -ptb_lstm_3_h2h -FullyConnected -num-hidden=1024 - - -ptb_lstm_3_h2h->_mul8 - - - - -_mul11 - -_mul11 -_Mul - - -ptb_lstm_3_h2h->_mul11 - - - - -_plus8 - -_plus8 -_Plus - - -_plus8->ptb_lstm_3_i2h - - - - -_plus8->ptb_lstm_3_h2h - - - - -ptb_lstm_3_gates - -ptb_lstm_3_gates -SliceChannel - - -ptb_lstm_3_gates->_plus8 - - - - -_plus10 - -_plus10 -_Plus - - -ptb_lstm_3_gates->_plus10 - - - - -activation23 - -activation23 -Activation -act-type=sigmoid - - -activation23->ptb_lstm_3_gates - - - - -activation22 - -activation22 -Activation -act-type=sigmoid - - -activation22->ptb_lstm_3_gates - - - - -_mul12 - -_mul12 -_Mul - - -_mul12->_plus5 - - - - -_mul12->activation22 - - - - -activation20 - -activation20 -Activation -act-type=sigmoid - - -activation20->ptb_lstm_3_gates - - - - -activation21 - -activation21 -Activation -act-type=tanh - - -activation21->ptb_lstm_3_gates - - - - -_mul13 - -_mul13 -_Mul - - -_mul13->activation20 - - - - -_mul13->activation21 - - - - -_plus9 - -_plus9 -_Plus - - -_plus9->_mul12 - - - - -_plus9->_mul13 - - - - -activation24 - -activation24 -Activation -act-type=tanh - - -activation24->_plus9 - - - - -_mul14->activation23 - - - - -_mul14->activation24 - - - - -_plus2->ptb_lstm_1_i2h - - - - -_plus2->ptb_lstm_1_h2h - - +ptb_lstm_2_gates + +ptb_lstm_2_gates +SliceChannel + + +ptb_lstm_2_gates->_plus2 + + -activation8 - -activation8 -Activation -act-type=sigmoid +activation8 + +activation8 +Activation +act-type=sigmoid - -activation8->ptb_lstm_1_gates - - + +activation8->ptb_lstm_2_gates + + -activation7 - -activation7 -Activation -act-type=sigmoid +activation7 + +activation7 +Activation +act-type=sigmoid - -activation7->ptb_lstm_1_gates - - + +activation7->ptb_lstm_2_gates + + -_mul3 - -_mul3 -_Mul +_mul3 + +_mul3 +_Mul + + +_mul3->_plus1 + + -_mul3->activation7 - - +_mul3->activation7 + + -activation5 - -activation5 -Activation -act-type=sigmoid +activation5 + +activation5 +Activation +act-type=sigmoid - -activation5->ptb_lstm_1_gates - - + +activation5->ptb_lstm_2_gates + + -activation6 - -activation6 -Activation -act-type=tanh +activation6 + +activation6 +Activation +act-type=tanh - -activation6->ptb_lstm_1_gates - - + +activation6->ptb_lstm_2_gates + + -_mul4 - -_mul4 -_Mul +_mul4 + +_mul4 +_Mul -_mul4->activation5 - - +_mul4->activation5 + + -_mul4->activation6 - - +_mul4->activation6 + + -_plus3 - -_plus3 -_Plus +_plus3 + +_plus3 +_Plus -_plus3->_mul3 - - +_plus3->_mul3 + + -_plus3->_mul4 - - +_plus3->_mul4 + + -activation9 - -activation9 -Activation -act-type=tanh +activation9 + +activation9 +Activation +act-type=tanh -activation9->_plus3 - - +activation9->_plus3 + + + + +_mul5 + +_mul5 +_Mul -_mul5->activation8 - - +_mul5->activation8 + + -_mul5->activation9 - - - - -_plus6->ptb_lstm_2_i2h - - - - -_plus6->ptb_lstm_2_h2h - - - - -activation18 - -activation18 -Activation -act-type=sigmoid - - -activation18->ptb_lstm_2_gates - - - - -activation17 - -activation17 -Activation -act-type=sigmoid - - -activation17->ptb_lstm_2_gates - - - - -_mul9 - -_mul9 -_Mul - - -_mul9->_plus3 - - - - -_mul9->activation17 - - - - -activation15 - -activation15 -Activation -act-type=sigmoid - - -activation15->ptb_lstm_2_gates - - - - -activation16 - -activation16 -Activation -act-type=tanh - - -activation16->ptb_lstm_2_gates - - - - -_mul10 - -_mul10 -_Mul - - -_mul10->activation15 - - - - -_mul10->activation16 - - - - -_plus7 - -_plus7 -_Plus - - -_plus7->_mul9 - - - - -_plus7->_mul10 - - - - -activation19 - -activation19 -Activation -act-type=tanh - - -activation19->_plus7 - - - - -_mul11->activation18 - - - - -_mul11->activation19 - - - - -_plus10->ptb_lstm_3_i2h - - - - -_plus10->ptb_lstm_3_h2h - - - - -activation28 - -activation28 -Activation -act-type=sigmoid - - -activation28->ptb_lstm_3_gates - - - - -activation27 - -activation27 -Activation -act-type=sigmoid - - -activation27->ptb_lstm_3_gates - - - - -_mul15 - -_mul15 -_Mul - - -_mul15->_plus7 - - - - -_mul15->activation27 - - - - -activation25 - -activation25 -Activation -act-type=sigmoid - - -activation25->ptb_lstm_3_gates - - - - -activation26 - -activation26 -Activation -act-type=tanh - - -activation26->ptb_lstm_3_gates - - - - -_mul16 - -_mul16 -_Mul - - -_mul16->activation25 - - - - -_mul16->activation26 - - - - -_plus11 - -_plus11 -_Plus - - -_plus11->_mul15 - - - - -_plus11->_mul16 - - - - -activation29 - -activation29 -Activation -act-type=tanh - - -activation29->_plus11 - - - - -_mul17 - -_mul17 -_Mul - - -_mul17->activation28 - - - - -_mul17->activation29 - - - - -ptb_l2_last_h - -ptb_l2_last_h -BlockGrad - - -ptb_l2_last_h->_mul17 - - +_mul5->activation9 + + -ptb_l1_last_h - -ptb_l1_last_h -BlockGrad - - -ptb_l1_last_h->_mul14 - - - - -ptb_l2_last_c - -ptb_l2_last_c -BlockGrad - - -ptb_l2_last_c->_plus11 - - +ptb_l1_last_h + +ptb_l1_last_h +BlockGrad + + +ptb_l1_last_h->_mul5 + + -ptb_l1_last_c - -ptb_l1_last_c -BlockGrad - - -ptb_l1_last_c->_plus9 - - - - -ptb_pred_3 - -ptb_pred_3 -FullyConnected -num-hidden=128 - - -ptb_pred_3->_mul17 - - - - -ptb_softmax_3 - -ptb_softmax_3 -SoftmaxOutput - - -ptb_softmax_3->ptb_pred_3 - - +ptb_l1_last_c + +ptb_l1_last_c +BlockGrad + + +ptb_l1_last_c->_plus3 + + -ptb_pred_2 - -ptb_pred_2 -FullyConnected -num-hidden=128 +ptb_pred_2 + +ptb_pred_2 +FullyConnected +num-hidden=128 - -ptb_pred_2->_mul11 - - + +ptb_pred_2->_mul5 + + -ptb_softmax_2 - -ptb_softmax_2 -SoftmaxOutput +ptb_softmax_2 + +ptb_softmax_2 +SoftmaxOutput -ptb_softmax_2->ptb_pred_2 - - +ptb_softmax_2->ptb_pred_2 + + -ptb_pred_1 - -ptb_pred_1 -FullyConnected -num-hidden=128 +ptb_pred_1 + +ptb_pred_1 +FullyConnected +num-hidden=128 - -ptb_pred_1->_mul5 - - + +ptb_pred_1->_mul2 + + -ptb_softmax_1 - -ptb_softmax_1 -SoftmaxOutput +ptb_softmax_1 + +ptb_softmax_1 +SoftmaxOutput -ptb_softmax_1->ptb_pred_1 - - +ptb_softmax_1->ptb_pred_1 + + diff --git a/examples/char-lstm/visualize.jl b/examples/char-lstm/visualize.jl index 336afea30705..acf757515005 100644 --- a/examples/char-lstm/visualize.jl +++ b/examples/char-lstm/visualize.jl @@ -3,8 +3,8 @@ include(joinpath(dirname(@__FILE__), "lstm.jl")) using MXNet -vis_n_layer = 2 -vis_seq_len = 3 +vis_n_layer = 1 +vis_seq_len = 2 vis_n_class = 128 lstm = LSTM(vis_n_layer, vis_seq_len, DIM_HIDDEN, DIM_EMBED, vis_n_class, name=NAME, output_states=true) From 4f094406ff1aa4950843413cec716050421cfb19 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 16 Nov 2015 16:32:31 +0900 Subject: [PATCH 242/630] implements variants of Xaiver They are several different Xaiver variants used and it is not quite clear if there is a "correct" one. Different variants can be chosen by the combinations of a uniform or normal distribution with a specific calculation of the variance. Currently implemented are different variants from Caffe, the original one by Bengio and Glorot and the one by K. He et. al 2015 (called msra in Caffe). The current default is the one used by the Python interface to MXNet (3 / (in+out)). implement several variants of xaiver rewrite xaiver to use enums --- src/initializer.jl | 52 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 49 insertions(+), 3 deletions(-) diff --git a/src/initializer.jl b/src/initializer.jl index 8f78bfba9367..6a941610e1cd 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -104,14 +104,60 @@ end The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding the difficulty of training deep feedforward neuralnetworks*. + + There are several different version of the XaiverInitializer used in the wild. + The general idea is that the variance of the initialization distribution is controlled + by the dimensionality of the input and output. As a distribution one can either choose + a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. + + Several different ways of calculating the variance are given in the literature or are + used by various libraries. + + - original [Bengio and Glorot 2010]: σ² = 2 / (in + out) + - msra [K. He, X. Zhang, S. Ren, and J. Sun 2015]: σ² = 2 / in + - caffe_avg: 6 / (in + out) + - caffe_in: 3 / in + - caffe_out: 3 / out + - mxnet: 3 / (in + out) + + Distribution and variant can be chosen by enums (prefixed by xv_). + As an example take mx.XaiverInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet), + which is currently the default. =# + +@enum XaiverDistribution xv_uniform xv_normal +@enum XaiverVariant xv_original xv_mrsa xv_caffe_avg xv_caffe_in zv_caffe_out xv_mxnet + immutable XaiverInitializer <: AbstractInitializer + distribution :: XaiverDistribution + variant :: XaiverVariant end +XaiverInitializer(; distribution = xv_uniform, variant = xv_mxnet) = XaiverInitializer(distribution, variant) -function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) +function _init_weight(self :: XaiverInitializer, name :: Base.Symbol, array :: NDArray) dims = size(array) fan_in = prod(dims[2:end]) fan_out = dims[1] - scale = sqrt(3 / (fan_in + fan_out)) - rand!(-scale, scale, array) + + if self.distribution == xv_uniform + func(σ, data) = rand!(-σ, σ, data) + elseif self.distribution == xv_normal + func(σ, data) = randn!(0.0, σ, data) + end + + if self.variant == xv_caffe_avg + var = 6 / (fan_in + fan_out) + elseif self.variant == xv_caffe_in + var = 3 / fan_in + elseif self.variant == xv_caffe_out + var = 3 / fan_out + elseif self.variant == xv_mrsa + var = 2 / fan_in + elseif self.variant == xv_original + var = 2 / (fan_in + fan_out) + elseif self.variant == xv_mxnet + var = 3 / (fan_in + fan_out) + end + + func(√var, array) end From 89cf70d996339190e440f882c4c8779dbb8d6441 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 16 Nov 2015 23:38:05 -0500 Subject: [PATCH 243/630] update doc --- docs/api/initializer.rst | 19 +++++++++++++++++++ src/initializer.jl | 6 +++--- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst index 3f4126721542..94a60e4021b0 100644 --- a/docs/api/initializer.rst +++ b/docs/api/initializer.rst @@ -65,5 +65,24 @@ Built-in initializers The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding the difficulty of training deep feedforward neuralnetworks*. + There are several different version of the XaiverInitializer used in the wild. + The general idea is that the variance of the initialization distribution is controlled + by the dimensionality of the input and output. As a distribution one can either choose + a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. + + Several different ways of calculating the variance are given in the literature or are + used by various libraries. + + - original [Bengio and Glorot 2010]: σ² = 2 / (in + out) + - msra [K. He, X. Zhang, S. Ren, and J. Sun 2015]: σ² = 2 / in + - caffe_avg: 6 / (in + out) + - caffe_in: 3 / in + - caffe_out: 3 / out + - mxnet: 3 / (in + out) + + Distribution and variant can be chosen by enums (prefixed by ``xv_``). + As an example take ``mx.XaiverInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, + which is currently the default. + diff --git a/src/initializer.jl b/src/initializer.jl index 6a941610e1cd..2fac334448a5 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -112,7 +112,7 @@ end Several different ways of calculating the variance are given in the literature or are used by various libraries. - + - original [Bengio and Glorot 2010]: σ² = 2 / (in + out) - msra [K. He, X. Zhang, S. Ren, and J. Sun 2015]: σ² = 2 / in - caffe_avg: 6 / (in + out) @@ -120,8 +120,8 @@ end - caffe_out: 3 / out - mxnet: 3 / (in + out) - Distribution and variant can be chosen by enums (prefixed by xv_). - As an example take mx.XaiverInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet), + Distribution and variant can be chosen by enums (prefixed by ``xv_``). + As an example take ``mx.XaiverInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, which is currently the default. =# From 1a935deed153622e357ac90ae5ab159c5c08f978 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 17 Nov 2015 15:00:38 +0900 Subject: [PATCH 244/630] rename Xaiver to Xavier --- src/initializer.jl | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/initializer.jl b/src/initializer.jl index 2fac334448a5..01744a760448 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -100,12 +100,12 @@ function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: N end #=doc -.. class:: XaiverInitializer +.. class:: XavierInitializer The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding the difficulty of training deep feedforward neuralnetworks*. - There are several different version of the XaiverInitializer used in the wild. + There are several different version of the XavierInitializer used in the wild. The general idea is that the variance of the initialization distribution is controlled by the dimensionality of the input and output. As a distribution one can either choose a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. @@ -121,20 +121,20 @@ end - mxnet: 3 / (in + out) Distribution and variant can be chosen by enums (prefixed by ``xv_``). - As an example take ``mx.XaiverInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, + As an example take ``mx.XavierInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, which is currently the default. =# -@enum XaiverDistribution xv_uniform xv_normal -@enum XaiverVariant xv_original xv_mrsa xv_caffe_avg xv_caffe_in zv_caffe_out xv_mxnet +@enum XavierDistribution xv_uniform xv_normal +@enum XavierVariant xv_original xv_mrsa xv_caffe_avg xv_caffe_in zv_caffe_out xv_mxnet -immutable XaiverInitializer <: AbstractInitializer - distribution :: XaiverDistribution - variant :: XaiverVariant +immutable XavierInitializer <: AbstractInitializer + distribution :: XavierDistribution + variant :: XavierVariant end -XaiverInitializer(; distribution = xv_uniform, variant = xv_mxnet) = XaiverInitializer(distribution, variant) +XavierInitializer(; distribution = xv_uniform, variant = xv_mxnet) = XavierInitializer(distribution, variant) -function _init_weight(self :: XaiverInitializer, name :: Base.Symbol, array :: NDArray) +function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: NDArray) dims = size(array) fan_in = prod(dims[2:end]) fan_out = dims[1] From 5dd866466158b18731461010a3e5cbb5c5f5d0a0 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 17 Nov 2015 15:01:33 +0900 Subject: [PATCH 245/630] rebuild docs --- docs/api/initializer.rst | 6 +++--- docs/api/io.rst | 4 ++-- docs/api/symbolic-node.rst | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst index 94a60e4021b0..30b2df35664a 100644 --- a/docs/api/initializer.rst +++ b/docs/api/initializer.rst @@ -60,12 +60,12 @@ Built-in initializers -.. class:: XaiverInitializer +.. class:: XavierInitializer The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding the difficulty of training deep feedforward neuralnetworks*. - There are several different version of the XaiverInitializer used in the wild. + There are several different version of the XavierInitializer used in the wild. The general idea is that the variance of the initialization distribution is controlled by the dimensionality of the input and output. As a distribution one can either choose a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. @@ -81,7 +81,7 @@ Built-in initializers - mxnet: 3 / (in + out) Distribution and variant can be chosen by enums (prefixed by ``xv_``). - As an example take ``mx.XaiverInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, + As an example take ``mx.XavierInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, which is currently the default. diff --git a/docs/api/io.rst b/docs/api/io.rst index f1ab959be6f2..6186db52ecc1 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -324,7 +324,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -467,7 +467,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 78e359537ec3..d847c6a8bd38 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -186,7 +186,7 @@ Public APIs :param momentum: Momentum for moving average - :type momentum: float, optional, default=0.1 + :type momentum: float, optional, default=0.9 :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. From 5b0e1acbeab508bc3e41a787eb91a24c580f02b8 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 18 Nov 2015 18:39:35 -0500 Subject: [PATCH 246/630] remove grad_scale parameter from optimizers (duplicated with lr) --- docs/build-api.jl | 2 +- src/optimizer.jl | 2 +- src/optimizers/adam.jl | 20 +++++++++++++++++++- src/optimizers/sgd.jl | 20 +++++++++++++++++++- 4 files changed, 40 insertions(+), 4 deletions(-) diff --git a/docs/build-api.jl b/docs/build-api.jl index 53ca9f6f7ca1..c8e54677096f 100644 --- a/docs/build-api.jl +++ b/docs/build-api.jl @@ -88,7 +88,7 @@ extract_doc("initializer.rst", "initializer.jl") extract_doc("callback.rst", "callback.jl") extract_doc("model.rst", "model.jl") -extract_doc("optimizer.rst", "optimizer.jl") +extract_doc("optimizer.rst", "optimizer.jl", "optimizers/sgd.jl", "optimizers/adam.jl") extract_doc("metric.rst", "metric.jl") extract_doc("io.rst", "io.jl") diff --git a/src/optimizer.jl b/src/optimizer.jl index a5f0bfd5ec60..0a3f23bc96ba 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -211,7 +211,7 @@ abstract AbstractOptimizerOptions =# function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, weight::NDArray, grad::NDArray) - grad_scale = opts.grad_scale / state.batch_size + grad_scale = 1.0 / state.batch_size grad = grad_scale * grad if opts.grad_clip > 0 diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index d8d1d2377836..95eaefe275a1 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -1,6 +1,5 @@ @defstruct ADAMOptions AbstractOptimizerOptions ( (lr :: Real = 0.001, lr > 0), - (grad_scale :: Real = 1.0, grad_scale >= 0), (grad_clip :: Real = 0, grad_clip >= 0), (weight_decay :: Real = 0.00001, weight_decay >= 0), (beta1 :: Real = 0.9, beta1 > 0), @@ -9,7 +8,26 @@ lr_scheduler :: Any = nothing ) +#=doc +.. class:: ADAM + The solver described in Diederik Kingma, Jimmy Ba: *Adam: A Method for + Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. + + .. function:: ADAM(; kwargs...) + + :param Real lr: default `0.001`, learning rate. + :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. + :param Real beta1: default `0.9`. + :param Real beta2: default `0.999`. + :param Real epsilon: default `1e-8`. + :param Real grad_clip: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. + :param Real weight_decay: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. +=# type ADAM <: AbstractOptimizer opts :: ADAMOptions state :: OptimizationState diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index 84ea55f40f11..f3d1b10a66e4 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -1,14 +1,32 @@ @defstruct SGDOptions AbstractOptimizerOptions ( (lr :: Real = 0.01, lr > 0), (momentum :: Real = 0.0, momentum >= 0), - (grad_scale :: Real = 1.0, grad_scale >= 0), (grad_clip :: Real = 0, grad_clip >= 0), (weight_decay :: Real = 0.0001, weight_decay >= 0), lr_scheduler :: Any = nothing, momentum_scheduler :: Any = nothing ) +#=doc +.. class:: SGD + Stochastic gradient descent optimizer. + + .. function:: SGD(; kwargs...) + + :param Real lr: default `0.01`, learning rate. + :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. + :param Real momentum: default `0.0`, the momentum. + :param AbstractMomentumScheduler momentum_scheduler: default `nothing`, + a dynamic momentum scheduler. If set, will overwrite the `momentum` + parameter. + :param Real grad_clip: default `0`, if positive, will clip the gradient + into the bounded range `[-grad_clip, grad_clip]`. + :param Real weight_decay: default `0.0001`, weight decay is equivalent to + adding a global l2 regularizer to the parameters. +=# type SGD <: AbstractOptimizer opts :: SGDOptions state :: OptimizationState From 4fc66b4d5711aab1f8e3b131109ecdcc46dd11e6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 18 Nov 2015 18:40:08 -0500 Subject: [PATCH 247/630] update documents for optimizers --- docs/api/io.rst | 4 ++-- docs/api/optimizer.rst | 44 ++++++++++++++++++++++++++++++++++++++ docs/api/symbolic-node.rst | 2 +- 3 files changed, 47 insertions(+), 3 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 6186db52ecc1..f1ab959be6f2 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -324,7 +324,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -467,7 +467,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/optimizer.rst b/docs/api/optimizer.rst index bc45cf1744ac..12e898d233b5 100644 --- a/docs/api/optimizer.rst +++ b/docs/api/optimizer.rst @@ -139,3 +139,47 @@ Built-in optimizers + +.. class:: SGD + + Stochastic gradient descent optimizer. + + .. function:: SGD(; kwargs...) + + :param Real lr: default `0.01`, learning rate. + :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. + :param Real momentum: default `0.0`, the momentum. + :param AbstractMomentumScheduler momentum_scheduler: default `nothing`, + a dynamic momentum scheduler. If set, will overwrite the `momentum` + parameter. + :param Real grad_clip: default `0`, if positive, will clip the gradient + into the bounded range `[-grad_clip, grad_clip]`. + :param Real weight_decay: default `0.0001`, weight decay is equivalent to + adding a global l2 regularizer to the parameters. + + + + +.. class:: ADAM + + The solver described in Diederik Kingma, Jimmy Ba: *Adam: A Method for + Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. + + .. function:: ADAM(; kwargs...) + + :param Real lr: default `0.001`, learning rate. + :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. + :param Real beta1: default `0.9`. + :param Real beta2: default `0.999`. + :param Real epsilon: default `1e-8`. + :param Real grad_clip: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. + :param Real weight_decay: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. + + + diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index d847c6a8bd38..78e359537ec3 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -186,7 +186,7 @@ Public APIs :param momentum: Momentum for moving average - :type momentum: float, optional, default=0.9 + :type momentum: float, optional, default=0.1 :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. From 884c7f84065e0d8d96e7696079c431fe86936308 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 19 Nov 2015 12:17:37 -0500 Subject: [PATCH 248/630] more arithmetic operations on symbolic nodes --- docs/api/symbolic-node.rst | 37 ++++++++++++++++++- src/symbolic-node.jl | 75 ++++++++++++++++++++++++++++++++++---- test/unittest/operator.jl | 36 ++++++++++++++++++ 3 files changed, 139 insertions(+), 9 deletions(-) create mode 100644 test/unittest/operator.jl diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 78e359537ec3..30420da31f13 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -186,7 +186,7 @@ Public APIs :param momentum: Momentum for moving average - :type momentum: float, optional, default=0.1 + :type momentum: float, optional, default=0.9 :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. @@ -872,6 +872,41 @@ Internal APIs +.. function:: _Power(...) + + Perform an elementwise power. + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _PowerScalar(...) + + Perform an elementwise power. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_right: scalar operand is on the right. + :type scalar_on_right: boolean, optional, default=False + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 0b7385ffa1a7..c1b604432718 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -250,16 +250,26 @@ function Base.getindex(self :: SymbolicNode, idx :: Int) end import Base: +, .+ -function +(self :: SymbolicNode, args :: SymbolicNode...) +function +(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) ret = self for arg in args - ret = _Plus(ret, arg) + if isa(arg, SymbolicNode) + ret = _Plus(ret, arg) + else + ret = _PlusScalar(ret, scalar=MX_float(arg)) + end end ret end -function .+(self :: SymbolicNode, args :: SymbolicNode...) +function .+(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) +(self, args...) end +function +(s1 :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) + +(self, s1, args...) +end +function .+(s1 :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) + +(self, s1, args...) +end import Base: -, .- function -(self :: SymbolicNode, arg :: SymbolicNode) @@ -268,20 +278,70 @@ end function .-(self :: SymbolicNode, arg :: SymbolicNode) -(self, arg) end +function -(self :: SymbolicNode, arg :: Real) + _MinusScalar(self, scalar=MX_float(arg)) +end +function .-(self :: SymbolicNode, arg :: Real) + -(self, arg) +end -import Base: .* -function .*(self :: SymbolicNode, args :: SymbolicNode...) +function -(arg :: Real, self :: SymbolicNode) + _MinusScalar(self, scalar=arg, scalar_on_right=true) +end +function .-(arg :: Real, self :: SymbolicNode) + -(arg, self) +end + +function -(self :: SymbolicNode) + -(0, self) +end + +import Base: .*, * +function .*(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) ret = self for arg in args - ret = _Mul(ret, arg) + if isa(arg, SymbolicNode) + ret = _Mul(ret, arg) + else + ret = _MulScalar(ret, scalar=MX_float(arg)) + end end ret end +function .*(arg :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) + .*(self, arg, args...) +end +function *(arg :: Real, self :: SymbolicNode) + _MulScalar(self, scalar=arg) +end +function *(self :: SymbolicNode, arg :: Real) + *(arg, self) +end -import Base: ./ +import Base: ./, / function ./(self :: SymbolicNode, arg :: SymbolicNode) _Div(self, arg) end +function ./(self :: SymbolicNode, arg :: Real) + _DivScalar(self, scalar=MX_float(arg)) +end +function /(self :: SymbolicNode, arg :: Real) + ./(self, arg) +end +function ./(arg :: Real, self :: SymbolicNode) + _DivScalar(self, scalar=arg, scalar_on_right=true) +end + +import Base: .^, ^ +function .^(self :: SymbolicNode, pow :: SymbolicNode) + _Power(self, pow) +end +function .^(self :: SymbolicNode, pow :: AbstractFloat) + _PowerScalar(self, scalar=pow) +end +function ^(self :: SymbolicNode, pow :: AbstractFloat) + .^(self, pow) +end function _compose!(node :: SymbolicNode; kwargs...) name = char_p(0) @@ -519,4 +579,3 @@ macro chain(layers) end return Expr(:block, exprs...) end - diff --git a/test/unittest/operator.jl b/test/unittest/operator.jl new file mode 100644 index 000000000000..2284b1080c7e --- /dev/null +++ b/test/unittest/operator.jl @@ -0,0 +1,36 @@ +module TestOperator +using MXNet +using Base.Test + +using ..Main: rand_dims, reldiff + +function test_scalar_op() + data = mx.Variable(:data) + shape = rand_dims() + info("Operator::scalar_op::dims = $shape") + + data_jl = 5ones(shape) + arr_data = mx.copy(data_jl, mx.cpu()) + arr_grad = mx.zeros(shape) + + test = 2 ./ (4 - ((1+data+1)*2/5) - 0.2) + exec_test = mx.bind(test, mx.cpu(), [arr_data], args_grad=[arr_grad]) + mx.forward(exec_test) + out = copy(exec_test.outputs[1]) + jl_out1 = (4 - ((1+data_jl+1)*2/5) - 0.2) + jl_out = 2 ./ jl_out1 + @test reldiff(copy(out), jl_out) < 1e-6 + + out_grad = 2mx.ones(shape) + jl_grad = 2copy(out_grad) / 5 + jl_grad = 2jl_grad ./ (jl_out1 .^ 2) + mx.backward(exec_test, out_grad) + @test reldiff(copy(arr_grad), jl_grad) < 1e-6 +end + +################################################################################ +# Run tests +################################################################################ +test_scalar_op() + +end From 62c97032a7ffddc066ce4ab94cddb32f4dbffa09 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 19 Nov 2015 09:44:19 +0900 Subject: [PATCH 249/630] rework Xavier to be more flexible --- src/initializer.jl | 43 +++++++++++++++++-------------------------- 1 file changed, 17 insertions(+), 26 deletions(-) diff --git a/src/initializer.jl b/src/initializer.jl index 01744a760448..8263c06ba496 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -113,26 +113,21 @@ end Several different ways of calculating the variance are given in the literature or are used by various libraries. - - original [Bengio and Glorot 2010]: σ² = 2 / (in + out) - - msra [K. He, X. Zhang, S. Ren, and J. Sun 2015]: σ² = 2 / in - - caffe_avg: 6 / (in + out) - - caffe_in: 3 / in - - caffe_out: 3 / out - - mxnet: 3 / (in + out) - - Distribution and variant can be chosen by enums (prefixed by ``xv_``). - As an example take ``mx.XavierInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, - which is currently the default. + - [Bengio and Glorot 2010]: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)`` + - [K. He, X. Zhang, S. Ren, and J. Sun 2015]: ``mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)`` + - caffe_avg: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)`` =# @enum XavierDistribution xv_uniform xv_normal -@enum XavierVariant xv_original xv_mrsa xv_caffe_avg xv_caffe_in zv_caffe_out xv_mxnet +@enum XavierRegularization xv_avg xv_in xv_out immutable XavierInitializer <: AbstractInitializer distribution :: XavierDistribution - variant :: XavierVariant + regularization :: XavierRegularization + magnitude :: Float64 end -XavierInitializer(; distribution = xv_uniform, variant = xv_mxnet) = XavierInitializer(distribution, variant) + +XavierInitializer(; distribution = xv_uniform, regularization = xv_avg, magnitude = 3.0) = XavierInitializer(distribution, regularization, magnitude) function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: NDArray) dims = size(array) @@ -145,19 +140,15 @@ function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: N func(σ, data) = randn!(0.0, σ, data) end - if self.variant == xv_caffe_avg - var = 6 / (fan_in + fan_out) - elseif self.variant == xv_caffe_in - var = 3 / fan_in - elseif self.variant == xv_caffe_out - var = 3 / fan_out - elseif self.variant == xv_mrsa - var = 2 / fan_in - elseif self.variant == xv_original - var = 2 / (fan_in + fan_out) - elseif self.variant == xv_mxnet - var = 3 / (fan_in + fan_out) + if self.regularization == xv_avg + factor = (fan_in + fan_out) / 2 + elseif self.regularization == xv_in + factor = fan_in + elseif self.regularization == xv_out + factor = fan_out end - func(√var, array) + σ = √(self.magnitude / factor) + + func(σ, array) end From 6081fcec039131d24aa0b38fd00bfcf92007e864 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 20 Nov 2015 17:54:08 +0900 Subject: [PATCH 250/630] xavier: rebuild documentation --- docs/api/initializer.rst | 13 +++---------- docs/api/io.rst | 4 ++-- docs/api/symbolic-node.rst | 35 ----------------------------------- 3 files changed, 5 insertions(+), 47 deletions(-) diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst index 30b2df35664a..7123507f7ee9 100644 --- a/docs/api/initializer.rst +++ b/docs/api/initializer.rst @@ -73,16 +73,9 @@ Built-in initializers Several different ways of calculating the variance are given in the literature or are used by various libraries. - - original [Bengio and Glorot 2010]: σ² = 2 / (in + out) - - msra [K. He, X. Zhang, S. Ren, and J. Sun 2015]: σ² = 2 / in - - caffe_avg: 6 / (in + out) - - caffe_in: 3 / in - - caffe_out: 3 / out - - mxnet: 3 / (in + out) - - Distribution and variant can be chosen by enums (prefixed by ``xv_``). - As an example take ``mx.XavierInitializer(distribution = mx.xv_normal, variant = mx.xv_mxnet)``, - which is currently the default. + - [Bengio and Glorot 2010]: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)`` + - [K. He, X. Zhang, S. Ren, and J. Sun 2015]: ``mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)`` + - caffe_avg: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)`` diff --git a/docs/api/io.rst b/docs/api/io.rst index f1ab959be6f2..6186db52ecc1 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -324,7 +324,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -467,7 +467,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 30420da31f13..d847c6a8bd38 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -872,41 +872,6 @@ Internal APIs -.. function:: _Power(...) - - Perform an elementwise power. - - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - - :return: the constructed :class:`SymbolicNode`. - - - - - -.. function:: _PowerScalar(...) - - Perform an elementwise power. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_right: scalar operand is on the right. - :type scalar_on_right: boolean, optional, default=False - - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - - :return: the constructed :class:`SymbolicNode`. - - - - - From 56f3eef1b9f8d6c4c37a83e2500d0bd8ac4e59c5 Mon Sep 17 00:00:00 2001 From: yeesian Date: Fri, 20 Nov 2015 16:14:25 -0500 Subject: [PATCH 251/630] update README --- README.md | 36 ++++++++++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 8bf867f39964..08cf95f87de9 100644 --- a/README.md +++ b/README.md @@ -17,8 +17,12 @@ Here is an exmple of how training a simple 3-layer MLP on MNIST looks like: ```julia using MXNet -mlp = @mx.chain mx.Variable(:data) => - mx.MLP([128, 64, 10]) => +mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => mx.SoftmaxOutput(name=:softmax) # data provider @@ -29,11 +33,35 @@ train_provider, eval_provider = get_mnist_providers(batch_size) # setup model model = mx.FeedForward(mlp, context=mx.cpu()) -# optimizer -optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) +# optimization algorithm +optimizer = mx.SGD(lr=0.1, momentum=0.9) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` +You can also predict using the `model` in the following way: + +```julia +probs = mx.predict(model, eval_provider) + +# collect all labels from eval data +labels = Array[] +for batch in eval_provider + push!(labels, copy(mx.get(eval_provider, batch, :softmax_label))) +end +labels = cat(1, labels...) + +# Now we use compute the accuracy +correct = 0 +for i = 1:length(labels) + # labels are 0...9 + if indmax(probs[:,i]) == labels[i]+1 + correct += 1 + end +end +accuracy = 100correct/length(labels) +println(mx.format("Accuracy on eval set: {1:.2f}%", accuracy)) +``` + For more details, please refer to the [document](http://mxnetjl.readthedocs.org/) and [examples](examples). From e848e8d17d7452f4b846dc5d92485c2fd37846b6 Mon Sep 17 00:00:00 2001 From: yeesian Date: Fri, 20 Nov 2015 16:20:47 -0500 Subject: [PATCH 252/630] spelling :bee: --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 08cf95f87de9..f60ca9f460a0 100644 --- a/README.md +++ b/README.md @@ -7,12 +7,12 @@ [![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) -MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: +MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of its features include: * Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. * Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. -Here is an exmple of how training a simple 3-layer MLP on MNIST looks like: +Here is an example of how training a simple 3-layer MLP on MNIST looks like: ```julia using MXNet @@ -64,4 +64,4 @@ accuracy = 100correct/length(labels) println(mx.format("Accuracy on eval set: {1:.2f}%", accuracy)) ``` -For more details, please refer to the [document](http://mxnetjl.readthedocs.org/) and [examples](examples). +For more details, please refer to the [documentation](http://mxnetjl.readthedocs.org/) and [examples](examples). From 7d8de6bf86a87e9387e0824c0425c6a4b980a41e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 21 Nov 2015 22:26:14 -0500 Subject: [PATCH 253/630] fix API change: scalar_on_right -> on_left --- src/symbolic-node.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index c1b604432718..62bf24d18b2b 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -286,7 +286,7 @@ function .-(self :: SymbolicNode, arg :: Real) end function -(arg :: Real, self :: SymbolicNode) - _MinusScalar(self, scalar=arg, scalar_on_right=true) + _MinusScalar(self, scalar=arg, scalar_on_left=true) end function .-(arg :: Real, self :: SymbolicNode) -(arg, self) @@ -329,7 +329,7 @@ function /(self :: SymbolicNode, arg :: Real) ./(self, arg) end function ./(arg :: Real, self :: SymbolicNode) - _DivScalar(self, scalar=arg, scalar_on_right=true) + _DivScalar(self, scalar=arg, scalar_on_left=true) end import Base: .^, ^ From 46257d28b77cc607856ee11fcb12fd6e60559adb Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 23 Nov 2015 18:04:31 +0900 Subject: [PATCH 254/630] basic interface for setting and getting attributes --- src/symbolic-node.jl | 29 +++++++++++++++++++++++++++++ test/unittest/symbolic-node.jl | 9 +++++++++ 2 files changed, 38 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 62bf24d18b2b..d7584d73baa1 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -122,6 +122,35 @@ function get_internals(self :: SymbolicNode) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end +#=doc +.. function:: get_attr(self :: SymbolicNode, key :: Symbol) + + Get attribute attached to this :class:`SymbolicNode` belonging to key. +=# +function get_attr(self :: SymbolicNode, key :: Symbol) + key_s = bytestring(string(key)) + ref_out = Ref{Cstring}() + ref_success = Ref{Cint}(-1) + @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), self, key_s, ref_out, ref_success) + if ref_success[] == 1 + return bytestring(ref_out[]) + else + throw(KeyError(key)) + end +end + +#=doc +.. function:: set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) + + Set the attribute key to value for this :class:`SymbolicNode`. +=# +function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) + key_s = bytestring(string(key)) + value_s = bytestring(value) + + @mxcall(:MXSymbolSetAttr, (MX_handle, Cstring, Cstring), self, key_s, value_s) +end + #=doc .. function:: Variable(name :: Union{Base.Symbol, AbstractString}) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 33948adfcd40..9dabcf281c89 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -81,6 +81,14 @@ function test_saveload() rm(fname) end +function test_attrs() + info("SymbolicNode::Attributes") + + data = mx.Variable(:data) + + mx.set_attr(data, :test, "1.0") + @test mx.get_attr(data, :test) == "1.0" +end ################################################################################ # Run tests @@ -91,5 +99,6 @@ test_compose() test_infer_shape() test_infer_shape_error() test_saveload() +test_attrs() end From e19ca8ee40c89f43b4f3da31d4cc540f27992037 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 23 Nov 2015 09:05:46 -0500 Subject: [PATCH 255/630] add warning to discourage users from calling set_attr directly --- docs/api/io.rst | 4 +- docs/api/symbolic-node.rst | 103 ++++++++++++++++++++++++++++++++++--- src/symbolic-node.jl | 7 +++ 3 files changed, 104 insertions(+), 10 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 6186db52ecc1..f1ab959be6f2 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -324,7 +324,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -467,7 +467,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index d847c6a8bd38..e1f301783051 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -80,6 +80,27 @@ Symbolic API +.. function:: get_attr(self :: SymbolicNode, key :: Symbol) + + Get attribute attached to this :class:`SymbolicNode` belonging to key. + + + + +.. function:: set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) + + Set the attribute key to value for this :class:`SymbolicNode`. + + .. warning:: + + It is encouraged not to call this function directly, unless you know exactly what you are doing. The + recommended way of setting attributes is when creating the :class:`SymbolicNode`. Changing + the attributes of a :class:`SymbolicNode` that is already been used somewhere else might + cause unexpected behavior and inconsistency. + + + + .. function:: Variable(name :: Union{Base.Symbol, AbstractString}) Create a symbolic variable with the given name. This is typically used as a placeholder. @@ -220,6 +241,10 @@ Public APIs :param num_args: Number of inputs to be concated. :type num_args: int, required + + :param dim: the dimension to be concated. + :type dim: int, optional, default='1' + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :return: the constructed :class:`SymbolicNode`. @@ -366,6 +391,33 @@ Public APIs +.. function:: Embedding(...) + + Get embedding for one-hot input + + :param data: Input data to the EmbeddingOp. + :type data: SymbolicNode + + + :param weight: Enbedding weight matrix. + :type weight: SymbolicNode + + + :param input_dim: input dim of one-hot encoding + :type input_dim: int, required + + + :param output_dim: output dim of embedding + :type output_dim: int, required + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: Flatten(...) Flatten input @@ -737,8 +789,8 @@ Internal APIs :type scalar: float, required - :param scalar_on_right: scalar operand is on the right. - :type scalar_on_right: boolean, optional, default=False + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. @@ -772,8 +824,8 @@ Internal APIs :type scalar: float, required - :param scalar_on_right: scalar operand is on the right. - :type scalar_on_right: boolean, optional, default=False + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. @@ -807,8 +859,8 @@ Internal APIs :type scalar: float, required - :param scalar_on_right: scalar operand is on the right. - :type scalar_on_right: boolean, optional, default=False + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. @@ -861,8 +913,43 @@ Internal APIs :type scalar: float, required - :param scalar_on_right: scalar operand is on the right. - :type scalar_on_right: boolean, optional, default=False + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _Power(...) + + Perform an elementwise power. + + :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _PowerScalar(...) + + Perform an elementwise power. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index d7584d73baa1..c4eb3b2f1237 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -143,6 +143,13 @@ end .. function:: set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) Set the attribute key to value for this :class:`SymbolicNode`. + + .. warning:: + + It is encouraged not to call this function directly, unless you know exactly what you are doing. The + recommended way of setting attributes is when creating the :class:`SymbolicNode`. Changing + the attributes of a :class:`SymbolicNode` that is already been used somewhere else might + cause unexpected behavior and inconsistency. =# function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) key_s = bytestring(string(key)) From 266d9462926d9d2cb48d6dc3693126c0382ffcd6 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 24 Nov 2015 10:06:36 +0900 Subject: [PATCH 256/630] attr interface for SymbolicNodes --- src/symbolic-node.jl | 35 +++++++++++++++++++++++++++------- test/unittest/symbolic-node.jl | 17 ++++++++++++++++- 2 files changed, 44 insertions(+), 8 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index c4eb3b2f1237..0d7f5937f88a 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -126,6 +126,7 @@ end .. function:: get_attr(self :: SymbolicNode, key :: Symbol) Get attribute attached to this :class:`SymbolicNode` belonging to key. + :return: The value belonging to key as a :class:`Nullable`. =# function get_attr(self :: SymbolicNode, key :: Symbol) key_s = bytestring(string(key)) @@ -133,9 +134,9 @@ function get_attr(self :: SymbolicNode, key :: Symbol) ref_success = Ref{Cint}(-1) @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), self, key_s, ref_out, ref_success) if ref_success[] == 1 - return bytestring(ref_out[]) + return Nullable{ByteString}(bytestring(ref_out[])) else - throw(KeyError(key)) + return Nullable{ByteString}() end end @@ -159,15 +160,22 @@ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) end #=doc -.. function:: Variable(name :: Union{Base.Symbol, AbstractString}) +.. function:: Variable(name :: Union{Symbol, AbstractString}) Create a symbolic variable with the given name. This is typically used as a placeholder. For example, the data node, acting as the starting point of a network architecture. + + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`Variable`. =# -function Variable(name :: Union{Base.Symbol, AbstractString}) +function Variable(name :: Union{Symbol, AbstractString}; attrs = Dict()) + attrs = convert(Dict{Symbol, AbstractString}, attrs) hdr_ref = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateVariable, (char_p, Ref{MX_handle}), name, hdr_ref) - SymbolicNode(MX_SymbolHandle(hdr_ref[])) + node = SymbolicNode(MX_SymbolHandle(hdr_ref[])) + for (k, v) in attrs + set_attr(node, k, v) + end + node end #=doc @@ -489,7 +497,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n\n" + f_desc *= ":param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n" + f_desc *= ":param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`.\n\n" f_desc *= ":return: the constructed :class:`SymbolicNode`.\n\n" return (func_name, f_desc) end @@ -506,7 +515,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) param_keys = AbstractString[] param_vals = AbstractString[] - symbol_kws = Dict{Base.Symbol, SymbolicNode}() + symbol_kws = Dict{Symbol, SymbolicNode}() + attrs = Dict{Symbol, AbstractString}() $(if kv_nargs != symbol("") quote @@ -521,6 +531,12 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) if k == :name; continue; end if isa(v, SymbolicNode) symbol_kws[k] = v + elseif k == :attrs + if isa(v, Dict) + attrs = convert(Dict{Symbol, AbstractString}, v) + else + throw(ArgumentError("attrs needs to be a Dictionary")) + end else push!(param_keys, string(k)) push!(param_vals, dump_mx_param(v)) @@ -550,6 +566,11 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) hint = lowercase($func_name_s) name = get!(DEFAULT_NAME_MANAGER, name, hint) + # set attrs + for (k, v) in attrs + set_attr(node, k, v) + end + if length(args) != 0 _compose!(node, name, args...) else diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 9dabcf281c89..e5a5531845ec 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -86,8 +86,23 @@ function test_attrs() data = mx.Variable(:data) + result = mx.get_attr(data, :test) + @test isnull(result) mx.set_attr(data, :test, "1.0") - @test mx.get_attr(data, :test) == "1.0" + result = mx.get_attr(data, :test) + @test !isnull(result) + @test get(result) == "1.0" + + data2 = mx.Variable(:data2, attrs = Dict(:test => "hallo!")) + @test get(mx.get_attr(data2, :test)) == "hallo!" + + conv = mx.Convolution(data = data2, kernel = (1,1), num_filter = 1, attrs = Dict(:a => "a", :π => "π")) + @test isnull(mx.get_attr(conv, :b)) + @test get(mx.get_attr(conv, :a)) == "a" + @test get(mx.get_attr(conv, :π)) == "π" + + @test_throws MethodError mx.Variable(:data3, attrs = Dict(:test => "1.0", :test2 => 1.0)) + @test_throws MethodError mx.Convolution(data=data2, kernel = (1,1), num_filter = 1, attrs = Dict(:test => "1.0", :test2 => 1.0)) end ################################################################################ From 247ec44da550a7340b7657d7fcf4f4f13def562e Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 24 Nov 2015 11:00:20 +0900 Subject: [PATCH 257/630] attrs: Doc rebuild --- docs/api/io.rst | 4 +- docs/api/ndarray.rst | 11 +++ docs/api/symbolic-node.rst | 145 +++++++++++++++++++++++++++---------- 3 files changed, 121 insertions(+), 39 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index f1ab959be6f2..6186db52ecc1 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -324,7 +324,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -467,7 +467,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 270d85bab837..c5baf5b75301 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -433,6 +433,17 @@ object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the out Public APIs ^^^^^^^^^^^ +.. function:: abs(...) + + Take absolute value of the src + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: choose_element_0index(...) Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs. This function assume rhs uses 0-based index. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index e1f301783051..3e030378c3ba 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -83,6 +83,7 @@ Symbolic API .. function:: get_attr(self :: SymbolicNode, key :: Symbol) Get attribute attached to this :class:`SymbolicNode` belonging to key. + :return: The value belonging to key as a :class:`Nullable`. @@ -101,11 +102,13 @@ Symbolic API -.. function:: Variable(name :: Union{Base.Symbol, AbstractString}) +.. function:: Variable(name :: Union{Symbol, AbstractString}) Create a symbolic variable with the given name. This is typically used as a placeholder. For example, the data node, acting as the starting point of a network architecture. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`Variable`. + @@ -186,7 +189,8 @@ Public APIs :param act_type: Activation function to be applied. :type act_type: {'relu', 'sigmoid', 'tanh'}, required - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -209,7 +213,8 @@ Public APIs :param momentum: Momentum for moving average :type momentum: float, optional, default=0.9 - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -224,7 +229,8 @@ Public APIs :param data: Input data. :type data: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -245,7 +251,8 @@ Public APIs :param dim: the dimension to be concated. :type dim: int, optional, default='1' - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -296,7 +303,8 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -347,7 +355,8 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=True - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -366,7 +375,8 @@ Public APIs :param p: Fraction of the input that gets dropped out at training time :type p: float, optional, default=0.5 - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -383,7 +393,8 @@ Public APIs :param num_args: Number of inputs to be sumed. :type num_args: int, required - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -410,7 +421,8 @@ Public APIs :param output_dim: output dim of embedding :type output_dim: int, required - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -425,7 +437,8 @@ Public APIs :param data: Input data to flatten. :type data: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -456,7 +469,8 @@ Public APIs :param no_bias: Whether to disable bias parameter. :type no_bias: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -487,7 +501,8 @@ Public APIs :param nsize: normalization window width in elements. :type nsize: int (non-negative), required - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -518,7 +533,8 @@ Public APIs :param upper_bound: Upper bound of random slope. (For rrelu only) :type upper_bound: float, optional, default=0.334 - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -537,7 +553,8 @@ Public APIs :param label: Input label to function. :type label: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -557,7 +574,8 @@ Public APIs :param label: Input label to function. :type label: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -588,7 +606,8 @@ Public APIs :param pad: pad for pooling: (y, x) :type pad: Shape(tuple), optional, default=(0, 0) - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -607,7 +626,8 @@ Public APIs :param target_shape: Target new shape :type target_shape: Shape(tuple), required - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -622,7 +642,8 @@ Public APIs :param num_outputs: Number of outputs to be sliced. :type num_outputs: int, required - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -645,7 +666,8 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -668,7 +690,8 @@ Public APIs :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -691,7 +714,24 @@ Public APIs :param dim2: the second axis to be swapped. :type dim2: int (non-negative), optional, default=0 - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: abs(...) + + Take absolute value of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -706,7 +746,8 @@ Public APIs :param src: Source symbolic input to the function :type src: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -721,7 +762,8 @@ Public APIs :param src: Source symbolic input to the function :type src: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -736,7 +778,8 @@ Public APIs :param src: Source symbolic input to the function :type src: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -751,7 +794,8 @@ Public APIs :param src: Source symbolic input to the function :type src: SymbolicNode - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -769,7 +813,8 @@ Internal APIs Perform an elementwise div. - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -792,7 +837,8 @@ Internal APIs :param scalar_on_left: scalar operand is on the left. :type scalar_on_left: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -804,7 +850,8 @@ Internal APIs Perform an elementwise minus. - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -827,7 +874,8 @@ Internal APIs :param scalar_on_left: scalar operand is on the left. :type scalar_on_left: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -839,7 +887,8 @@ Internal APIs Perform an elementwise mul. - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -862,7 +911,24 @@ Internal APIs :param scalar_on_left: scalar operand is on the left. :type scalar_on_left: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _NDArray(...) + + Stub for implementing an operator implemented in native frontend language with ndarray. + + :param info: + :type info: , required + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -881,7 +947,8 @@ Internal APIs :param need_top_grad: Whether this layer needs out grad for backward. Should be false for loss layers. :type need_top_grad: boolean, optional, default=True - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -893,7 +960,8 @@ Internal APIs Perform an elementwise plus. - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -916,7 +984,8 @@ Internal APIs :param scalar_on_left: scalar operand is on the left. :type scalar_on_left: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -928,7 +997,8 @@ Internal APIs Perform an elementwise power. - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. @@ -951,7 +1021,8 @@ Internal APIs :param scalar_on_left: scalar operand is on the left. :type scalar_on_left: boolean, optional, default=False - :param Base.Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. :return: the constructed :class:`SymbolicNode`. From c66531f9ea70dfe38fbab7fc7814186829a1862d Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 26 Nov 2015 10:13:56 +0900 Subject: [PATCH 258/630] adds LearningRate.Inv from Mocha --- src/optimizer.jl | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/src/optimizer.jl b/src/optimizer.jl index 0a3f23bc96ba..887fd9ab59e0 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -106,8 +106,27 @@ function Exp(base_lr::Real; gamma::Real=0.9, decay_on_iteration::Bool=false) end get_learning_rate(self :: Exp, state :: OptimizationState) = self.learning_rate * self.gamma ^ (self.on_iteration ? state.curr_iter : state.curr_epoch) +#=doc +.. class:: LearningRate.Inv -end # module LearningRate + :math:`\eta_t = \eta_0 * (1 + \gamma * t)^(-power)`. + Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` + is set to true. +=# +type Inv <: AbstractLearningRateScheduler + learning_rate :: Float64 + gamma :: Float64 + power :: Float64 + on_iteration :: Bool +end +function Inv(base_lr :: Real; gamma::Real=0.9, power::Real=0.5, decay_on_iteration::Bool=false) + @assert(0 < gamma < 1) + @assert(0 <= power) + Inv(Float64(base_lr), Float64(gamma), Float64(power), decay_on_iteration) +end +get_learning_rate(self :: Inv, state :: OptimizationState) = + self.learning_rate * ( 1 + self.gamma * (self.on_iteration ? state.curr_iter : state.curr_epoch)) ^ (-self.power) +end# module LearningRate ################################################################################ function get_lr_scheduler(scheduler :: Any, lr :: Real) if isa(scheduler, AbstractLearningRateScheduler) From 74cc5baa1104a851a8a89bcd22e6ba1c1614f8b7 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Nov 2015 17:41:05 +0900 Subject: [PATCH 259/630] Fixes usuage of floor https://github.com/dmlc/mxnet/pull/717 a PR upstream added floor/ceil/round, which are now imported as operators, causing problems with the native Julia functions. Luckily there is only one usage. --- src/model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index 80f0a9644778..8cb5e5bc39df 100644 --- a/src/model.jl +++ b/src/model.jl @@ -41,7 +41,7 @@ end """ function _split_inputs(batch_size :: Int, n_split :: Int) @assert(batch_size >= n_split) - per_split = floor(Int, batch_size / n_split) + per_split = Base.floor(Int, batch_size / n_split) counts = Base.zeros(Int, n_split)+per_split extra = batch_size - sum(counts) counts[1:extra] += 1 From f094a11cbbd1deb15a516479fdd3e271c8dd0385 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 29 Nov 2015 23:41:49 -0500 Subject: [PATCH 260/630] add missing _init_default --- src/initializer.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/initializer.jl b/src/initializer.jl index 8263c06ba496..0d2a10586f57 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -56,6 +56,10 @@ function _init_zero(self :: AbstractInitializer, name :: Base.Symbol, array :: N array[:] = 0 end +function _init_default(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + error("Do not know how to init $name") +end + #=doc Built-in initializers --------------------- From 2ff90c364f25684bf5d47a23795524f0ae757f80 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 1 Dec 2015 21:20:07 +0900 Subject: [PATCH 261/630] imports operators into the namespace to prevent issues --- src/MXNet.jl | 3 +++ src/model.jl | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index 42c9e45f477e..c5056320791c 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -8,6 +8,9 @@ export mx module mx using Formatting +# Functions from base that we can safely extend and that are defined by libmxnet. +import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm + include("base.jl") include("context.jl") diff --git a/src/model.jl b/src/model.jl index 8cb5e5bc39df..80f0a9644778 100644 --- a/src/model.jl +++ b/src/model.jl @@ -41,7 +41,7 @@ end """ function _split_inputs(batch_size :: Int, n_split :: Int) @assert(batch_size >= n_split) - per_split = Base.floor(Int, batch_size / n_split) + per_split = floor(Int, batch_size / n_split) counts = Base.zeros(Int, n_split)+per_split extra = batch_size - sum(counts) counts[1:extra] += 1 From 67c8bb2d6e319d7a95fc574a76736e5c20d05523 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 2 Dec 2015 00:42:26 -0500 Subject: [PATCH 262/630] prepare for v0.0.6 (c.f. #42) --- NEWS.md | 6 ++++++ docs/conf.py | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/NEWS.md b/NEWS.md index d50bd5ed9300..dd95dbb5ffec 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,9 @@ +$ v0.0.6 (2015.12.02) + +* Variants of Xaiver initializers (@vchuravy) +* More arithmetic operators on symbolic nodes +* Basic interface for symbolic node attributes (@vchuravy) + # v0.0.5 (2015.11.14) * char-lstm example. diff --git a/docs/conf.py b/docs/conf.py index c2a405765352..32e0d28f35ba 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,9 @@ # built documents. # # The short X.Y version. -version = '0.0.5' +version = '0.0.6' # The full version, including alpha/beta/rc tags. -release = '0.0.5' +release = '0.0.6' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From aa1b600cd9683bffe82d26f3d5d29ec928ea47c2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 3 Dec 2015 10:38:44 -0500 Subject: [PATCH 263/630] backport @defstruct from SimpleStructs.jl --- src/base.jl | 82 ++++++++++++++++++++++++++++++++++-------- src/model.jl | 2 +- src/optimizers/adam.jl | 2 +- src/optimizers/sgd.jl | 2 +- 4 files changed, 71 insertions(+), 17 deletions(-) diff --git a/src/base.jl b/src/base.jl index 0988400b9d31..de48241ac7e9 100644 --- a/src/base.jl +++ b/src/base.jl @@ -161,18 +161,48 @@ is available. The macro will define a constructor that could accept the keyword arguments. """ -macro defstruct(name, super_name, fields) - @assert fields.head == :tuple - fields = fields.args +macro defstruct(name, fields) + _defstruct_impl(false, name, fields) +end + +"""A convenient macro to define immutable structs. The same as +`@defstruct` except that the defined type is immutable. +""" +macro defimmutable(name, fields) + _defstruct_impl(true, name, fields) +end + +"""Internal use only, this value is used to indicate a required value +is not specified. +""" +immutable __Undefined +end + +function _defstruct_impl(is_immutable, name, fields) + if isa(fields, Expr) && fields.head == :tuple + fields = fields.args + else + fields = [fields] + end @assert length(fields) > 0 - name = esc(name) - super_name = esc(super_name) + + if isa(name, Symbol) + name = esc(name) + super_name = :Any + else + @assert(isa(name, Expr) && name.head == :comparison && length(name.args) == 3 && name.args[2] == :(<:), + "name must be of form 'Name <: SuperType'") + @assert(isa(name.args[1], Symbol) && isa(name.args[3], Symbol)) + super_name = esc(name.args[3]) + name = esc(name.args[1]) + end field_defs = Array(Expr, length(fields)) # :(field2 :: Int) field_names = Array(Expr, length(fields)) # :field2 field_defaults = Array(Expr, length(fields)) # :(field2 = 0) field_types = Array(Expr, length(fields)) # Int field_asserts = Array(Expr, length(fields)) # :(field2 >= 0) + required_field = Symbol[] for i = 1:length(fields) field = fields[i] @@ -180,16 +210,30 @@ macro defstruct(name, super_name, fields) field_asserts[i] = esc(field.args[2]) field = field.args[1] end - field_defs[i] = esc(field.args[1]) - field_names[i] = esc(field.args[1].args[1]) - field_types[i] = esc(field.args[1].args[2]) - field_defaults[i] = Expr(:kw, field.args[1].args[1], esc(field.args[2])) + if field.head == :(=) + fname = field.args[1].args[1] + field_defs[i] = esc(field.args[1]) + field_names[i] = esc(fname) + field_types[i] = esc(field.args[1].args[2]) + field_defaults[i] = Expr(:kw, fname, esc(field.args[2])) + else + # no default value provided, required field + fname = field.args[1] + field_defs[i] = esc(field) + field_names[i] = esc(fname) + field_types[i] = esc(field.args[2]) + field_defaults[i] = Expr(:kw, fname, __Undefined()) + push!(required_field, fname) + end end # body of layer type, defining fields type_body = Expr(:block, field_defs...) # constructor + requires = map(required_field) do fname + :(@assert(!isa($fname, __Undefined), "value for " * string($fname) * " is required")) + end converts = map(zip(field_names, field_types)) do param f_name, f_type = param :($f_name = convert($f_type, $f_name)) @@ -198,15 +242,25 @@ macro defstruct(name, super_name, fields) :(@assert($(field_asserts[i]))) end construct = Expr(:call, name, field_names...) - ctor_body = Expr(:block, converts..., asserts..., construct) + ctor_body = Expr(:block, requires..., converts..., asserts..., construct) ctor_def = Expr(:call, name, Expr(:parameters, field_defaults...)) ctor = Expr(:(=), ctor_def, ctor_body) - quote - type $(name) <: $super_name - $type_body + if is_immutable + quote + immutable $(name) <: $(super_name) + $type_body + end + + $ctor end + else + quote + type $(name) <: $(super_name) + $type_body + end - $ctor + $ctor + end end end diff --git a/src/model.jl b/src/model.jl index 80f0a9644778..009471c785f2 100644 --- a/src/model.jl +++ b/src/model.jl @@ -260,7 +260,7 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : return (kv, update_on_kvstore) end -@defstruct TrainingOptions Any ( +@defstruct TrainingOptions ( initializer :: AbstractInitializer = UniformInitializer(0.01), n_epoch :: Int = 10, eval_data :: Union{Void, AbstractDataProvider} = nothing, diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index 95eaefe275a1..6b17f1b3e152 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -1,4 +1,4 @@ -@defstruct ADAMOptions AbstractOptimizerOptions ( +@defstruct ADAMOptions <: AbstractOptimizerOptions ( (lr :: Real = 0.001, lr > 0), (grad_clip :: Real = 0, grad_clip >= 0), (weight_decay :: Real = 0.00001, weight_decay >= 0), diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index f3d1b10a66e4..fb6bf195fdb5 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -1,4 +1,4 @@ -@defstruct SGDOptions AbstractOptimizerOptions ( +@defstruct SGDOptions <: AbstractOptimizerOptions ( (lr :: Real = 0.01, lr > 0), (momentum :: Real = 0.0, momentum >= 0), (grad_clip :: Real = 0, grad_clip >= 0), From b51389884da2f2865bc91bf43b1f7143b41c99bc Mon Sep 17 00:00:00 2001 From: Simon Date: Sat, 5 Dec 2015 12:45:36 +0100 Subject: [PATCH 264/630] use download instead of wget This works better on windows. Unzip must be installed though: http://sourceforge.net/projects/gnuwin32/?source=typ_redirect Maybe use one of the Julia zip packages? --- src/util.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/util.jl b/src/util.jl index be27d1e2310c..1e9b0853dc5b 100644 --- a/src/util.jl +++ b/src/util.jl @@ -18,8 +18,8 @@ function get_mnist_ubyte() filenames = [k => joinpath(mnist_dir, v) for (k,v) in filenames] if !all(isfile, values(filenames)) cd(mnist_dir) do - run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip`) - run(`unzip -u mnist.zip`) + mnist_dir = download("http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip", "mnist.zip") + run(`unzip -u $mnist_dir`) end end return filenames From d612faf9554300e029138a0cc219685d42605c24 Mon Sep 17 00:00:00 2001 From: kasiabozek Date: Wed, 9 Dec 2015 15:59:37 +0900 Subject: [PATCH 265/630] Accuracy metric added to epoch callbacks.. --- src/callback.jl | 8 ++++---- src/model.jl | 13 +++++++------ 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/callback.jl b/src/callback.jl index 9f3d85b576ff..e18184b1131a 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -110,13 +110,13 @@ end function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -function Base.call(cb :: EpochCallback, model :: Any, state :: OptimizationState) +function Base.call{T<:Real}(cb :: EpochCallback, model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) if state.curr_epoch == 0 if cb.call_on_0 - cb.callback(model, state) + cb.callback(model, state, metric) end elseif state.curr_epoch % cb.frequency == 0 - cb.callback(model, state) + cb.callback(model, state, metric) end end @@ -136,7 +136,7 @@ end =# function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) mkpath(dirname(prefix)) - every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state + every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state, metric save_checkpoint(model, prefix, state) end end diff --git a/src/model.jl b/src/model.jl index 009471c785f2..a8e5c49df28e 100644 --- a/src/model.jl +++ b/src/model.jl @@ -260,7 +260,7 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : return (kv, update_on_kvstore) end -@defstruct TrainingOptions ( +@defstruct TrainingOptions Any ( initializer :: AbstractInitializer = UniformInitializer(0.01), n_epoch :: Int = 10, eval_data :: Union{Void, AbstractDataProvider} = nothing, @@ -270,13 +270,14 @@ end callbacks :: Vector{AbstractCallback} = AbstractCallback[], ) -function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, - state::OptimizationState, type_filter::Type) +function _invoke_callbacks{T<:Real}(self::FeedForward, callbacks::Vector{AbstractCallback}, + state::OptimizationState, type_filter::Type; + metric::Vector{Tuple{Base.Symbol, T}} = Vector{Tuple{Base.Symbol, Real}}()) map(callbacks) do cb if isa(cb, type_filter) if type_filter == AbstractEpochCallback # epoch callback have extra access to the model object - cb(self, state) + cb(self, state, metric) else cb(state) end @@ -465,6 +466,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end # end of one epoch time_stop = time() + metric = get(opts.eval_metric) info(format("== Epoch {1:0>3d} ==========", i_epoch)) info("## Training summary") for (name, value) in get(opts.eval_metric) @@ -514,7 +516,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra copy!(self.aux_params[name], aux_avg) end end - _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) + _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback; metric=metric) end # end of all epochs end @@ -573,4 +575,3 @@ function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: self.aux_params = aux_params return self end - From a5c66935c85a4f746530f4011b3e73a72de60b1e Mon Sep 17 00:00:00 2001 From: kasiabozek Date: Wed, 9 Dec 2015 16:04:20 +0900 Subject: [PATCH 266/630] Accuracy metric added to epoch callbacks. --- src/model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index a8e5c49df28e..2745310da03a 100644 --- a/src/model.jl +++ b/src/model.jl @@ -469,7 +469,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra metric = get(opts.eval_metric) info(format("== Epoch {1:0>3d} ==========", i_epoch)) info("## Training summary") - for (name, value) in get(opts.eval_metric) + for (name, value) in metric info(format("{1:>18s} = {2:.4f}", string(name), value)) end info(format("{1:>18s} = {2:.4f} seconds", "time", time_stop-time_start)) From 49a92d1e90e09e468cdb48a99f9898dee52946b9 Mon Sep 17 00:00:00 2001 From: kasiabozek Date: Wed, 9 Dec 2015 17:06:23 +0900 Subject: [PATCH 267/630] Accuracy metric added to epoch callbacks. --- src/model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index 2745310da03a..d4b492333f9f 100644 --- a/src/model.jl +++ b/src/model.jl @@ -260,7 +260,7 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : return (kv, update_on_kvstore) end -@defstruct TrainingOptions Any ( +@defstruct TrainingOptions ( initializer :: AbstractInitializer = UniformInitializer(0.01), n_epoch :: Int = 10, eval_data :: Union{Void, AbstractDataProvider} = nothing, From 71b49456c4b26ed49179fcf91b19ece3d5740d81 Mon Sep 17 00:00:00 2001 From: BigEpsilon Date: Sat, 12 Dec 2015 20:58:49 +0100 Subject: [PATCH 268/630] Fix compilation errors for julia 0.4.2 --- src/model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index d4b492333f9f..c208873e867f 100644 --- a/src/model.jl +++ b/src/model.jl @@ -43,7 +43,7 @@ function _split_inputs(batch_size :: Int, n_split :: Int) @assert(batch_size >= n_split) per_split = floor(Int, batch_size / n_split) counts = Base.zeros(Int, n_split)+per_split - extra = batch_size - sum(counts) + extra = batch_size - Base.sum(counts) counts[1:extra] += 1 cum = [0, cumsum(counts)...] From 8e879d74512d8ae2e7bc78fe9bd2f31eed4ef9a9 Mon Sep 17 00:00:00 2001 From: BigEpsilon Date: Sat, 12 Dec 2015 21:03:30 +0100 Subject: [PATCH 269/630] Fix compilation errors for julia 0.4.2 --- src/io.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/io.jl b/src/io.jl index 48e0a31e32a2..99fcd8516c6b 100644 --- a/src/io.jl +++ b/src/io.jl @@ -435,7 +435,7 @@ immutable ArrayDataBatch <: AbstractDataBatch idx :: UnitRange{Int} end function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) - idx = state.curr_idx:min(state.curr_idx+provider.batch_size-1, provider.sample_count) + idx = state.curr_idx:Base.min(state.curr_idx+provider.batch_size-1, provider.sample_count) return (ArrayDataBatch(idx), ArrayDataProviderState(idx.stop+1)) end From 5ed54d85355325b61df333e6b80be0939d7f1c57 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 12 Dec 2015 20:51:00 -0500 Subject: [PATCH 270/630] try to fix build-script for installing cblas --- deps/build.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 5c0dd2bb749f..cb9f930c096d 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -31,8 +31,8 @@ if !libmxnet_detected #-------------------------------------------------------------------------------- # Install dependencies, blas @linux_only begin - blas = library_dependency("blas", aliases=["libblas","libblas.so.3"]) - provides(AptGet, "libblas-dev", blas) + blas = library_dependency("cblas", aliases=["libcblas"]) + provides(AptGet, "libatlas-base-dev", blas) provides(Pacman, "blas", blas) provides(Yum, "blas-devel", blas) From f91c200d6b0c6774f0cc3c533217a1d1f1ce97af Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 12 Dec 2015 20:52:20 -0500 Subject: [PATCH 271/630] add Juliapkg badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index f60ca9f460a0..855680ae6d1a 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,7 @@ [![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) [![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) [![Documentation Status](https://readthedocs.org/projects/mxnetjl/badge/?version=latest)](http://mxnetjl.readthedocs.org/en/latest/?badge=latest) +[![MXNet](http://pkg.julialang.org/badges/MXNet_0.4.svg)](http://pkg.julialang.org/?pkg=MXNet) [![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) [![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) From 0169981ed15d74306996bbba15fc4989cdc0c583 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 16:32:52 -0500 Subject: [PATCH 272/630] try to use the built-in bindeps for building in travis CI --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8771dfa989b6..3dd2874b2cdb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,7 +28,7 @@ addons: before_install: - export TRAVIS=test/travis - source $TRAVIS/setup_env.sh - - source $TRAVIS/build_mxnet.sh + #- source $TRAVIS/build_mxnet.sh notifications: email: false From d8e5065a2c99b2d64a02afbcaf202c5d6f2414c2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 16:39:39 -0500 Subject: [PATCH 273/630] fix travis CI building script --- test/travis/setup_env.sh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index 242e034120b2..16ce30c62382 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -10,3 +10,8 @@ if [ ${TRAVIS_OS_NAME} == "osx" ]; then brew install graphviz brew install opencv fi + +if [ ${TRAVIS_OS_NAME} == "linux" ]; then + alias gcc='gcc-4.8' + alias g++='g++-4.8' +fi From 7a7cfbe7256f514d9fcc2c9b2fdf6ef1a4405c63 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 16:51:26 -0500 Subject: [PATCH 274/630] trying to fix travis CI gcc-4.8 thing --- deps/build.jl | 6 ++++++ test/travis/setup_env.sh | 4 ++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index cb9f930c096d..1be650b500fd 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -60,6 +60,12 @@ if !libmxnet_detected `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` + if haskey(ENV, "MXNET_CC") + `sed -i -s 's/export CC = gcc/export CC = $(ENV["MXNET_CC"])/g' config.mk` + end + if haskey(ENV, "MXNET_CXX") + `sed -i -s 's/export CXX = g++/export CC = $(ENV["MXNET_CXX"])/g' config.mk` + end `make` `cp lib/libmxnet.so $_libdir` end) diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index 16ce30c62382..d8a454bd83cc 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -12,6 +12,6 @@ if [ ${TRAVIS_OS_NAME} == "osx" ]; then fi if [ ${TRAVIS_OS_NAME} == "linux" ]; then - alias gcc='gcc-4.8' - alias g++='g++-4.8' + export MXNET_CC=gcc-4.8 + export MXNET_CXX=g++-4.8 fi From 99e7e66dcc263117751cef6c3ac6998fc4db4015 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 17:00:42 -0500 Subject: [PATCH 275/630] still trying to fix travis CI gcc issue... --- deps/build.jl | 6 ------ test/travis/run_test.sh | 1 + test/travis/setup_env.sh | 6 ++++-- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 1be650b500fd..cb9f930c096d 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -60,12 +60,6 @@ if !libmxnet_detected `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` - if haskey(ENV, "MXNET_CC") - `sed -i -s 's/export CC = gcc/export CC = $(ENV["MXNET_CC"])/g' config.mk` - end - if haskey(ENV, "MXNET_CXX") - `sed -i -s 's/export CXX = g++/export CC = $(ENV["MXNET_CXX"])/g' config.mk` - end `make` `cp lib/libmxnet.so $_libdir` end) diff --git a/test/travis/run_test.sh b/test/travis/run_test.sh index a576c69f6979..93f8a97c2568 100755 --- a/test/travis/run_test.sh +++ b/test/travis/run_test.sh @@ -1,4 +1,5 @@ #!/bin/bash +g++ --version if [[ -a .git/shallow ]]; then git fetch --unshallow; fi julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index d8a454bd83cc..5a33a9e42955 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -12,6 +12,8 @@ if [ ${TRAVIS_OS_NAME} == "osx" ]; then fi if [ ${TRAVIS_OS_NAME} == "linux" ]; then - export MXNET_CC=gcc-4.8 - export MXNET_CXX=g++-4.8 + mkdir shadow_bin + ln -s `which gcc-4.8` shadow_bin/gcc + ln -s `which g++-4.8` shadow_bin/g++ + export PATH=$PWD/shadow_bin:$PATH fi From 3861edcf0aeec488942e6f3a1e07c31fe196b3b9 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 17:13:55 -0500 Subject: [PATCH 276/630] clean up travis CI script --- .travis.yml | 1 - test/travis/build_mxnet.sh | 21 --------------------- test/travis/run_test.sh | 1 - 3 files changed, 23 deletions(-) delete mode 100755 test/travis/build_mxnet.sh diff --git a/.travis.yml b/.travis.yml index 3dd2874b2cdb..9f08e176eed2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,7 +28,6 @@ addons: before_install: - export TRAVIS=test/travis - source $TRAVIS/setup_env.sh - #- source $TRAVIS/build_mxnet.sh notifications: email: false diff --git a/test/travis/build_mxnet.sh b/test/travis/build_mxnet.sh deleted file mode 100755 index 6774011f6355..000000000000 --- a/test/travis/build_mxnet.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash - -git clone --recursive https://github.com/dmlc/mxnet __mxnet_build -cd __mxnet_build - -if [ ! -f config.mk ]; then - if [ ${TRAVIS_OS_NAME} == "linux" ]; then - cp make/config.mk config.mk - sed -i 's/export CC = gcc/export CC = gcc-4.8/g' config.mk - sed -i 's/export CXX = g++/export CXX = g++-4.8/g' config.mk - fi - - if [ ${TRAVIS_OS_NAME} == "osx" ]; then - cp make/osx.mk config.mk - fi -fi - -make -j4 || exit 1 - -export MXNET_HOME=$PWD -cd .. diff --git a/test/travis/run_test.sh b/test/travis/run_test.sh index 93f8a97c2568..a576c69f6979 100755 --- a/test/travis/run_test.sh +++ b/test/travis/run_test.sh @@ -1,5 +1,4 @@ #!/bin/bash -g++ --version if [[ -a .git/shallow ]]; then git fetch --unshallow; fi julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' From 3a134e82e5e663ab095e9f836c2860d9ce6a58ec Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 17:19:19 -0500 Subject: [PATCH 277/630] test whether bindeps works well on travis CI to install cblas --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 9f08e176eed2..ded60f0045b6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,9 +19,9 @@ addons: - git - libcurl4-openssl-dev - unzip - - libatlas-base-dev - - libatlas-dev - - libopencv-dev + #- libatlas-base-dev + #- libatlas-dev + #- libopencv-dev - gcc-4.8 - g++-4.8 From ce57a3907bd8cf9843d6c7956c39f71385e96aec Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 17:25:02 -0500 Subject: [PATCH 278/630] test failed, travis CI is configured to disable sudo --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index ded60f0045b6..9f08e176eed2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,9 +19,9 @@ addons: - git - libcurl4-openssl-dev - unzip - #- libatlas-base-dev - #- libatlas-dev - #- libopencv-dev + - libatlas-base-dev + - libatlas-dev + - libopencv-dev - gcc-4.8 - g++-4.8 From 8edb94be2a4c6250ef4bb26e58ca5fb0b9fee6ff Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 14 Dec 2015 18:24:56 -0500 Subject: [PATCH 279/630] prepare for v0.0.7 --- NEWS.md | 7 +- docs/api/io.rst | 4 +- docs/api/ndarray.rst | 110 ++++++++++++++ docs/api/optimizer.rst | 9 ++ docs/api/symbolic-node.rst | 303 ++++++++++++++++++++++++++++++++++++- docs/conf.py | 4 +- 6 files changed, 427 insertions(+), 10 deletions(-) diff --git a/NEWS.md b/NEWS.md index dd95dbb5ffec..c0d365a7336e 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,4 +1,9 @@ -$ v0.0.6 (2015.12.02) +# v0.0.7 (2015.12.14) + +* Fix compatability with Julia v0.4.2 (@BigEpsilon) +* Metrics in epoch callbacks (@kasiabozek) + +# v0.0.6 (2015.12.02) * Variants of Xaiver initializers (@vchuravy) * More arithmetic operators on symbolic nodes diff --git a/docs/api/io.rst b/docs/api/io.rst index 6186db52ecc1..f1ab959be6f2 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -324,7 +324,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -467,7 +467,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index c5baf5b75301..d2f0c4fc9636 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -444,6 +444,17 @@ Public APIs +.. function:: ceil(...) + + Take ceil value of the src + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: choose_element_0index(...) Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs. This function assume rhs uses 0-based index. @@ -478,6 +489,17 @@ Public APIs +.. function:: cos(...) + + Take cos of the src + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: dot(...) Calcuate 2D matrix multiplication @@ -504,6 +526,17 @@ Public APIs +.. function:: floor(...) + + Take floor value of the src + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: log(...) Take log of the src @@ -515,6 +548,28 @@ Public APIs +.. function:: max(...) + + Take max of the src.The result will be ndarray of shape (1,) on the same device. + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: min(...) + + Take min of the src.The result will be ndarray of shape (1,) on the same device. + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: norm(...) Take L2 norm of the src.The result will be ndarray of shape (1,) on the same device. @@ -526,6 +581,50 @@ Public APIs +.. function:: round(...) + + Take round value of the src + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: rsqrt(...) + + Take rsqrt of the src + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: sign(...) + + Take sign value of the src + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: sin(...) + + Take sin of the src + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: sqrt(...) Take sqrt of the src @@ -547,6 +646,17 @@ Public APIs + +.. function:: sum(...) + + Take sum of the src.The result will be ndarray of shape (1,) on the same device. + + :param src: Source input to the function + :type src: NDArray + + + + Internal APIs ^^^^^^^^^^^^^ diff --git a/docs/api/optimizer.rst b/docs/api/optimizer.rst index 12e898d233b5..03c9c11fed4a 100644 --- a/docs/api/optimizer.rst +++ b/docs/api/optimizer.rst @@ -80,6 +80,15 @@ Common interfaces +.. class:: LearningRate.Inv + + :math:`\eta_t = \eta_0 * (1 + \gamma * t)^(-power)`. + Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` + is set to true. + + + + .. function:: get_momentum(scheduler, state) :param AbstractMomentumScheduler scheduler: the momentum scheduler. diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 3e030378c3ba..3ddb299a8807 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -187,7 +187,7 @@ Public APIs :param act_type: Activation function to be applied. - :type act_type: {'relu', 'sigmoid', 'tanh'}, required + :type act_type: {'relu', 'sigmoid', 'softrelu', 'tanh'}, required :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -207,12 +207,16 @@ Public APIs :param eps: Epsilon to prevent div 0 - :type eps: float, optional, default=1e-10 + :type eps: float, optional, default=0.001 :param momentum: Momentum for moving average :type momentum: float, optional, default=0.9 + + :param fix_gamma: Fix gamma while training + :type fix_gamma: boolean, optional, default=True + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -478,6 +482,34 @@ Public APIs +.. function:: IdentityAttachKLSparseReg(...) + + Apply a sparse regularization to the output a sigmoid activation function. + + :param data: Input data. + :type data: SymbolicNode + + + :param sparseness_target: The sparseness target + :type sparseness_target: float, optional, default=0.1 + + + :param penalty: The tradeoff parameter for the sparseness penalty + :type penalty: float, optional, default=0.001 + + + :param momentum: The momentum for running average + :type momentum: float, optional, default=0.9 + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: LRN(...) Apply convolution to input then add a bias. @@ -519,10 +551,10 @@ Public APIs :param act_type: Activation function to be applied. - :type act_type: {'leaky', 'prelu', 'rrelu'},optional, default='leaky' + :type act_type: {'elu', 'leaky', 'prelu', 'rrelu'},optional, default='leaky' - :param slope: Init slope for the activation. (For leaky only) + :param slope: Init slope for the activation. (For leaky and elu only) :type slope: float, optional, default=0.25 @@ -553,6 +585,10 @@ Public APIs :param label: Input label to function. :type label: SymbolicNode + + :param grad_scale: Scale the gradient by a float factor + :type grad_scale: float, optional, default=1 + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -574,6 +610,34 @@ Public APIs :param label: Input label to function. :type label: SymbolicNode + + :param grad_scale: Scale the gradient by a float factor + :type grad_scale: float, optional, default=1 + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: MAERegressionOutput(...) + + Use mean absolute error regression for final output, this is used on final output of a net. + + :param data: Input data to function. + :type data: SymbolicNode + + + :param label: Input label to function. + :type label: SymbolicNode + + + :param grad_scale: Scale the gradient by a float factor + :type grad_scale: float, optional, default=1 + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -623,7 +687,7 @@ Public APIs :type data: SymbolicNode - :param target_shape: Target new shape + :param target_shape: Target new shape. One and only one dim can be 0, in which case it will be infered from the rest of dims :type target_shape: Shape(tuple), required :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. @@ -723,6 +787,36 @@ Public APIs +.. function:: UpSampling(...) + + Perform nearest neighboor/bilinear up sampling to inputs + + This function support variable length positional :class:`SymbolicNode` inputs. + + :param scale: Up sampling scale + :type scale: int (non-negative), required + + + :param num_filter: Input filter. Only used by nearest sample_type. + :type num_filter: int (non-negative), optional, default=0 + + + :param sample_type: upsampling method + :type sample_type: {'bilinear', 'nearest'}, required + + + :param num_args: Number of inputs to be upsampled. For nearest neighbor upsampling, this can be 1-N; the size of output will be(scale*h_0,scale*w_0) and all other inputs will be upsampled to thesame size. For bilinear upsampling this must be 2; 1 input and 1 weight. + :type num_args: int, required + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: abs(...) Take absolute value of the src @@ -739,6 +833,38 @@ Public APIs +.. function:: ceil(...) + + Take ceil value of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: cos(...) + + Take cos of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: exp(...) Take exp of the src @@ -755,6 +881,22 @@ Public APIs +.. function:: floor(...) + + Take floor value of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: log(...) Take log of the src @@ -771,6 +913,70 @@ Public APIs +.. function:: round(...) + + Take round value of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: rsqrt(...) + + Take rsqrt of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: sign(...) + + Take sign value of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: sin(...) + + Take sin of the src + + :param src: Source symbolic input to the function + :type src: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: sqrt(...) Take sqrt of the src @@ -809,6 +1015,19 @@ Internal APIs Document and signatures for internal API functions might be incomplete. +.. function:: _CrossDeviceCopy(...) + + Special op to copy data cross device + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: _Div(...) Perform an elementwise div. @@ -846,6 +1065,80 @@ Internal APIs +.. function:: _Maximum(...) + + Perform an elementwise power. + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _MaximumScalar(...) + + Perform an elementwise maximum. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _Minimum(...) + + Perform an elementwise power. + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: _MinimumScalar(...) + + Perform an elementwise minimum. + + :param array: Input array operand to the operation. + :type array: SymbolicNode + + + :param scalar: scalar value. + :type scalar: float, required + + + :param scalar_on_left: scalar operand is on the left. + :type scalar_on_left: boolean, optional, default=False + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: _Minus(...) Perform an elementwise minus. diff --git a/docs/conf.py b/docs/conf.py index 32e0d28f35ba..6293dcf80b3f 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,9 @@ # built documents. # # The short X.Y version. -version = '0.0.6' +version = '0.0.7' # The full version, including alpha/beta/rc tags. -release = '0.0.6' +release = '0.0.7' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 4c52eb8eb76c6239b0a447a5624c7b6c7c3586b6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 16 Jan 2016 19:25:19 +0800 Subject: [PATCH 280/630] workaround for #55 --- src/ndarray.jl | 7 +++++++ test/unittest/ndarray.jl | 12 ++++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index e477bf6fd2f4..d0eaaf2621b4 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -940,6 +940,13 @@ function _import_ndarray_functions(;gen_docs=false) _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) + + # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped + # See https://github.com/dmlc/MXNet.jl/issues/55 + if func_name == :dot + _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) + end + stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) if n_mutate_vars == 1 stmt_ret = :(return out1) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index b7208f532222..680d2f867263 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -260,6 +260,17 @@ function test_nd_as_jl() @test reldiff(copy(z)[:,2:end], copy(x)[:,2:end]) < 1e-6 end +function test_dot() + dims1 = (2, 3) + dims2 = (3, 8) + info("NDArray::dot") + + x = mx.zeros(dims1) + y = mx.zeros(dims2) + z = mx.dot(x, y) + @test size(z) == (2, 8) +end + ################################################################################ # Run tests @@ -276,5 +287,6 @@ test_saveload() test_clip() test_sqrt() test_nd_as_jl() +test_dot() end From 724007a4469a05e90e6c0fc5f6508192825b7d90 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 16 Jan 2016 19:31:48 +0800 Subject: [PATCH 281/630] add notes about AWS GPUs --- docs/api/io.rst | 51 +++++++++++++++++++- docs/api/ndarray.rst | 11 +++++ docs/api/symbolic-node.rst | 98 +++++++++++++++++++++++++++++++++++++- docs/index.rst | 1 + docs/user-guide/faq.rst | 7 +++ 5 files changed, 164 insertions(+), 4 deletions(-) create mode 100644 docs/user-guide/faq.rst diff --git a/docs/api/io.rst b/docs/api/io.rst index f1ab959be6f2..e5cb3ffb32e5 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -267,6 +267,33 @@ libmxnet data providers +.. function:: CSVIter(...) + + Can also be called with the alias ``CSVProvider``. + Create iterator for dataset in csv. + + :param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data. + :param Base.Symbol label_name: keyword argument, default ``:softmax_label``. The name of the label. Could be ``nothing`` if no label is presented in this dataset. + + :param data_csv: Dataset Param: Data csv path. + :type data_csv: string, required + + + :param data_shape: Dataset Param: Shape of the data. + :type data_shape: Shape(tuple), required + + + :param label_csv: Dataset Param: Label csv path. If is NULL, all labels will be returned as 0 + :type label_csv: string, optional, default='NULL' + + + :param label_shape: Dataset Param: Shape of the label. + :type label_shape: Shape(tuple), optional, default=(1,) + + :return: the constructed :class:`MXDataProvider`. + + + .. function:: ImageRecordIter(...) Can also be called with the alias ``ImageRecordProvider``. @@ -375,6 +402,18 @@ libmxnet data providers :type min_img_size: float, optional, default=0 + :param random_h: Augmentation Param: Maximum value of H channel in HSL color space. + :type random_h: int, optional, default='0' + + + :param random_s: Augmentation Param: Maximum value of S channel in HSL color space. + :type random_s: int, optional, default='0' + + + :param random_l: Augmentation Param: Maximum value of L channel in HSL color space. + :type random_l: int, optional, default='0' + + :param rotate: Augmentation Param: Rotate angle. :type rotate: int, optional, default='-1' @@ -383,6 +422,10 @@ libmxnet data providers :type fill_value: int, optional, default='255' + :param inter_method: Augmentation Param: 0-NN 1-bilinear 2-cubic 3-area 4-lanczos4 9-auto 10-rand. + :type inter_method: int, optional, default='1' + + :param mirror: Augmentation Param: Whether to mirror the image. :type mirror: boolean, optional, default=False @@ -399,14 +442,18 @@ libmxnet data providers :type mean_r: float, optional, default=0 - :param mean_g: Augmentation: Mean value on G channel. + :param mean_g: Augmentation Param: Mean value on G channel. :type mean_g: float, optional, default=0 - :param mean_b: Augmentation: Mean value on B channel. + :param mean_b: Augmentation Param: Mean value on B channel. :type mean_b: float, optional, default=0 + :param mean_a: Augmentation Param: Mean value on Alpha channel. + :type mean_a: float, optional, default=0 + + :param scale: Augmentation Param: Scale in color space. :type scale: float, optional, default=1 diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index d2f0c4fc9636..448f69bd1731 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -444,6 +444,17 @@ Public APIs +.. function:: argmax_channel(...) + + Take sum of the src.The result will be ndarray of shape (1,) on the same device. + + :param src: Source input to the function + :type src: NDArray + + + + + .. function:: ceil(...) Take ceil value of the src diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 3ddb299a8807..8859a2243b6f 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -180,7 +180,7 @@ Public APIs ^^^^^^^^^^^ .. function:: Activation(...) - Apply activation function to input. + Apply activation function to input.Softmax Activation is only available with CUDNN on GPUand will be computed at each location across channel if input is 4D. :param data: Input data to activation function. :type data: SymbolicNode @@ -242,6 +242,26 @@ Public APIs +.. function:: Cast(...) + + Cast array to a different data type. + + :param data: Input data to cast function. + :type data: SymbolicNode + + + :param dtype: Target data type. + :type dtype: {'float16', 'float32', 'float64', 'int32', 'uint8'}, required + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: Concat(...) Perform an feature concat on channel dim (dim 1) over all the inputs. @@ -288,6 +308,10 @@ Public APIs :type stride: Shape(tuple), optional, default=(1, 1) + :param dilate: convolution dilate: (y, x) + :type dilate: Shape(tuple), optional, default=(1, 1) + + :param pad: pad for convolution: (y, x) :type pad: Shape(tuple), optional, default=(0, 0) @@ -300,7 +324,7 @@ Public APIs :type num_group: int (non-negative), optional, default=1 - :param workspace: Tmp workspace for convolution (MB) + :param workspace: Tmp workspace for convolution (MB). :type workspace: long (non-negative), optional, default=512 @@ -316,6 +340,36 @@ Public APIs +.. function:: Crop(...) + + Crop the 2th and 3th dim of input data, with the corresponding size of w_h orwith widht and height of the second input symbol + + This function support variable length positional :class:`SymbolicNode` inputs. + + :param num_args: Number of inputs for crop, if equals one, then we will use the h_wfor crop heihgt and width, else if equals two, then we will use the heightand width of the second input symbol, we name crop_like here + :type num_args: int, required + + + :param offset: corp offset coordinate: (y, x) + :type offset: Shape(tuple), optional, default=(0, 0) + + + :param h_w: corp height and weight: (h, w) + :type h_w: Shape(tuple), optional, default=(0, 0) + + + :param center_crop: If set to true, then it will use be the center_crop,or it will crop using the shape of crop_like + :type center_crop: boolean, optional, default=False + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + .. function:: Deconvolution(...) Apply deconvolution to input then add a bias. @@ -727,9 +781,37 @@ Public APIs :type grad_scale: float, optional, default=1 + :param ignore_label: the ignore_label will not work in backward, and this onlybe used when multi_output=true + :type ignore_label: float, optional, default=-1 + + :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False + + :param use_ignore: If set to true, the ignore_label value will not contributorto the backward gradient + :type use_ignore: boolean, optional, default=False + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: the constructed :class:`SymbolicNode`. + + + + + +.. function:: SoftmaxActivation(...) + + Apply softmax activation to input. This is intended for internal layers. For output (loss layer) please use SoftmaxOutput. If type=instance, this operator will compute a softmax for each instance in the batch; this is the default mode. If type=channel, this operator will compute a num_channel-class softmax at each position of each instance; this can be used for fully convolutional network, image segmentation, etc. + + :param data: Input data to activation function. + :type data: SymbolicNode + + + :param type: Softmax Mode. If set to instance, this operator will compute a softmax for each instance in the batch; this is the default mode. If set to channel, this operator will compute a num_channel-class softmax at each position of each instance; this can be used for fully convolutional network, image segmentation, etc. + :type type: {'channel', 'instance'},optional, default='instance' + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -751,9 +833,17 @@ Public APIs :type grad_scale: float, optional, default=1 + :param ignore_label: the ignore_label will not work in backward, and this onlybe used when multi_output=true + :type ignore_label: float, optional, default=-1 + + :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes :type multi_output: boolean, optional, default=False + + :param use_ignore: If set to true, the ignore_label value will not contributorto the backward gradient + :type use_ignore: boolean, optional, default=False + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -805,6 +895,10 @@ Public APIs :type sample_type: {'bilinear', 'nearest'}, required + :param multi_input_mode: How to handle multiple input. concat means concatenate upsampled images along the channel dimension. sum means add all images together, only available for nearest neighbor upsampling. + :type multi_input_mode: {'concat', 'sum'},optional, default='concat' + + :param num_args: Number of inputs to be upsampled. For nearest neighbor upsampling, this can be 1-N; the size of output will be(scale*h_0,scale*w_0) and all other inputs will be upsampled to thesame size. For bilinear upsampling this must be 2; 1 input and 1 weight. :type num_args: int, required diff --git a/docs/index.rst b/docs/index.rst index 05077bed6904..7e95b6a7c10e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -27,6 +27,7 @@ For more details, see documentation below. Please also checkout the `examples user-guide/install user-guide/overview + user-guide/faq .. toctree:: :maxdepth: 1 diff --git a/docs/user-guide/faq.rst b/docs/user-guide/faq.rst new file mode 100644 index 000000000000..602c8ab9fda5 --- /dev/null +++ b/docs/user-guide/faq.rst @@ -0,0 +1,7 @@ +FAQ +=== + +Running MXNet on AWS GPU instances +---------------------------------- +See the discussions and notes `here +`_. From c5cbf2440a86f395a7471b40e65d1496eb09f1d3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 6 Feb 2016 12:17:41 -0500 Subject: [PATCH 282/630] fix seg-fault due to upstream API change --- src/ndarray.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index d0eaaf2621b4..9c4836e60771 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -895,9 +895,13 @@ function _import_ndarray_functions(;gen_docs=false) ref_arg_types = Ref{char_pp}(0) ref_arg_descs = Ref{char_pp}(0) + ref_ret_type = Ref{char_p}(0) + @mxcall(:MXFuncGetInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), - func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, + Ref{char_pp}, Ref{char_pp}, Ref{char_p}), + func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, + ref_arg_types, ref_arg_descs, ref_ret_type) func_name = symbol(bytestring(ref_name[])) From 175d399d7a3f26d1b654638be59ebacec2c6855c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 6 Feb 2016 12:21:52 -0500 Subject: [PATCH 283/630] workaround for jenkins CI --- test/travis/run_test.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/test/travis/run_test.sh b/test/travis/run_test.sh index a576c69f6979..c21864d5de6b 100755 --- a/test/travis/run_test.sh +++ b/test/travis/run_test.sh @@ -1,4 +1,5 @@ #!/bin/bash if [[ -a .git/shallow ]]; then git fetch --unshallow; fi +julia -e 'Pkg.rm("MXNet")' # in case Jenkins CI did not remove existing files julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' From f277cd7598426295d0812f21b8b33e56e7c34234 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 6 Feb 2016 12:27:32 -0500 Subject: [PATCH 284/630] revert last commit (not useful) --- test/travis/run_test.sh | 1 - 1 file changed, 1 deletion(-) diff --git a/test/travis/run_test.sh b/test/travis/run_test.sh index c21864d5de6b..a576c69f6979 100755 --- a/test/travis/run_test.sh +++ b/test/travis/run_test.sh @@ -1,5 +1,4 @@ #!/bin/bash if [[ -a .git/shallow ]]; then git fetch --unshallow; fi -julia -e 'Pkg.rm("MXNet")' # in case Jenkins CI did not remove existing files julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' From 30b71427e954cf8e2e1ae98a3e004331f1d8eca5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 6 Feb 2016 12:35:35 -0500 Subject: [PATCH 285/630] fix one travis CI test error --- src/symbolic-node.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 0d7f5937f88a..83ed11ba4923 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -549,8 +549,8 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) $(if kv_nargs != symbol("") quote if length(symbol_kws) > 0 - @assert(false, $func_name * " takes variable number of SymbolicNode arguments, please pass input Symbols " * - "via positional arguments, instead of keyword arguments.") + @assert(false, $func_name_s * " takes variable number of SymbolicNode arguments, " * + "please pass input Symbols via positional arguments, instead of keyword arguments.") end end end) From 1e4f9f0ae406674106e87e688399e13f7d997286 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 6 Feb 2016 12:50:45 -0500 Subject: [PATCH 286/630] fix error due to upstream API changes on symbolic node function --- docs/api/ndarray.rst | 62 +++++++++++++- docs/api/symbolic-node.rst | 164 ++++++++++++++++++++----------------- src/symbolic-node.jl | 8 +- 3 files changed, 153 insertions(+), 81 deletions(-) diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index 448f69bd1731..be1c74b80bea 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -446,7 +446,7 @@ Public APIs .. function:: argmax_channel(...) - Take sum of the src.The result will be ndarray of shape (1,) on the same device. + Take argmax indices of each channel of the src.The result will be ndarray of shape (num_channel,) on the same device. :param src: Source input to the function :type src: NDArray @@ -513,7 +513,7 @@ Public APIs .. function:: dot(...) - Calcuate 2D matrix multiplication + Calculate 2D matrix multiplication :param lhs: Left operand to the function. :type lhs: NDArray @@ -537,6 +537,25 @@ Public APIs +.. function:: fill_element_0index(...) + + Fill one element of each line(row for python, column for R/Julia) in lhs according to index indicated by rhs and values indicated by mhs. This function assume rhs uses 0-based index. + + :param lhs: Left operand to the function. + :type lhs: NDArray + + + :param mhs: Middle operand to the function. + :type mhs: NDArray + + + :param rhs: Right operand to the function. + :type rhs: NDArray + + + + + .. function:: floor(...) Take floor value of the src @@ -716,6 +735,45 @@ Internal APIs +.. function:: _imdecode(...) + + Decode an image, clip to (x0, y0, x1, y1), substract mean, and write to buffer + + :param mean: image mean + :type mean: NDArray + + + :param index: buffer position for output + :type index: int + + + :param x0: x0 + :type x0: int + + + :param y0: y0 + :type y0: int + + + :param x1: x1 + :type x1: int + + + :param y1: y1 + :type y1: int + + + :param c: channel + :type c: int + + + :param size: length of str_img + :type size: int + + + + + .. function:: _minus(...) diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index 8859a2243b6f..a390b82dd09d 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -192,7 +192,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -220,7 +220,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -236,7 +236,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -256,7 +256,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -268,6 +268,10 @@ Public APIs This function support variable length positional :class:`SymbolicNode` inputs. + :param data: List of tensors to concatenate + :type data: SymbolicNode[] + + :param num_args: Number of inputs to be concated. :type num_args: int, required @@ -278,7 +282,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -305,15 +309,15 @@ Public APIs :param stride: convolution stride: (y, x) - :type stride: Shape(tuple), optional, default=(1, 1) + :type stride: Shape(tuple), optional, default=(1,1) :param dilate: convolution dilate: (y, x) - :type dilate: Shape(tuple), optional, default=(1, 1) + :type dilate: Shape(tuple), optional, default=(1,1) :param pad: pad for convolution: (y, x) - :type pad: Shape(tuple), optional, default=(0, 0) + :type pad: Shape(tuple), optional, default=(0,0) :param num_filter: convolution filter(channel) number @@ -334,7 +338,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -342,7 +346,7 @@ Public APIs .. function:: Crop(...) - Crop the 2th and 3th dim of input data, with the corresponding size of w_h orwith widht and height of the second input symbol + Crop the 2nd and 3rd dim of input data, with the corresponding size of w_h or with width and height of the second input symbol This function support variable length positional :class:`SymbolicNode` inputs. @@ -350,12 +354,12 @@ Public APIs :type num_args: int, required - :param offset: corp offset coordinate: (y, x) - :type offset: Shape(tuple), optional, default=(0, 0) + :param offset: crop offset coordinate: (y, x) + :type offset: Shape(tuple), optional, default=(0,0) - :param h_w: corp height and weight: (h, w) - :type h_w: Shape(tuple), optional, default=(0, 0) + :param h_w: crop height and weight: (h, w) + :type h_w: Shape(tuple), optional, default=(0,0) :param center_crop: If set to true, then it will use be the center_crop,or it will crop using the shape of crop_like @@ -364,7 +368,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -391,11 +395,11 @@ Public APIs :param stride: deconvolution stride: (y, x) - :type stride: Shape(tuple), optional, default=(1, 1) + :type stride: Shape(tuple), optional, default=(1,1) :param pad: pad for deconvolution: (y, x) - :type pad: Shape(tuple), optional, default=(0, 0) + :type pad: Shape(tuple), optional, default=(0,0) :param num_filter: deconvolution filter(channel) number @@ -416,7 +420,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -436,7 +440,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -448,13 +452,13 @@ Public APIs This function support variable length positional :class:`SymbolicNode` inputs. - :param num_args: Number of inputs to be sumed. + :param num_args: Number of inputs to be summed. :type num_args: int, required :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -482,7 +486,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -498,7 +502,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -530,7 +534,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -558,7 +562,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -590,7 +594,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -622,7 +626,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -646,7 +650,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -671,7 +675,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -695,7 +699,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -718,16 +722,16 @@ Public APIs :param stride: stride: for pooling (y, x) - :type stride: Shape(tuple), optional, default=(1, 1) + :type stride: Shape(tuple), optional, default=(1,1) :param pad: pad for pooling: (y, x) - :type pad: Shape(tuple), optional, default=(0, 0) + :type pad: Shape(tuple), optional, default=(0,0) :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -741,13 +745,13 @@ Public APIs :type data: SymbolicNode - :param target_shape: Target new shape. One and only one dim can be 0, in which case it will be infered from the rest of dims + :param target_shape: Target new shape. One and only one dim can be 0, in which case it will be inferred from the rest of dims :type target_shape: Shape(tuple), required :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -763,7 +767,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode[]. @@ -781,21 +785,21 @@ Public APIs :type grad_scale: float, optional, default=1 - :param ignore_label: the ignore_label will not work in backward, and this onlybe used when multi_output=true + :param ignore_label: the ignore_label will not work in backward, and this only be used when multi_output=true :type ignore_label: float, optional, default=-1 - :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensional input tensor, softmax will generate n*x_1*...*x_n output, each has k classes :type multi_output: boolean, optional, default=False - :param use_ignore: If set to true, the ignore_label value will not contributorto the backward gradient + :param use_ignore: If set to true, the ignore_label value will not contribute to the backward gradient :type use_ignore: boolean, optional, default=False :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -815,7 +819,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -829,25 +833,29 @@ Public APIs :type data: SymbolicNode + :param label: Label data. + :type label: SymbolicNode + + :param grad_scale: Scale the gradient by a float factor :type grad_scale: float, optional, default=1 - :param ignore_label: the ignore_label will not work in backward, and this onlybe used when multi_output=true + :param ignore_label: the ignore_label will not work in backward, and this only be used when multi_output=true :type ignore_label: float, optional, default=-1 - :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensionalinput tensor, softmax will generate n*x_1*...*x_n output, eachhas k classes + :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensional input tensor, softmax will generate n*x_1*...*x_n output, each has k classes :type multi_output: boolean, optional, default=False - :param use_ignore: If set to true, the ignore_label value will not contributorto the backward gradient + :param use_ignore: If set to true, the ignore_label value will not contribute to the backward gradient :type use_ignore: boolean, optional, default=False :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -871,7 +879,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -883,6 +891,10 @@ Public APIs This function support variable length positional :class:`SymbolicNode` inputs. + :param data: Array of tensors to upsample + :type data: SymbolicNode[] + + :param scale: Up sampling scale :type scale: int (non-negative), required @@ -905,7 +917,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -921,7 +933,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -937,7 +949,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -953,7 +965,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -969,7 +981,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -985,7 +997,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1001,7 +1013,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1017,7 +1029,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1033,7 +1045,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1049,7 +1061,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1065,7 +1077,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1081,7 +1093,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1097,7 +1109,7 @@ Public APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: . @@ -1116,7 +1128,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1129,7 +1141,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1153,7 +1165,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1166,7 +1178,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1190,7 +1202,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1203,7 +1215,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1227,7 +1239,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1240,7 +1252,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1264,7 +1276,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1277,7 +1289,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1301,7 +1313,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1317,7 +1329,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1337,7 +1349,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1350,7 +1362,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1374,7 +1386,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1387,7 +1399,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. @@ -1411,7 +1423,7 @@ Internal APIs :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: the constructed :class:`SymbolicNode`. + :return: SymbolicNode. diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 83ed11ba4923..e83b94aeefa4 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -480,11 +480,13 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) ref_arg_names = Ref{char_pp}(0) ref_arg_types = Ref{char_pp}(0) ref_arg_descs = Ref{char_pp}(0) + ref_ret_type = Ref{char_p}(0) @mxcall(:MXSymbolGetAtomicSymbolInfo, (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, - Ref{char_pp}, Ref{char_p}), - hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs) + Ref{char_pp}, Ref{char_p}, Ref{char_p}), + hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, + ref_kv_nargs, ref_ret_type) func_name_s= bytestring(ref_name[]) func_name = symbol(func_name_s) @@ -499,7 +501,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) f_desc *= ":param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n" f_desc *= ":param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`.\n\n" - f_desc *= ":return: the constructed :class:`SymbolicNode`.\n\n" + f_desc *= ":return: $(_format_typestring(bytestring(ref_ret_type[]))).\n\n" return (func_name, f_desc) end From 579b88106f361f883982e8caad6c9074408ee569 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 6 Feb 2016 13:10:22 -0500 Subject: [PATCH 287/630] fix for breaking changes in Julia nightly --- src/initializer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/initializer.jl b/src/initializer.jl index 0d2a10586f57..2f1342e9ac80 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -24,7 +24,7 @@ Or, if full behavior customization is needed, override the following function =# abstract AbstractInitializer -function call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +function call{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) strname = string(name) if endswith(strname, "bias") _init_bias(self, name, array) From ce3f6d38bde4be9e8dbdac9d76545bf8132c44b6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 8 Feb 2016 11:34:02 -0500 Subject: [PATCH 288/630] another attempt to fix julia v0.5 compatability --- src/initializer.jl | 4 ++-- src/model.jl | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/initializer.jl b/src/initializer.jl index 2f1342e9ac80..00e71b19c3cd 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -20,11 +20,11 @@ enough to derive a new type, and implement one or more of the following methods: Or, if full behavior customization is needed, override the following function -.. function:: call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) =# abstract AbstractInitializer -function call{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) +function init{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) strname = string(name) if endswith(strname, "bias") _init_bias(self, name, array) diff --git a/src/model.jl b/src/model.jl index c208873e867f..93fc41ca0452 100644 --- a/src/model.jl +++ b/src/model.jl @@ -109,12 +109,12 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove # initialize the contents of the parameters if !arg_defined || overwrite for (k,v) in self.arg_params - initializer(k, v) + init(initializer, k, v) end end if !aux_defined || overwrite for (k,v) in self.aux_params - initializer(k, v) + init(initializer, k, v) end end From 938b2362a78cad578c5dd1f80200d6d7c41f8d65 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 8 Feb 2016 11:43:54 -0500 Subject: [PATCH 289/630] update doc for initializer --- docs/api/initializer.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst index 7123507f7ee9..6dbb81a001b3 100644 --- a/docs/api/initializer.rst +++ b/docs/api/initializer.rst @@ -21,7 +21,7 @@ enough to derive a new type, and implement one or more of the following methods: Or, if full behavior customization is needed, override the following function -.. function:: call(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +.. function:: init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) From 06019e2c1c86588137666384bc420ec9632af5b4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 8 Feb 2016 11:48:21 -0500 Subject: [PATCH 290/630] stricter type anotation for julia v0.5 --- test/unittest/ndarray.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 680d2f867263..2f7d4d9af6dd 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -192,7 +192,7 @@ function test_saveload() # save and load N arrays of different shape arrays = [rand_tensors(rand_dims()) for i = 1:n_arrays] - nd_arrays = [x[2] for x in arrays] + nd_arrays = NDArray[x[2] for x in arrays] mx.save(fname, nd_arrays) data = mx.load(fname, mx.NDArray) @test isa(data, Vector{mx.NDArray}) From 1c707a2147a077c536627b9f337515677eab1a73 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 8 Feb 2016 12:00:19 -0500 Subject: [PATCH 291/630] fix typo --- test/unittest/ndarray.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 2f7d4d9af6dd..67bff2959c8a 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -192,7 +192,7 @@ function test_saveload() # save and load N arrays of different shape arrays = [rand_tensors(rand_dims()) for i = 1:n_arrays] - nd_arrays = NDArray[x[2] for x in arrays] + nd_arrays = mx.NDArray[x[2] for x in arrays] mx.save(fname, nd_arrays) data = mx.load(fname, mx.NDArray) @test isa(data, Vector{mx.NDArray}) From ac709d12662484705c409911db00868509c6c9f4 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 8 Feb 2016 12:56:59 -0500 Subject: [PATCH 292/630] prepare for v0.0.8 --- NEWS.md | 5 +++++ docs/conf.py | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/NEWS.md b/NEWS.md index c0d365a7336e..f11136c8cd46 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,8 @@ +# v0.0.8 (2016.02.08) + +* Fix compatability with Julia v0.5. +* Fix seg-faults introduced by upstream API changes. + # v0.0.7 (2015.12.14) * Fix compatability with Julia v0.4.2 (@BigEpsilon) diff --git a/docs/conf.py b/docs/conf.py index 6293dcf80b3f..7454faa99eee 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,9 +56,9 @@ # built documents. # # The short X.Y version. -version = '0.0.7' +version = '0.0.8' # The full version, including alpha/beta/rc tags. -release = '0.0.7' +release = '0.0.8' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 0b387f7be4505262b1925a852c0806d0a02fc04f Mon Sep 17 00:00:00 2001 From: ultradian Date: Thu, 11 Feb 2016 23:12:08 -0800 Subject: [PATCH 293/630] add further information to install.rst Include information on updating the LD_LIBRARY_PATH which had me stumped for a while. Please feel free to reword for clarity. --- docs/user-guide/install.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/user-guide/install.rst b/docs/user-guide/install.rst index 43f882928d39..9609368ca1fd 100644 --- a/docs/user-guide/install.rst +++ b/docs/user-guide/install.rst @@ -52,3 +52,5 @@ Basically, MXNet.jl will search ``libmxnet.so`` or ``libmxnet.dll`` in the follo * ``$MXNET_HOME/lib``: customized libmxnet builds * ``Pkg.dir("MXNet")/deps/usr/lib``: automatic builds * Any system wide library search path + +Note that MXNet.jl will not find ``libmxnet.so`` even if it is on the path if a library it depends upon is missing from the LD_LIBRARY_PATH. Thus, if you are going to compile to add CUDA, the path to the CUDA libraries will have to be added to LD_LIBRARY_PATH. From f26ad9699df77315b4dbf932d3d103a0a52c5c48 Mon Sep 17 00:00:00 2001 From: ultradian Date: Wed, 17 Feb 2016 21:52:05 -0800 Subject: [PATCH 294/630] Update install.rst --- docs/user-guide/install.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/user-guide/install.rst b/docs/user-guide/install.rst index 9609368ca1fd..d53830a0cb37 100644 --- a/docs/user-guide/install.rst +++ b/docs/user-guide/install.rst @@ -53,4 +53,4 @@ Basically, MXNet.jl will search ``libmxnet.so`` or ``libmxnet.dll`` in the follo * ``Pkg.dir("MXNet")/deps/usr/lib``: automatic builds * Any system wide library search path -Note that MXNet.jl will not find ``libmxnet.so`` even if it is on the path if a library it depends upon is missing from the LD_LIBRARY_PATH. Thus, if you are going to compile to add CUDA, the path to the CUDA libraries will have to be added to LD_LIBRARY_PATH. +Note that MXNet.jl will not find ``libmxnet.so`` even if it is on one of the paths above if a library it depends upon is missing from the ``LD_LIBRARY_PATH``. Thus, if you are going to compile to add CUDA, the path to the CUDA libraries will have to be added to ``LD_LIBRARY_PATH``. From 7a5035773b5dbd7c1d20aeda6c94a5a4d65f3cdd Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Apr 2016 10:27:13 +0900 Subject: [PATCH 295/630] MXNet exposes a transpose function --- src/MXNet.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index c5056320791c..16ad415efe52 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -9,7 +9,8 @@ module mx using Formatting # Functions from base that we can safely extend and that are defined by libmxnet. -import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm +import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm, + transpose include("base.jl") include("context.jl") From f8988374a0e7f384c487e285470742e117985216 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 8 Apr 2016 16:30:24 +0900 Subject: [PATCH 296/630] add function to get debug_str, similar to python --- src/executor.jl | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/src/executor.jl b/src/executor.jl index 5844a62e446c..4c72589ae966 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -195,3 +195,22 @@ function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, end end end + + +""" +Get a debug string about internal execution plan. + +Can be used to get an estimated about the memory cost. +```julia + net = ... # Symbol + dProvider = ... # DataProvider + exec = mx.simple_bind(net, mx.cpu(), data=size(dProvider.data_batch[1])) + dbg_str = mx.debug_str(exec) + println(split(ref, ['\n'])[end-2]) +``` +""" +function debug_str(self :: Executor) + s_ref = Ref{Cstring}() + @mxcall(:MXExecutorPrint, (MX_handle, Ptr{Cstring}), self.handle, s_ref) + bytestring(s_ref[]) +end From acd2a74684a518c0bafa85259362191cd917ba1b Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sun, 10 Apr 2016 20:47:44 +0900 Subject: [PATCH 297/630] output TempSpace allocation --- src/model.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/model.jl b/src/model.jl index 93fc41ca0452..b823a342b529 100644 --- a/src/model.jl +++ b/src/model.jl @@ -129,6 +129,8 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha # the predictor use only the first device self.pred_exec = simple_bind(self.arch, self.ctx[1]; grad_req=GRAD_NOP, data_shapes...) + dbg_str = mx.debug_str(self.pred_exec) + info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[1])) copy_params_from(self.pred_exec, self.arg_params, self.aux_params) else # make sure the new setup is compatible with the existing one @@ -345,6 +347,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes..., label_shapes...) + dbg_str = mx.debug_str(train_execs[i]) + info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[i])) copy_params_from(train_execs[i], self.arg_params, self.aux_params) end From 8b8c9c2a961198ff602fe62c151a7f5e1d386ec6 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 14 Apr 2016 14:55:33 +0900 Subject: [PATCH 298/630] cleanup Accuracy --- src/metric.jl | 57 +++++++++++++++++++++------------------------------ 1 file changed, 23 insertions(+), 34 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 5bf14e52a840..053c549df4eb 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -51,47 +51,36 @@ type Accuracy <: AbstractEvalMetric Accuracy() = new(0.0, 0) end -""" -Implementation taken from findmax in Julia base. -Searches for the maximum value in p_dim of a. -I and n are values for the other dimensions. -""" -function _indmax(a, I, p_dim, n) - m = a[I..., 1, n] - mi = 1 - for i in 2:size(a, p_dim) - ai = a[I..., i, n] - if ai > m || m!=m - m = ai - mi = i - end - end - return mi -end - function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin - if ndims(pred) > 2 # Multidimensional case - # Construct cartesian index - p_dim = ndims(pred)-1 - initial = tuple(fill(1,p_dim-1)...) - dims = size(pred, (1:p_dim-1)...) - crange = CartesianRange(CartesianIndex(initial), CartesianIndex(dims)) - - for sample in 1:size(label, ndims(label)) - for i in crange - l_i = sub2ind(dims, i.I...) - klass = _indmax(pred, i.I, p_dim, sample) - metric.acc_sum += (klass-1) == label[l_i, sample] - metric.n_sample += 1 + # Samples are stored in the last dimension + @assert size(label, ndims(label)) == size(pred, ndims(pred)) + + if ndims(pred) == 4 # Multidimensional case + # Reshape label to be of the same shape as pred. + # Except for the third dimension where the predictions are stored. + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + label = labels[i, j, 1, sample] + klasses = sub(pred, i, j, :, sample) + klass = indmax(klasses) - 1 # Classes start at 0...k-1 + + metric.acc_sum += klass == label + metric.n_sample += 1 + end end end - else # 1-dimensional case + elseif ndims(pred) == 2 # 1-dimensional case for sample in 1:size(label, 1) - klass = indmax(pred[:, sample]) - metric.acc_sum += (klass-1) == label[sample] + klass = indmax(sub(pred, :, sample)) - 1 + metric.acc_sum += klass == label[sample] metric.n_sample += 1 end + else + error("Can't handle prediction with dimensions $(ndims(pred)).") end end end From 91821482c7612e88c25399aac94980c3593d9cab Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 14 Apr 2016 15:29:01 +0900 Subject: [PATCH 299/630] add average cross-entropy metric --- docs/api/metric.rst | 9 ++++++++ src/metric.jl | 56 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+) diff --git a/docs/api/metric.rst b/docs/api/metric.rst index db503d5e6849..966d682ba2b7 100644 --- a/docs/api/metric.rst +++ b/docs/api/metric.rst @@ -56,3 +56,12 @@ set. + +.. class:: ACE + + Averaged cross-entropy for classification. This also know als logloss. + + Calculated the averaged cross entropy for multi-dimentions output. + + + diff --git a/src/metric.jl b/src/metric.jl index 053c549df4eb..a22794e9f158 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -144,3 +144,59 @@ function reset!(metric :: MSE) metric.mse_sum = 0.0 metric.n_sample = 0 end + +#=doc +.. class:: ACE + + Averaged cross-entropy for classification. This also know als logloss. + + Calculated the averaged cross entropy for multi-dimentions output. +=# +type ACE <: AbstractEvalMetric + ace_sum :: Float64 + n_sample :: Int + + ACE() = new(0.0, 0) +end + +function get(metric :: ACE) + return [(:ACE, - metric.ace_sum / metric.n_sample)] +end + +function reset!(metric :: ACE) + metric.ace_sum = 0.0 + metric.n_sample = 0 +end + +function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) + @nd_as_jl ro=(label,pred) begin + # Samples are stored in the last dimension + @assert size(label, ndims(label)) == size(pred, ndims(pred)) + @assert ndims(pred) == 4 + + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + label = labels[i, j, 1, sample] + + # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification + # Since we can only target labels right now this is the only thing we can do. + target = Int(label) + 1 # klasses are 0...k-1 => julia indexing + p_k = pred[i, j, target, sample] + + metric.ace_sum += log(p_k) + metric.n_sample += 1 + end + end + end + end +end + +function update!(metric :: ACE, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + @assert length(labels) == length(preds) + for i = 1:length(labels) + _update_single_output(metric, labels[i], preds[i]) + end +end + From 3170c4eb22b30b1a4c3bfd6a21a42800a6061e5a Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 14 Apr 2016 15:29:20 +0900 Subject: [PATCH 300/630] update documentation --- docs/api/io.rst | 4 ++-- docs/api/ndarray.rst | 41 +++++++++++++++++++++++++++++++++++++ docs/api/symbolic-node.rst | 42 +++++++++++++++++++++++++++++++++++--- 3 files changed, 82 insertions(+), 5 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index e5cb3ffb32e5..8ec67cf6d073 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -351,7 +351,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -514,7 +514,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 + :type prefetch_buffer: long (non-negative), optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index be1c74b80bea..b72f0faa5cfe 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -687,6 +687,28 @@ Public APIs + +.. function:: sum_mid_internal(...) + + Take sum on medium dimension of the 3D src. + + :param src: Source input to the function + :type src: NDArray + + + + + +.. function:: transpose(...) + + Transpose the input matrix and return a new one + + :param src: Source input to the function + :type src: NDArray + + + + Internal APIs ^^^^^^^^^^^^^ @@ -694,6 +716,25 @@ Internal APIs Document and signatures for internal API functions might be incomplete. +.. function:: _broadcast(...) + + Broadcast array in the given axis to the given size + + :param src: source ndarray + :type src: NDArray + + + :param axis: axis to broadcast + :type axis: int + + + :param size: size of broadcast + :type size: int + + + + + .. function:: _copyto(...) diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index a390b82dd09d..e303afba21dc 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -346,11 +346,15 @@ Public APIs .. function:: Crop(...) - Crop the 2nd and 3rd dim of input data, with the corresponding size of w_h or with width and height of the second input symbol + Crop the 2nd and 3rd dim of input data, with the corresponding size of h_w or with width and height of the second input symbol, i.e., with one input, we need h_w to specify the crop height and width, otherwise the second input symbol's size will be used This function support variable length positional :class:`SymbolicNode` inputs. - :param num_args: Number of inputs for crop, if equals one, then we will use the h_wfor crop heihgt and width, else if equals two, then we will use the heightand width of the second input symbol, we name crop_like here + :param data: Tensor or List of Tensors, the second input will be used as crop_like shape reference + :type data: SymbolicNode or SymbolicNode[] + + + :param num_args: Number of inputs for crop, if equals one, then we will use the h_wfor crop height and width, else if equals two, then we will use the heightand width of the second input symbol, we name crop_like here :type num_args: int, required @@ -374,6 +378,34 @@ Public APIs +.. function:: CuDNNBatchNorm(...) + + Apply batch normalization to input. + + :param data: Input data to batch normalization + :type data: SymbolicNode + + + :param eps: Epsilon to prevent div 0 + :type eps: float, optional, default=0.001 + + + :param momentum: Momentum for moving average + :type momentum: float, optional, default=0.9 + + + :param fix_gamma: Fix gamma while training + :type fix_gamma: boolean, optional, default=False + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: SymbolicNode. + + + + + .. function:: Deconvolution(...) Apply deconvolution to input then add a bias. @@ -759,11 +791,15 @@ Public APIs .. function:: SliceChannel(...) - Slice channel into many outputs with equally divided channel + Slice input equally along specified axis :param num_outputs: Number of outputs to be sliced. :type num_outputs: int, required + + :param axis: Dimension along which to slice. + :type axis: int, optional, default='1' + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. From d85549b86e259db6859ccc96da4e0053653d756a Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 16 Apr 2016 23:20:14 +0900 Subject: [PATCH 301/630] adds MultiACE or ACE per class --- src/metric.jl | 57 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/src/metric.jl b/src/metric.jl index a22794e9f158..7916d45b639c 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -200,3 +200,60 @@ function update!(metric :: ACE, labels :: Vector{NDArray}, preds :: Vector{NDArr end end +#=doc +.. class:: MultiACE + + Averaged cross-entropy for classification. This also know als logloss. + This variant keeps track of the different losses per class. + + Calculated the averaged cross entropy for multi-dimentions output. +=# +type MultiACE <: AbstractEvalMetric + aces :: Vector{Float64} + counts :: Vector{Int} + + MultiACE(nclasses) = new(Base.zeros(nclasses), Base.zeros(Int, nclasses)) +end + +function get(metric :: MultiACE) + aces = [(symbol("ACE_$(i-0)"), - metric.aces[i] / metric.counts[i]) for i in 1:length(metric.aces)] + push!(aces, (:ACE, - Base.sum(metric.aces) / Base.sum(metric.counts))) + return aces +end + +function reset!(metric :: MultiACE) + metric.aces = Base.zero(metric.aces) + metric.counts = Base.zero(metric.counts) +end + +function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDArray) + @nd_as_jl ro=(label,pred) begin + # Samples are stored in the last dimension + @assert size(label, ndims(label)) == size(pred, ndims(pred)) + @assert ndims(pred) == 4 + + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + label = labels[i, j, 1, sample] + + # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification + # Since we can only target labels right now this is the only thing we can do. + target = Int(label) + 1 # klasses are 0...k-1 => julia indexing + p_k = pred[i, j, target, sample] + + metric.aces[target] += log(p_k) + metric.counts[target] += 1 + end + end + end + end +end + +function update!(metric :: MultiACE, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + @assert length(labels) == length(preds) + for i = 1:length(labels) + _update_single_output(metric, labels[i], preds[i]) + end +end From 1a73ddae6ec17e90b2ef375f173ecb787f2888c1 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 10:48:49 -0700 Subject: [PATCH 302/630] Use openblas from Julia, instead of installing yet another BLAS library --- deps/build.jl | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index cb9f930c096d..177765bfe869 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -6,7 +6,7 @@ libmxnet_detected = false if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") info("Trying to load existing libmxnet...") - lib = Libdl.find_library(["libmxnet.so","libmxnet.dll"], ["$(ENV["MXNET_HOME"])/lib"]) + lib = Libdl.find_library(["libmxnet"], ["$(ENV["MXNET_HOME"])/lib"]) if !isempty(lib) info("Existing libmxnet detected at $lib, skip building...") libmxnet_detected = true @@ -28,20 +28,11 @@ if !libmxnet_detected error("Automatic building libxmnet on Windows is currently not supported yet.") end - #-------------------------------------------------------------------------------- - # Install dependencies, blas - @linux_only begin - blas = library_dependency("cblas", aliases=["libcblas"]) - provides(AptGet, "libatlas-base-dev", blas) - provides(Pacman, "blas", blas) - provides(Yum, "blas-devel", blas) - - @BinDeps.install Dict(:blas => :blas) - end + openblas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) #-------------------------------------------------------------------------------- # Build libmxnet - mxnet = library_dependency("mxnet", aliases=["libmxnet.so"]) + mxnet = library_dependency("mxnet", aliases=["libmxnet"]) _prefix = joinpath(BinDeps.depsdir(mxnet), "usr") _srcdir = joinpath(BinDeps.depsdir(mxnet),"src") @@ -60,7 +51,7 @@ if !libmxnet_detected `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` - `make` + `make USE_BLAS=openblas MSHADOW_LDFLAGS=-L$(basename(openblas_path)) -j` `cp lib/libmxnet.so $_libdir` end) end From 0c5086ae088899136cb68b5aef0fa6f3ed163ca1 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 12:17:28 -0700 Subject: [PATCH 303/630] Simplify linking --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 177765bfe869..7e86297ede89 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -51,7 +51,7 @@ if !libmxnet_detected `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` - `make USE_BLAS=openblas MSHADOW_LDFLAGS=-L$(basename(openblas_path)) -j` + `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path" -j` `cp lib/libmxnet.so $_libdir` end) end From 7be95f60df18f13ebb8557c1d2977030035022ca Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 12:28:39 -0700 Subject: [PATCH 304/630] Bundle generic cblas.h header for users that don't have libblas-dev installed already --- deps/build.jl | 1 + deps/cblas.h | 580 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 581 insertions(+) create mode 100644 deps/cblas.h diff --git a/deps/build.jl b/deps/build.jl index 7e86297ede89..5a4073537bf2 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -51,6 +51,7 @@ if !libmxnet_detected `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` + `cp ../../cblas.h include/cblas.h` `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path" -j` `cp lib/libmxnet.so $_libdir` end) diff --git a/deps/cblas.h b/deps/cblas.h new file mode 100644 index 000000000000..941b570c78de --- /dev/null +++ b/deps/cblas.h @@ -0,0 +1,580 @@ +#ifndef CBLAS_H +#define CBLAS_H +#include + +// This header file copied from the Netlib libblas distribution package + +/* Allow the use in C++ code. */ +#ifdef __cplusplus +extern "C" +{ +#endif + +/* + * Enumerated and derived types + */ +#define CBLAS_INDEX size_t /* this may vary between platforms */ + +enum CBLAS_ORDER {CblasRowMajor=101, CblasColMajor=102}; +enum CBLAS_TRANSPOSE {CblasNoTrans=111, CblasTrans=112, CblasConjTrans=113}; +enum CBLAS_UPLO {CblasUpper=121, CblasLower=122}; +enum CBLAS_DIAG {CblasNonUnit=131, CblasUnit=132}; +enum CBLAS_SIDE {CblasLeft=141, CblasRight=142}; + +/* + * =========================================================================== + * Prototypes for level 1 BLAS functions (complex are recast as routines) + * =========================================================================== + */ +float cblas_sdsdot(const int N, const float alpha, const float *X, + const int incX, const float *Y, const int incY); +double cblas_dsdot(const int N, const float *X, const int incX, const float *Y, + const int incY); +float cblas_sdot(const int N, const float *X, const int incX, + const float *Y, const int incY); +double cblas_ddot(const int N, const double *X, const int incX, + const double *Y, const int incY); + +/* + * Functions having prefixes Z and C only + */ +void cblas_cdotu_sub(const int N, const void *X, const int incX, + const void *Y, const int incY, void *dotu); +void cblas_cdotc_sub(const int N, const void *X, const int incX, + const void *Y, const int incY, void *dotc); + +void cblas_zdotu_sub(const int N, const void *X, const int incX, + const void *Y, const int incY, void *dotu); +void cblas_zdotc_sub(const int N, const void *X, const int incX, + const void *Y, const int incY, void *dotc); + + +/* + * Functions having prefixes S D SC DZ + */ +float cblas_snrm2(const int N, const float *X, const int incX); +float cblas_sasum(const int N, const float *X, const int incX); + +double cblas_dnrm2(const int N, const double *X, const int incX); +double cblas_dasum(const int N, const double *X, const int incX); + +float cblas_scnrm2(const int N, const void *X, const int incX); +float cblas_scasum(const int N, const void *X, const int incX); + +double cblas_dznrm2(const int N, const void *X, const int incX); +double cblas_dzasum(const int N, const void *X, const int incX); + + +/* + * Functions having standard 4 prefixes (S D C Z) + */ +CBLAS_INDEX cblas_isamax(const int N, const float *X, const int incX); +CBLAS_INDEX cblas_idamax(const int N, const double *X, const int incX); +CBLAS_INDEX cblas_icamax(const int N, const void *X, const int incX); +CBLAS_INDEX cblas_izamax(const int N, const void *X, const int incX); + +/* + * =========================================================================== + * Prototypes for level 1 BLAS routines + * =========================================================================== + */ + +/* + * Routines with standard 4 prefixes (s, d, c, z) + */ +void cblas_sswap(const int N, float *X, const int incX, + float *Y, const int incY); +void cblas_scopy(const int N, const float *X, const int incX, + float *Y, const int incY); +void cblas_saxpy(const int N, const float alpha, const float *X, + const int incX, float *Y, const int incY); + +void cblas_dswap(const int N, double *X, const int incX, + double *Y, const int incY); +void cblas_dcopy(const int N, const double *X, const int incX, + double *Y, const int incY); +void cblas_daxpy(const int N, const double alpha, const double *X, + const int incX, double *Y, const int incY); + +void cblas_cswap(const int N, void *X, const int incX, + void *Y, const int incY); +void cblas_ccopy(const int N, const void *X, const int incX, + void *Y, const int incY); +void cblas_caxpy(const int N, const void *alpha, const void *X, + const int incX, void *Y, const int incY); + +void cblas_zswap(const int N, void *X, const int incX, + void *Y, const int incY); +void cblas_zcopy(const int N, const void *X, const int incX, + void *Y, const int incY); +void cblas_zaxpy(const int N, const void *alpha, const void *X, + const int incX, void *Y, const int incY); + + +/* + * Routines with S and D prefix only + */ +void cblas_srotg(float *a, float *b, float *c, float *s); +void cblas_srotmg(float *d1, float *d2, float *b1, const float b2, float *P); +void cblas_srot(const int N, float *X, const int incX, + float *Y, const int incY, const float c, const float s); +void cblas_srotm(const int N, float *X, const int incX, + float *Y, const int incY, const float *P); + +void cblas_drotg(double *a, double *b, double *c, double *s); +void cblas_drotmg(double *d1, double *d2, double *b1, const double b2, double *P); +void cblas_drot(const int N, double *X, const int incX, + double *Y, const int incY, const double c, const double s); +void cblas_drotm(const int N, double *X, const int incX, + double *Y, const int incY, const double *P); + + +/* + * Routines with S D C Z CS and ZD prefixes + */ +void cblas_sscal(const int N, const float alpha, float *X, const int incX); +void cblas_dscal(const int N, const double alpha, double *X, const int incX); +void cblas_cscal(const int N, const void *alpha, void *X, const int incX); +void cblas_zscal(const int N, const void *alpha, void *X, const int incX); +void cblas_csscal(const int N, const float alpha, void *X, const int incX); +void cblas_zdscal(const int N, const double alpha, void *X, const int incX); + +/* + * =========================================================================== + * Prototypes for level 2 BLAS + * =========================================================================== + */ + +/* + * Routines with standard 4 prefixes (S, D, C, Z) + */ +void cblas_sgemv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const float alpha, const float *A, const int lda, + const float *X, const int incX, const float beta, + float *Y, const int incY); +void cblas_sgbmv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const int KL, const int KU, const float alpha, + const float *A, const int lda, const float *X, + const int incX, const float beta, float *Y, const int incY); +void cblas_strmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const float *A, const int lda, + float *X, const int incX); +void cblas_stbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const float *A, const int lda, + float *X, const int incX); +void cblas_stpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const float *Ap, float *X, const int incX); +void cblas_strsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const float *A, const int lda, float *X, + const int incX); +void cblas_stbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const float *A, const int lda, + float *X, const int incX); +void cblas_stpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const float *Ap, float *X, const int incX); + +void cblas_dgemv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const double alpha, const double *A, const int lda, + const double *X, const int incX, const double beta, + double *Y, const int incY); +void cblas_dgbmv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const int KL, const int KU, const double alpha, + const double *A, const int lda, const double *X, + const int incX, const double beta, double *Y, const int incY); +void cblas_dtrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const double *A, const int lda, + double *X, const int incX); +void cblas_dtbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const double *A, const int lda, + double *X, const int incX); +void cblas_dtpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const double *Ap, double *X, const int incX); +void cblas_dtrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const double *A, const int lda, double *X, + const int incX); +void cblas_dtbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const double *A, const int lda, + double *X, const int incX); +void cblas_dtpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const double *Ap, double *X, const int incX); + +void cblas_cgemv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const void *alpha, const void *A, const int lda, + const void *X, const int incX, const void *beta, + void *Y, const int incY); +void cblas_cgbmv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const int KL, const int KU, const void *alpha, + const void *A, const int lda, const void *X, + const int incX, const void *beta, void *Y, const int incY); +void cblas_ctrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *A, const int lda, + void *X, const int incX); +void cblas_ctbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const void *A, const int lda, + void *X, const int incX); +void cblas_ctpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *Ap, void *X, const int incX); +void cblas_ctrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *A, const int lda, void *X, + const int incX); +void cblas_ctbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const void *A, const int lda, + void *X, const int incX); +void cblas_ctpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *Ap, void *X, const int incX); + +void cblas_zgemv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const void *alpha, const void *A, const int lda, + const void *X, const int incX, const void *beta, + void *Y, const int incY); +void cblas_zgbmv(const enum CBLAS_ORDER order, + const enum CBLAS_TRANSPOSE TransA, const int M, const int N, + const int KL, const int KU, const void *alpha, + const void *A, const int lda, const void *X, + const int incX, const void *beta, void *Y, const int incY); +void cblas_ztrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *A, const int lda, + void *X, const int incX); +void cblas_ztbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const void *A, const int lda, + void *X, const int incX); +void cblas_ztpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *Ap, void *X, const int incX); +void cblas_ztrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *A, const int lda, void *X, + const int incX); +void cblas_ztbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const int K, const void *A, const int lda, + void *X, const int incX); +void cblas_ztpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, + const int N, const void *Ap, void *X, const int incX); + + +/* + * Routines with S and D prefixes only + */ +void cblas_ssymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const float *A, + const int lda, const float *X, const int incX, + const float beta, float *Y, const int incY); +void cblas_ssbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const int K, const float alpha, const float *A, + const int lda, const float *X, const int incX, + const float beta, float *Y, const int incY); +void cblas_sspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const float *Ap, + const float *X, const int incX, + const float beta, float *Y, const int incY); +void cblas_sger(const enum CBLAS_ORDER order, const int M, const int N, + const float alpha, const float *X, const int incX, + const float *Y, const int incY, float *A, const int lda); +void cblas_ssyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const float *X, + const int incX, float *A, const int lda); +void cblas_sspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const float *X, + const int incX, float *Ap); +void cblas_ssyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const float *X, + const int incX, const float *Y, const int incY, float *A, + const int lda); +void cblas_sspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const float *X, + const int incX, const float *Y, const int incY, float *A); + +void cblas_dsymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const double *A, + const int lda, const double *X, const int incX, + const double beta, double *Y, const int incY); +void cblas_dsbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const int K, const double alpha, const double *A, + const int lda, const double *X, const int incX, + const double beta, double *Y, const int incY); +void cblas_dspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const double *Ap, + const double *X, const int incX, + const double beta, double *Y, const int incY); +void cblas_dger(const enum CBLAS_ORDER order, const int M, const int N, + const double alpha, const double *X, const int incX, + const double *Y, const int incY, double *A, const int lda); +void cblas_dsyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const double *X, + const int incX, double *A, const int lda); +void cblas_dspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const double *X, + const int incX, double *Ap); +void cblas_dsyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const double *X, + const int incX, const double *Y, const int incY, double *A, + const int lda); +void cblas_dspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const double *X, + const int incX, const double *Y, const int incY, double *A); + + +/* + * Routines with C and Z prefixes only + */ +void cblas_chemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const void *alpha, const void *A, + const int lda, const void *X, const int incX, + const void *beta, void *Y, const int incY); +void cblas_chbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const int K, const void *alpha, const void *A, + const int lda, const void *X, const int incX, + const void *beta, void *Y, const int incY); +void cblas_chpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const void *alpha, const void *Ap, + const void *X, const int incX, + const void *beta, void *Y, const int incY); +void cblas_cgeru(const enum CBLAS_ORDER order, const int M, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *A, const int lda); +void cblas_cgerc(const enum CBLAS_ORDER order, const int M, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *A, const int lda); +void cblas_cher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const void *X, const int incX, + void *A, const int lda); +void cblas_chpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const float alpha, const void *X, + const int incX, void *A); +void cblas_cher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *A, const int lda); +void cblas_chpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *Ap); + +void cblas_zhemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const void *alpha, const void *A, + const int lda, const void *X, const int incX, + const void *beta, void *Y, const int incY); +void cblas_zhbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const int K, const void *alpha, const void *A, + const int lda, const void *X, const int incX, + const void *beta, void *Y, const int incY); +void cblas_zhpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const void *alpha, const void *Ap, + const void *X, const int incX, + const void *beta, void *Y, const int incY); +void cblas_zgeru(const enum CBLAS_ORDER order, const int M, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *A, const int lda); +void cblas_zgerc(const enum CBLAS_ORDER order, const int M, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *A, const int lda); +void cblas_zher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const void *X, const int incX, + void *A, const int lda); +void cblas_zhpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, + const int N, const double alpha, const void *X, + const int incX, void *A); +void cblas_zher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *A, const int lda); +void cblas_zhpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, + const void *alpha, const void *X, const int incX, + const void *Y, const int incY, void *Ap); + +/* + * =========================================================================== + * Prototypes for level 3 BLAS + * =========================================================================== + */ + +/* + * Routines with standard 4 prefixes (S, D, C, Z) + */ +void cblas_sgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_TRANSPOSE TransB, const int M, const int N, + const int K, const float alpha, const float *A, + const int lda, const float *B, const int ldb, + const float beta, float *C, const int ldc); +void cblas_ssymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const int M, const int N, + const float alpha, const float *A, const int lda, + const float *B, const int ldb, const float beta, + float *C, const int ldc); +void cblas_ssyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const float alpha, const float *A, const int lda, + const float beta, float *C, const int ldc); +void cblas_ssyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const float alpha, const float *A, const int lda, + const float *B, const int ldb, const float beta, + float *C, const int ldc); +void cblas_strmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const float alpha, const float *A, const int lda, + float *B, const int ldb); +void cblas_strsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const float alpha, const float *A, const int lda, + float *B, const int ldb); + +void cblas_dgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_TRANSPOSE TransB, const int M, const int N, + const int K, const double alpha, const double *A, + const int lda, const double *B, const int ldb, + const double beta, double *C, const int ldc); +void cblas_dsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const int M, const int N, + const double alpha, const double *A, const int lda, + const double *B, const int ldb, const double beta, + double *C, const int ldc); +void cblas_dsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const double alpha, const double *A, const int lda, + const double beta, double *C, const int ldc); +void cblas_dsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const double alpha, const double *A, const int lda, + const double *B, const int ldb, const double beta, + double *C, const int ldc); +void cblas_dtrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const double alpha, const double *A, const int lda, + double *B, const int ldb); +void cblas_dtrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const double alpha, const double *A, const int lda, + double *B, const int ldb); + +void cblas_cgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_TRANSPOSE TransB, const int M, const int N, + const int K, const void *alpha, const void *A, + const int lda, const void *B, const int ldb, + const void *beta, void *C, const int ldc); +void cblas_csymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const int M, const int N, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const void *beta, + void *C, const int ldc); +void cblas_csyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const void *alpha, const void *A, const int lda, + const void *beta, void *C, const int ldc); +void cblas_csyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const void *beta, + void *C, const int ldc); +void cblas_ctrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const void *alpha, const void *A, const int lda, + void *B, const int ldb); +void cblas_ctrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const void *alpha, const void *A, const int lda, + void *B, const int ldb); + +void cblas_zgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_TRANSPOSE TransB, const int M, const int N, + const int K, const void *alpha, const void *A, + const int lda, const void *B, const int ldb, + const void *beta, void *C, const int ldc); +void cblas_zsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const int M, const int N, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const void *beta, + void *C, const int ldc); +void cblas_zsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const void *alpha, const void *A, const int lda, + const void *beta, void *C, const int ldc); +void cblas_zsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const void *beta, + void *C, const int ldc); +void cblas_ztrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const void *alpha, const void *A, const int lda, + void *B, const int ldb); +void cblas_ztrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, + const enum CBLAS_DIAG Diag, const int M, const int N, + const void *alpha, const void *A, const int lda, + void *B, const int ldb); + + +/* + * Routines with prefixes C and Z only + */ +void cblas_chemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const int M, const int N, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const void *beta, + void *C, const int ldc); +void cblas_cherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const float alpha, const void *A, const int lda, + const float beta, void *C, const int ldc); +void cblas_cher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const float beta, + void *C, const int ldc); + +void cblas_zhemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, + const enum CBLAS_UPLO Uplo, const int M, const int N, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const void *beta, + void *C, const int ldc); +void cblas_zherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const double alpha, const void *A, const int lda, + const double beta, void *C, const int ldc); +void cblas_zher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, + const enum CBLAS_TRANSPOSE Trans, const int N, const int K, + const void *alpha, const void *A, const int lda, + const void *B, const int ldb, const double beta, + void *C, const int ldc); + +void cblas_xerbla(int p, const char *rout, const char *form, ...); + +#ifdef __cplusplus +} +#endif + +#endif From 0ec6dca56a70a4399de369973b29e14f74bd98df Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 12:35:56 -0700 Subject: [PATCH 305/630] Oh, it really does call itself `libmxnet.so` on non-linux platforms too --- deps/build.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 5a4073537bf2..ed77726f7051 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -6,7 +6,7 @@ libmxnet_detected = false if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") info("Trying to load existing libmxnet...") - lib = Libdl.find_library(["libmxnet"], ["$(ENV["MXNET_HOME"])/lib"]) + lib = Libdl.find_library(["libmxnet", "libmxnet.so"], ["$(ENV["MXNET_HOME"])/lib"]) if !isempty(lib) info("Existing libmxnet detected at $lib, skip building...") libmxnet_detected = true @@ -32,7 +32,7 @@ if !libmxnet_detected #-------------------------------------------------------------------------------- # Build libmxnet - mxnet = library_dependency("mxnet", aliases=["libmxnet"]) + mxnet = library_dependency("mxnet", aliases=["libmxnet", "libmxnet.so"]) _prefix = joinpath(BinDeps.depsdir(mxnet), "usr") _srcdir = joinpath(BinDeps.depsdir(mxnet),"src") From 60c16e22840ec5cac8f2df0e5049326034994e44 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 14:42:50 -0700 Subject: [PATCH 306/630] Cut down cblas.h to only the stuff we need, make this work on ILP64 --- deps/build.jl | 6 + deps/cblas.h | 605 +++++--------------------------------------------- 2 files changed, 58 insertions(+), 553 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index ed77726f7051..bddecb135dde 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -30,6 +30,11 @@ if !libmxnet_detected openblas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) + ilp64 = "" + if Base.blas_vendor() == :openblas64 + ilp64 = "-DINTERFACE64" + end + #-------------------------------------------------------------------------------- # Build libmxnet mxnet = library_dependency("mxnet", aliases=["libmxnet", "libmxnet.so"]) @@ -51,6 +56,7 @@ if !libmxnet_detected `cp make/config.mk config.mk` @osx_only `cp make/osx.mk config.mk` `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` + `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` `cp ../../cblas.h include/cblas.h` `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path" -j` `cp lib/libmxnet.so $_libdir` diff --git a/deps/cblas.h b/deps/cblas.h index 941b570c78de..c08fb5f6af9d 100644 --- a/deps/cblas.h +++ b/deps/cblas.h @@ -1,580 +1,79 @@ #ifndef CBLAS_H #define CBLAS_H -#include - -// This header file copied from the Netlib libblas distribution package - -/* Allow the use in C++ code. */ -#ifdef __cplusplus -extern "C" -{ -#endif /* - * Enumerated and derived types - */ -#define CBLAS_INDEX size_t /* this may vary between platforms */ - -enum CBLAS_ORDER {CblasRowMajor=101, CblasColMajor=102}; -enum CBLAS_TRANSPOSE {CblasNoTrans=111, CblasTrans=112, CblasConjTrans=113}; -enum CBLAS_UPLO {CblasUpper=121, CblasLower=122}; -enum CBLAS_DIAG {CblasNonUnit=131, CblasUnit=132}; -enum CBLAS_SIDE {CblasLeft=141, CblasRight=142}; - -/* - * =========================================================================== - * Prototypes for level 1 BLAS functions (complex are recast as routines) - * =========================================================================== - */ -float cblas_sdsdot(const int N, const float alpha, const float *X, - const int incX, const float *Y, const int incY); -double cblas_dsdot(const int N, const float *X, const int incX, const float *Y, - const int incY); -float cblas_sdot(const int N, const float *X, const int incX, - const float *Y, const int incY); -double cblas_ddot(const int N, const double *X, const int incX, - const double *Y, const int incY); - -/* - * Functions having prefixes Z and C only - */ -void cblas_cdotu_sub(const int N, const void *X, const int incX, - const void *Y, const int incY, void *dotu); -void cblas_cdotc_sub(const int N, const void *X, const int incX, - const void *Y, const int incY, void *dotc); - -void cblas_zdotu_sub(const int N, const void *X, const int incX, - const void *Y, const int incY, void *dotu); -void cblas_zdotc_sub(const int N, const void *X, const int incX, - const void *Y, const int incY, void *dotc); - - -/* - * Functions having prefixes S D SC DZ - */ -float cblas_snrm2(const int N, const float *X, const int incX); -float cblas_sasum(const int N, const float *X, const int incX); - -double cblas_dnrm2(const int N, const double *X, const int incX); -double cblas_dasum(const int N, const double *X, const int incX); - -float cblas_scnrm2(const int N, const void *X, const int incX); -float cblas_scasum(const int N, const void *X, const int incX); - -double cblas_dznrm2(const int N, const void *X, const int incX); -double cblas_dzasum(const int N, const void *X, const int incX); - - -/* - * Functions having standard 4 prefixes (S D C Z) - */ -CBLAS_INDEX cblas_isamax(const int N, const float *X, const int incX); -CBLAS_INDEX cblas_idamax(const int N, const double *X, const int incX); -CBLAS_INDEX cblas_icamax(const int N, const void *X, const int incX); -CBLAS_INDEX cblas_izamax(const int N, const void *X, const int incX); - -/* - * =========================================================================== - * Prototypes for level 1 BLAS routines - * =========================================================================== - */ - -/* - * Routines with standard 4 prefixes (s, d, c, z) + * This file modified from the OpenBLAS repository, */ -void cblas_sswap(const int N, float *X, const int incX, - float *Y, const int incY); -void cblas_scopy(const int N, const float *X, const int incX, - float *Y, const int incY); -void cblas_saxpy(const int N, const float alpha, const float *X, - const int incX, float *Y, const int incY); -void cblas_dswap(const int N, double *X, const int incX, - double *Y, const int incY); -void cblas_dcopy(const int N, const double *X, const int incX, - double *Y, const int incY); -void cblas_daxpy(const int N, const double alpha, const double *X, - const int incX, double *Y, const int incY); - -void cblas_cswap(const int N, void *X, const int incX, - void *Y, const int incY); -void cblas_ccopy(const int N, const void *X, const int incX, - void *Y, const int incY); -void cblas_caxpy(const int N, const void *alpha, const void *X, - const int incX, void *Y, const int incY); - -void cblas_zswap(const int N, void *X, const int incX, - void *Y, const int incY); -void cblas_zcopy(const int N, const void *X, const int incX, - void *Y, const int incY); -void cblas_zaxpy(const int N, const void *alpha, const void *X, - const int incX, void *Y, const int incY); - - -/* - * Routines with S and D prefix only - */ -void cblas_srotg(float *a, float *b, float *c, float *s); -void cblas_srotmg(float *d1, float *d2, float *b1, const float b2, float *P); -void cblas_srot(const int N, float *X, const int incX, - float *Y, const int incY, const float c, const float s); -void cblas_srotm(const int N, float *X, const int incX, - float *Y, const int incY, const float *P); - -void cblas_drotg(double *a, double *b, double *c, double *s); -void cblas_drotmg(double *d1, double *d2, double *b1, const double b2, double *P); -void cblas_drot(const int N, double *X, const int incX, - double *Y, const int incY, const double c, const double s); -void cblas_drotm(const int N, double *X, const int incX, - double *Y, const int incY, const double *P); +#include +#ifdef __cplusplus +extern "C" { + /* Assume C declarations for C++ */ +#endif /* __cplusplus */ -/* - * Routines with S D C Z CS and ZD prefixes - */ -void cblas_sscal(const int N, const float alpha, float *X, const int incX); -void cblas_dscal(const int N, const double alpha, double *X, const int incX); -void cblas_cscal(const int N, const void *alpha, void *X, const int incX); -void cblas_zscal(const int N, const void *alpha, void *X, const int incX); -void cblas_csscal(const int N, const float alpha, void *X, const int incX); -void cblas_zdscal(const int N, const double alpha, void *X, const int incX); /* - * =========================================================================== - * Prototypes for level 2 BLAS - * =========================================================================== - */ - -/* - * Routines with standard 4 prefixes (S, D, C, Z) - */ -void cblas_sgemv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const float alpha, const float *A, const int lda, - const float *X, const int incX, const float beta, - float *Y, const int incY); -void cblas_sgbmv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const int KL, const int KU, const float alpha, - const float *A, const int lda, const float *X, - const int incX, const float beta, float *Y, const int incY); -void cblas_strmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const float *A, const int lda, - float *X, const int incX); -void cblas_stbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const float *A, const int lda, - float *X, const int incX); -void cblas_stpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const float *Ap, float *X, const int incX); -void cblas_strsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const float *A, const int lda, float *X, - const int incX); -void cblas_stbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const float *A, const int lda, - float *X, const int incX); -void cblas_stpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const float *Ap, float *X, const int incX); - -void cblas_dgemv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const double alpha, const double *A, const int lda, - const double *X, const int incX, const double beta, - double *Y, const int incY); -void cblas_dgbmv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const int KL, const int KU, const double alpha, - const double *A, const int lda, const double *X, - const int incX, const double beta, double *Y, const int incY); -void cblas_dtrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const double *A, const int lda, - double *X, const int incX); -void cblas_dtbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const double *A, const int lda, - double *X, const int incX); -void cblas_dtpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const double *Ap, double *X, const int incX); -void cblas_dtrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const double *A, const int lda, double *X, - const int incX); -void cblas_dtbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const double *A, const int lda, - double *X, const int incX); -void cblas_dtpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const double *Ap, double *X, const int incX); - -void cblas_cgemv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const void *alpha, const void *A, const int lda, - const void *X, const int incX, const void *beta, - void *Y, const int incY); -void cblas_cgbmv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const int KL, const int KU, const void *alpha, - const void *A, const int lda, const void *X, - const int incX, const void *beta, void *Y, const int incY); -void cblas_ctrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *A, const int lda, - void *X, const int incX); -void cblas_ctbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const void *A, const int lda, - void *X, const int incX); -void cblas_ctpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *Ap, void *X, const int incX); -void cblas_ctrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *A, const int lda, void *X, - const int incX); -void cblas_ctbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const void *A, const int lda, - void *X, const int incX); -void cblas_ctpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *Ap, void *X, const int incX); - -void cblas_zgemv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const void *alpha, const void *A, const int lda, - const void *X, const int incX, const void *beta, - void *Y, const int incY); -void cblas_zgbmv(const enum CBLAS_ORDER order, - const enum CBLAS_TRANSPOSE TransA, const int M, const int N, - const int KL, const int KU, const void *alpha, - const void *A, const int lda, const void *X, - const int incX, const void *beta, void *Y, const int incY); -void cblas_ztrmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *A, const int lda, - void *X, const int incX); -void cblas_ztbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const void *A, const int lda, - void *X, const int incX); -void cblas_ztpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *Ap, void *X, const int incX); -void cblas_ztrsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *A, const int lda, void *X, - const int incX); -void cblas_ztbsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const int K, const void *A, const int lda, - void *X, const int incX); -void cblas_ztpsv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE TransA, const enum CBLAS_DIAG Diag, - const int N, const void *Ap, void *X, const int incX); - - -/* - * Routines with S and D prefixes only - */ -void cblas_ssymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const float *A, - const int lda, const float *X, const int incX, - const float beta, float *Y, const int incY); -void cblas_ssbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const int K, const float alpha, const float *A, - const int lda, const float *X, const int incX, - const float beta, float *Y, const int incY); -void cblas_sspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const float *Ap, - const float *X, const int incX, - const float beta, float *Y, const int incY); -void cblas_sger(const enum CBLAS_ORDER order, const int M, const int N, - const float alpha, const float *X, const int incX, - const float *Y, const int incY, float *A, const int lda); -void cblas_ssyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const float *X, - const int incX, float *A, const int lda); -void cblas_sspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const float *X, - const int incX, float *Ap); -void cblas_ssyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const float *X, - const int incX, const float *Y, const int incY, float *A, - const int lda); -void cblas_sspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const float *X, - const int incX, const float *Y, const int incY, float *A); - -void cblas_dsymv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const double *A, - const int lda, const double *X, const int incX, - const double beta, double *Y, const int incY); -void cblas_dsbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const int K, const double alpha, const double *A, - const int lda, const double *X, const int incX, - const double beta, double *Y, const int incY); -void cblas_dspmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const double *Ap, - const double *X, const int incX, - const double beta, double *Y, const int incY); -void cblas_dger(const enum CBLAS_ORDER order, const int M, const int N, - const double alpha, const double *X, const int incX, - const double *Y, const int incY, double *A, const int lda); -void cblas_dsyr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const double *X, - const int incX, double *A, const int lda); -void cblas_dspr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const double *X, - const int incX, double *Ap); -void cblas_dsyr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const double *X, - const int incX, const double *Y, const int incY, double *A, - const int lda); -void cblas_dspr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const double *X, - const int incX, const double *Y, const int incY, double *A); - - -/* - * Routines with C and Z prefixes only + * Since all of GotoBlas was written without const, + * we disable it at build time. */ -void cblas_chemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const void *alpha, const void *A, - const int lda, const void *X, const int incX, - const void *beta, void *Y, const int incY); -void cblas_chbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const int K, const void *alpha, const void *A, - const int lda, const void *X, const int incX, - const void *beta, void *Y, const int incY); -void cblas_chpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const void *alpha, const void *Ap, - const void *X, const int incX, - const void *beta, void *Y, const int incY); -void cblas_cgeru(const enum CBLAS_ORDER order, const int M, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *A, const int lda); -void cblas_cgerc(const enum CBLAS_ORDER order, const int M, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *A, const int lda); -void cblas_cher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const void *X, const int incX, - void *A, const int lda); -void cblas_chpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const float alpha, const void *X, - const int incX, void *A); -void cblas_cher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *A, const int lda); -void cblas_chpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *Ap); - -void cblas_zhemv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const void *alpha, const void *A, - const int lda, const void *X, const int incX, - const void *beta, void *Y, const int incY); -void cblas_zhbmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const int K, const void *alpha, const void *A, - const int lda, const void *X, const int incX, - const void *beta, void *Y, const int incY); -void cblas_zhpmv(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const void *alpha, const void *Ap, - const void *X, const int incX, - const void *beta, void *Y, const int incY); -void cblas_zgeru(const enum CBLAS_ORDER order, const int M, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *A, const int lda); -void cblas_zgerc(const enum CBLAS_ORDER order, const int M, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *A, const int lda); -void cblas_zher(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const void *X, const int incX, - void *A, const int lda); -void cblas_zhpr(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, - const int N, const double alpha, const void *X, - const int incX, void *A); -void cblas_zher2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *A, const int lda); -void cblas_zhpr2(const enum CBLAS_ORDER order, const enum CBLAS_UPLO Uplo, const int N, - const void *alpha, const void *X, const int incX, - const void *Y, const int incY, void *Ap); +#ifndef OPENBLAS_CONST +# define OPENBLAS_CONST const +#endif /* - * =========================================================================== - * Prototypes for level 3 BLAS - * =========================================================================== + * Add definitions for BLASLONG and blasint */ -/* - * Routines with standard 4 prefixes (S, D, C, Z) - */ -void cblas_sgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_TRANSPOSE TransB, const int M, const int N, - const int K, const float alpha, const float *A, - const int lda, const float *B, const int ldb, - const float beta, float *C, const int ldc); -void cblas_ssymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const int M, const int N, - const float alpha, const float *A, const int lda, - const float *B, const int ldb, const float beta, - float *C, const int ldc); -void cblas_ssyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const float alpha, const float *A, const int lda, - const float beta, float *C, const int ldc); -void cblas_ssyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const float alpha, const float *A, const int lda, - const float *B, const int ldb, const float beta, - float *C, const int ldc); -void cblas_strmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const float alpha, const float *A, const int lda, - float *B, const int ldb); -void cblas_strsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const float alpha, const float *A, const int lda, - float *B, const int ldb); +#if defined(OS_WINDOWS) && defined(__64BIT__) +typedef long long BLASLONG; +typedef unsigned long long BLASULONG; +#else +typedef long BLASLONG; +typedef unsigned long BLASULONG; +#endif -void cblas_dgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_TRANSPOSE TransB, const int M, const int N, - const int K, const double alpha, const double *A, - const int lda, const double *B, const int ldb, - const double beta, double *C, const int ldc); -void cblas_dsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const int M, const int N, - const double alpha, const double *A, const int lda, - const double *B, const int ldb, const double beta, - double *C, const int ldc); -void cblas_dsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const double alpha, const double *A, const int lda, - const double beta, double *C, const int ldc); -void cblas_dsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const double alpha, const double *A, const int lda, - const double *B, const int ldb, const double beta, - double *C, const int ldc); -void cblas_dtrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const double alpha, const double *A, const int lda, - double *B, const int ldb); -void cblas_dtrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const double alpha, const double *A, const int lda, - double *B, const int ldb); +#ifdef INTERFACE64 +typedef BLASLONG blasint; +#else +typedef int blasint; +#endif -void cblas_cgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_TRANSPOSE TransB, const int M, const int N, - const int K, const void *alpha, const void *A, - const int lda, const void *B, const int ldb, - const void *beta, void *C, const int ldc); -void cblas_csymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const int M, const int N, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const void *beta, - void *C, const int ldc); -void cblas_csyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const void *alpha, const void *A, const int lda, - const void *beta, void *C, const int ldc); -void cblas_csyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const void *beta, - void *C, const int ldc); -void cblas_ctrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const void *alpha, const void *A, const int lda, - void *B, const int ldb); -void cblas_ctrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const void *alpha, const void *A, const int lda, - void *B, const int ldb); +#ifdef INTERFACE64 +#define cblas_sgemm cblas_sgemm64_ +#define cblas_sgemv cblas_sgemv64_ +#define cblas_sger cblas_sger64_ +#define cblas_dgemm cblas_dgemm64_ +#define cblas_dgemv cblas_dgemv64_ +#define cblas_dger cblas_dger64_ +#define dblas_sdot cblas_sdot64_ +#define dblas_ddot cblas_ddot64_ +#endif -void cblas_zgemm(const enum CBLAS_ORDER Order, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_TRANSPOSE TransB, const int M, const int N, - const int K, const void *alpha, const void *A, - const int lda, const void *B, const int ldb, - const void *beta, void *C, const int ldc); -void cblas_zsymm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const int M, const int N, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const void *beta, - void *C, const int ldc); -void cblas_zsyrk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const void *alpha, const void *A, const int lda, - const void *beta, void *C, const int ldc); -void cblas_zsyr2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const void *beta, - void *C, const int ldc); -void cblas_ztrmm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const void *alpha, const void *A, const int lda, - void *B, const int ldb); -void cblas_ztrsm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const enum CBLAS_TRANSPOSE TransA, - const enum CBLAS_DIAG Diag, const int M, const int N, - const void *alpha, const void *A, const int lda, - void *B, const int ldb); +typedef enum CBLAS_ORDER {CblasRowMajor=101, CblasColMajor=102} CBLAS_ORDER; +typedef enum CBLAS_TRANSPOSE {CblasNoTrans=111, CblasTrans=112, CblasConjTrans=113, CblasConjNoTrans=114} CBLAS_TRANSPOSE; +typedef enum CBLAS_UPLO {CblasUpper=121, CblasLower=122} CBLAS_UPLO; +typedef enum CBLAS_DIAG {CblasNonUnit=131, CblasUnit=132} CBLAS_DIAG; +typedef enum CBLAS_SIDE {CblasLeft=141, CblasRight=142} CBLAS_SIDE; -/* - * Routines with prefixes C and Z only - */ -void cblas_chemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const int M, const int N, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const void *beta, - void *C, const int ldc); -void cblas_cherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const float alpha, const void *A, const int lda, - const float beta, void *C, const int ldc); -void cblas_cher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const float beta, - void *C, const int ldc); +float cblas_sdot(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy); +double cblas_ddot(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *y, OPENBLAS_CONST blasint incy); -void cblas_zhemm(const enum CBLAS_ORDER Order, const enum CBLAS_SIDE Side, - const enum CBLAS_UPLO Uplo, const int M, const int N, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const void *beta, - void *C, const int ldc); -void cblas_zherk(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const double alpha, const void *A, const int lda, - const double beta, void *C, const int ldc); -void cblas_zher2k(const enum CBLAS_ORDER Order, const enum CBLAS_UPLO Uplo, - const enum CBLAS_TRANSPOSE Trans, const int N, const int K, - const void *alpha, const void *A, const int lda, - const void *B, const int ldb, const double beta, - void *C, const int ldc); +void cblas_sgemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE trans, OPENBLAS_CONST blasint m, OPENBLAS_CONST blasint n, + OPENBLAS_CONST float alpha, OPENBLAS_CONST float *a, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float beta, float *y, OPENBLAS_CONST blasint incy); +void cblas_dgemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE trans, OPENBLAS_CONST blasint m, OPENBLAS_CONST blasint n, + OPENBLAS_CONST double alpha, OPENBLAS_CONST double *a, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double beta, double *y, OPENBLAS_CONST blasint incy); +void cblas_sger (OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A, OPENBLAS_CONST blasint lda); +void cblas_dger (OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A, OPENBLAS_CONST blasint lda); -void cblas_xerbla(int p, const char *rout, const char *form, ...); +void cblas_sgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_dgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); #ifdef __cplusplus } -#endif +#endif /* __cplusplus */ #endif From 7f445b907443800cb24765cea3fbad272de1d8ae Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 14:52:27 -0700 Subject: [PATCH 307/630] Fix typo --- deps/cblas.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/cblas.h b/deps/cblas.h index c08fb5f6af9d..a99c8fc9920f 100644 --- a/deps/cblas.h +++ b/deps/cblas.h @@ -46,8 +46,8 @@ typedef int blasint; #define cblas_dgemm cblas_dgemm64_ #define cblas_dgemv cblas_dgemv64_ #define cblas_dger cblas_dger64_ -#define dblas_sdot cblas_sdot64_ -#define dblas_ddot cblas_ddot64_ +#define cblas_sdot cblas_sdot64_ +#define cblas_ddot cblas_ddot64_ #endif From 9474561cb95765410e9f5bacd5c4ad8b412ae30f Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 14:56:43 -0700 Subject: [PATCH 308/630] More intelligent parallelized build --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index bddecb135dde..50fd0a907ae7 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -58,7 +58,7 @@ if !libmxnet_detected `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` `cp ../../cblas.h include/cblas.h` - `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path" -j` + `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path" -j$(max(1, CPU_CORES - 1))` `cp lib/libmxnet.so $_libdir` end) end From 39786c5904ec9ee912488eeb136682f9427afe08 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Mon, 18 Apr 2016 15:58:57 -0700 Subject: [PATCH 309/630] Just remove parallel build, it's making Travis angry --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 50fd0a907ae7..e0e0a0e1388f 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -58,7 +58,7 @@ if !libmxnet_detected `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` `cp ../../cblas.h include/cblas.h` - `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path" -j$(max(1, CPU_CORES - 1))` + `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path"` `cp lib/libmxnet.so $_libdir` end) end From b38e2ccae4183b861f591db9f5a26b8ab1b94fc8 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 21 Apr 2016 09:35:35 -0400 Subject: [PATCH 310/630] update docs --- docs/api/io.rst | 4 +- docs/api/metric.rst | 10 ++ docs/api/ndarray.rst | 69 ++++++++++--- docs/api/symbolic-node.rst | 199 +++++++++++++++++++++++++++++-------- 4 files changed, 229 insertions(+), 53 deletions(-) diff --git a/docs/api/io.rst b/docs/api/io.rst index 8ec67cf6d073..e5cb3ffb32e5 100644 --- a/docs/api/io.rst +++ b/docs/api/io.rst @@ -351,7 +351,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :param rand_crop: Augmentation Param: Whether to random crop on the image @@ -514,7 +514,7 @@ libmxnet data providers :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: long (non-negative), optional, default=4 + :type prefetch_buffer: , optional, default=4 :return: the constructed :class:`MXDataProvider`. diff --git a/docs/api/metric.rst b/docs/api/metric.rst index 966d682ba2b7..0f3cc2d81358 100644 --- a/docs/api/metric.rst +++ b/docs/api/metric.rst @@ -65,3 +65,13 @@ set. + +.. class:: MultiACE + + Averaged cross-entropy for classification. This also know als logloss. + This variant keeps track of the different losses per class. + + Calculated the averaged cross entropy for multi-dimentions output. + + + diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst index b72f0faa5cfe..b94232b25e50 100644 --- a/docs/api/ndarray.rst +++ b/docs/api/ndarray.rst @@ -748,13 +748,13 @@ Internal APIs .. function:: _div(...) + Multiply lhs by rhs - - :param lhs: Left operand to the function. + :param lhs: Left operand to the function :type lhs: NDArray - :param rhs: Right operand to the function. + :param rhs: Right operand to the function :type rhs: NDArray @@ -815,15 +815,45 @@ Internal APIs -.. function:: _minus(...) +.. function:: _maximum(...) + Elementwise max of lhs by rhs + :param lhs: Left operand to the function + :type lhs: NDArray - :param lhs: Left operand to the function. + + :param rhs: Right operand to the function + :type rhs: NDArray + + + + + +.. function:: _minimum(...) + + Elementwise min of lhs by rhs + + :param lhs: Left operand to the function :type lhs: NDArray - :param rhs: Right operand to the function. + :param rhs: Right operand to the function + :type rhs: NDArray + + + + + +.. function:: _minus(...) + + Minus lhs and rhs + + :param lhs: Left operand to the function + :type lhs: NDArray + + + :param rhs: Right operand to the function :type rhs: NDArray @@ -847,13 +877,13 @@ Internal APIs .. function:: _mul(...) + Multiply lhs and rhs - - :param lhs: Left operand to the function. + :param lhs: Left operand to the function :type lhs: NDArray - :param rhs: Right operand to the function. + :param rhs: Right operand to the function :type rhs: NDArray @@ -892,13 +922,13 @@ Internal APIs .. function:: _plus(...) + Add lhs and rhs - - :param lhs: Left operand to the function. + :param lhs: Left operand to the function :type lhs: NDArray - :param rhs: Right operand to the function. + :param rhs: Right operand to the function :type rhs: NDArray @@ -920,6 +950,21 @@ Internal APIs +.. function:: _power(...) + + Elementwise power(lhs, rhs) + + :param lhs: Left operand to the function + :type lhs: NDArray + + + :param rhs: Right operand to the function + :type rhs: NDArray + + + + + .. function:: _random_gaussian(...) diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst index e303afba21dc..125b391046cb 100644 --- a/docs/api/symbolic-node.rst +++ b/docs/api/symbolic-node.rst @@ -498,7 +498,7 @@ Public APIs .. function:: Embedding(...) - Get embedding for one-hot input + Get embedding for one-hot input. A n-dimensional input tensor will be trainsformed into a (n+1)-dimensional tensor, where a new dimension is added for the embedding results. :param data: Input data to the EmbeddingOp. :type data: SymbolicNode @@ -800,6 +800,10 @@ Public APIs :param axis: Dimension along which to slice. :type axis: int, optional, default='1' + + :param squeeze_axis: If true AND the sliced dimension becomes 1, squeeze that dimension. + :type squeeze_axis: boolean, optional, default=False + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -963,8 +967,12 @@ Public APIs Take absolute value of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -979,8 +987,12 @@ Public APIs Take ceil value of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -995,8 +1007,12 @@ Public APIs Take cos of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1011,8 +1027,12 @@ Public APIs Take exp of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1027,8 +1047,12 @@ Public APIs Take floor value of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1043,8 +1067,12 @@ Public APIs Take log of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1059,8 +1087,12 @@ Public APIs Take round value of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1075,8 +1107,12 @@ Public APIs Take rsqrt of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1091,8 +1127,12 @@ Public APIs Take sign value of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1107,8 +1147,12 @@ Public APIs Take sin of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1123,8 +1167,12 @@ Public APIs Take sqrt of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1139,8 +1187,32 @@ Public APIs Take square of the src - :param src: Source symbolic input to the function - :type src: SymbolicNode + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode + + :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. + :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. + + :return: . + + + + + +.. function:: transpose(...) + + Transpose the input matrix and return a new one + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. @@ -1172,12 +1244,19 @@ Internal APIs .. function:: _Div(...) - Perform an elementwise div. + Multiply lhs by rhs + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . @@ -1209,12 +1288,19 @@ Internal APIs .. function:: _Maximum(...) - Perform an elementwise power. + Elementwise max of lhs by rhs + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . @@ -1246,12 +1332,19 @@ Internal APIs .. function:: _Minimum(...) - Perform an elementwise power. + Elementwise min of lhs by rhs + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . @@ -1283,12 +1376,19 @@ Internal APIs .. function:: _Minus(...) - Perform an elementwise minus. + Minus lhs and rhs + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . @@ -1320,12 +1420,19 @@ Internal APIs .. function:: _Mul(...) - Perform an elementwise mul. + Multiply lhs and rhs + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . @@ -1393,12 +1500,19 @@ Internal APIs .. function:: _Plus(...) - Perform an elementwise plus. + Add lhs and rhs + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . @@ -1430,12 +1544,19 @@ Internal APIs .. function:: _Power(...) - Perform an elementwise power. + Elementwise power(lhs, rhs) + + :param lhs: Left symbolic input to the function + :type lhs: SymbolicNode + + + :param rhs: Left symbolic input to the function + :type rhs: SymbolicNode :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - :return: SymbolicNode. + :return: . From bbe31513ade7cdf1db04ead6c7195d679e56d99e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sat, 23 Apr 2016 10:03:54 -0400 Subject: [PATCH 311/630] fix for upstream changes dmlc/mxnet#1857 --- src/symbolic-node.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index e83b94aeefa4..da0833242def 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -330,7 +330,7 @@ function .-(self :: SymbolicNode, arg :: Real) end function -(arg :: Real, self :: SymbolicNode) - _MinusScalar(self, scalar=arg, scalar_on_left=true) + _RMinusScalar(self, scalar=arg) end function .-(arg :: Real, self :: SymbolicNode) -(arg, self) @@ -373,7 +373,7 @@ function /(self :: SymbolicNode, arg :: Real) ./(self, arg) end function ./(arg :: Real, self :: SymbolicNode) - _DivScalar(self, scalar=arg, scalar_on_left=true) + _RDivScalar(self, scalar=arg) end import Base: .^, ^ From 4bf4037bc2344d507950216d74561c801970a6fe Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 25 Apr 2016 21:58:28 +0900 Subject: [PATCH 312/630] check if their are predefined weights and use them --- src/model.jl | 45 ++++++++++++++++++++++++++++++++------------- 1 file changed, 32 insertions(+), 13 deletions(-) diff --git a/src/model.jl b/src/model.jl index b823a342b529..3458e6a579fa 100644 --- a/src/model.jl +++ b/src/model.jl @@ -92,32 +92,51 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove param_names = setdiff(arg_names, input_names) aux_names = list_auxiliary_states(self.arch) - arg_defined = true - aux_defined = true - arg_shapes, out_shapes, aux_shapes = infer_shape(self.arch; input_shapes...) + + # If target dict is not yet defined set a temporary one if !isdefined(self, :arg_params) - param_name_shapes = filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) - self.arg_params = Dict([name => empty(shape) for (name,shape) in param_name_shapes]) - arg_defined = false + self.arg_params = Dict{Symbol, NDArray}() end if !isdefined(self, :aux_params) - self.aux_params = Dict([name => empty(shape) for (name,shape) in zip(aux_names,aux_shapes)]) - aux_defined = false + self.aux_params = Dict{Symbol, NDArray}() + end + + arg_params = Dict{Symbol, NDArray}() + aux_params = Dict{Symbol, NDArray}() + + for (name, shape) in filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) + if haskey(self.arg_params, name) + shape == size(self.arg_params[name]) || error("Shape mismatch for $name.") + arg_params[name] = self.arg_params[name] + else + arg_params[name] = empty(shape) + end end - # initialize the contents of the parameters - if !arg_defined || overwrite - for (k,v) in self.arg_params + for (name, shape) in zip(aux_names, aux_shapes) + if haskey(self.aux_params, name) + shape == size(self.arg_params[name]) || error("Shape mismatch for $name.") + aux_params[name] = self.aux_params[name] + else + aux_params[name] = empty(shape) + end + end + + for (k,v) in arg_params + if overwrite || !haskey(self.arg_params, k) init(initializer, k, v) end end - if !aux_defined || overwrite - for (k,v) in self.aux_params + for (k,v) in aux_params + if overwrite || !haskey(self.aux_params, k) init(initializer, k, v) end end + self.arg_params = arg_params + self.aux_params = aux_params + return (arg_names, param_names, aux_names) end From 4c7a066ce4469479b96eb39ecbf7d0504bd6acbd Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 25 Apr 2016 23:23:45 +0900 Subject: [PATCH 313/630] be more lenient in model loading --- src/model.jl | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/src/model.jl b/src/model.jl index 3458e6a579fa..683510b13854 100644 --- a/src/model.jl +++ b/src/model.jl @@ -107,20 +107,28 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove for (name, shape) in filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) if haskey(self.arg_params, name) - shape == size(self.arg_params[name]) || error("Shape mismatch for $name.") - arg_params[name] = self.arg_params[name] - else - arg_params[name] = empty(shape) + if shape == size(self.arg_params[name]) + arg_params[name] = self.arg_params[name] + continue + else + warn("Shape mismatch for $name. Overwriting with new one.") + delete!(self.arg_params, name) + end end + arg_params[name] = empty(shape) end for (name, shape) in zip(aux_names, aux_shapes) if haskey(self.aux_params, name) - shape == size(self.arg_params[name]) || error("Shape mismatch for $name.") - aux_params[name] = self.aux_params[name] - else - aux_params[name] = empty(shape) + if shape == size(self.auxg_params[name]) + aux_params[name] = self.aux_params[name] + continue + else + warn("Shape mismatch for $name. Overwriting with new one.") + delete!(self.aux_params, name) + end end + aux_params[name] = empty(shape) end for (k,v) in arg_params From 98b7cf2341f3045e0babf1d274aec5cdb3c7df76 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 26 Apr 2016 12:54:40 +0900 Subject: [PATCH 314/630] add SymbolListAttr --- src/symbolic-node.jl | 25 ++++++++++++++++++++++++- test/unittest/symbolic-node.jl | 1 + 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index da0833242def..1ddc5e682319 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -132,7 +132,8 @@ function get_attr(self :: SymbolicNode, key :: Symbol) key_s = bytestring(string(key)) ref_out = Ref{Cstring}() ref_success = Ref{Cint}(-1) - @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), self, key_s, ref_out, ref_success) + @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), + self, key_s, ref_out, ref_success) if ref_success[] == 1 return Nullable{ByteString}(bytestring(ref_out[])) else @@ -140,6 +141,28 @@ function get_attr(self :: SymbolicNode, key :: Symbol) end end +#=doc +.. function: list_attr(self :: SymbolicNode) + + Get all attributes from symbol. + :return: Dictionary of attributes. +=# +function list_attr(self :: SymbolicNode) + ref_sz = Ref{MX_uint}(0) + ref_strings = Ref{char_pp}(0) + @mxcall(:MXSymbolListAttr, (MX_handle, Ref{MX_uint}, Ref{char_pp}), + self, ref_sz, ref_strings) + narg = 2*ref_sz[] + strings = pointer_to_array(ref_strings[], narg) + out = Dict{Symbol, ByteString}() + for i in 1:2:narg + key = symbol(bytestring(strings[i])) + value = bytestring(strings[i+1]) + out[key] = value + end + return out +end + #=doc .. function:: set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index e5a5531845ec..825602723151 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -100,6 +100,7 @@ function test_attrs() @test isnull(mx.get_attr(conv, :b)) @test get(mx.get_attr(conv, :a)) == "a" @test get(mx.get_attr(conv, :π)) == "π" + @test mx.list_attr(conv) == Dict(:a => "a", :π => "π") @test_throws MethodError mx.Variable(:data3, attrs = Dict(:test => "1.0", :test2 => 1.0)) @test_throws MethodError mx.Convolution(data=data2, kernel = (1,1), num_filter = 1, attrs = Dict(:test => "1.0", :test2 => 1.0)) From b0fc16f117b1c6e12d5a5bab6550b0d298384c06 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 26 Apr 2016 15:16:42 +0900 Subject: [PATCH 315/630] get idx that need to be frozen --- src/model.jl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/model.jl b/src/model.jl index 683510b13854..da54614ac935 100644 --- a/src/model.jl +++ b/src/model.jl @@ -430,6 +430,16 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # invoke callbacks on epoch 0 _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) + # get grad attribute to allow for freezing + freeze_names = Symbol[] + for (attr, value) in list_attr(self.arch) + sattr = string(attr) + if endswith(sattr, "grad") && value == "freeze" + push!(freeze_names, symbol(sattr[1:end-5])) + end + end + freeze_idx = filter(i -> in(arg_names[i], freeze_names), 1:length(arg_names)) + info("Start training...") for i_epoch = 1:opts.n_epoch time_start = time() From ce599a969e76c6dfeefed32c03e89b9b046d2b8d Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 26 Apr 2016 15:20:57 +0900 Subject: [PATCH 316/630] skip parameter update on frozen weights --- src/model.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/model.jl b/src/model.jl index da54614ac935..7dede8d05206 100644 --- a/src/model.jl +++ b/src/model.jl @@ -473,6 +473,10 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # update parameters for idx = 1:length(param_names) + if in(idx, freeze_idx) + continue # Skip parameter update entirely + end + # gradient synchronization if !isa(kvstore, Void) # push gradient, priority is negative index From 9880bdf57b350b2602684446d3bfe0f3a2a0094b Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 26 Apr 2016 18:02:17 +0900 Subject: [PATCH 317/630] set GRAD_NOP for all frozen params --- src/executor.jl | 29 +++++++++++++++++++---------- src/model.jl | 32 +++++++++++++++++++++----------- 2 files changed, 40 insertions(+), 21 deletions(-) diff --git a/src/executor.jl b/src/executor.jl index 4c72589ae966..e4b943c0c764 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -128,22 +128,31 @@ function bind(self :: SymbolicNode; kwargs...) bind(self, context, args; kwargs...) end -function simple_bind(self :: SymbolicNode, ctx :: Context; grad_req :: GRAD_REQ=GRAD_WRITE, kwargs...) +function simple_bind(self :: SymbolicNode, ctx :: Context; + grad_req :: Union{GRAD_REQ, Dict{Symbol, GRAD_REQ}}=GRAD_WRITE, + kwargs...) arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") arg_arrays = NDArray[zeros(shape, ctx) for shape in arg_shapes] arg_names = list_arguments(self) - if grad_req == GRAD_NOP - grad_arrays = Dict{Base.Symbol,NDArray}() - else + + grad_arrays = Dict{Symbol,NDArray}() + + if grad_req != GRAD_NOP + shapes = zip(arg_names, arg_shapes) + + # if not in provided data, should be parameters provided_data_names = [x[1] for x in kwargs] - grad_arrays = Dict{Base.Symbol,NDArray}() - for (name, shape) in zip(arg_names, arg_shapes) - # if not in provided data, should be parameters - if !in(name, provided_data_names) - grad_arrays[name] = zeros(shape, ctx) - end + shapes = filter(x -> !in(x[1], provided_data_names), shapes) + + # Remove all gradients for nop params + # if isa(grad_req, Dict{Symbol, GRAD_REQ}) + # shapes = filter(x -> grad_req[x[1]] != GRAD_NOP,shapes) + # end + + for (name, shape) in shapes + grad_arrays[name] = zeros(shape, ctx) end end diff --git a/src/model.jl b/src/model.jl index 7dede8d05206..f5dffc457bc6 100644 --- a/src/model.jl +++ b/src/model.jl @@ -369,11 +369,31 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) end + # get grad attribute to allow for freezing + freeze_names = Symbol[] + for (attr, value) in list_attr(self.arch) + sattr = string(attr) + if endswith(sattr, "grad") && value == "freeze" + push!(freeze_names, symbol(sattr[1:end-5])) + end + end + freeze_idx = filter(i -> in(arg_names[i], freeze_names), 1:length(arg_names)) + + # Setup grad_req as a dictionary + grad_req = Dict{Symbol, GRAD_REQ}() + for param in param_names + if in(param, freeze_names) + grad_req[param] = GRAD_NOP + else + grad_req[param] = GRAD_WRITE + end + end + train_execs = Array(Executor, num_dev) for i = 1:num_dev data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] - train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=GRAD_WRITE, data_shapes..., label_shapes...) + train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=grad_req, data_shapes..., label_shapes...) dbg_str = mx.debug_str(train_execs[i]) info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[i])) @@ -430,16 +450,6 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # invoke callbacks on epoch 0 _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) - # get grad attribute to allow for freezing - freeze_names = Symbol[] - for (attr, value) in list_attr(self.arch) - sattr = string(attr) - if endswith(sattr, "grad") && value == "freeze" - push!(freeze_names, symbol(sattr[1:end-5])) - end - end - freeze_idx = filter(i -> in(arg_names[i], freeze_names), 1:length(arg_names)) - info("Start training...") for i_epoch = 1:opts.n_epoch time_start = time() From 68180f75fd411dd62301b848e8299aeaa9b8c68e Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 26 Apr 2016 18:54:29 +0900 Subject: [PATCH 318/630] ids derived from arg_names and param_names are not relatable. --- src/model.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index f5dffc457bc6..c025dc17091c 100644 --- a/src/model.jl +++ b/src/model.jl @@ -377,7 +377,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra push!(freeze_names, symbol(sattr[1:end-5])) end end - freeze_idx = filter(i -> in(arg_names[i], freeze_names), 1:length(arg_names)) + # Needs to correspond to the correct id in the update loop layer idx=1:length(param_names). + freeze_idx = filter(i -> in(param_names[i], freeze_names), 1:length(param_names)) # Setup grad_req as a dictionary grad_req = Dict{Symbol, GRAD_REQ}() From deb021dab22f1ed1790ce8a74642cbc23d52dc85 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 26 Apr 2016 14:13:40 -0400 Subject: [PATCH 319/630] (temporarily) disable Julia nightly test --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 9f08e176eed2..df63021f35aa 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ os: - osx julia: - 0.4 - - nightly + #- nightly # dependent apt packages addons: From c948dc39c77f3cf1846aaaea6c6d6234aa9547cf Mon Sep 17 00:00:00 2001 From: Michael Wallace Louwrens Date: Mon, 9 May 2016 01:18:10 +0200 Subject: [PATCH 320/630] Added 7z alternative for mnist Added try catch around the unzip and added 7z extraction alternative. --- src/util.jl | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/src/util.jl b/src/util.jl index 1e9b0853dc5b..d6ce97a3e5ca 100644 --- a/src/util.jl +++ b/src/util.jl @@ -19,7 +19,15 @@ function get_mnist_ubyte() if !all(isfile, values(filenames)) cd(mnist_dir) do mnist_dir = download("http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip", "mnist.zip") - run(`unzip -u $mnist_dir`) + try + run(`unzip -u $mnist_dir`) + catch + try + run(pipe(`7z x $mnist_dir`,stdout=DevNull)) + catch + error("Extraction Failed:No extraction program found in path") + end + end end end return filenames @@ -34,7 +42,15 @@ function get_cifar10() if !all(isfile, values(filenames)) cd(cifar10_dir) do run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) - run(`unzip -u cifar10.zip`) + try + run(`unzip -u cifar10.zip`) + catch + try + run(pipe(`7z x cifar10.zip`,stdout=DevNull)) + catch + error("Extraction Failed:No extraction program found in path") + end + end end end From 013b88b8c7580f057fcdf3aeac3777a57ca8a04d Mon Sep 17 00:00:00 2001 From: Michael Wallace Louwrens Date: Mon, 9 May 2016 01:38:14 +0200 Subject: [PATCH 321/630] Change where set_attr is called in _define_atomic_symbol_creator MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Where `set_attr` was originally called in `_define_atomic_symbol_creator` would cause `list_attr` to return a dictionary with extra bias and weight fields as seen here: ` Dict{Symbol,ByteString}(:convolution0_a=>"a",:convolution0_π=>"π",:convolution0_bias_a=>"a",:convolution0_weight_π=>"π",:convolution0_bias_π=>"π",:convolution0_weight_a=>"a",:data2_test=>"hallo!") ` The test still fails however as `list_attr` still contains the name of the source before the key. --- src/symbolic-node.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 1ddc5e682319..0bc3b593da8a 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -591,17 +591,17 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) hint = lowercase($func_name_s) name = get!(DEFAULT_NAME_MANAGER, name, hint) - # set attrs - for (k, v) in attrs - set_attr(node, k, v) - end - if length(args) != 0 _compose!(node, name, args...) else _compose!(node; name=name, symbol_kws...) end + # set attrs + for (k, v) in attrs + set_attr(node, k, v) + end + return node end From 463f202e6fa4ad6eaaa2f82e93ddd2d4ec782981 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 9 May 2016 11:52:24 +0900 Subject: [PATCH 322/630] add support for ListAttrShallow --- src/model.jl | 2 +- src/symbolic-node.jl | 24 +++++++++++++++++++++++- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/src/model.jl b/src/model.jl index c025dc17091c..3984eb9d1389 100644 --- a/src/model.jl +++ b/src/model.jl @@ -371,7 +371,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # get grad attribute to allow for freezing freeze_names = Symbol[] - for (attr, value) in list_attr(self.arch) + for (attr, value) in list_all_attr(self.arch) sattr = string(attr) if endswith(sattr, "grad") && value == "freeze" push!(freeze_names, symbol(sattr[1:end-5])) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 0bc3b593da8a..dcaae9bd5dc6 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -144,10 +144,32 @@ end #=doc .. function: list_attr(self :: SymbolicNode) - Get all attributes from symbol. + Get all attributes from a symbol. :return: Dictionary of attributes. =# function list_attr(self :: SymbolicNode) + ref_sz = Ref{MX_uint}(0) + ref_strings = Ref{char_pp}(0) + @mxcall(:MXSymbolListAttrShallow, (MX_handle, Ref{MX_uint}, Ref{char_pp}), + self, ref_sz, ref_strings) + narg = 2*ref_sz[] + strings = pointer_to_array(ref_strings[], narg) + out = Dict{Symbol, ByteString}() + for i in 1:2:narg + key = symbol(bytestring(strings[i])) + value = bytestring(strings[i+1]) + out[key] = value + end + return out +end + +#=doc +.. function: list_all_attr(self :: SymbolicNode) + + Get all attributes from the symbol graph. + :return: Dictionary of attributes. +=# +function list_all_attr(self :: SymbolicNode) ref_sz = Ref{MX_uint}(0) ref_strings = Ref{char_pp}(0) @mxcall(:MXSymbolListAttr, (MX_handle, Ref{MX_uint}, Ref{char_pp}), From baa9c2a98749b465edafeaed51e41e2bdf221d32 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Sun, 8 May 2016 23:22:42 -0400 Subject: [PATCH 323/630] revert #91 --- src/symbolic-node.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index dcaae9bd5dc6..4ce5b59a18ea 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -613,17 +613,17 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) hint = lowercase($func_name_s) name = get!(DEFAULT_NAME_MANAGER, name, hint) + # set attrs + for (k, v) in attrs + set_attr(node, k, v) + end + if length(args) != 0 _compose!(node, name, args...) else _compose!(node; name=name, symbol_kws...) end - # set attrs - for (k, v) in attrs - set_attr(node, k, v) - end - return node end From 64a63f7797b946e334e38fbb08bd7ddd79634c12 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 11 May 2016 14:15:54 +0900 Subject: [PATCH 324/630] Deduplicate and generalise metric update! MXNet allows for the design of networks that use the same label for multiple outputs. Instead of failing for these kinds of networks, warn the user and try to proceed. --- src/metric.jl | 40 +++++++++++++--------------------------- 1 file changed, 13 insertions(+), 27 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 7916d45b639c..7e76b969d0a0 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -36,6 +36,19 @@ set. =# abstract AbstractEvalMetric +# Generic update! version +function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + if length(labels) != length(preds) + Base.warn_once( + "The number of labels ($(length(labels))) does not correspond to the\ + number of outputs ($(length(preds))). The calculated metric might not be accuracte.") + end + for (label, pred) in zip(labels, preds) + _update_single_output(metric, label, pred) + end +end + + #=doc .. class:: Accuracy @@ -85,13 +98,6 @@ function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDA end end -function update!(metric :: Accuracy, labels :: Vector{NDArray}, preds :: Vector{NDArray}) - @assert length(labels) == length(preds) - for i = 1:length(labels) - _update_single_output(metric, labels[i], preds[i]) - end -end - import Base: get function get(metric :: Accuracy) return [(:accuracy, metric.acc_sum / metric.n_sample)] @@ -129,13 +135,6 @@ function _update_single_output(metric :: MSE, label :: NDArray, pred :: NDArray) end end -function update!(metric :: MSE, labels :: Vector{NDArray}, preds :: Vector{NDArray}) - @assert length(labels) == length(preds) - for i = 1:length(labels) - _update_single_output(metric, labels[i], preds[i]) - end -end - function get(metric :: MSE) return [(:MSE, metric.mse_sum / metric.n_sample)] end @@ -193,13 +192,6 @@ function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) end end -function update!(metric :: ACE, labels :: Vector{NDArray}, preds :: Vector{NDArray}) - @assert length(labels) == length(preds) - for i = 1:length(labels) - _update_single_output(metric, labels[i], preds[i]) - end -end - #=doc .. class:: MultiACE @@ -251,9 +243,3 @@ function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDA end end -function update!(metric :: MultiACE, labels :: Vector{NDArray}, preds :: Vector{NDArray}) - @assert length(labels) == length(preds) - for i = 1:length(labels) - _update_single_output(metric, labels[i], preds[i]) - end -end From 3a013b889a923b6aaaa27caead707ab64f830b36 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 25 May 2016 14:03:17 +0900 Subject: [PATCH 325/630] fixup expr check for v0.5 --- src/base.jl | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/base.jl b/src/base.jl index de48241ac7e9..2dc570147b61 100644 --- a/src/base.jl +++ b/src/base.jl @@ -189,10 +189,19 @@ function _defstruct_impl(is_immutable, name, fields) if isa(name, Symbol) name = esc(name) super_name = :Any + elseif VERSION >= v"0.5-" + @assert(isa(name, Expr) && name.head == :(<:) && length(name.args) == 2 && + isa(name.args[1], Symbol) && isa(name.args[2], Symbol), + "name must be of form 'Name <: SuperType'") + + super_name = esc(name.args[2]) + name = esc(name.args[1]) else - @assert(isa(name, Expr) && name.head == :comparison && length(name.args) == 3 && name.args[2] == :(<:), + @assert(isa(name, Expr) && name.head == :comparison && + length(name.args) == 3 && name.args[2] == :(<:) && + isa(name.args[1], Symbol) && isa(name.args[3], Symbol), "name must be of form 'Name <: SuperType'") - @assert(isa(name.args[1], Symbol) && isa(name.args[3], Symbol)) + super_name = esc(name.args[3]) name = esc(name.args[1]) end From e6d07f0268d1ae84fd874ca40306ccf99ecd0c57 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 25 May 2016 14:25:34 +0900 Subject: [PATCH 326/630] fix deprications on v0.5 --- REQUIRE | 3 ++- src/MXNet.jl | 3 +++ src/base.jl | 2 +- src/executor.jl | 2 +- src/io.jl | 6 +++--- src/kvstore.jl | 2 +- src/metric.jl | 2 +- src/model.jl | 8 ++++---- src/name.jl | 10 +++++----- src/ndarray.jl | 26 +++++++++++++------------- src/nn-factory.jl | 4 ++-- src/symbolic-node.jl | 38 +++++++++++++++++++------------------- src/util.jl | 6 +++--- src/visualize.jl | 4 ++-- 14 files changed, 60 insertions(+), 56 deletions(-) diff --git a/REQUIRE b/REQUIRE index fea260919fbc..d97c567e15db 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,4 +1,5 @@ -julia 0.4 +julia 0.4+ +Compat Formatting BinDeps JSON diff --git a/src/MXNet.jl b/src/MXNet.jl index 16ad415efe52..31618b795ed5 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -6,6 +6,9 @@ module MXNet # functions with the same names as built-in utilities like "zeros", etc. export mx module mx + +using Compat + using Formatting # Functions from base that we can safely extend and that are defined by libmxnet. diff --git a/src/base.jl b/src/base.jl index 2dc570147b61..bd67a7c2ac52 100644 --- a/src/base.jl +++ b/src/base.jl @@ -42,7 +42,7 @@ function mx_get_last_error() if msg == C_NULL throw(MXError("Failed to get last error message")) end - return bytestring(msg) + return @compat String(msg) end "Utility macro to call MXNet API functions" diff --git a/src/executor.jl b/src/executor.jl index e4b943c0c764..86f3f65921f4 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -221,5 +221,5 @@ Can be used to get an estimated about the memory cost. function debug_str(self :: Executor) s_ref = Ref{Cstring}() @mxcall(:MXExecutorPrint, (MX_handle, Ptr{Cstring}), self.handle, s_ref) - bytestring(s_ref[]) + @compat String(s_ref[]) end diff --git a/src/io.jl b/src/io.jl index 99fcd8516c6b..46fbcfe86a2d 100644 --- a/src/io.jl +++ b/src/io.jl @@ -584,7 +584,7 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - iter_name = symbol(bytestring(ref_name[])) + iter_name = Symbol(String(ref_name[])) if gen_docs if endswith(string(iter_name), "Iter") @@ -592,7 +592,7 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) else f_desc = "" end - f_desc *= bytestring(ref_desc[]) * "\n\n" + f_desc *= String(ref_desc[]) * "\n\n" f_desc *= ":param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data.\n" f_desc *= ":param Base.Symbol label_name: keyword argument, default ``:softmax_label``. " * "The name of the label. Could be ``nothing`` if no label is presented in this dataset.\n\n" @@ -617,7 +617,7 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) # add an alias XXXProvider => XXXIter if endswith(string(iter_name), "Iter") - alias_name = symbol(string(iter_name)[1:end-4] * "Provider") + alias_name = Symbol(string(iter_name)[1:end-4] * "Provider") eval(:($alias_name = $iter_name)) end end diff --git a/src/kvstore.jl b/src/kvstore.jl index d52433f567b2..bc2e65c6fdd3 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -87,7 +87,7 @@ end function get_type(self :: KVStore) type_ref = Ref{char_p}(0) @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) - return symbol(bytestring(type_ref[])) + return Symbol(@compat String(type_ref[])) end function get_num_workers(self :: KVStore) diff --git a/src/metric.jl b/src/metric.jl index 7e76b969d0a0..1f601980ad13 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -208,7 +208,7 @@ type MultiACE <: AbstractEvalMetric end function get(metric :: MultiACE) - aces = [(symbol("ACE_$(i-0)"), - metric.aces[i] / metric.counts[i]) for i in 1:length(metric.aces)] + aces = [(Symbol("ACE_$(i-0)"), - metric.aces[i] / metric.counts[i]) for i in 1:length(metric.aces)] push!(aces, (:ACE, - Base.sum(metric.aces) / Base.sum(metric.counts))) return aces end diff --git a/src/model.jl b/src/model.jl index 3984eb9d1389..d9b81d02f3d9 100644 --- a/src/model.jl +++ b/src/model.jl @@ -374,7 +374,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra for (attr, value) in list_all_attr(self.arch) sattr = string(attr) if endswith(sattr, "grad") && value == "freeze" - push!(freeze_names, symbol(sattr[1:end-5])) + push!(freeze_names, Symbol(sattr[1:end-5])) end end # Needs to correspond to the correct id in the update loop layer idx=1:length(param_names). @@ -582,8 +582,8 @@ end function save_checkpoint(sym :: SymbolicNode, arg_params :: Dict{Base.Symbol, NDArray}, aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) save("$prefix-symbol.json", sym) - save_dict = merge(Dict([symbol("arg:$k") => v for (k,v) in arg_params]), - Dict([symbol("aux:$k") => v for (k,v) in aux_params])) + save_dict = merge(Dict([Symbol("arg:$k") => v for (k,v) in arg_params]), + Dict([Symbol("aux:$k") => v for (k,v) in aux_params])) save_filename = format("{1}-{2:04d}.params", prefix, epoch) save(save_filename, save_dict) info("Saved checkpoint to '$save_filename'") @@ -596,7 +596,7 @@ function load_checkpoint(prefix :: AbstractString, epoch :: Int) aux_params = Dict{Base.Symbol, NDArray}() for (k,v) in saved_dict tp, name = split(string(k), ':') - name = symbol(name) + name = Symbol(name) if tp == "arg" arg_params[name] = v else diff --git a/src/name.jl b/src/name.jl index 5ebf10917ce6..2ec531834232 100644 --- a/src/name.jl +++ b/src/name.jl @@ -9,14 +9,14 @@ import Base: get! # is automatically generated based on the hint string. function _default_get_name!(counter :: NameCounter, name :: NameType, hint :: NameType) if isa(name, Base.Symbol) || !isempty(name) - return symbol(name) + return Symbol(name) end - hint = symbol(hint) + hint = Symbol(hint) if !haskey(counter, hint) counter[hint] = 0 end - name = symbol("$hint$(counter[hint])") + name = Symbol("$hint$(counter[hint])") counter[hint] += 1 return name end @@ -34,11 +34,11 @@ type PrefixNameManager <: AbstractNameManager prefix :: Base.Symbol counter :: NameCounter end -PrefixNameManager(prefix :: NameType) = PrefixNameManager(symbol(prefix), NameCounter()) +PrefixNameManager(prefix :: NameType) = PrefixNameManager(Symbol(prefix), NameCounter()) function get!(manager :: PrefixNameManager, name :: NameType, hint :: NameType) name = _default_get_name!(manager.counter, name, hint) - return symbol("$(manager.prefix)$name") + return Symbol("$(manager.prefix)$name") end DEFAULT_NAME_MANAGER = BasicNameManager() diff --git a/src/ndarray.jl b/src/ndarray.jl index 9c4836e60771..1ed620d800e5 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -788,7 +788,7 @@ function load(filename::AbstractString, ::Type{NDArray}) return [NDArray(MX_NDArrayHandle(hdr)) for hdr in pointer_to_array(out_hdrs[], out_size)] else @assert out_size == out_name_size - return Dict([(symbol(bytestring(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in + return Dict([(Symbol(@compat String(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in zip(pointer_to_array(out_names[], out_size), pointer_to_array(out_hdrs[], out_size))]) end end @@ -903,11 +903,11 @@ function _import_ndarray_functions(;gen_docs=false) func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs, ref_ret_type) - func_name = symbol(bytestring(ref_name[])) + func_name = Symbol(@compat String(ref_name[])) if gen_docs # generate document only - f_desc = bytestring(ref_desc[]) * "\n\n" + f_desc = @compat String(ref_desc[]) * "\n\n" f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) docs[func_name] = f_desc else @@ -932,18 +932,18 @@ function _import_ndarray_functions(;gen_docs=false) # general ndarray function if arg_before_scalar - args = vcat([Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + args = vcat([Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) else - args = vcat([Expr(:(::), symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + args = vcat([Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) end - _use_vars = Expr(:ref, :MX_handle, [symbol("in$i") for i=1:n_used_vars]...) - _scalars = Expr(:ref, :MX_float, [symbol("sca$i") for i=1:n_scalars]...) - _mut_vars = Expr(:ref, :MX_handle, [symbol("out$i") for i=1:n_mutate_vars]...) + _use_vars = Expr(:ref, :MX_handle, [Symbol("in$i") for i=1:n_used_vars]...) + _scalars = Expr(:ref, :MX_float, [Symbol("sca$i") for i=1:n_scalars]...) + _mut_vars = Expr(:ref, :MX_handle, [Symbol("out$i") for i=1:n_mutate_vars]...) # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped # See https://github.com/dmlc/MXNet.jl/issues/55 @@ -955,7 +955,7 @@ function _import_ndarray_functions(;gen_docs=false) if n_mutate_vars == 1 stmt_ret = :(return out1) else - stmt_ret = Expr(:return, Expr(:tuple, [symbol("out$i") for i=1:n_mutate_vars]...)) + stmt_ret = Expr(:return, Expr(:tuple, [Symbol("out$i") for i=1:n_mutate_vars]...)) end func_body = Expr(:block, stmt_call, stmt_ret) diff --git a/src/nn-factory.jl b/src/nn-factory.jl index 984f50a5f782..3725587f37a1 100644 --- a/src/nn-factory.jl +++ b/src/nn-factory.jl @@ -39,10 +39,10 @@ function MLP(input, spec; hidden_activation::Base.Symbol=:relu, prefix=gensym()) n_unit = s act_type = hidden_activation end - input = FullyConnected(input, name=symbol(prefix, "fc$i"), num_hidden=n_unit) + input = FullyConnected(input, name=Symbol(prefix, "fc$i"), num_hidden=n_unit) if i < n_layer || isa(s, Tuple) # will not add activation unless the user explicitly specified - input = Activation(input, name=symbol(prefix, "$act_type$i"), act_type=act_type) + input = Activation(input, name=Symbol(prefix, "$act_type$i"), act_type=act_type) end end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 4ce5b59a18ea..2b9d466c8e95 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -62,7 +62,7 @@ macro _list_symbol_info(self, func_name) $self, ref_sz, ref_names) narg = ref_sz[] names = pointer_to_array(ref_names[], narg) - names = [symbol(bytestring(x)) for x in names] + names = [Symbol(@compat String(x)) for x in names] return names end end @@ -129,13 +129,13 @@ end :return: The value belonging to key as a :class:`Nullable`. =# function get_attr(self :: SymbolicNode, key :: Symbol) - key_s = bytestring(string(key)) + key_s = @compat String(string(key)) ref_out = Ref{Cstring}() ref_success = Ref{Cint}(-1) @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), self, key_s, ref_out, ref_success) if ref_success[] == 1 - return Nullable{ByteString}(bytestring(ref_out[])) + return Nullable{ByteString}(@compat String(ref_out[])) else return Nullable{ByteString}() end @@ -156,8 +156,8 @@ function list_attr(self :: SymbolicNode) strings = pointer_to_array(ref_strings[], narg) out = Dict{Symbol, ByteString}() for i in 1:2:narg - key = symbol(bytestring(strings[i])) - value = bytestring(strings[i+1]) + key = Symbol(@compat String(strings[i])) + value = @compat String(strings[i+1]) out[key] = value end return out @@ -178,8 +178,8 @@ function list_all_attr(self :: SymbolicNode) strings = pointer_to_array(ref_strings[], narg) out = Dict{Symbol, ByteString}() for i in 1:2:narg - key = symbol(bytestring(strings[i])) - value = bytestring(strings[i+1]) + key = Symbol(@compat String(strings[i])) + value = @compat String(strings[i+1]) out[key] = value end return out @@ -198,8 +198,8 @@ end cause unexpected behavior and inconsistency. =# function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) - key_s = bytestring(string(key)) - value_s = bytestring(value) + key_s = @compat String(string(key)) + value_s = @compat String(value) @mxcall(:MXSymbolSetAttr, (MX_handle, Cstring, Cstring), self, key_s, value_s) end @@ -325,7 +325,7 @@ end indicating the index, as in the list of :func:`list_outputs`. =# function Base.getindex(self :: SymbolicNode, idx :: Union{Base.Symbol, AbstractString}) - idx = symbol(idx) + idx = Symbol(idx) i_idx = find(idx .== list_outputs(self)) @assert(length(i_idx) > 0, "Cannot find output with name '$idx'") @assert(length(i_idx) < 2, "Found duplicated output with name '$idx'") @@ -474,7 +474,7 @@ end function to_json(self :: SymbolicNode) ref_json = Ref{char_p}(0) @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) - return bytestring(ref_json[]) + return @compat String(ref_json[]) end #=doc @@ -533,20 +533,20 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs, ref_ret_type) - func_name_s= bytestring(ref_name[]) - func_name = symbol(func_name_s) - kv_nargs_s = bytestring(ref_kv_nargs[]) - kv_nargs = symbol(kv_nargs_s) + func_name_s= @compat String(ref_name[]) + func_name = Symbol(func_name_s) + kv_nargs_s = @compat String(ref_kv_nargs[]) + kv_nargs = Symbol(kv_nargs_s) if gen_docs - f_desc = bytestring(ref_desc[]) * "\n\n" + f_desc = @compat String(ref_desc[]) * "\n\n" if !isempty(kv_nargs_s) f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" end f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) f_desc *= ":param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n" f_desc *= ":param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`.\n\n" - f_desc *= ":return: $(_format_typestring(bytestring(ref_ret_type[]))).\n\n" + f_desc *= ":return: $(_format_typestring(@compat String(ref_ret_type[]))).\n\n" return (func_name, f_desc) end @@ -565,7 +565,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) symbol_kws = Dict{Symbol, SymbolicNode}() attrs = Dict{Symbol, AbstractString}() - $(if kv_nargs != symbol("") + $(if kv_nargs != Symbol("") quote if !in($kv_nargs_s, param_keys) push!(param_keys, $kv_nargs_s) @@ -593,7 +593,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) if length(args) != 0 && length(symbol_kws) != 0 @assert(false, $func_name_s * " only accepts Symbols either as positional or keyword arguments, not both.") end - $(if kv_nargs != symbol("") + $(if kv_nargs != Symbol("") quote if length(symbol_kws) > 0 @assert(false, $func_name_s * " takes variable number of SymbolicNode arguments, " * diff --git a/src/util.jl b/src/util.jl index d6ce97a3e5ca..73c3316fa9f7 100644 --- a/src/util.jl +++ b/src/util.jl @@ -75,14 +75,14 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch docstrings = AbstractString[] for i = 1:narg - arg_name = bytestring(arg_names[i]) + arg_name = @compat String(arg_names[i]) if arg_name ∈ param_keys && remove_dup continue end push!(param_keys, arg_name) - arg_type = _format_typestring(bytestring(arg_types[i])) - arg_desc = bytestring(arg_descs[i]) + arg_type = _format_typestring(@compat String(arg_types[i])) + arg_desc = @compat String(arg_descs[i]) push!(docstrings, ":param $arg_name: $arg_desc\n:type $arg_name: $arg_type\n\n") end return join(docstrings, "\n") diff --git a/src/visualize.jl b/src/visualize.jl index b1fa90ff3851..3d46b9c38cb3 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -117,10 +117,10 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp attr = Dict(:dir => :back, :arrowtail => :open, :color => "#737373") if draw_shape if input_node["op"] != "null" - key = symbol(input_name, "_output") + key = Symbol(input_name, "_output") shape = shape_dict[key][1:end-1] else - key = symbol(input_name) + key = Symbol(input_name) shape = shape_dict[key][1:end-1] end label = "(" * join([string(x) for x in shape], ",") * ")" From 55f2a42dfaaf67e27fd08ffda5f48abee445dbfc Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 25 May 2016 14:28:08 +0900 Subject: [PATCH 327/630] fix overwriting of inner function --- src/initializer.jl | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/src/initializer.jl b/src/initializer.jl index 00e71b19c3cd..8be33bdf66d2 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -138,12 +138,6 @@ function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: N fan_in = prod(dims[2:end]) fan_out = dims[1] - if self.distribution == xv_uniform - func(σ, data) = rand!(-σ, σ, data) - elseif self.distribution == xv_normal - func(σ, data) = randn!(0.0, σ, data) - end - if self.regularization == xv_avg factor = (fan_in + fan_out) / 2 elseif self.regularization == xv_in @@ -154,5 +148,9 @@ function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: N σ = √(self.magnitude / factor) - func(σ, array) + if self.distribution == xv_uniform + rand!(-σ, σ, array) + elseif self.distribution == xv_normal + randn!(0.0, σ, array) + end end From 48b723a96d3a1f90149c01e62be38fccff315bdd Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 25 May 2016 14:34:28 +0900 Subject: [PATCH 328/630] fix ByteString deprication and deprications in tests --- src/MXNet.jl | 1 + src/symbolic-node.jl | 8 ++++---- test/unittest/name.jl | 8 ++++---- test/unittest/ndarray.jl | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index 31618b795ed5..cdbacbe64be0 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -8,6 +8,7 @@ export mx module mx using Compat +import Compat.String using Formatting diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 2b9d466c8e95..da251e4cc74f 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -135,9 +135,9 @@ function get_attr(self :: SymbolicNode, key :: Symbol) @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), self, key_s, ref_out, ref_success) if ref_success[] == 1 - return Nullable{ByteString}(@compat String(ref_out[])) + return Nullable{String}(@compat String(ref_out[])) else - return Nullable{ByteString}() + return Nullable{String}() end end @@ -154,7 +154,7 @@ function list_attr(self :: SymbolicNode) self, ref_sz, ref_strings) narg = 2*ref_sz[] strings = pointer_to_array(ref_strings[], narg) - out = Dict{Symbol, ByteString}() + out = Dict{Symbol, String}() for i in 1:2:narg key = Symbol(@compat String(strings[i])) value = @compat String(strings[i+1]) @@ -176,7 +176,7 @@ function list_all_attr(self :: SymbolicNode) self, ref_sz, ref_strings) narg = 2*ref_sz[] strings = pointer_to_array(ref_strings[], narg) - out = Dict{Symbol, ByteString}() + out = Dict{Symbol, String}() for i in 1:2:narg key = Symbol(@compat String(strings[i])) value = @compat String(strings[i+1]) diff --git a/test/unittest/name.jl b/test/unittest/name.jl index a7dd6f7bfc35..aca039670286 100644 --- a/test/unittest/name.jl +++ b/test/unittest/name.jl @@ -10,8 +10,8 @@ function test_default() @test get!(mx.DEFAULT_NAME_MANAGER, string(name), "") == name hint = name - @test get!(mx.DEFAULT_NAME_MANAGER, "", hint) == symbol("$(hint)0") - @test get!(mx.DEFAULT_NAME_MANAGER, "", string(hint)) == symbol("$(hint)1") + @test get!(mx.DEFAULT_NAME_MANAGER, "", hint) == Symbol("$(hint)0") + @test get!(mx.DEFAULT_NAME_MANAGER, "", string(hint)) == Symbol("$(hint)1") end function test_prefix() @@ -21,8 +21,8 @@ function test_prefix() prefix = :_____foobar_____ prefix_manager = mx.PrefixNameManager(prefix) - @test get!(prefix_manager, name, "") == symbol("$prefix$name") - @test get!(prefix_manager, "", name) == symbol("$prefix$(name)0") + @test get!(prefix_manager, name, "") == Symbol("$prefix$name") + @test get!(prefix_manager, "", name) == Symbol("$prefix$(name)0") end test_default() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 67bff2959c8a..444aa90616fe 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -202,7 +202,7 @@ function test_saveload() end # save and load dictionary of ndarrays - names = [symbol("array$i") for i = 1:n_arrays] + names = [Symbol("array$i") for i = 1:n_arrays] dict = Dict([n => v for (n,v) in zip(names, nd_arrays)]) mx.save(fname, dict) data = mx.load(fname, mx.NDArray) From afd9baa1b614768502821a0d8f43803d400eb417 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 May 2016 10:10:28 +0900 Subject: [PATCH 329/630] enable travis for 0.5 --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index df63021f35aa..9f08e176eed2 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ os: - osx julia: - 0.4 - #- nightly + - nightly # dependent apt packages addons: From 9c9e762947fff926cd040743c367b91403429f35 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 24 May 2016 10:45:22 +0900 Subject: [PATCH 330/630] Initial support for DType and TypeFlags This also adds MXNDArrayCreateNX and rewrites eltype to get the DType from mxnet --- src/ndarray.jl | 67 ++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 62 insertions(+), 5 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 1ed620d800e5..3987c7f5bbbd 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -3,6 +3,42 @@ NDArray API =========== =# +# All the types supported by mshadow. +typealias DType Union{Float32, Float64, Float16, UInt8, Int32} +@enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 + +function toTypeFlag{T <: DType}(:: Type{T}) + if T == Float32 + return kFloat32 + elseif T == Float64 + return kFloat64 + elseif T == Float16 + return kFloat16 + elseif T == UInt8 + return kUint8 + elseif T == Int32 + return kInt32 + else + throw(ArgumentError("Can't convert $T to Dtype.")) + end +end + +function fromTypeFlag(T :: TypeFlag) + if T == kFloat32 + return Float32 + elseif T == kFloat64 + return Float64 + elseif T == kFloat16 + return Float16 + elseif T == kUint8 + return UInt8 + elseif T == kInt32 + return Int32 + else + throw(ArgumentError("Can't convert Dtype $T.")) + end +end + # create a NDArray handle of specific shape function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) h_ref = Ref{MX_handle}(0) @@ -13,6 +49,17 @@ function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc return handle end +# create a NDArray handle of specific shape type +function _ndarray_alloc{T <: DType,N}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) + h_ref = Ref{MX_handle}(0) + shape = flipdim(MX_uint[shape...],1) + dtype = toTypeFlag(T) + @mxcall(:MXNDArrayCreateEx, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Cint, Ref{MX_handle}), + shape, length(shape), ctx.device_type, ctx.device_id, delay_alloc, dtype, h_ref) + handle = MX_NDArrayHandle(h_ref[]) + return handle +end + # create a handle to an empty NDArray, this handle can be used to hold # results returned by libmx API calls function _ndarray_alloc() @@ -51,7 +98,7 @@ type NDArray end function Base.show(io :: IO, arr :: NDArray) - print(io, "mx.NDArray$(size(arr))") + print(io, "mx.NDArray{$(eltype(arr))}$(size(arr))") end function NDArray{T<:Real}(data :: Array{T}) @@ -185,10 +232,20 @@ end #=doc .. function:: eltype(arr :: NDArray) - Get the element type of an :class:`NDArray`. Currently the element type is always ``mx.MX_float``. + Get the element type of an :class:`NDArray`. =# -function eltype(arr :: NDArray) - MX_float +function eltype{T <: Union{NDArray, MX_NDArrayHandle}}(arr :: T) + dtype_ref = Ref{Cint}(0) + @mxcall(:MXNDArrayGetDType, (MX_handle, Ptr{Cint}), arr, dtype_ref) + + if dtype_ref[] == -1 # arr->is_none() + warn("Eltype of $arr is not defined") + Base.show_backtrace(STDOUT,backtrace()) + println() + return Float32 + else + return fromTypeFlag(TypeFlag(dtype_ref[])) + end end @@ -499,7 +556,7 @@ function .-(arg0 :: Real, arg1 :: NDArray) end function -(arg0 :: NDArray) - _mul_scalar(arg0, -1.0) + _mul_scalar(arg0, -one(eltype(arg0))) end #=doc From a282586232a65ca05311f99a40c46eb4e6ee1985 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 May 2016 12:06:14 +0900 Subject: [PATCH 331/630] add typed empty, ones, zeros --- src/ndarray.jl | 62 +++++++++++++++++++++++++++++++++++++++- test/unittest/ndarray.jl | 14 +++++++++ 2 files changed, 75 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 3987c7f5bbbd..f903a797fb66 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -6,6 +6,7 @@ NDArray API # All the types supported by mshadow. typealias DType Union{Float32, Float64, Float16, UInt8, Int32} @enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 +typealias DEFAULT_DTYPE Float32 function toTypeFlag{T <: DType}(:: Type{T}) if T == Float32 @@ -127,13 +128,32 @@ function context(arr :: NDArray) return Context(ref_typeid[], ref_devid[]) end + +#=doc +.. function:: + empty(DType, shape :: Tuple, ctx :: Context) + empty(DType, shape :: Tuple) + empty(DType, dim1, dim2, ...) + + Allocate memory for an uninitialized :class:`NDArray` with a specified type. +=# +function empty{N,T<:DType}(::Type{T}, shape :: NTuple{N, Int}) + empty(T, shape, cpu()) +end +function empty{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) + NDArray(_ndarray_alloc(T, shape, ctx, false)) +end +function empty{T<:DType}(:: Type{T}, shape :: Int...) + empty(T, shape) +end + #=doc .. function:: empty(shape :: Tuple, ctx :: Context) empty(shape :: Tuple) empty(dim1, dim2, ...) - Allocate memory for an uninitialized :class:`NDArray` with specific shape. + Allocate memory for an uninitialized :class:`NDArray` with specific shape of type Float32. =# function empty{N}(shape :: NTuple{N, Int}) empty(shape, cpu()) @@ -150,6 +170,26 @@ Interface functions similar to Julia Arrays ------------------------------------------- =# +#=doc +.. function:: + zeros(DType, shape :: Tuple, ctx :: Context) + zeros(DType, shape :: Tuple) + zeros(DType, dim1, dim2, ...) + + Create zero-ed :class:`NDArray` with specific shape and type +=# +function zeros{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) + zeros(T, shape, cpu()) +end +function zeros{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) + arr = empty(T, shape, ctx) + arr[:] = zero(T) + return arr +end +function zeros{T<:DType}(:: Type{T}, shape :: Int...) + zeros(T, shape) +end + #=doc .. function:: zeros(shape :: Tuple, ctx :: Context) @@ -170,6 +210,26 @@ function zeros(shape :: Int...) zeros(shape) end +#=doc +.. function:: + ones(DType, shape :: Tuple, ctx :: Context) + ones(DType, shape :: Tuple) + ones(DType, dim1, dim2, ...) + + Create an :class:`NDArray` with specific shape & type, and initialize with 1. +=# +function ones{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) + ones(T, shape, cpu()) +end +function ones{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) + arr = empty(T, shape, ctx) + arr[:] = one(T) + return arr +end +function ones{T<:DType}(:: Type{T}, shape :: Int...) + ones(T, shape) +end + #=doc .. function:: ones(shape :: Tuple, ctx :: Context) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 444aa90616fe..6750e9eb5914 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -271,6 +271,19 @@ function test_dot() @test size(z) == (2, 8) end +function test_eltype() + info("NDArray::eltype") + dims1 = (3,3) + + x = mx.empty(dims1) + @test eltype(x) == mx.DEFAULT_DTYPE + + for TF in instances(mx.TypeFlag) + T = mx.fromTypeFlag(TF) + x = mx.empty(T, dims1) + @test eltype(x) == T + end +end ################################################################################ # Run tests @@ -286,6 +299,7 @@ test_gd() test_saveload() test_clip() test_sqrt() +test_eltype() test_nd_as_jl() test_dot() From c161fe16c204685bf96aa58008066ab15ea3f940 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 May 2016 13:56:10 +0900 Subject: [PATCH 332/630] make arithmetic work with DType and extend arithmetic tests on all types --- src/ndarray.jl | 41 ++++++++++++++++++++------------------- test/unittest/bind.jl | 24 ++++++++++++----------- test/unittest/ndarray.jl | 13 +++++++++++++ test/unittest/operator.jl | 2 +- 4 files changed, 48 insertions(+), 32 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index f903a797fb66..1c34c45d0c33 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -20,7 +20,7 @@ function toTypeFlag{T <: DType}(:: Type{T}) elseif T == Int32 return kInt32 else - throw(ArgumentError("Can't convert $T to Dtype.")) + throw(ArgumentError("Can't convert $T to DType.")) end end @@ -36,7 +36,7 @@ function fromTypeFlag(T :: TypeFlag) elseif T == kInt32 return Int32 else - throw(ArgumentError("Can't convert Dtype $T.")) + throw(ArgumentError("Can't convert DType $T.")) end end @@ -353,7 +353,7 @@ import Base: setindex! =# function setindex!(arr :: NDArray, val :: Real, ::Colon) @assert(arr.writable) - _set_value(val, arr) + _set_value(convert(eltype(arr), val), arr) return arr end function setindex!{T<:Real}(arr :: NDArray, val :: Array{T}, ::Colon) @@ -426,9 +426,10 @@ function copy!(dst :: NDArray, src :: NDArray) return dst end -function copy!(dst :: Array{MX_float}, src :: NDArray) +function copy!{T<:DType}(dst :: Array{T}, src :: NDArray) + @assert T == eltype(src) @assert size(dst) == size(src) - @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{MX_float}, Csize_t), + @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{Void}, Csize_t), src, pointer(dst), length(dst)) return dst end @@ -439,8 +440,8 @@ end function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) @assert dst.writable @assert size(dst) == size(src) - src = convert(Array{MX_float}, src) # this might involve copying - @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), + src = convert(Array{eltype(dst)}, src) # this might involve copying + @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{Void}, Csize_t), dst.handle, pointer(src), length(src)) return dst end @@ -448,8 +449,8 @@ end function copy_ignore_shape!{T<:Real}(dst :: NDArray, src :: Array{T}) @assert dst.writable @assert length(dst) == length(src) - src = convert(Array{MX_float}, src) # this might involve copying - @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{MX_float}, Csize_t), + src = convert(Array{eltype(dst)}, src) # this might involve copying + @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{Void}, Csize_t), dst.handle, pointer(src), length(src)) return dst end @@ -466,19 +467,19 @@ end =# # Create copy: NDArray -> Julia Array function copy(arr :: NDArray) - j_arr = Array(MX_float, size(arr)) + j_arr = Array{eltype(arr)}(size(arr)) copy!(j_arr, arr) end # Create copy: NDArray -> NDArray in a given context function copy(arr :: NDArray, ctx :: Context) - dst = NDArray(_ndarray_alloc(size(arr), ctx, true)) + dst = NDArray(_ndarray_alloc(eltype(arr), size(arr), ctx, true)) copy!(dst, arr) end # Create copy: Julia Array -> NDArray in a given context -function copy{T<:Real}(arr :: Array{T}, ctx :: Context) - dst = empty(size(arr), ctx) +function copy{T<:DType}(arr :: Array{T}, ctx :: Context) + dst = empty(T, size(arr), ctx) copy!(dst, arr) end @@ -543,7 +544,7 @@ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) @assert dst.writable for arg in args if isa(arg, Real) - _plus_scalar(dst, arg, dst) + _plus_scalar(dst, convert(eltype(dst), arg), dst) else _plus(dst, arg, dst) end @@ -583,7 +584,7 @@ end function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) - _minus_scalar(dst, arg, dst) + _minus_scalar(dst, convert(eltype(dst), arg), dst) else _minus(dst, arg, dst) end @@ -628,7 +629,7 @@ end function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) - _mul_scalar(dst, arg, dst) + _mul_scalar(dst, convert(eltype(dst), arg), dst) else _mul(dst, arg, dst) end @@ -673,7 +674,7 @@ end function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) - _div_scalar(dst, arg, dst) + _div_scalar(dst, convert(eltype(dst), arg), dst) else _div(dst, arg, dst) end @@ -821,7 +822,7 @@ import Base.pointer function pointer(arr :: NDArray) pdata = Ref{Ptr{MX_float}}(0) @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{MX_float}}), arr, pdata) - return pdata[] + return convert(Ptr{eltype(arr)}, pdata[]) end function _wait_to_read(arr :: NDArray) @mxcall(:MXNDArrayWaitToRead, (MX_handle,), arr) @@ -861,10 +862,10 @@ end :param Array j_arr: the Julia Array. :param NDArray arr: the :class:`NDArray`. =# -function is_shared{T}(j_arr :: Array{T}, arr :: NDArray) +function is_shared(j_arr :: Array, arr :: NDArray) false end -function is_shared(j_arr :: Array{MX_float}, arr :: NDArray) +function is_shared{T<:DType}(j_arr :: Array{T}, arr :: NDArray) if length(j_arr) != length(arr) return false end diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 760e261fe581..6e1b821aa418 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -7,19 +7,19 @@ using ..Main: rand_dims, reldiff ################################################################################ # Test Implementations ################################################################################ -function test_arithmetic(uf, gf) +function test_arithmetic{T <: mx.DType}(::Type{T}, uf, gf) shape = rand_dims() - info("Bind::arithmetic::$uf::dims = $shape") + info("Bind::arithmetic::$T::$uf::dims = $shape") lhs = mx.Variable(:lhs) rhs = mx.Variable(:rhs) ret = uf(lhs, rhs) @test mx.list_arguments(ret) == [:lhs, :rhs] - lhs_arr = mx.NDArray(rand(shape)) - rhs_arr = mx.NDArray(rand(shape)) - lhs_grad = mx.empty(shape) - rhs_grad = mx.empty(shape) + lhs_arr = mx.NDArray(rand(T, shape)) + rhs_arr = mx.NDArray(rand(T, shape)) + lhs_grad = mx.empty(T, shape) + rhs_grad = mx.empty(T, shape) exec2 = mx.bind(ret, mx.Context(mx.CPU), [lhs_arr, rhs_arr], args_grad=[lhs_grad, rhs_grad]) exec3 = mx.bind(ret, mx.Context(mx.CPU), [lhs_arr, rhs_arr]) @@ -39,7 +39,7 @@ function test_arithmetic(uf, gf) @test reldiff(out1, out4) < 1e-6 # test gradients - out_grad = mx.NDArray(ones(shape)) + out_grad = mx.NDArray(ones(T, shape)) lhs_grad2, rhs_grad2 = gf(copy(out_grad), copy(lhs_arr), copy(rhs_arr)) mx.backward(exec2, out_grad) @test reldiff(copy(lhs_grad), lhs_grad2) < 1e-6 @@ -55,10 +55,12 @@ function test_arithmetic(uf, gf) end function test_arithmetic() - test_arithmetic(.+, (g,x,y) -> (g,g)) - test_arithmetic(.-, (g,x,y) -> (g,-g)) - test_arithmetic(.*, (g,x,y) -> (y.*g, x.*g)) - test_arithmetic(./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) + for T in [mx.fromTypeFlag(TF) for TF in instances(mx.TypeFlag)] + test_arithmetic(T, .+, (g,x,y) -> (g,g)) + test_arithmetic(T, .-, (g,x,y) -> (g,-g)) + test_arithmetic(T, .*, (g,x,y) -> (y.*g, x.*g)) + test_arithmetic(T, ./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) + end end ################################################################################ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 6750e9eb5914..f76d8cd95b59 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -44,11 +44,24 @@ function test_assign() array2 = mx.zeros(size(tensor)) @test reldiff(zeros(size(tensor)), copy(array2)) < 1e-6 + array3 = mx.zeros(Float16, size(tensor)) + @test reldiff(zeros(Float16, size(tensor)), copy(array2)) < 1e-6 + # scalar -> NDArray assignment scalar = rand() array2[:] = scalar @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 + scalar = rand(Float16) + array2[:] = scalar + @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 + + scalar = rand(Float64) + array2[:] = scalar + array3[:] = scalar + @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 + @test reldiff(zeros(Float16,size(tensor))+scalar, copy(array3)) < 1e-6 + # NDArray -> NDArray assignment array[:] = array2 @test reldiff(zeros(size(tensor))+scalar, copy(array)) < 1e-6 diff --git a/test/unittest/operator.jl b/test/unittest/operator.jl index 2284b1080c7e..f4332582428c 100644 --- a/test/unittest/operator.jl +++ b/test/unittest/operator.jl @@ -9,7 +9,7 @@ function test_scalar_op() shape = rand_dims() info("Operator::scalar_op::dims = $shape") - data_jl = 5ones(shape) + data_jl = 5ones(Float32, shape) arr_data = mx.copy(data_jl, mx.cpu()) arr_grad = mx.zeros(shape) From 2916a8f5da0f31a4ca8d66afbc86c97cf3791a51 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 May 2016 16:49:47 +0900 Subject: [PATCH 333/630] turn macro into helper function --- src/symbolic-node.jl | 87 ++++++++++++++++++++++---------------------- 1 file changed, 43 insertions(+), 44 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index da251e4cc74f..dbc3eb7dd26d 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -236,53 +236,52 @@ function Group(nodes :: SymbolicNode...) SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -macro _infer_shape(self, keys, indptr, sdata) - quote - ref_arg_shape_size = Ref{MX_uint}(0) - ref_arg_shape_ndim = Ref{Ptr{MX_uint}}(0) - ref_arg_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) - ref_out_shape_size = Ref{MX_uint}(0) - ref_out_shape_ndim = Ref{Ptr{MX_uint}}(0) - ref_out_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) - ref_aux_shape_size = Ref{MX_uint}(0) - ref_aux_shape_ndim = Ref{Ptr{MX_uint}}(0) - ref_aux_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) - ref_complete = Ref{Cint}(0) - @mxcall(:MXSymbolInferShape, - (MX_handle, MX_uint, char_pp, Ptr{MX_uint}, Ptr{MX_uint}, - Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, - Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, - Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, - Ref{Cint}), - self, length(indptr)-1, keys, indptr, sdata, - ref_arg_shape_size, ref_arg_shape_ndim, ref_arg_shape_data, - ref_out_shape_size, ref_out_shape_ndim, ref_out_shape_data, - ref_aux_shape_size, ref_aux_shape_ndim, ref_aux_shape_data, - ref_complete) - if ref_complete[] == 0 - return (nothing, nothing, nothing) - else - function build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) - shape_ndim = pointer_to_array(shape_ndim, shape_size) - shape_data = pointer_to_array(shape_data, shape_size) - shapes = map(1:shape_size) do i - my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) - tuple(flipdim(Int[my_shape...],1)...) - end - convert(Vector{Tuple}, shapes) - end - return ( - build_shapes(ref_arg_shape_size[], ref_arg_shape_ndim[], ref_arg_shape_data[]), - build_shapes(ref_out_shape_size[], ref_out_shape_ndim[], ref_out_shape_data[]), - build_shapes(ref_aux_shape_size[], ref_aux_shape_ndim[], ref_aux_shape_data[]) - ) - end +function _build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) + shape_ndim = pointer_to_array(shape_ndim, shape_size) + shape_data = pointer_to_array(shape_data, shape_size) + shapes = map(1:shape_size) do i + my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) + tuple(flipdim(Int[my_shape...],1)...) + end + convert(Vector{Tuple}, shapes) +end + +function _infer_shape(self, keys, indptr, sdata) + ref_arg_shape_size = Ref{MX_uint}(0) + ref_arg_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_arg_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_out_shape_size = Ref{MX_uint}(0) + ref_out_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_out_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_aux_shape_size = Ref{MX_uint}(0) + ref_aux_shape_ndim = Ref{Ptr{MX_uint}}(0) + ref_aux_shape_data = Ref{Ptr{Ptr{MX_uint}}}(0) + ref_complete = Ref{Cint}(0) + @mxcall(:MXSymbolInferShape, + (MX_handle, MX_uint, char_pp, Ptr{MX_uint}, Ptr{MX_uint}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{MX_uint}, Ref{Ptr{MX_uint}}, Ref{Ptr{Ptr{MX_uint}}}, + Ref{Cint}), + self, length(indptr)-1, keys, indptr, sdata, + ref_arg_shape_size, ref_arg_shape_ndim, ref_arg_shape_data, + ref_out_shape_size, ref_out_shape_ndim, ref_out_shape_data, + ref_aux_shape_size, ref_aux_shape_ndim, ref_aux_shape_data, + ref_complete) + if ref_complete[] == 0 + return (nothing, nothing, nothing) + else + return ( + _build_shapes(ref_arg_shape_size[], ref_arg_shape_ndim[], ref_arg_shape_data[]), + _build_shapes(ref_out_shape_size[], ref_out_shape_ndim[], ref_out_shape_data[]), + _build_shapes(ref_aux_shape_size[], ref_aux_shape_ndim[], ref_aux_shape_data[]) + ) end end #=doc .. function:: - infer_shape(self :: SymbolicNode; args...) + infer_shape(self :: SymbolicNode, args...) infer_shape(self :: SymbolicNode; kwargs...) Do shape inference according to the input shapes. The input shapes could be provided @@ -302,7 +301,7 @@ function infer_shape(self :: SymbolicNode; kwargs...) push!(indptr, length(sdata)) end keys = AbstractString[string(x[1]) for x in kwargs] - @_infer_shape(self, keys, indptr, sdata) + _infer_shape(self, keys, indptr, sdata) end function infer_shape(self :: SymbolicNode, args :: Union{Tuple, Void}...) sdata = MX_uint[] @@ -313,7 +312,7 @@ function infer_shape(self :: SymbolicNode, args :: Union{Tuple, Void}...) push!(indptr, length(sdata)) end keys = Ptr{char_p}(0) - @_infer_shape(self, keys, indptr, sdata) + _infer_shape(self, keys, indptr, sdata) end #=doc From dc405e73fd6558c2944edbe76c03c6d91bbb4033 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 May 2016 17:57:43 +0900 Subject: [PATCH 334/630] add type_infer --- src/symbolic-node.jl | 64 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 64 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index dbc3eb7dd26d..94e45188038a 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -315,6 +315,70 @@ function infer_shape(self :: SymbolicNode, args :: Union{Tuple, Void}...) _infer_shape(self, keys, indptr, sdata) end +function _infer_type(self, keys, arg_type_data) + ref_in_type_size = Ref{MX_uint}() + ref_in_type_data = Ref{Ptr{Cint}}() + ref_out_type_size = Ref{MX_uint}() + ref_out_type_data = Ref{Ptr{Cint}}() + ref_aux_type_size = Ref{MX_uint}() + ref_aux_type_data = Ref{Ptr{Cint}}() + ref_complete = Ref{Cint}() + + @mxcall(:MXSymbolInferType, + (MX_handle, MX_uint, char_pp, Ptr{Cint}, + Ref{MX_uint}, Ref{Ptr{Cint}}, + Ref{MX_uint}, Ref{Ptr{Cint}}, + Ref{MX_uint}, Ref{Ptr{Cint}}, + Ref{Cint}), + self, length(arg_type_data)-1, keys, arg_type_data, + ref_in_type_size, ref_in_type_data, + ref_out_type_size, ref_out_type_data, + ref_aux_type_size, ref_aux_type_data, + ref_complete) + + if ref_complete[] == 0 + return (nothing, nothing, nothing) + else + in_type = pointer_to_array(ref_in_type_data[], ref_in_type_size[]) + out_type = pointer_to_array(ref_out_type_data[], ref_out_type_size[]) + aux_type = pointer_to_array(ref_aux_type_data[], ref_aux_type_size[]) + return ([fromTypeFlag(TypeFlag(t)) for t in in_type], + [fromTypeFlag(TypeFlag(t)) for t in out_type], + [fromTypeFlag(TypeFlag(t)) for t in aux_type]) + end +end + +#=doc +.. function:: + infer_type(self :: SymbolicNode; kwargs...) + infer_type(self :: SymbolicNode, args...) + + Do type inference according to the input types. The input types could be provided + as a list of types, which should specify the types of inputs in the same order as + the arguments returned by :func:`list_arguments`. Alternatively, the type information + could be specified via keyword arguments. + + :return: A 3-tuple containing types of all the arguments, types of all the outputs and + types of all the auxiliary variables. If type inference failed due to incomplete + or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. +=# +function infer_type(self :: SymbolicNode; kwargs...) + types = Cint[toTypeFlag(x[2]) for x in kwargs] + keys = AbstractString[string(x[1]) for x in kwargs] + _infer_type(self, keys, types) +end + +function infer_type(self :: SymbolicNode, args :: Union{Tuple, Void}...) + types = Cint[] + keys = Ptr{char_p}(0) + + for arg in args + if isa(arg, Void); continue; end + push!(types, toTypeFlag(arg)) + end + _infer_type(self, keys, types) +end + #=doc .. function:: getindex(self :: SymbolicNode, idx :: Union{Int, Base.Symbol, AbstractString}) From 4a4ea61d08f2eb77fa8aef781ca25dad2a5b8229 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 30 May 2016 12:51:38 +0900 Subject: [PATCH 335/630] make bind tests use isapprox and don't run division tests for Integer types --- test/unittest/bind.jl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 6e1b821aa418..64ae2cd95592 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -34,24 +34,24 @@ function test_arithmetic{T <: mx.DType}(::Type{T}, uf, gf) out2 = copy(exec2.outputs[1]) out3 = copy(exec3.outputs[1]) out4 = copy(exec4.outputs[1]) - @test reldiff(out1, out2) < 1e-6 - @test reldiff(out1, out3) < 1e-6 - @test reldiff(out1, out4) < 1e-6 + @test isapprox(out1, out2) + @test isapprox(out1, out3) + @test isapprox(out1, out4) # test gradients out_grad = mx.NDArray(ones(T, shape)) lhs_grad2, rhs_grad2 = gf(copy(out_grad), copy(lhs_arr), copy(rhs_arr)) mx.backward(exec2, out_grad) - @test reldiff(copy(lhs_grad), lhs_grad2) < 1e-6 - @test reldiff(copy(rhs_grad), rhs_grad2) < 1e-6 + @test isapprox(copy(lhs_grad), lhs_grad2) + @test isapprox(copy(rhs_grad), rhs_grad2) # reset grads lhs_grad[:] = 0 rhs_grad[:] = 0 # compute using another binding mx.backward(exec4, out_grad) - @test reldiff(copy(lhs_grad), lhs_grad2) < 1e-6 - @test reldiff(copy(rhs_grad), rhs_grad2) < 1e-6 + @test isapprox(copy(lhs_grad), lhs_grad2) + @test isapprox(copy(rhs_grad), rhs_grad2) end function test_arithmetic() @@ -59,7 +59,7 @@ function test_arithmetic() test_arithmetic(T, .+, (g,x,y) -> (g,g)) test_arithmetic(T, .-, (g,x,y) -> (g,-g)) test_arithmetic(T, .*, (g,x,y) -> (y.*g, x.*g)) - test_arithmetic(T, ./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) + T <: Integer || test_arithmetic(T, ./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) end end From affebe675eaa4761475768ed5386439f5d828dd1 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 7 Jun 2016 11:35:32 +0900 Subject: [PATCH 336/630] Exclude division test for Float16 --- test/unittest/bind.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 64ae2cd95592..c764435d5a9c 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -59,7 +59,11 @@ function test_arithmetic() test_arithmetic(T, .+, (g,x,y) -> (g,g)) test_arithmetic(T, .-, (g,x,y) -> (g,-g)) test_arithmetic(T, .*, (g,x,y) -> (y.*g, x.*g)) - T <: Integer || test_arithmetic(T, ./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) + if T <: Integer || T == Float16 + warn("Not running division test for $T") + else + test_arithmetic(T, ./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) + end end end From 9a827f796a2d574c68d87f359c679c1ccc1a50ad Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 7 Jun 2016 11:43:52 +0900 Subject: [PATCH 337/630] fix blas_vendor for v0.5 --- deps/build.jl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index e0e0a0e1388f..9449c6337102 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -30,8 +30,14 @@ if !libmxnet_detected openblas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) + if VERSION >= v"0.5.0-dev+4338" + blas_vendor = Base.BLAS.vendor() + else + blas_vendor = Base.blas_vendor() + end + ilp64 = "" - if Base.blas_vendor() == :openblas64 + if blas_vendor == :openblas64 ilp64 = "-DINTERFACE64" end From 5e70cb96e9de4a7a18d347d5f15bd42ea46e3e99 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 5 Jul 2016 09:23:15 +0900 Subject: [PATCH 338/630] start transitioning to Documenter.jl --- .gitignore | 2 ++ docs/make.jl | 3 +++ docs/src/index.md | 0 3 files changed, 5 insertions(+) create mode 100644 docs/make.jl create mode 100644 docs/src/index.md diff --git a/.gitignore b/.gitignore index e2b2ea70b3fc..3325a370ecaf 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,5 @@ deps/src deps/usr deps/deps.jl docs/_build +docs/build/ +docs/site/ diff --git a/docs/make.jl b/docs/make.jl new file mode 100644 index 000000000000..1a20e2b93826 --- /dev/null +++ b/docs/make.jl @@ -0,0 +1,3 @@ +using Documenter, MXNet + +makedocs() diff --git a/docs/src/index.md b/docs/src/index.md new file mode 100644 index 000000000000..e69de29bb2d1 From ce16bfe76d0aee40d7c157de2f7718a9b5ab4739 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 5 Jul 2016 23:41:16 +0900 Subject: [PATCH 339/630] split import_ndarry functions up --- src/ndarray.jl | 192 ++++++++++++++++++++++++++----------------------- 1 file changed, 104 insertions(+), 88 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 1c34c45d0c33..82b421651a4f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -988,110 +988,126 @@ object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the out **autogen:EMBED:ndarray:EMBED:autogen** =# -function _import_ndarray_functions(;gen_docs=false) - n_ref = Ref{MX_uint}(0) - h_ref = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - n_funcs = n_ref[] - h_funcs = pointer_to_array(h_ref[], n_funcs) +function _get_ndarray_functions() + n = Ref{MX_uint}(0) + handles = Ref{Ptr{MX_handle}}(0) - if gen_docs - docs = Dict{Base.Symbol, AbstractString}() - end + @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n, handles) - for i = 1:n_funcs - func_handle = h_funcs[i] + funcs = unsafe_wrap(Array, handles[], n[]) + return funcs +end - #---------------------------------------- - # get function information (human readable) - ref_name = Ref{char_p}(0) - ref_desc = Ref{char_p}(0) - ref_narg = Ref{MX_uint}(0) +function _get_function_description(handle :: MX_handle) + # get function information (human readable) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) - ref_arg_names = Ref{char_pp}(0) - ref_arg_types = Ref{char_pp}(0) - ref_arg_descs = Ref{char_pp}(0) + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) - ref_ret_type = Ref{char_p}(0) + ref_ret_type = Ref{char_p}(0) - @mxcall(:MXFuncGetInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, - Ref{char_pp}, Ref{char_pp}, Ref{char_p}), - func_handle, ref_name, ref_desc, ref_narg, ref_arg_names, - ref_arg_types, ref_arg_descs, ref_ret_type) + @mxcall(:MXFuncGetInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, + Ref{char_pp}, Ref{char_pp}, Ref{char_p}), + handle, ref_name, ref_desc, ref_narg, ref_arg_names, + ref_arg_types, ref_arg_descs, ref_ret_type) - func_name = Symbol(@compat String(ref_name[])) + name = Symbol(unsafe_wrap(String, ref_name[])) - if gen_docs - # generate document only - f_desc = @compat String(ref_desc[]) * "\n\n" - f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) - docs[func_name] = f_desc - else - #---------------------------------------- - # get function specification - ref_n_use_vars = Ref{MX_uint}(0) - ref_n_scalars = Ref{MX_uint}(0) - ref_n_mut_vars = Ref{MX_uint}(0) - ref_type_mask = Ref{Cint}(0) - @mxcall(:MXFuncDescribe, - (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), - func_handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) - - #---------------------------------------- - # prepare function definition - n_used_vars = ref_n_use_vars[] - n_scalars = ref_n_scalars[] - n_mutate_vars = ref_n_mut_vars[] - type_mask = ref_type_mask[] - accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 - arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - - # general ndarray function - if arg_before_scalar - args = vcat([Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - else - args = vcat([Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - end + desc = unsafe_wrap(String, ref_desc[]) * "\n\n" + desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) + return name, desc +end - _use_vars = Expr(:ref, :MX_handle, [Symbol("in$i") for i=1:n_used_vars]...) - _scalars = Expr(:ref, :MX_float, [Symbol("sca$i") for i=1:n_scalars]...) - _mut_vars = Expr(:ref, :MX_handle, [Symbol("out$i") for i=1:n_mutate_vars]...) +function _get_function_expressions(handle :: MX_handle, name) + # get function specification + ref_n_use_vars = Ref{MX_uint}(0) + ref_n_scalars = Ref{MX_uint}(0) + ref_n_mut_vars = Ref{MX_uint}(0) + ref_type_mask = Ref{Cint}(0) + @mxcall(:MXFuncDescribe, + (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), + handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) - # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped - # See https://github.com/dmlc/MXNet.jl/issues/55 - if func_name == :dot - _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) - end + n_used_vars = ref_n_use_vars[] + n_scalars = ref_n_scalars[] + n_mutate_vars = ref_n_mut_vars[] + type_mask = ref_type_mask[] + accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 + arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - stmt_call = Expr(:call, :_invoke_mxfunction, func_handle, _use_vars, _scalars, _mut_vars) - if n_mutate_vars == 1 - stmt_ret = :(return out1) - else - stmt_ret = Expr(:return, Expr(:tuple, [Symbol("out$i") for i=1:n_mutate_vars]...)) - end + # general ndarray function + if arg_before_scalar + args = vcat([Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + else + args = vcat([Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], + [Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], + [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) + end + + _use_vars = Expr(:ref, :MX_handle, [Symbol("in$i") for i=1:n_used_vars]...) + _scalars = Expr(:ref, :MX_float, [Symbol("sca$i") for i=1:n_scalars]...) + _mut_vars = Expr(:ref, :MX_handle, [Symbol("out$i") for i=1:n_mutate_vars]...) + + # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped + # See https://github.com/dmlc/MXNet.jl/issues/55 + if name == :dot + _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) + end - func_body = Expr(:block, stmt_call, stmt_ret) - func_head = Expr(:call, func_name, args...) + stmt_call = Expr(:call, :_invoke_mxfunction, handle, _use_vars, _scalars, _mut_vars) + if n_mutate_vars == 1 + stmt_ret = :(return out1) + else + stmt_ret = Expr(:return, Expr(:tuple, [Symbol("out$i") for i=1:n_mutate_vars]...)) + end + + func_body = Expr(:block, stmt_call, stmt_ret) + func_head = Expr(:call, name, args...) + + func_def = Expr(:function, func_head, func_body) + exprs = Expr[func_def] + + if accept_empty_mutate + args0 = args[1:n_used_vars+n_scalars] + func_head0 = Expr(:call, name, args0...) + _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] + stmt_call0 = Expr(:call, name, args0..., _mut_vars0...) + func_body0 = Expr(:block, stmt_call0) + func_head0 = Expr(:call, name, args0...) + + func_def0 = Expr(:function, func_head0, func_body0) + push!(exprs, func_def0) + end + return exprs +end - func_def = Expr(:function, func_head, func_body) - eval(func_def) +function _import_ndarray_functions(;gen_docs=false) + funcs = _get_ndarray_functions() + + if gen_docs + docs = Dict{Symbol, String}() + end - if accept_empty_mutate - args0 = args[1:n_used_vars+n_scalars] - func_head0 = Expr(:call, func_name, args0...) - _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] - stmt_call0 = Expr(:call, func_name, args0..., _mut_vars0...) - func_body0 = Expr(:block, stmt_call0) - func_head0 = Expr(:call, func_name, args0...) + for i = 1:length(funcs) + handle = funcs[i] - func_def0 = Expr(:function, func_head0, func_body0) - eval(func_def0) + name, desc = _get_function_description(handle) + + if gen_docs + # generate document only + docs[name] = desc + else + exprs = _get_function_expressions(handle, name) + for expr in exprs + eval(expr) end end end From f09362c92c2ea93289badcc992ae520047251f54 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 00:36:05 +0900 Subject: [PATCH 340/630] add mxnet.so as a dependency --- src/base.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/base.jl b/src/base.jl index bd67a7c2ac52..ba1fda2e258c 100644 --- a/src/base.jl +++ b/src/base.jl @@ -24,6 +24,8 @@ if isempty(MXNET_LIB) # MXNet.ji will be re-compiled to get MXNET_LIB properly. touch(@__FILE__) error("Cannot find or load libmxnet.so. Please see the document on how to build it.") +else + include_dependency(MXNET_LIB) end function __init__() From 411a08bca644253fff7acb7125fdc933af932fc8 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 00:36:53 +0900 Subject: [PATCH 341/630] simplify _import_ndarray --- src/ndarray.jl | 24 +++++++----------------- 1 file changed, 7 insertions(+), 17 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 82b421651a4f..b82787c18c97 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1089,31 +1089,21 @@ function _get_function_expressions(handle :: MX_handle, name) return exprs end -function _import_ndarray_functions(;gen_docs=false) +function _import_ndarray_functions() funcs = _get_ndarray_functions() - - if gen_docs - docs = Dict{Symbol, String}() - end + func_exprs = Expr[] for i = 1:length(funcs) handle = funcs[i] name, desc = _get_function_description(handle) + exprs = _get_function_expressions(handle, name) - if gen_docs - # generate document only - docs[name] = desc - else - exprs = _get_function_expressions(handle, name) - for expr in exprs - eval(expr) - end + expr = quote + $(exprs...) + @doc $desc $name end - end - - if gen_docs - return docs + eval(expr) end end From 7b65547780e1868b4944423847e18e9a77e19559 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 00:59:46 +0900 Subject: [PATCH 342/630] implement _import_ndarray functions as macro --- src/MXNet.jl | 2 +- src/base.jl | 1 - src/ndarray.jl | 23 +++++++++++++++++++---- 3 files changed, 20 insertions(+), 6 deletions(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index cdbacbe64be0..c447617e7fbf 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -18,6 +18,7 @@ import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm, include("base.jl") include("context.jl") +include("util.jl") include("ndarray.jl") include("random.jl") @@ -36,7 +37,6 @@ include("kvstore.jl") include("callback.jl") include("model.jl") -include("util.jl") include("visualize.jl") include("nn-factory.jl") diff --git a/src/base.jl b/src/base.jl index ba1fda2e258c..578647e30486 100644 --- a/src/base.jl +++ b/src/base.jl @@ -29,7 +29,6 @@ else end function __init__() - _import_ndarray_functions() _import_atomic_symbol_creators() _import_io_iterators() diff --git a/src/ndarray.jl b/src/ndarray.jl index b82787c18c97..a555e201725b 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -999,6 +999,13 @@ function _get_ndarray_functions() return funcs end +function _get_function(name :: String) + handle = Ref{MX_handle}(0) + + @mxcall(:MXGetFunction, (Cstring, Ref{MX_handle}), name, handle) + return handle[] +end + function _get_function_description(handle :: MX_handle) # get function information (human readable) ref_name = Ref{char_p}(0) @@ -1061,8 +1068,10 @@ function _get_function_expressions(handle :: MX_handle, name) if name == :dot _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) end - - stmt_call = Expr(:call, :_invoke_mxfunction, handle, _use_vars, _scalars, _mut_vars) + stmt_call = quote + local handle = _get_function($(string(name))) + _invoke_mxfunction(handle, $_use_vars, $_scalars, $_mut_vars) + end if n_mutate_vars == 1 stmt_ret = :(return out1) else @@ -1089,7 +1098,7 @@ function _get_function_expressions(handle :: MX_handle, name) return exprs end -function _import_ndarray_functions() +macro _import_ndarray_functions() funcs = _get_ndarray_functions() func_exprs = Expr[] @@ -1103,7 +1112,13 @@ function _import_ndarray_functions() $(exprs...) @doc $desc $name end - eval(expr) + + push!(func_exprs, expr) end + + esc(quote + $(func_exprs...) + end) end +@_import_ndarray_functions() From 89e2c8e9c7ce5c1d5da7674fcf831457fa889b7a Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 02:06:07 +0900 Subject: [PATCH 343/630] implement a function cache --- src/ndarray.jl | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index a555e201725b..d78d647fe185 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -999,11 +999,17 @@ function _get_ndarray_functions() return funcs end -function _get_function(name :: String) - handle = Ref{MX_handle}(0) - - @mxcall(:MXGetFunction, (Cstring, Ref{MX_handle}), name, handle) - return handle[] +const _function_cache = Dict{Symbol, MX_handle}() +function _get_function(name :: Symbol) + if !haskey(_function_cache, name) + handle = Ref{MX_handle}(0) + + @mxcall(:MXGetFunction, (Cstring, Ref{MX_handle}), name, handle) + _function_cache[name] = handle[] + return handle[] + else + return _function_cache[name] + end end function _get_function_description(handle :: MX_handle) @@ -1069,7 +1075,7 @@ function _get_function_expressions(handle :: MX_handle, name) _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) end stmt_call = quote - local handle = _get_function($(string(name))) + local handle = _get_function($(QuoteNode(name))) _invoke_mxfunction(handle, $_use_vars, $_scalars, $_mut_vars) end if n_mutate_vars == 1 From 5b2bd3ee8948d57305eb950f6fb14fe7a383e7a5 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 02:57:18 +0900 Subject: [PATCH 344/630] Refactor _import_atomic_symbol to be a macro --- src/base.jl | 2 +- src/symbolic-node.jl | 87 +++++++++++++++++++++++++++----------------- 2 files changed, 54 insertions(+), 35 deletions(-) diff --git a/src/base.jl b/src/base.jl index 578647e30486..32968ea0afb6 100644 --- a/src/base.jl +++ b/src/base.jl @@ -29,7 +29,7 @@ else end function __init__() - _import_atomic_symbol_creators() + _populate_symbol_creator_cache!() _import_io_iterators() atexit() do diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 94e45188038a..5bd1a6120f0e 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -580,7 +580,7 @@ libmxnet APIs ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet ################################################################################ -function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) +function _define_atomic_symbol_creator(hdr :: MX_handle) ref_name = Ref{char_p}(0) ref_desc = Ref{char_p}(0) ref_kv_nargs = Ref{char_p}(0) @@ -596,22 +596,19 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, ref_kv_nargs, ref_ret_type) - func_name_s= @compat String(ref_name[]) - func_name = Symbol(func_name_s) - kv_nargs_s = @compat String(ref_kv_nargs[]) - kv_nargs = Symbol(kv_nargs_s) + func_name_s = unsafe_wrap(String, ref_name[]) + func_name = Symbol(func_name_s) + kv_nargs_s = unsafe_wrap(String, ref_kv_nargs[]) + kv_nargs = Symbol(kv_nargs_s) - if gen_docs - f_desc = @compat String(ref_desc[]) * "\n\n" - if !isempty(kv_nargs_s) - f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" - end - f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n" - f_desc *= ":param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`.\n\n" - f_desc *= ":return: $(_format_typestring(@compat String(ref_ret_type[]))).\n\n" - return (func_name, f_desc) + f_desc = unsafe_wrap(String, ref_desc[]) * "\n\n" + if !isempty(kv_nargs_s) + f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" end + f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) + f_desc *= ":param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n" + f_desc *= ":param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`.\n\n" + f_desc *= ":return: $(_format_typestring(unsafe_wrap(String, ref_ret_type[]))).\n\n" # function $func_name(args...; kwargs...) func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) @@ -623,10 +620,10 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) name = "" end - param_keys = AbstractString[] - param_vals = AbstractString[] + param_keys = String[] + param_vals = String[] symbol_kws = Dict{Symbol, SymbolicNode}() - attrs = Dict{Symbol, AbstractString}() + attrs = Dict{Symbol, String}() $(if kv_nargs != Symbol("") quote @@ -665,11 +662,13 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) end end) + local hdr = _get_symbol_creator($(QuoteNode(func_name))) + # create the SymbolicNode ref_sym_hdr = Ref{MX_handle}() @mxcall(:MXSymbolCreateAtomicSymbol, (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), - $hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) + hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) sym_hdr = ref_sym_hdr[] node = SymbolicNode(MX_SymbolHandle(sym_hdr)) @@ -691,34 +690,54 @@ function _define_atomic_symbol_creator(hdr :: MX_handle; gen_docs=false) end func_def = Expr(:function, func_head, Expr(:block, func_body)) - eval(func_def) + quote + $func_def + @doc $f_desc $func_name + end end -function _import_atomic_symbol_creators(;gen_docs=false) +function _get_atomic_symbol_creators() n_ref = Ref{MX_uint}(0) h_ref = Ref{Ptr{MX_handle}}(0) @mxcall(:MXSymbolListAtomicSymbolCreators, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - n_creators = n_ref[] - h_creators = pointer_to_array(h_ref[], n_creators) + return unsafe_wrap(Array, h_ref[], n_ref[]) +end - if gen_docs - docs = Dict{Base.Symbol, AbstractString}() - end +function _get_atomic_symbol_name(handle :: MX_handle) + name_r = Ref{char_p}(0) + @mxcall(:MXSymbolGetAtomicSymbolName, (MX_handle, Ref{char_p}), handle, name_r) + return unsafe_wrap(String, name_r[]) +end - for i = 1:n_creators - creator_hdr = h_creators[i] - ret = _define_atomic_symbol_creator(creator_hdr, gen_docs=gen_docs) - if gen_docs - docs[ret[1]] = ret[2] - end +const _symbol_creator_cache = Dict{Symbol, MX_handle}() +function _populate_symbol_creator_cache!() + empty!(_symbol_creator_cache) + h_creators = _get_atomic_symbol_creators() + for handle in h_creators + name = Symbol(_get_atomic_symbol_name(handle)) + _symbol_creator_cache[name] = handle end +end + +_get_symbol_creator(name :: Symbol) = _symbol_creator_cache[name] - if gen_docs - return docs +macro _import_atomic_symbol_creators() + h_creators = _get_atomic_symbol_creators() + + exprs = Expr[] + for creator_hdr in h_creators + expr = _define_atomic_symbol_creator(creator_hdr) + push!(exprs, expr) end + + esc(quote + $(exprs...) + end) end +@_import_atomic_symbol_creators() + ################################################################################ # Utility macros to chain up symbols ################################################################################ From 1ad9a1a7a38e676e7e01df040c904e708c918daa Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 03:54:36 +0900 Subject: [PATCH 345/630] transition context to see how it turns out --- docs/src/api/context.md | 7 +++++++ src/context.jl | 36 +++++++++++++++++------------------- 2 files changed, 24 insertions(+), 19 deletions(-) create mode 100644 docs/src/api/context.md diff --git a/docs/src/api/context.md b/docs/src/api/context.md new file mode 100644 index 000000000000..2c2ac8f7d7ac --- /dev/null +++ b/docs/src/api/context.md @@ -0,0 +1,7 @@ +# Context + +```@docs +mx.Context +mx.cpu +mx.gpu +``` diff --git a/src/context.jl b/src/context.jl index 1e96c305fb04..908d542f5ba3 100644 --- a/src/context.jl +++ b/src/context.jl @@ -1,14 +1,10 @@ -#=doc -Context -======= -=# @enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 -#=doc -.. class:: Context +""" + Context(dev_type, dev_id) - A context describes the device type and id on which computation should be carried on. -=# +A context describes the device type and id on which computation should be carried on. +""" immutable Context device_type :: CONTEXT_TYPE device_id :: Int @@ -20,25 +16,27 @@ function Base.show(io :: IO, ctx :: Context) print(io, "$(ctx.device_type)$(ctx.device_id)") end -#=doc -.. function:: cpu(dev_id=0) +""" + cpu(dev_id) - :param Int dev_id: the CPU id. +Get a CPU context with a specific id. ``cpu()`` is usually the default context for many +operations when no context is specified. - Get a CPU context with a specific id. ``cpu()`` is usually the default context for many - operations when no context is specified. -=# +# Arguments +* `dev_id::Int = 0`: the CPU id. +""" function cpu(dev_id::Int=0) return Context(CPU, dev_id) end -#=doc -.. function:: gpu(dev_id=0) +""" + gpu(dev_id) - :param Int dev_id: the GPU device id. +Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. - Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. -=# +# Arguments +* `dev_id :: Int = 0` the GPU device id. +""" function gpu(dev_id::Int=0) return Context(GPU, dev_id) end From ca949d73b5b90c81eda2e021dfaa20a973e271a4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 03:57:42 +0900 Subject: [PATCH 346/630] get rid of the most of the old docs --- docs/Makefile | 180 -- docs/api/callback.rst | 92 - docs/api/context.rst | 32 - docs/api/executor.rst | 32 - docs/api/initializer.rst | 81 - docs/api/io.rst | 525 ------ docs/api/metric.rst | 77 - docs/api/model.rst | 133 -- docs/api/ndarray.rst | 1027 ----------- docs/api/nn-factory.rst | 33 - docs/api/optimizer.rst | 194 -- docs/api/symbolic-node.rst | 1591 ----------------- docs/api/visualize.rst | 19 - docs/build-api.jl | 98 - docs/conf.py | 277 --- docs/index.rst | 55 - docs/sphinx/julia.py | 23 - docs/src/index.md | 55 + docs/{ => src}/tutorial/char-lstm.rst | 0 .../{ => src}/tutorial/images/LSTM3-chain.png | Bin .../tutorial/images/char-lstm-vis.svg | 0 docs/{ => src}/tutorial/mnist.rst | 0 docs/{ => src}/user-guide/faq.rst | 0 docs/{ => src}/user-guide/install.rst | 0 docs/{ => src}/user-guide/overview.rst | 0 25 files changed, 55 insertions(+), 4469 deletions(-) delete mode 100644 docs/Makefile delete mode 100644 docs/api/callback.rst delete mode 100644 docs/api/context.rst delete mode 100644 docs/api/executor.rst delete mode 100644 docs/api/initializer.rst delete mode 100644 docs/api/io.rst delete mode 100644 docs/api/metric.rst delete mode 100644 docs/api/model.rst delete mode 100644 docs/api/ndarray.rst delete mode 100644 docs/api/nn-factory.rst delete mode 100644 docs/api/optimizer.rst delete mode 100644 docs/api/symbolic-node.rst delete mode 100644 docs/api/visualize.rst delete mode 100644 docs/build-api.jl delete mode 100644 docs/conf.py delete mode 100644 docs/index.rst delete mode 100644 docs/sphinx/julia.py rename docs/{ => src}/tutorial/char-lstm.rst (100%) rename docs/{ => src}/tutorial/images/LSTM3-chain.png (100%) rename docs/{ => src}/tutorial/images/char-lstm-vis.svg (100%) rename docs/{ => src}/tutorial/mnist.rst (100%) rename docs/{ => src}/user-guide/faq.rst (100%) rename docs/{ => src}/user-guide/install.rst (100%) rename docs/{ => src}/user-guide/overview.rst (100%) diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index e1763e330fc7..000000000000 --- a/docs/Makefile +++ /dev/null @@ -1,180 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -livehtml: - sphinx-autobuild -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Mocha.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Mocha.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/Mocha" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Mocha" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/api/callback.rst b/docs/api/callback.rst deleted file mode 100644 index 2bda7477094f..000000000000 --- a/docs/api/callback.rst +++ /dev/null @@ -1,92 +0,0 @@ - -Callbacks in training -===================== - - - - -.. class:: AbstractCallback - - Abstract type of callback functions used in training. - - - - -.. class:: AbstractBatchCallback - - Abstract type of callbacks to be called every mini-batch. - - - - -.. class:: AbstractEpochCallback - - Abstract type of callbacks to be called every epoch. - - - - -.. function:: every_n_batch(callback :: Function, n :: Int; call_on_0 = false) - - A convenient function to construct a callback that runs every ``n`` mini-batches. - - :param Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on batch 0. - - For example, the :func:`speedometer` callback is defined as - - .. code-block:: julia - - every_n_iter(frequency, call_on_0=true) do state :: OptimizationState - if state.curr_batch == 0 - # reset timer - else - # compute and print speed - end - end - - :seealso: :func:`every_n_epoch`, :func:`speedometer`. - - - - -.. function:: speedometer(; frequency=50) - - Create an :class:`AbstractBatchCallback` that measure the training speed - (number of samples processed per second) every k mini-batches. - - :param Int frequency: keyword argument, default 50. The frequency (number of - min-batches) to measure and report the speed. - - - - -.. function:: every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) - - A convenient function to construct a callback that runs every ``n`` full data-passes. - - :param Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on epoch 0. Epoch 0 means no training has been performed - yet. This is useful if you want to inspect the randomly initialized model - that has not seen any data yet. - - :seealso: :func:`every_n_iter`. - - - - -.. function:: do_checkpoint(prefix; frequency=1, save_epoch_0=false) - - Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. - The checkpoints can be loaded back later on. - - :param AbstractString prefix: the prefix of the filenames to save the model. The model - architecture will be saved to prefix-symbol.json, while the weights will be saved - to prefix-0012.params, for example, for the 12-th epoch. - :param Int frequency: keyword argument, default 1. The frequency (measured in epochs) to - save checkpoints. - :param Bool save_epoch_0: keyword argument, default false. Whether we should save a - checkpoint for epoch 0 (model initialized but not seen any data yet). - - - diff --git a/docs/api/context.rst b/docs/api/context.rst deleted file mode 100644 index 5230b892c8e5..000000000000 --- a/docs/api/context.rst +++ /dev/null @@ -1,32 +0,0 @@ - -Context -======= - - - - -.. class:: Context - - A context describes the device type and id on which computation should be carried on. - - - - -.. function:: cpu(dev_id=0) - - :param Int dev_id: the CPU id. - - Get a CPU context with a specific id. ``cpu()`` is usually the default context for many - operations when no context is specified. - - - - -.. function:: gpu(dev_id=0) - - :param Int dev_id: the GPU device id. - - Get a GPU context with a specific id. The K GPUs on a node is typically numbered as 0,...,K-1. - - - diff --git a/docs/api/executor.rst b/docs/api/executor.rst deleted file mode 100644 index 69c78137cd0e..000000000000 --- a/docs/api/executor.rst +++ /dev/null @@ -1,32 +0,0 @@ - -Executor -======== - - - - -.. class:: Executor - - An executor is a realization of a symbolic architecture defined by a :class:`SymbolicNode`. - The actual forward and backward computation specified by the network architecture can - be carried out with an executor. - - - - -.. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) - - Create an :class:`Executor` by binding a :class:`SymbolicNode` to concrete :class:`NDArray`. - - :param SymbolicNode sym: the network architecture describing the computation graph. - :param Context ctx: the context on which the computation should run. - :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete - arrays for all the inputs in the network architecture. The inputs typically include - network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` - and :func:`infer_shape`. - :param args_grad: TODO - :param aux_states: - :param grad_req: - - - diff --git a/docs/api/initializer.rst b/docs/api/initializer.rst deleted file mode 100644 index 6dbb81a001b3..000000000000 --- a/docs/api/initializer.rst +++ /dev/null @@ -1,81 +0,0 @@ - -Initializers -============ -Interface ---------- - - - - -.. class:: AbstractInitializer - - The abstract base class for all initializers. - -To define a new initializer, it is -enough to derive a new type, and implement one or more of the following methods: - -.. function:: _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - -Or, if full behavior customization is needed, override the following function - -.. function:: init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - - - - -Built-in initializers ---------------------- - - - - -.. class:: UniformInitializer - - Initialize weights according to a uniform distribution within the provided scale. - - - - -.. function UniformInitializer(scale=0.07) - - Construct a :class:`UniformInitializer` with the specified scale. - - - - -.. class:: NormalInitializer - - Initialize weights according to a univariate Gaussian distribution. - - - - -.. function:: NormalIninitializer(; mu=0, sigma=0.01) - - Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. - - - - -.. class:: XavierInitializer - - The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding - the difficulty of training deep feedforward neuralnetworks*. - - There are several different version of the XavierInitializer used in the wild. - The general idea is that the variance of the initialization distribution is controlled - by the dimensionality of the input and output. As a distribution one can either choose - a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. - - Several different ways of calculating the variance are given in the literature or are - used by various libraries. - - - [Bengio and Glorot 2010]: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)`` - - [K. He, X. Zhang, S. Ren, and J. Sun 2015]: ``mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)`` - - caffe_avg: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)`` - - - diff --git a/docs/api/io.rst b/docs/api/io.rst deleted file mode 100644 index e5cb3ffb32e5..000000000000 --- a/docs/api/io.rst +++ /dev/null @@ -1,525 +0,0 @@ - -Data Providers -============== -Interface ---------- - -Data providers are wrappers that load external data, be it images, text, or general tensors, -and split it into mini-batches so that the model can consume the data in a uniformed way. - - - - -.. class:: AbstractDataProvider - - The root type for all data provider. A data provider should implement the following interfaces: - - .. function:: get_batch_size(provider) -> Int - - :param AbstractDataProvider provider: the data provider. - :return: the mini-batch size of the provided data. All the provided data should have the - same mini-batch size (i.e. the last dimension). - - .. function:: provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - - :param AbstractDataProvider provider: the data provider. - :return: a vector of (name, shape) pairs describing the names of the data it provides, and - the corresponding shapes. - - .. function:: provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - - :param AbstractDataProvider provider: the data provider. - :return: a vector of (name, shape) pairs describing the names of the labels it provides, and - the corresponding shapes. - - The difference between *data* and *label* is that during - training stage, both *data* and *label* will be feeded into the model, while during - prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and - of any shapes. The provided data and label names here should match the input names in a target - :class:`SymbolicNode`. - - A data provider should also implement the Julia iteration interface, in order to allow iterating - through the data set. The provider will be called in the following way: - - .. code-block:: julia - - for batch in eachbatch(provider) - data = get_data(provider, batch) - end - - which will be translated by Julia compiler into - - .. code-block:: julia - - state = Base.start(eachbatch(provider)) - while !Base.done(provider, state) - (batch, state) = Base.next(provider, state) - data = get_data(provider, batch) - end - - By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface - is implemented on the provider type itself. But the extra layer of abstraction allows us to - implement a data provider easily via a Julia ``Task`` coroutine. See the - data provider defined in :doc:`the char-lstm example - ` for an example of using coroutine to define data - providers. - -The detailed interface functions for the iterator API is listed below: - -.. function:: Base.eltype(provider) -> AbstractDataBatch - - :param AbstractDataProvider provider: the data provider. - :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. - -.. function:: Base.start(provider) -> AbstractDataProviderState - - :param AbstractDataProvider provider: the data provider. - - This function is always called before iterating into the dataset. It should initialize - the iterator, reset the index, and do data shuffling if needed. - -.. function:: Base.done(provider, state) -> Bool - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. - :return: true if there is no more data to iterate in this dataset. - -.. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) - - :param AbstractDataProvider provider: the data provider. - :return: the current data batch, and the state for the next iteration. - -Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that -is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this -case, you can safely assume that - -* :func:`Base.start` will always be called, and called only once before the iteration starts. -* :func:`Base.done` will always be called at the beginning of every iteration and always be called once. -* If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with - a call to :func:`Base.start`. -* :func:`Base.next` will always be called only once in each iteration. It will always be called after - one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will - not be called. - -With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation -of the built-in :class:`MXDataProvider` for example. - -.. caution:: - - Please do not use the one data provider simultaneously in two different places, either in parallel, - or in a nested loop. For example, the behavior for the following code is undefined - - .. code-block:: julia - - for batch in data - # updating the parameters - - # now let's test the performance on the training set - for b2 in data - # ... - end - end - - - - -.. class:: AbstractDataProviderState - - Base type for data provider states. - - - - -.. class:: AbstractDataBatch - - Base type for a data mini-batch. It should implement the following interfaces: - - .. function:: count_samples(provider, batch) -> Int - - :param AbstractDataBatch batch: the data batch object. - :return: the number of samples in this batch. This number should be greater than 0, but - less than or equal to the batch size. This is used to indicate at the end of - the data set, there might not be enough samples for a whole mini-batch. - - .. function:: get_data(provider, batch) -> Vector{NDArray} - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :return: a vector of data in this batch, should be in the same order as declared in - :func:`provide_data() `. - - The last dimension of each :class:`NDArray` should always match the batch_size, even when - :func:`count_samples` returns a value less than the batch size. In this case, - the data provider is free to pad the remaining contents with any value. - - .. function:: get_label(provider, batch) -> Vector{NDArray} - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :return: a vector of labels in this batch. Similar to :func:`get_data`. - - - The following utility functions will be automatically defined. - - .. function:: get(provider, batch, name) -> NDArray - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param Base.Symbol name: the name of the data to get, should be one of the names - provided in either :func:`provide_data() ` - or :func:`provide_label() `. - :return: the corresponding data array corresponding to that name. - - .. function:: load_data!(provider, batch, targets) - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param targets: the targets to load data into. - :type targets: Vector{Vector{SlicedNDArray}} - - The targets is a list of the same length as number of data provided by this provider. - Each element in the list is a list of :class:`SlicedNDArray`. This list described a - spliting scheme of this data batch into different slices, each slice is specified by - a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch - that should be loaded into the corresponding *ndarray*. - - This utility function is used in data parallelization, where a mini-batch is splited - and computed on several different devices. - - .. function:: load_label!(provider, batch, targets) - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param targets: the targets to load label into. - :type targets: Vector{Vector{SlicedNDArray}} - - The same as :func:`load_data!`, except that this is for loading labels. - - - - -.. class:: DataBatch - - A basic subclass of :class:`AbstractDataBatch`, that implement the interface by - accessing member fields. - - - - -.. class:: SlicedNDArray - - A alias type of ``Tuple{UnitRange{Int},NDArray}``. - - - - -Built-in data providers ------------------------ - - - - -.. class:: ArrayDataProvider - - A convenient tool to iterate :class:`NDArray` or Julia ``Array``. - - - - -.. function:: ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) - - Construct a data provider from :class:`NDArray` or Julia Arrays. - - :param data: the data, could be - - - a :class:`NDArray`, or a Julia Array. This is equivalent to ``:data => data``. - - a name-data pair, like ``:mydata => array``, where ``:mydata`` is the name of the data - and ``array`` is an :class:`NDArray` or a Julia Array. - - a list of name-data pairs. - - :param label: the same as the ``data`` parameter. When this argument is omitted, the constructed - provider will provide no labels. - :param Int batch_size: the batch size, default is 0, which means treating the whole array as a - single mini-batch. - :param Bool shuffle: turn on if the data should be shuffled at every epoch. - :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might - be less samples to include than a mini-batch. This value specify a scalar to pad the - contents of all the missing data points. - :param Real label_padding: the same as ``data_padding``, except for the labels. - - TODO: remove ``data_padding`` and ``label_padding``, and implement rollover that copies - the last or first several training samples to feed the padding. - - - - -libmxnet data providers ------------------------ - - - - -.. class:: MXDataProvider - - A data provider that wrap built-in data iterators from libmxnet. See below for - a list of built-in data iterators. - - - - -.. function:: CSVIter(...) - - Can also be called with the alias ``CSVProvider``. - Create iterator for dataset in csv. - - :param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data. - :param Base.Symbol label_name: keyword argument, default ``:softmax_label``. The name of the label. Could be ``nothing`` if no label is presented in this dataset. - - :param data_csv: Dataset Param: Data csv path. - :type data_csv: string, required - - - :param data_shape: Dataset Param: Shape of the data. - :type data_shape: Shape(tuple), required - - - :param label_csv: Dataset Param: Label csv path. If is NULL, all labels will be returned as 0 - :type label_csv: string, optional, default='NULL' - - - :param label_shape: Dataset Param: Shape of the label. - :type label_shape: Shape(tuple), optional, default=(1,) - - :return: the constructed :class:`MXDataProvider`. - - - -.. function:: ImageRecordIter(...) - - Can also be called with the alias ``ImageRecordProvider``. - Create iterator for dataset packed in recordio. - - :param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data. - :param Base.Symbol label_name: keyword argument, default ``:softmax_label``. The name of the label. Could be ``nothing`` if no label is presented in this dataset. - - :param path_imglist: Dataset Param: Path to image list. - :type path_imglist: string, optional, default='' - - - :param path_imgrec: Dataset Param: Path to image record file. - :type path_imgrec: string, optional, default='./data/imgrec.rec' - - - :param label_width: Dataset Param: How many labels for an image. - :type label_width: int, optional, default='1' - - - :param data_shape: Dataset Param: Shape of each instance generated by the DataIter. - :type data_shape: Shape(tuple), required - - - :param preprocess_threads: Backend Param: Number of thread to do preprocessing. - :type preprocess_threads: int, optional, default='4' - - - :param verbose: Auxiliary Param: Whether to output parser information. - :type verbose: boolean, optional, default=True - - - :param num_parts: partition the data into multiple parts - :type num_parts: int, optional, default='1' - - - :param part_index: the index of the part will read - :type part_index: int, optional, default='0' - - - :param shuffle: Augmentation Param: Whether to shuffle data. - :type shuffle: boolean, optional, default=False - - - :param seed: Augmentation Param: Random Seed. - :type seed: int, optional, default='0' - - - :param batch_size: Batch Param: Batch size. - :type batch_size: int (non-negative), required - - - :param round_batch: Batch Param: Use round robin to handle overflow batch. - :type round_batch: boolean, optional, default=True - - - :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 - - - :param rand_crop: Augmentation Param: Whether to random crop on the image - :type rand_crop: boolean, optional, default=False - - - :param crop_y_start: Augmentation Param: Where to nonrandom crop on y. - :type crop_y_start: int, optional, default='-1' - - - :param crop_x_start: Augmentation Param: Where to nonrandom crop on x. - :type crop_x_start: int, optional, default='-1' - - - :param max_rotate_angle: Augmentation Param: rotated randomly in [-max_rotate_angle, max_rotate_angle]. - :type max_rotate_angle: int, optional, default='0' - - - :param max_aspect_ratio: Augmentation Param: denotes the max ratio of random aspect ratio augmentation. - :type max_aspect_ratio: float, optional, default=0 - - - :param max_shear_ratio: Augmentation Param: denotes the max random shearing ratio. - :type max_shear_ratio: float, optional, default=0 - - - :param max_crop_size: Augmentation Param: Maximum crop size. - :type max_crop_size: int, optional, default='-1' - - - :param min_crop_size: Augmentation Param: Minimum crop size. - :type min_crop_size: int, optional, default='-1' - - - :param max_random_scale: Augmentation Param: Maxmum scale ratio. - :type max_random_scale: float, optional, default=1 - - - :param min_random_scale: Augmentation Param: Minimum scale ratio. - :type min_random_scale: float, optional, default=1 - - - :param max_img_size: Augmentation Param: Maxmum image size after resizing. - :type max_img_size: float, optional, default=1e+10 - - - :param min_img_size: Augmentation Param: Minimum image size after resizing. - :type min_img_size: float, optional, default=0 - - - :param random_h: Augmentation Param: Maximum value of H channel in HSL color space. - :type random_h: int, optional, default='0' - - - :param random_s: Augmentation Param: Maximum value of S channel in HSL color space. - :type random_s: int, optional, default='0' - - - :param random_l: Augmentation Param: Maximum value of L channel in HSL color space. - :type random_l: int, optional, default='0' - - - :param rotate: Augmentation Param: Rotate angle. - :type rotate: int, optional, default='-1' - - - :param fill_value: Augmentation Param: Maximum value of illumination variation. - :type fill_value: int, optional, default='255' - - - :param inter_method: Augmentation Param: 0-NN 1-bilinear 2-cubic 3-area 4-lanczos4 9-auto 10-rand. - :type inter_method: int, optional, default='1' - - - :param mirror: Augmentation Param: Whether to mirror the image. - :type mirror: boolean, optional, default=False - - - :param rand_mirror: Augmentation Param: Whether to mirror the image randomly. - :type rand_mirror: boolean, optional, default=False - - - :param mean_img: Augmentation Param: Mean Image to be subtracted. - :type mean_img: string, optional, default='' - - - :param mean_r: Augmentation Param: Mean value on R channel. - :type mean_r: float, optional, default=0 - - - :param mean_g: Augmentation Param: Mean value on G channel. - :type mean_g: float, optional, default=0 - - - :param mean_b: Augmentation Param: Mean value on B channel. - :type mean_b: float, optional, default=0 - - - :param mean_a: Augmentation Param: Mean value on Alpha channel. - :type mean_a: float, optional, default=0 - - - :param scale: Augmentation Param: Scale in color space. - :type scale: float, optional, default=1 - - - :param max_random_contrast: Augmentation Param: Maximum ratio of contrast variation. - :type max_random_contrast: float, optional, default=0 - - - :param max_random_illumination: Augmentation Param: Maximum value of illumination variation. - :type max_random_illumination: float, optional, default=0 - - :return: the constructed :class:`MXDataProvider`. - - - -.. function:: MNISTIter(...) - - Can also be called with the alias ``MNISTProvider``. - Create iterator for MNIST hand-written digit number recognition dataset. - - :param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data. - :param Base.Symbol label_name: keyword argument, default ``:softmax_label``. The name of the label. Could be ``nothing`` if no label is presented in this dataset. - - :param image: Dataset Param: Mnist image path. - :type image: string, optional, default='./train-images-idx3-ubyte' - - - :param label: Dataset Param: Mnist label path. - :type label: string, optional, default='./train-labels-idx1-ubyte' - - - :param batch_size: Batch Param: Batch Size. - :type batch_size: int, optional, default='128' - - - :param shuffle: Augmentation Param: Whether to shuffle data. - :type shuffle: boolean, optional, default=True - - - :param flat: Augmentation Param: Whether to flat the data into 1D. - :type flat: boolean, optional, default=False - - - :param seed: Augmentation Param: Random Seed. - :type seed: int, optional, default='0' - - - :param silent: Auxiliary Param: Whether to print out data info. - :type silent: boolean, optional, default=False - - - :param num_parts: partition the data into multiple parts - :type num_parts: int, optional, default='1' - - - :param part_index: the index of the part will read - :type part_index: int, optional, default='0' - - - :param prefetch_buffer: Backend Param: Number of prefetched parameters - :type prefetch_buffer: , optional, default=4 - - :return: the constructed :class:`MXDataProvider`. - - - - - - diff --git a/docs/api/metric.rst b/docs/api/metric.rst deleted file mode 100644 index 0f3cc2d81358..000000000000 --- a/docs/api/metric.rst +++ /dev/null @@ -1,77 +0,0 @@ - -Evaluation Metrics -================== - -Evaluation metrics provide a way to evaluate the performance of a learned model. -This is typically used during training to monitor performance on the validation -set. - - - - -.. class:: AbstractEvalMetric - - The base class for all evaluation metrics. The sub-types should implement the following - interfaces. - - .. function:: update!(metric, labels, preds) - - Update and accumulate metrics. - - :param AbstractEvalMetric metric: the metric object. - :param labels: the labels from the data provider. - :type labels: Vector{NDArray} - :param preds: the outputs (predictions) of the network. - :type preds: Vector{NDArray} - - .. function:: reset!(metric) - - Reset the accumulation counter. - - .. function:: get(metric) - - Get the accumulated metrics. - - :return: ``Vector{Tuple{Base.Symbol, Real}}``, a list of name-value pairs. For - example, ``[(:accuracy, 0.9)]``. - - - - -.. class:: Accuracy - - Multiclass classification accuracy. - - Calculates the mean accuracy per sample for softmax in one dimension. - For a multi-dimensional softmax the mean accuracy over all dimensions is calculated. - - - - -.. class:: MSE - - Mean Squared Error. TODO: add support for multi-dimensional outputs. - - Calculates the mean squared error regression loss in one dimension. - - - - -.. class:: ACE - - Averaged cross-entropy for classification. This also know als logloss. - - Calculated the averaged cross entropy for multi-dimentions output. - - - - -.. class:: MultiACE - - Averaged cross-entropy for classification. This also know als logloss. - This variant keeps track of the different losses per class. - - Calculated the averaged cross entropy for multi-dimentions output. - - - diff --git a/docs/api/model.rst b/docs/api/model.rst deleted file mode 100644 index 764d3e9329a8..000000000000 --- a/docs/api/model.rst +++ /dev/null @@ -1,133 +0,0 @@ - -Models -====== - -The model API provides convenient high-level interface to do training and predicting on -a network described using the symbolic API. - - - - -.. class:: AbstractModel - - The abstract super type of all models in MXNet.jl. - - - - -.. class:: FeedForward - - The feedforward model provides convenient interface to train and predict on - feedforward architectures like multi-layer MLP, ConvNets, etc. There is no - explicitly handling of *time index*, but it is relatively easy to implement - unrolled RNN / LSTM under this framework (**TODO**: add example). For models - that handles sequential data explicitly, please use **TODO**... - - - - -.. function:: FeedForward(arch :: SymbolicNode, ctx) - - :param arch: the architecture of the network constructed using the symbolic API. - :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` - or a list of :class:`Context` objects. In the latter case, data parallelization will be used - for training. If no context is provided, the default context ``cpu()`` will be used. - - - - -.. function:: init_model(self, initializer; overwrite=false, input_shapes...) - - Initialize the weights in the model. - - This method will be called automatically when training a model. So there is usually no - need to call this method unless one needs to inspect a model with only randomly initialized - weights. - - :param FeedForward self: the model to be initialized. - :param AbstractInitializer initializer: an initializer describing how the weights should be initialized. - :param Bool overwrite: keyword argument, force initialization even when weights already exists. - :param input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. - For example, ``data=(28,28,1,100), label=(100,)``. - - - - -.. function:: - predict(self, data; overwrite=false, callback=nothing) - - Predict using an existing model. The model should be already initialized, or trained or loaded from - a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, - so it is possible to do - - .. code-block:: julia - - predict(model, data) do batch_output - # consume or write batch_output to file - end - - :param FeedForward self: the model. - :param AbstractDataProvider data: the data to perform prediction on. - :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory - allocation of the :class:`Executor` depends on the mini-batch size of the test - data provider. If you call predict twice with data provider of the same batch-size, - then the executor can be potentially be re-used. So, if ``overwrite`` is false, - we will try to re-use, and raise an error if batch-size changed. If ``overwrite`` - is true (the default), a new :class:`Executor` will be created to replace the old one. - - .. note:: - - Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO - for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better - to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a - concern. - - For the same reason, currently prediction will only use the first device even if multiple devices are - provided to construct the model. - - .. note:: - - If you perform further after prediction. The weights are not automatically synchronized if ``overwrite`` - is set to false and the old predictor is re-used. In this case - setting ``overwrite`` to true (the default) will re-initialize the predictor the next time you call - predict and synchronize the weights again. - - :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` - - - - -.. function:: train(model :: FeedForward, ...) - - Alias to :func:`fit`. - - - - -.. function:: fit(model :: FeedForward, optimizer, data; kwargs...) - - Train the ``model`` on ``data`` with the ``optimizer``. - - :param FeedForward model: the model to be trained. - :param AbstractOptimizer optimizer: the optimization algorithm to use. - :param AbstractDataProvider data: the training data provider. - :param Int n_epoch: default 10, the number of full data-passes to run. - :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for - the validation set. - :param AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used - to evaluate the training performance. If ``eval_data`` is provided, the same metric is also - calculated on the validation set. - :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients - and parameters when multiple devices are used for training. - :type kvstore: :class:`KVStore` or ``Base.Symbol`` - :param AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. - :param Bool force_init: keyword argument, default false. By default, the random initialization using the - provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous - call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When - this option is set, it will always do random initialization at the begining of training. - :param callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, - see :class:`AbstractCallback`. - :type callbacks: ``Vector{AbstractCallback}`` - - - diff --git a/docs/api/ndarray.rst b/docs/api/ndarray.rst deleted file mode 100644 index b94232b25e50..000000000000 --- a/docs/api/ndarray.rst +++ /dev/null @@ -1,1027 +0,0 @@ - -NDArray API -=========== - - - - -.. class:: NDArray - - Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block - of tensor-based computation. - - .. _ndarray-shape-note: - - .. note:: - - since C/C++ use row-major ordering for arrays while Julia follows a - column-major ordering. To keep things consistent, we keep the underlying data - in their original layout, but use *language-native* convention when we talk - about shapes. For example, a mini-batch of 100 MNIST images is a tensor of - C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory - have shape (28,28,1,100). - - - - -.. function:: context(arr :: NDArray) - - Get the context that this :class:`NDArray` lives on. - - - - -.. function:: - empty(shape :: Tuple, ctx :: Context) - empty(shape :: Tuple) - empty(dim1, dim2, ...) - - Allocate memory for an uninitialized :class:`NDArray` with specific shape. - - - - -Interface functions similar to Julia Arrays -------------------------------------------- - - - - -.. function:: - zeros(shape :: Tuple, ctx :: Context) - zeros(shape :: Tuple) - zeros(dim1, dim2, ...) - - Create zero-ed :class:`NDArray` with specific shape. - - - - -.. function:: - ones(shape :: Tuple, ctx :: Context) - ones(shape :: Tuple) - ones(dim1, dim2, ...) - - Create an :class:`NDArray` with specific shape and initialize with 1. - - - - -.. function:: - size(arr :: NDArray) - size(arr :: NDArray, dim :: Int) - - Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See - also the :ref:`notes on NDArray shapes `. - - - - -.. function:: length(arr :: NDArray) - - Get the number of elements in an :class:`NDArray`. - - - - -.. function:: ndims(arr :: NDArray) - - Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. - - - - -.. function:: eltype(arr :: NDArray) - - Get the element type of an :class:`NDArray`. Currently the element type is always ``mx.MX_float``. - - - - -.. function:: slice(arr :: NDArray, start:stop) - - Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest - changing dimension is supported. In Julia's column-major perspective, this is the last - dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create - a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is - used in data parallelization to split mini-batch into sub-batches for different devices. - - - - -.. function:: setindex!(arr :: NDArray, val, idx) - - Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following - scenarios are supported - - - ``arr[:] = val``: whole array assignment, ``val`` could be a scalar or an array (Julia ``Array`` - or :class:`NDArray`) of the same shape. - - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of - the same shape to the slice. See also :func:`slice`. - - - - -.. function:: getindex(arr :: NDArray, idx) - - Shortcut for :func:`slice`. A typical use is to write - - .. code-block:: julia - - arr[:] += 5 - - which translates into - - .. code-block:: julia - - arr[:] = arr[:] + 5 - - which furthur translates into - - .. code-block:: julia - - setindex!(getindex(arr, Colon()), 5, Colon()) - - .. note:: - - The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` - create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is - a *slice* that shares the memory. - - - - -Copying functions ------------------ - - - - -.. function:: - copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) - - Copy contents of ``src`` into ``dst``. - - - - -.. function:: - copy(arr :: NDArray) - copy(arr :: NDArray, ctx :: Context) - copy(arr :: Array, ctx :: Context) - - Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. - Otherwise, create an :class:`NDArray` on the specified context. - - - - -.. function:: convert(::Type{Array{T}}, arr :: NDArray) - - Convert an :class:`NDArray` into a Julia ``Array`` of specific type. Data will be copied. - - - - -Basic arithmetics ------------------ - - - - -.. function:: @inplace - - Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), - When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new - memory for the results, and the newly allocated :class:`NDArray` object is then assigned - back to a, while the original contents in a is discarded. This is very inefficient - when we want to do inplace update. - - This macro is a simple utility to implement this behavior. Write - - .. code-block:: julia - - @mx.inplace a += b - - will translate into - - .. code-block:: julia - - mx.add_to!(a, b) - - which will do inplace adding of the contents of ``b`` into ``a``. - - - - -.. function:: add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) - - Add a bunch of arguments into ``dst``. Inplace updating. - - - - -.. function:: - +(args...) - .+(args...) - - Summation. Multiple arguments of either scalar or :class:`NDArray` could be - added together. Note at least the first or second argument needs to be an :class:`NDArray` to - avoid ambiguity of built-in summation. - - - - -.. function:: sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) - - Subtract a bunch of arguments from ``dst``. Inplace updating. - - - - -.. function:: - -(arg0, arg1) - -(arg0) - .-(arg0, arg1) - - Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create - the negative of ``arg0``. - - - - -.. function:: mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) - - Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. - Inplace updating. - - - - -.. function:: - .*(arg0, arg1) - - Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. - - - - -.. function:: - *(arg0, arg1) - - Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication - is to be added soon. - - - - -.. function:: div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) - - Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. - - - - -.. function:: ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) - - Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. - - - - -.. function:: /(arg0 :: NDArray, arg :: Real) - - Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. - - - - -Manipulating as Julia Arrays ----------------------------- - -.. function:: @nd_as_jl(captures..., statement) - - A convenient macro that allows to operate :class:`NDArray` as Julia Arrays. For example, - - .. code-block:: julia - - x = mx.zeros(3,4) - y = mx.ones(3,4) - z = mx.zeros((3,4), mx.gpu()) - - @mx.nd_as_jl ro=(x,y) rw=z begin - # now x, y, z are just ordinary Julia Arrays - z[:,1] = y[:,2] - z[:,2] = 5 - end - - Under the hood, the macro convert all the declared captures from :class:`NDArray` into Julia - Arrays, by using :func:`try_get_shared`. And automatically commit the modifications back into - the :class:`NDArray` that is declared as ``rw``. This is useful for fast prototyping and when - implement non-critical computations, such as :class:`AbstractEvalMetric`. - - .. note:: - - - Multiple ``rw`` and / or ``ro`` capture declaration could be made. - - The macro does **not** check to make sure that ``ro`` captures are not modified. If the - original :class:`NDArray` lives in CPU memory, then it is very likely the corresponding - Julia Array shares data with the :class:`NDArray`, so modifying the Julia Array will also - modify the underlying :class:`NDArray`. - - More importantly, since the :class:`NDArray` is - asynchronized, we will wait for *writing* for ``rw`` variables but wait only for *reading* - in ``ro`` variables. If we write into those ``ro`` variables, **and** if the memory is - shared, racing condition might happen, and the behavior is undefined. - - When an :class:`NDArray` is declared to be captured as ``rw``, its contents is always sync - back in the end. - - The execution results of the expanded macro is always ``nothing``. - - The statements are wrapped in a ``let``, thus locally introduced new variables will not be - available after the statements. So you will need to declare the variables before calling the - macro if needed. - - - - -.. function:: try_get_shared(arr) - - Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. - - :param NDArray arr: the array to be shared. - - .. warning:: - - The returned array does not guarantee to share data with the underlying :class:`NDArray`. - In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. - - - - -.. function:: is_shared(j_arr, arr) - - Test whether ``j_arr`` is sharing data with ``arr``. - - :param Array j_arr: the Julia Array. - :param NDArray arr: the :class:`NDArray`. - - - - -IO --- - - - - -.. function:: load(filename, ::Type{NDArray}) - - Load NDArrays from binary file. - - :param AbstractString filename: the path of the file to load. It could be S3 or HDFS address. - :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. - - If the ``libmxnet`` is built with the corresponding component enabled. Examples - - * ``s3://my-bucket/path/my-s3-ndarray`` - * ``hdfs://my-bucket/path/my-hdfs-ndarray`` - * ``/path-to/my-local-ndarray`` - - - - -.. function:: save(filename :: AbstractString, data) - - Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built - with corresponding support. - - :param AbstractString filename: path to the binary file to write to. - :param data: data to save to file. - :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. - - - - -libmxnet APIs -------------- - - - - -The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed -here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered -as - -.. code-block:: julia - - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) - -unless ``NDARRAY_ARG_BEFORE_SCALAR`` is not set. In this case, the scalars are put before the input arguments: - -.. code-block:: julia - - func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) - - -If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the output arguments will also be defined: - -.. code-block:: julia - - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) - -Upon calling, the output arguments will be automatically initialized with empty NDArrays. - -Those functions always return the output arguments. If there is only one output (the typical situation), that -object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. - -Public APIs -^^^^^^^^^^^ -.. function:: abs(...) - - Take absolute value of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: argmax_channel(...) - - Take argmax indices of each channel of the src.The result will be ndarray of shape (num_channel,) on the same device. - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: ceil(...) - - Take ceil value of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: choose_element_0index(...) - - Choose one element from each line(row for python, column for R/Julia) in lhs according to index indicated by rhs. This function assume rhs uses 0-based index. - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: NDArray - - - - - -.. function:: clip(...) - - Clip ndarray elements to range (a_min, a_max) - - :param src: Source input - :type src: NDArray - - - :param a_min: Minimum value - :type a_min: real_t - - - :param a_max: Maximum value - :type a_max: real_t - - - - - -.. function:: cos(...) - - Take cos of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: dot(...) - - Calculate 2D matrix multiplication - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: NDArray - - - - - -.. function:: exp(...) - - Take exp of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: fill_element_0index(...) - - Fill one element of each line(row for python, column for R/Julia) in lhs according to index indicated by rhs and values indicated by mhs. This function assume rhs uses 0-based index. - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param mhs: Middle operand to the function. - :type mhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: NDArray - - - - - -.. function:: floor(...) - - Take floor value of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: log(...) - - Take log of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: max(...) - - Take max of the src.The result will be ndarray of shape (1,) on the same device. - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: min(...) - - Take min of the src.The result will be ndarray of shape (1,) on the same device. - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: norm(...) - - Take L2 norm of the src.The result will be ndarray of shape (1,) on the same device. - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: round(...) - - Take round value of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: rsqrt(...) - - Take rsqrt of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: sign(...) - - Take sign value of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: sin(...) - - Take sin of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: sqrt(...) - - Take sqrt of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: square(...) - - Take square of the src - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: sum(...) - - Take sum of the src.The result will be ndarray of shape (1,) on the same device. - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: sum_mid_internal(...) - - Take sum on medium dimension of the 3D src. - - :param src: Source input to the function - :type src: NDArray - - - - - -.. function:: transpose(...) - - Transpose the input matrix and return a new one - - :param src: Source input to the function - :type src: NDArray - - - - -Internal APIs -^^^^^^^^^^^^^ - -.. note:: - - Document and signatures for internal API functions might be incomplete. - -.. function:: _broadcast(...) - - Broadcast array in the given axis to the given size - - :param src: source ndarray - :type src: NDArray - - - :param axis: axis to broadcast - :type axis: int - - - :param size: size of broadcast - :type size: int - - - - - -.. function:: _copyto(...) - - - - :param src: Source input to the function. - :type src: NDArray - - - - - -.. function:: _div(...) - - Multiply lhs by rhs - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _div_scalar(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: real_t - - - - - -.. function:: _imdecode(...) - - Decode an image, clip to (x0, y0, x1, y1), substract mean, and write to buffer - - :param mean: image mean - :type mean: NDArray - - - :param index: buffer position for output - :type index: int - - - :param x0: x0 - :type x0: int - - - :param y0: y0 - :type y0: int - - - :param x1: x1 - :type x1: int - - - :param y1: y1 - :type y1: int - - - :param c: channel - :type c: int - - - :param size: length of str_img - :type size: int - - - - - -.. function:: _maximum(...) - - Elementwise max of lhs by rhs - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _minimum(...) - - Elementwise min of lhs by rhs - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _minus(...) - - Minus lhs and rhs - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _minus_scalar(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: real_t - - - - - -.. function:: _mul(...) - - Multiply lhs and rhs - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _mul_scalar(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: real_t - - - - - -.. function:: _onehot_encode(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: NDArray - - - - - -.. function:: _plus(...) - - Add lhs and rhs - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _plus_scalar(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: real_t - - - - - -.. function:: _power(...) - - Elementwise power(lhs, rhs) - - :param lhs: Left operand to the function - :type lhs: NDArray - - - :param rhs: Right operand to the function - :type rhs: NDArray - - - - - -.. function:: _random_gaussian(...) - - - - - - - -.. function:: _random_uniform(...) - - - - - - - -.. function:: _rdiv_scalar(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: real_t - - - - - -.. function:: _rminus_scalar(...) - - - - :param lhs: Left operand to the function. - :type lhs: NDArray - - - :param rhs: Right operand to the function. - :type rhs: real_t - - - - - -.. function:: _set_value(...) - - - - :param src: Source input to the function. - :type src: real_t - - - - - - - - diff --git a/docs/api/nn-factory.rst b/docs/api/nn-factory.rst deleted file mode 100644 index 44569c640da9..000000000000 --- a/docs/api/nn-factory.rst +++ /dev/null @@ -1,33 +0,0 @@ - -Neural Networks Factory -======================= - -Neural network factory provide convenient helper functions to define -common neural networks. - - - - -.. function:: MLP(input, spec) - - Construct a multi-layer perceptron. A MLP is a multi-layer neural network with - fully connected layers. - - :param SymbolicNode input: the input to the mlp. - :param spec: the mlp specification, a list of hidden dimensions. For example, - ``[128, (512, :sigmoid), 10]``. The number in the list indicate the - number of hidden units in each layer. A tuple could be used to specify - the activation of each layer. Otherwise, the default activation will - be used (except for the last layer). - :param Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating - the default activation for hidden layers. The specification here could be overwritten - by layer-wise specification in the ``spec`` argument. Also activation is not - applied to the last, i.e. the prediction layer. See :func:`Activation` for a - list of supported activation types. - :param prefix: keyword argument, default ``gensym()``, used as the prefix to - name the constructed layers. - - :return: the constructed MLP. - - - diff --git a/docs/api/optimizer.rst b/docs/api/optimizer.rst deleted file mode 100644 index 03c9c11fed4a..000000000000 --- a/docs/api/optimizer.rst +++ /dev/null @@ -1,194 +0,0 @@ - -Optimizers -========== - -Common interfaces ------------------ - - - - -.. class:: AbstractOptimizer - - Base type for all optimizers. - - - - -.. class:: AbstractLearningRateScheduler - - Base type for all learning rate scheduler. - - - - -.. class:: AbstractMomentumScheduler - - Base type for all momentum scheduler. - - - - -.. class:: OptimizationState - - .. attribute:: batch_size - - The size of the mini-batch used in stochastic training. - - .. attribute:: curr_epoch - - The current epoch count. Epoch 0 means no training yet, during the first - pass through the data, the epoch will be 1; during the second pass, the - epoch count will be 1, and so on. - - .. attribute:: curr_batch - - The current mini-batch count. The batch count is reset during every epoch. - The batch count 0 means the beginning of each epoch, with no mini-batch - seen yet. During the first mini-batch, the mini-batch count will be 1. - - .. attribute:: curr_iter - - The current iteration count. One iteration corresponds to one mini-batch, - but unlike the mini-batch count, the iteration count does **not** reset - in each epoch. So it track the *total* number of mini-batches seen so far. - - - - -.. function:: get_learning_rate(scheduler, state) - - :param AbstractLearningRateScheduler scheduler: a learning rate scheduler. - :param OptimizationState state: the current state about epoch, mini-batch and iteration count. - :return: the current learning rate. - - - - -.. class:: LearningRate.Fixed - - Fixed learning rate scheduler always return the same learning rate. - - - - -.. class:: LearningRate.Exp - - :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration - count if ``decay_on_iteration`` is set to true. - - - - -.. class:: LearningRate.Inv - - :math:`\eta_t = \eta_0 * (1 + \gamma * t)^(-power)`. - Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` - is set to true. - - - - -.. function:: get_momentum(scheduler, state) - - :param AbstractMomentumScheduler scheduler: the momentum scheduler. - :param OptimizationState state: the state about current epoch, mini-batch and iteration count. - :return: the current momentum. - - - - -.. class:: Momentum.Null - - The null momentum scheduler always returns 0 for momentum. It is also used to - explicitly indicate momentum should not be used. - - - - -.. class:: Momentum.Fixed - - Fixed momentum scheduler always returns the same value. - - - - -.. function:: get_updater(optimizer) - - :param AbstractOptimizer optimizer: the underlying optimizer. - - A utility function to create an updater function, that uses its closure to - store all the states needed for each weights. - - - - -Built-in optimizers -------------------- - - - - -.. class:: AbstractOptimizerOptions - - Base class for all optimizer options. - - - - -.. function:: normalized_gradient(opts, state, grad) - - :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field - ``grad_scale``, ``grad_clip`` and ``weight_decay``. - :param OptimizationState state: the current optimization state. - :param NDArray weight: the trainable weights. - :param NDArray grad: the original gradient of the weights. - - Get the properly normalized gradient (re-scaled and clipped if necessary). - - - - -.. class:: SGD - - Stochastic gradient descent optimizer. - - .. function:: SGD(; kwargs...) - - :param Real lr: default `0.01`, learning rate. - :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` - parameter. - :param Real momentum: default `0.0`, the momentum. - :param AbstractMomentumScheduler momentum_scheduler: default `nothing`, - a dynamic momentum scheduler. If set, will overwrite the `momentum` - parameter. - :param Real grad_clip: default `0`, if positive, will clip the gradient - into the bounded range `[-grad_clip, grad_clip]`. - :param Real weight_decay: default `0.0001`, weight decay is equivalent to - adding a global l2 regularizer to the parameters. - - - - -.. class:: ADAM - - The solver described in Diederik Kingma, Jimmy Ba: *Adam: A Method for - Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. - - .. function:: ADAM(; kwargs...) - - :param Real lr: default `0.001`, learning rate. - :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` - parameter. - :param Real beta1: default `0.9`. - :param Real beta2: default `0.999`. - :param Real epsilon: default `1e-8`. - :param Real grad_clip: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. - :param Real weight_decay: default `0.00001`, weight decay is equivalent - to adding a global l2 regularizer for all the parameters. - - - diff --git a/docs/api/symbolic-node.rst b/docs/api/symbolic-node.rst deleted file mode 100644 index 125b391046cb..000000000000 --- a/docs/api/symbolic-node.rst +++ /dev/null @@ -1,1591 +0,0 @@ - -Symbolic API -============ - - - - -.. class:: SymbolicNode - - SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. - - - - -.. function:: deepcopy(self :: SymbolicNode) - - Make a deep copy of a SymbolicNode. - - - - -.. function:: copy(self :: SymbolicNode) - - Make a copy of a SymbolicNode. The same as making a deep copy. - - - - -.. function:: - call(self :: SymbolicNode, args :: SymbolicNode...) - call(self :: SymbolicNode; kwargs...) - - Make a new node by composing ``self`` with ``args``. Or the arguments - can be specified using keyword arguments. - - - - -.. function:: list_arguments(self :: SymbolicNode) - - List all the arguments of this node. The argument for a node contains both - the inputs and parameters. For example, a :class:`FullyConnected` node will - have both data and weights in its arguments. A composed node (e.g. a MLP) will - list all the arguments for intermediate nodes. - - :return: A list of symbols indicating the names of the arguments. - - - - -.. function:: list_outputs(self :: SymbolicNode) - - List all the outputs of this node. - - :return: A list of symbols indicating the names of the outputs. - - - - -.. function:: list_auxiliary_states(self :: SymbolicNode) - - - List all auxiliary states in the symbool. - - Auxiliary states are special states of symbols that do not corresponds to an argument, - and do not have gradient. But still be useful for the specific operations. - A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. - Most operators do not have Auxiliary states. - - :return: A list of symbols indicating the names of the auxiliary states. - - - - -.. function:: get_internals(self :: SymbolicNode) - - Get a new grouped :class:`SymbolicNode` whose output contains all the internal outputs of - this :class:`SymbolicNode`. - - - - -.. function:: get_attr(self :: SymbolicNode, key :: Symbol) - - Get attribute attached to this :class:`SymbolicNode` belonging to key. - :return: The value belonging to key as a :class:`Nullable`. - - - - -.. function:: set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) - - Set the attribute key to value for this :class:`SymbolicNode`. - - .. warning:: - - It is encouraged not to call this function directly, unless you know exactly what you are doing. The - recommended way of setting attributes is when creating the :class:`SymbolicNode`. Changing - the attributes of a :class:`SymbolicNode` that is already been used somewhere else might - cause unexpected behavior and inconsistency. - - - - -.. function:: Variable(name :: Union{Symbol, AbstractString}) - - Create a symbolic variable with the given name. This is typically used as a placeholder. - For example, the data node, acting as the starting point of a network architecture. - - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`Variable`. - - - - -.. function:: Group(nodes :: SymbolicNode...) - - Create a :class:`SymbolicNode` by grouping nodes together. - - - - -.. function:: - infer_shape(self :: SymbolicNode; args...) - infer_shape(self :: SymbolicNode; kwargs...) - - Do shape inference according to the input shapes. The input shapes could be provided - as a list of shapes, which should specify the shapes of inputs in the same order as - the arguments returned by :func:`list_arguments`. Alternatively, the shape information - could be specified via keyword arguments. - - :return: A 3-tuple containing shapes of all the arguments, shapes of all the outputs and - shapes of all the auxiliary variables. If shape inference failed due to incomplete - or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. - - - - -.. function:: - getindex(self :: SymbolicNode, idx :: Union{Int, Base.Symbol, AbstractString}) - - Get a node representing the specified output of this node. The index could be - a symbol or string indicating the name of the output, or a 1-based integer - indicating the index, as in the list of :func:`list_outputs`. - - - - -.. function:: to_json(self :: SymbolicNode) - - Convert a :class:`SymbolicNode` into a JSON string. - - - - -.. function:: from_json(repr :: AbstractString, ::Type{SymbolicNode}) - - Load a :class:`SymbolicNode` from a JSON string representation. - - - - -.. function:: load(filename :: AbstractString, ::Type{SymbolicNode}) - - Load a :class:`SymbolicNode` from a JSON file. - - - - -.. function:: save(filename :: AbstractString, node :: SymbolicNode) - - Save a :class:`SymbolicNode` to a JSON file. - - - - -libmxnet APIs -------------- - -Public APIs -^^^^^^^^^^^ -.. function:: Activation(...) - - Apply activation function to input.Softmax Activation is only available with CUDNN on GPUand will be computed at each location across channel if input is 4D. - - :param data: Input data to activation function. - :type data: SymbolicNode - - - :param act_type: Activation function to be applied. - :type act_type: {'relu', 'sigmoid', 'softrelu', 'tanh'}, required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: BatchNorm(...) - - Apply batch normalization to input. - - :param data: Input data to batch normalization - :type data: SymbolicNode - - - :param eps: Epsilon to prevent div 0 - :type eps: float, optional, default=0.001 - - - :param momentum: Momentum for moving average - :type momentum: float, optional, default=0.9 - - - :param fix_gamma: Fix gamma while training - :type fix_gamma: boolean, optional, default=True - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: BlockGrad(...) - - Get output from a symbol and pass 0 gradient back - - :param data: Input data. - :type data: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Cast(...) - - Cast array to a different data type. - - :param data: Input data to cast function. - :type data: SymbolicNode - - - :param dtype: Target data type. - :type dtype: {'float16', 'float32', 'float64', 'int32', 'uint8'}, required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Concat(...) - - Perform an feature concat on channel dim (dim 1) over all the inputs. - - This function support variable length positional :class:`SymbolicNode` inputs. - - :param data: List of tensors to concatenate - :type data: SymbolicNode[] - - - :param num_args: Number of inputs to be concated. - :type num_args: int, required - - - :param dim: the dimension to be concated. - :type dim: int, optional, default='1' - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Convolution(...) - - Apply convolution to input then add a bias. - - :param data: Input data to the ConvolutionOp. - :type data: SymbolicNode - - - :param weight: Weight matrix. - :type weight: SymbolicNode - - - :param bias: Bias parameter. - :type bias: SymbolicNode - - - :param kernel: convolution kernel size: (y, x) - :type kernel: Shape(tuple), required - - - :param stride: convolution stride: (y, x) - :type stride: Shape(tuple), optional, default=(1,1) - - - :param dilate: convolution dilate: (y, x) - :type dilate: Shape(tuple), optional, default=(1,1) - - - :param pad: pad for convolution: (y, x) - :type pad: Shape(tuple), optional, default=(0,0) - - - :param num_filter: convolution filter(channel) number - :type num_filter: int (non-negative), required - - - :param num_group: Number of groups partition. This option is not supported by CuDNN, you can use SliceChannel to num_group,apply convolution and concat instead to achieve the same need. - :type num_group: int (non-negative), optional, default=1 - - - :param workspace: Tmp workspace for convolution (MB). - :type workspace: long (non-negative), optional, default=512 - - - :param no_bias: Whether to disable bias parameter. - :type no_bias: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Crop(...) - - Crop the 2nd and 3rd dim of input data, with the corresponding size of h_w or with width and height of the second input symbol, i.e., with one input, we need h_w to specify the crop height and width, otherwise the second input symbol's size will be used - - This function support variable length positional :class:`SymbolicNode` inputs. - - :param data: Tensor or List of Tensors, the second input will be used as crop_like shape reference - :type data: SymbolicNode or SymbolicNode[] - - - :param num_args: Number of inputs for crop, if equals one, then we will use the h_wfor crop height and width, else if equals two, then we will use the heightand width of the second input symbol, we name crop_like here - :type num_args: int, required - - - :param offset: crop offset coordinate: (y, x) - :type offset: Shape(tuple), optional, default=(0,0) - - - :param h_w: crop height and weight: (h, w) - :type h_w: Shape(tuple), optional, default=(0,0) - - - :param center_crop: If set to true, then it will use be the center_crop,or it will crop using the shape of crop_like - :type center_crop: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: CuDNNBatchNorm(...) - - Apply batch normalization to input. - - :param data: Input data to batch normalization - :type data: SymbolicNode - - - :param eps: Epsilon to prevent div 0 - :type eps: float, optional, default=0.001 - - - :param momentum: Momentum for moving average - :type momentum: float, optional, default=0.9 - - - :param fix_gamma: Fix gamma while training - :type fix_gamma: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Deconvolution(...) - - Apply deconvolution to input then add a bias. - - :param data: Input data to the DeconvolutionOp. - :type data: SymbolicNode - - - :param weight: Weight matrix. - :type weight: SymbolicNode - - - :param bias: Bias parameter. - :type bias: SymbolicNode - - - :param kernel: deconvolution kernel size: (y, x) - :type kernel: Shape(tuple), required - - - :param stride: deconvolution stride: (y, x) - :type stride: Shape(tuple), optional, default=(1,1) - - - :param pad: pad for deconvolution: (y, x) - :type pad: Shape(tuple), optional, default=(0,0) - - - :param num_filter: deconvolution filter(channel) number - :type num_filter: int (non-negative), required - - - :param num_group: number of groups partition - :type num_group: int (non-negative), optional, default=1 - - - :param workspace: Tmp workspace for deconvolution (MB) - :type workspace: long (non-negative), optional, default=512 - - - :param no_bias: Whether to disable bias parameter. - :type no_bias: boolean, optional, default=True - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Dropout(...) - - Apply dropout to input - - :param data: Input data to dropout. - :type data: SymbolicNode - - - :param p: Fraction of the input that gets dropped out at training time - :type p: float, optional, default=0.5 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: ElementWiseSum(...) - - Perform an elementwise sum over all the inputs. - - This function support variable length positional :class:`SymbolicNode` inputs. - - :param num_args: Number of inputs to be summed. - :type num_args: int, required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Embedding(...) - - Get embedding for one-hot input. A n-dimensional input tensor will be trainsformed into a (n+1)-dimensional tensor, where a new dimension is added for the embedding results. - - :param data: Input data to the EmbeddingOp. - :type data: SymbolicNode - - - :param weight: Enbedding weight matrix. - :type weight: SymbolicNode - - - :param input_dim: input dim of one-hot encoding - :type input_dim: int, required - - - :param output_dim: output dim of embedding - :type output_dim: int, required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Flatten(...) - - Flatten input - - :param data: Input data to flatten. - :type data: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: FullyConnected(...) - - Apply matrix multiplication to input then add a bias. - - :param data: Input data to the FullyConnectedOp. - :type data: SymbolicNode - - - :param weight: Weight matrix. - :type weight: SymbolicNode - - - :param bias: Bias parameter. - :type bias: SymbolicNode - - - :param num_hidden: Number of hidden nodes of the output. - :type num_hidden: int, required - - - :param no_bias: Whether to disable bias parameter. - :type no_bias: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: IdentityAttachKLSparseReg(...) - - Apply a sparse regularization to the output a sigmoid activation function. - - :param data: Input data. - :type data: SymbolicNode - - - :param sparseness_target: The sparseness target - :type sparseness_target: float, optional, default=0.1 - - - :param penalty: The tradeoff parameter for the sparseness penalty - :type penalty: float, optional, default=0.001 - - - :param momentum: The momentum for running average - :type momentum: float, optional, default=0.9 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: LRN(...) - - Apply convolution to input then add a bias. - - :param data: Input data to the ConvolutionOp. - :type data: SymbolicNode - - - :param alpha: value of the alpha variance scaling parameter in the normalization formula - :type alpha: float, optional, default=0.0001 - - - :param beta: value of the beta power parameter in the normalization formula - :type beta: float, optional, default=0.75 - - - :param knorm: value of the k parameter in normalization formula - :type knorm: float, optional, default=2 - - - :param nsize: normalization window width in elements. - :type nsize: int (non-negative), required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: LeakyReLU(...) - - Apply activation function to input. - - :param data: Input data to activation function. - :type data: SymbolicNode - - - :param act_type: Activation function to be applied. - :type act_type: {'elu', 'leaky', 'prelu', 'rrelu'},optional, default='leaky' - - - :param slope: Init slope for the activation. (For leaky and elu only) - :type slope: float, optional, default=0.25 - - - :param lower_bound: Lower bound of random slope. (For rrelu only) - :type lower_bound: float, optional, default=0.125 - - - :param upper_bound: Upper bound of random slope. (For rrelu only) - :type upper_bound: float, optional, default=0.334 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: LinearRegressionOutput(...) - - Use linear regression for final output, this is used on final output of a net. - - :param data: Input data to function. - :type data: SymbolicNode - - - :param label: Input label to function. - :type label: SymbolicNode - - - :param grad_scale: Scale the gradient by a float factor - :type grad_scale: float, optional, default=1 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: LogisticRegressionOutput(...) - - Use Logistic regression for final output, this is used on final output of a net. - Logistic regression is suitable for binary classification or probability prediction tasks. - - :param data: Input data to function. - :type data: SymbolicNode - - - :param label: Input label to function. - :type label: SymbolicNode - - - :param grad_scale: Scale the gradient by a float factor - :type grad_scale: float, optional, default=1 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: MAERegressionOutput(...) - - Use mean absolute error regression for final output, this is used on final output of a net. - - :param data: Input data to function. - :type data: SymbolicNode - - - :param label: Input label to function. - :type label: SymbolicNode - - - :param grad_scale: Scale the gradient by a float factor - :type grad_scale: float, optional, default=1 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Pooling(...) - - Perform spatial pooling on inputs. - - :param data: Input data to the pooling operator. - :type data: SymbolicNode - - - :param kernel: pooling kernel size: (y, x) - :type kernel: Shape(tuple), required - - - :param pool_type: Pooling type to be applied. - :type pool_type: {'avg', 'max', 'sum'}, required - - - :param stride: stride: for pooling (y, x) - :type stride: Shape(tuple), optional, default=(1,1) - - - :param pad: pad for pooling: (y, x) - :type pad: Shape(tuple), optional, default=(0,0) - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: Reshape(...) - - Reshape input to target shape - - :param data: Input data to reshape. - :type data: SymbolicNode - - - :param target_shape: Target new shape. One and only one dim can be 0, in which case it will be inferred from the rest of dims - :type target_shape: Shape(tuple), required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: SliceChannel(...) - - Slice input equally along specified axis - - :param num_outputs: Number of outputs to be sliced. - :type num_outputs: int, required - - - :param axis: Dimension along which to slice. - :type axis: int, optional, default='1' - - - :param squeeze_axis: If true AND the sliced dimension becomes 1, squeeze that dimension. - :type squeeze_axis: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode[]. - - - - - -.. function:: Softmax(...) - - DEPRECATED: Perform a softmax transformation on input. Please use SoftmaxOutput - - :param data: Input data to softmax. - :type data: SymbolicNode - - - :param grad_scale: Scale the gradient by a float factor - :type grad_scale: float, optional, default=1 - - - :param ignore_label: the ignore_label will not work in backward, and this only be used when multi_output=true - :type ignore_label: float, optional, default=-1 - - - :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensional input tensor, softmax will generate n*x_1*...*x_n output, each has k classes - :type multi_output: boolean, optional, default=False - - - :param use_ignore: If set to true, the ignore_label value will not contribute to the backward gradient - :type use_ignore: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: SoftmaxActivation(...) - - Apply softmax activation to input. This is intended for internal layers. For output (loss layer) please use SoftmaxOutput. If type=instance, this operator will compute a softmax for each instance in the batch; this is the default mode. If type=channel, this operator will compute a num_channel-class softmax at each position of each instance; this can be used for fully convolutional network, image segmentation, etc. - - :param data: Input data to activation function. - :type data: SymbolicNode - - - :param type: Softmax Mode. If set to instance, this operator will compute a softmax for each instance in the batch; this is the default mode. If set to channel, this operator will compute a num_channel-class softmax at each position of each instance; this can be used for fully convolutional network, image segmentation, etc. - :type type: {'channel', 'instance'},optional, default='instance' - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: SoftmaxOutput(...) - - Perform a softmax transformation on input, backprop with logloss. - - :param data: Input data to softmax. - :type data: SymbolicNode - - - :param label: Label data. - :type label: SymbolicNode - - - :param grad_scale: Scale the gradient by a float factor - :type grad_scale: float, optional, default=1 - - - :param ignore_label: the ignore_label will not work in backward, and this only be used when multi_output=true - :type ignore_label: float, optional, default=-1 - - - :param multi_output: If set to true, for a (n,k,x_1,..,x_n) dimensional input tensor, softmax will generate n*x_1*...*x_n output, each has k classes - :type multi_output: boolean, optional, default=False - - - :param use_ignore: If set to true, the ignore_label value will not contribute to the backward gradient - :type use_ignore: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: SwapAxis(...) - - Apply swapaxis to input. - - :param data: Input data to the SwapAxisOp. - :type data: SymbolicNode - - - :param dim1: the first axis to be swapped. - :type dim1: int (non-negative), optional, default=0 - - - :param dim2: the second axis to be swapped. - :type dim2: int (non-negative), optional, default=0 - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: UpSampling(...) - - Perform nearest neighboor/bilinear up sampling to inputs - - This function support variable length positional :class:`SymbolicNode` inputs. - - :param data: Array of tensors to upsample - :type data: SymbolicNode[] - - - :param scale: Up sampling scale - :type scale: int (non-negative), required - - - :param num_filter: Input filter. Only used by nearest sample_type. - :type num_filter: int (non-negative), optional, default=0 - - - :param sample_type: upsampling method - :type sample_type: {'bilinear', 'nearest'}, required - - - :param multi_input_mode: How to handle multiple input. concat means concatenate upsampled images along the channel dimension. sum means add all images together, only available for nearest neighbor upsampling. - :type multi_input_mode: {'concat', 'sum'},optional, default='concat' - - - :param num_args: Number of inputs to be upsampled. For nearest neighbor upsampling, this can be 1-N; the size of output will be(scale*h_0,scale*w_0) and all other inputs will be upsampled to thesame size. For bilinear upsampling this must be 2; 1 input and 1 weight. - :type num_args: int, required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: abs(...) - - Take absolute value of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: ceil(...) - - Take ceil value of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: cos(...) - - Take cos of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: exp(...) - - Take exp of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: floor(...) - - Take floor value of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: log(...) - - Take log of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: round(...) - - Take round value of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: rsqrt(...) - - Take rsqrt of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: sign(...) - - Take sign value of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: sin(...) - - Take sin of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: sqrt(...) - - Take sqrt of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: square(...) - - Take square of the src - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: transpose(...) - - Transpose the input matrix and return a new one - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - -Internal APIs -^^^^^^^^^^^^^ - -.. note:: - - Document and signatures for internal API functions might be incomplete. - -.. function:: _CrossDeviceCopy(...) - - Special op to copy data cross device - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Div(...) - - Multiply lhs by rhs - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _DivScalar(...) - - Perform an elementwise div. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Maximum(...) - - Elementwise max of lhs by rhs - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _MaximumScalar(...) - - Perform an elementwise maximum. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Minimum(...) - - Elementwise min of lhs by rhs - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _MinimumScalar(...) - - Perform an elementwise minimum. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Minus(...) - - Minus lhs and rhs - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _MinusScalar(...) - - Perform an elementwise minus. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Mul(...) - - Multiply lhs and rhs - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _MulScalar(...) - - Perform an elementwise mul. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _NDArray(...) - - Stub for implementing an operator implemented in native frontend language with ndarray. - - :param info: - :type info: , required - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Native(...) - - Stub for implementing an operator implemented in native frontend language. - - :param info: - :type info: , required - - - :param need_top_grad: Whether this layer needs out grad for backward. Should be false for loss layers. - :type need_top_grad: boolean, optional, default=True - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Plus(...) - - Add lhs and rhs - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _PlusScalar(...) - - Perform an elementwise plus. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - -.. function:: _Power(...) - - Elementwise power(lhs, rhs) - - :param lhs: Left symbolic input to the function - :type lhs: SymbolicNode - - - :param rhs: Left symbolic input to the function - :type rhs: SymbolicNode - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: . - - - - - -.. function:: _PowerScalar(...) - - Perform an elementwise power. - - :param array: Input array operand to the operation. - :type array: SymbolicNode - - - :param scalar: scalar value. - :type scalar: float, required - - - :param scalar_on_left: scalar operand is on the left. - :type scalar_on_left: boolean, optional, default=False - - :param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`. - - :return: SymbolicNode. - - - - - - - - diff --git a/docs/api/visualize.rst b/docs/api/visualize.rst deleted file mode 100644 index 42857c5f4ff0..000000000000 --- a/docs/api/visualize.rst +++ /dev/null @@ -1,19 +0,0 @@ - -Network Visualization -===================== - - - - -.. function:: to_graphviz(network) - - :param SymbolicNode network: the network to visualize. - :param AbstractString title: keyword argument, default "Network Visualization", - the title of the GraphViz graph. - :param input_shapes: keyword argument, default ``nothing``. If provided, - will run shape inference and plot with the shape information. Should - be either a dictionary of name-shape mapping or an array of shapes. - :return: the graph description in GraphViz ``dot`` language. - - - diff --git a/docs/build-api.jl b/docs/build-api.jl deleted file mode 100644 index c8e54677096f..000000000000 --- a/docs/build-api.jl +++ /dev/null @@ -1,98 +0,0 @@ -# extract API docs -using MXNet - -const SRC_DIR = joinpath(Pkg.dir("MXNet"), "src") -const API_DIR = joinpath(Pkg.dir("MXNet"), "docs", "api") - -################################################################################# -# Document Builders -################################################################################# -function extract_doc(output_filename::AbstractString, input_filenames::AbstractString...) - mkpath(API_DIR) - open(joinpath(API_DIR, output_filename), "w") do io - for in_fn in input_filenames - for doc in eachmatch(r"^#=doc\s*$(.*?)^=#\s*$"ms, readall(joinpath(SRC_DIR, in_fn))) - println(io, doc.captures[1], "\n\n") - end - end - end -end - -function sort_api_names(names) - names = collect(names) - names_pub = filter(x -> !startswith(string(x), '_'), names) - names_pri = filter(x -> startswith(string(x), '_'), names) - return (sort(names_pub), sort(names_pri)) -end - -function embed_mxnet_api(output_filename::AbstractString, key::AbstractString, generator::Function) - output_filename = joinpath(API_DIR, output_filename) - contents = readall(output_filename) - open(output_filename, "w") do io - docs = generator(gen_docs=true) - function gen_doc(fname) - doc = replace(docs[fname], r"^"m, " ") - """ - .. function:: $fname(...) - - $doc - - """ - end - - names_pub, names_pri = sort_api_names(keys(docs)) - docs_pub = join(map(gen_doc, names_pub), "\n\n") - docs_pri = join(map(gen_doc, names_pri), "\n\n") - if isempty(names_pri) - docstrings = "" - else - docstrings = """ - Public APIs - ^^^^^^^^^^^ - """ - end - docstrings *= docs_pub - - if !isempty(names_pri) - docstrings *= """ - - Internal APIs - ^^^^^^^^^^^^^ - - .. note:: - - Document and signatures for internal API functions might be incomplete. - - """ * docs_pri - end - - key = mx.format(mx.DOC_EMBED_ANCHOR, key) - println(io, replace(contents, key, docstrings)) - end -end - -################################################################################# -# Build Documents -################################################################################# -extract_doc("context.rst", "context.jl") - -extract_doc("ndarray.rst", "ndarray.jl") -embed_mxnet_api("ndarray.rst", "ndarray", mx._import_ndarray_functions) - -extract_doc("symbolic-node.rst", "symbolic-node.jl") -embed_mxnet_api("symbolic-node.rst", "symbolic-node", mx._import_atomic_symbol_creators) - -extract_doc("executor.rst", "executor.jl") - -extract_doc("initializer.rst", "initializer.jl") -extract_doc("callback.rst", "callback.jl") -extract_doc("model.rst", "model.jl") - -extract_doc("optimizer.rst", "optimizer.jl", "optimizers/sgd.jl", "optimizers/adam.jl") -extract_doc("metric.rst", "metric.jl") - -extract_doc("io.rst", "io.jl") -embed_mxnet_api("io.rst", "io", mx._import_io_iterators) - -extract_doc("visualize.rst", "visualize.jl") -extract_doc("nn-factory.rst", "nn-factory.jl") diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 7454faa99eee..000000000000 --- a/docs/conf.py +++ /dev/null @@ -1,277 +0,0 @@ -# -*- coding: utf-8 -*- -# -# MXNet documentation build configuration file, created by -# sphinx-quickstart on Thu Nov 13 00:43:40 2014. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath("sphinx")) -import julia - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - 'sphinx.ext.mathjax', - 'julia' -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'MXNet' -copyright = u'2015, pluskid' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '0.0.8' -# The full version, including alpha/beta/rc tags. -release = '0.0.8' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -primary_domain = 'jl' -highlight_language = 'julia' - -# If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin theme -html_theme = 'default' - -import os -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' - -if not on_rtd: # only import and set the theme if we're building docs locally - try: - import sphinx_rtd_theme - html_theme = "sphinx_rtd_theme" - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - except: - pass - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -#html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'MXNetdoc' - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ('index', 'MXNet.tex', u'MXNet Documentation', - u'pluskid', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'mxnet', u'MXNet Documentation', - [u'pluskid'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'MXNet', u'MXNet Documentation', - u'pluskid', 'MXNet', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 7e95b6a7c10e..000000000000 --- a/docs/index.rst +++ /dev/null @@ -1,55 +0,0 @@ -%%%%%%%%%%%%%%%%%%% -MXNet Documentation -%%%%%%%%%%%%%%%%%%% - -`MXNet.jl `_ is `Julia -`_ package of `dmlc/mxnet -`_. MXNet.jl brings flexible and efficient GPU -computing and state-of-art deep learning to Julia. Some highlight of features -include: - -- Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. -- Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. - -For more details, see documentation below. Please also checkout the `examples -`_ directory. - -.. toctree:: - :maxdepth: 2 - :caption: Tutorials - - tutorial/mnist - tutorial/char-lstm - -.. toctree:: - :maxdepth: 2 - :caption: User's Guide - - user-guide/install - user-guide/overview - user-guide/faq - -.. toctree:: - :maxdepth: 1 - :caption: API Documentation - - api/context - api/model - api/initializer - api/optimizer - api/callback - api/metric - api/io - api/ndarray - api/symbolic-node - api/nn-factory - api/executor - api/visualize - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/docs/sphinx/julia.py b/docs/sphinx/julia.py deleted file mode 100644 index 49d6af95b545..000000000000 --- a/docs/sphinx/julia.py +++ /dev/null @@ -1,23 +0,0 @@ -# Julia domain for Sphinx (stolen from StrPack.jl) -# http://sphinx.pocoo.org/domains.html - -import re -import sphinx.domains.python - -sphinx.domains.python.py_sig_re = re.compile( - r'''^ ([\w.]*\.)? # class name(s) - ([^\s(]+) \s* # thing name - (?: \((.*)\) # optional: arguments - (?:\s* -> \s* (.*))? # return annotation - )? $ # and nothing more - ''', re.VERBOSE | re.UNICODE) - -class JuliaDomain(sphinx.domains.python.PythonDomain): - """Julia language domain.""" - name = 'jl' - label = 'Julia' - -JuliaDomain.directives['type'] = JuliaDomain.directives['class'] - -def setup(app): - app.add_domain(JuliaDomain) diff --git a/docs/src/index.md b/docs/src/index.md index e69de29bb2d1..7e95b6a7c10e 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -0,0 +1,55 @@ +%%%%%%%%%%%%%%%%%%% +MXNet Documentation +%%%%%%%%%%%%%%%%%%% + +`MXNet.jl `_ is `Julia +`_ package of `dmlc/mxnet +`_. MXNet.jl brings flexible and efficient GPU +computing and state-of-art deep learning to Julia. Some highlight of features +include: + +- Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. +- Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. + +For more details, see documentation below. Please also checkout the `examples +`_ directory. + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials + + tutorial/mnist + tutorial/char-lstm + +.. toctree:: + :maxdepth: 2 + :caption: User's Guide + + user-guide/install + user-guide/overview + user-guide/faq + +.. toctree:: + :maxdepth: 1 + :caption: API Documentation + + api/context + api/model + api/initializer + api/optimizer + api/callback + api/metric + api/io + api/ndarray + api/symbolic-node + api/nn-factory + api/executor + api/visualize + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/docs/tutorial/char-lstm.rst b/docs/src/tutorial/char-lstm.rst similarity index 100% rename from docs/tutorial/char-lstm.rst rename to docs/src/tutorial/char-lstm.rst diff --git a/docs/tutorial/images/LSTM3-chain.png b/docs/src/tutorial/images/LSTM3-chain.png similarity index 100% rename from docs/tutorial/images/LSTM3-chain.png rename to docs/src/tutorial/images/LSTM3-chain.png diff --git a/docs/tutorial/images/char-lstm-vis.svg b/docs/src/tutorial/images/char-lstm-vis.svg similarity index 100% rename from docs/tutorial/images/char-lstm-vis.svg rename to docs/src/tutorial/images/char-lstm-vis.svg diff --git a/docs/tutorial/mnist.rst b/docs/src/tutorial/mnist.rst similarity index 100% rename from docs/tutorial/mnist.rst rename to docs/src/tutorial/mnist.rst diff --git a/docs/user-guide/faq.rst b/docs/src/user-guide/faq.rst similarity index 100% rename from docs/user-guide/faq.rst rename to docs/src/user-guide/faq.rst diff --git a/docs/user-guide/install.rst b/docs/src/user-guide/install.rst similarity index 100% rename from docs/user-guide/install.rst rename to docs/src/user-guide/install.rst diff --git a/docs/user-guide/overview.rst b/docs/src/user-guide/overview.rst similarity index 100% rename from docs/user-guide/overview.rst rename to docs/src/user-guide/overview.rst From 504aec63352f484fe915c902f2d9f92e6cc86ede Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 6 Jul 2016 04:19:29 +0900 Subject: [PATCH 347/630] do some housekeeping and start moving things to markdown --- docs/make.jl | 4 +- docs/src/api.md | 5 ++ docs/src/index.md | 61 ++++--------------- docs/src/tutorial.md | 0 .../tutorial/{char-lstm.rst => char-lstm.md} | 0 docs/src/tutorial/{mnist.rst => mnist.md} | 0 docs/src/user-guide.md | 0 7 files changed, 20 insertions(+), 50 deletions(-) create mode 100644 docs/src/api.md create mode 100644 docs/src/tutorial.md rename docs/src/tutorial/{char-lstm.rst => char-lstm.md} (100%) rename docs/src/tutorial/{mnist.rst => mnist.md} (100%) create mode 100644 docs/src/user-guide.md diff --git a/docs/make.jl b/docs/make.jl index 1a20e2b93826..f5569035bc69 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,3 +1,5 @@ using Documenter, MXNet -makedocs() +makedocs( + modules = [MXNet] +) diff --git a/docs/src/api.md b/docs/src/api.md new file mode 100644 index 000000000000..2316d3eda140 --- /dev/null +++ b/docs/src/api.md @@ -0,0 +1,5 @@ +# API + +```@contents +Pages = ["api/context.md"] +``` diff --git a/docs/src/index.md b/docs/src/index.md index 7e95b6a7c10e..0ad69711e41c 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -1,55 +1,18 @@ -%%%%%%%%%%%%%%%%%%% -MXNet Documentation -%%%%%%%%%%%%%%%%%%% +# MXNet Documentation -`MXNet.jl `_ is `Julia -`_ package of `dmlc/mxnet -`_. MXNet.jl brings flexible and efficient GPU +[`MXNet.jl`](https://github.com/dmlc/MXNet.jl>) is the +[`Julia`](http://julialang.org/) package of +[`dmlc/mxnet`](https://github.com/dmlc/mxnet). `MXNet.jl` brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: -- Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. -- Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. +* Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. +* Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. -For more details, see documentation below. Please also checkout the `examples -`_ directory. - -.. toctree:: - :maxdepth: 2 - :caption: Tutorials - - tutorial/mnist - tutorial/char-lstm - -.. toctree:: - :maxdepth: 2 - :caption: User's Guide - - user-guide/install - user-guide/overview - user-guide/faq - -.. toctree:: - :maxdepth: 1 - :caption: API Documentation - - api/context - api/model - api/initializer - api/optimizer - api/callback - api/metric - api/io - api/ndarray - api/symbolic-node - api/nn-factory - api/executor - api/visualize - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` +For more details, see documentation below. Please also checkout the +[examples](https://github.com/dmlc/MXNet.jl/tree/master/examples) directory. +```@contents +Pages = ["tutorial.md", "user-guide.md", "api.md"] +Depth = 2 +``` diff --git a/docs/src/tutorial.md b/docs/src/tutorial.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/docs/src/tutorial/char-lstm.rst b/docs/src/tutorial/char-lstm.md similarity index 100% rename from docs/src/tutorial/char-lstm.rst rename to docs/src/tutorial/char-lstm.md diff --git a/docs/src/tutorial/mnist.rst b/docs/src/tutorial/mnist.md similarity index 100% rename from docs/src/tutorial/mnist.rst rename to docs/src/tutorial/mnist.md diff --git a/docs/src/user-guide.md b/docs/src/user-guide.md new file mode 100644 index 000000000000..e69de29bb2d1 From 733d46d1d6f85923d423fe3b05631b0c219edced Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 12 Jul 2016 07:14:35 +0900 Subject: [PATCH 348/630] setup api docs with autodoc --- docs/src/api.md | 4 ++-- docs/src/api/callback.md | 6 ++++++ docs/src/api/context.md | 7 +++---- docs/src/api/executor.md | 6 ++++++ docs/src/api/initializer.md | 6 ++++++ docs/src/api/io.md | 6 ++++++ docs/src/api/kvstore.md | 6 ++++++ docs/src/api/metric.md | 6 ++++++ docs/src/api/model.md | 6 ++++++ docs/src/api/ndarry.md | 6 ++++++ docs/src/api/nn-factory.md | 6 ++++++ docs/src/api/optimizer.md | 10 ++++++++++ docs/src/api/optimizers/adam.md | 6 ++++++ docs/src/api/optimizers/sgd.md | 6 ++++++ docs/src/api/random.md | 6 ++++++ docs/src/api/symbolic-node.md | 6 ++++++ docs/src/api/visualize.md | 6 ++++++ 17 files changed, 99 insertions(+), 6 deletions(-) create mode 100644 docs/src/api/callback.md create mode 100644 docs/src/api/executor.md create mode 100644 docs/src/api/initializer.md create mode 100644 docs/src/api/io.md create mode 100644 docs/src/api/kvstore.md create mode 100644 docs/src/api/metric.md create mode 100644 docs/src/api/model.md create mode 100644 docs/src/api/ndarry.md create mode 100644 docs/src/api/nn-factory.md create mode 100644 docs/src/api/optimizer.md create mode 100644 docs/src/api/optimizers/adam.md create mode 100644 docs/src/api/optimizers/sgd.md create mode 100644 docs/src/api/random.md create mode 100644 docs/src/api/symbolic-node.md create mode 100644 docs/src/api/visualize.md diff --git a/docs/src/api.md b/docs/src/api.md index 2316d3eda140..fa48c540721a 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -1,5 +1,5 @@ -# API +# API Documentation ```@contents -Pages = ["api/context.md"] +Pages = ["api/context.md", "api/model.md", "api/initializers.md", "api/optimizers.md", "api/callbacks.md", "api/metric.md", "api/io.md", "api/ndarray.md", "api/symbolic-node.md", "api/nn-factory.md", "api/executor.md", "api/visualize.md"] ``` diff --git a/docs/src/api/callback.md b/docs/src/api/callback.md new file mode 100644 index 000000000000..f67811cc41fe --- /dev/null +++ b/docs/src/api/callback.md @@ -0,0 +1,6 @@ +# Callback in training + +```@autodocs +Modules = [MXNet.mx] +Pages = ["callback.jl"] +``` diff --git a/docs/src/api/context.md b/docs/src/api/context.md index 2c2ac8f7d7ac..93ccf83e51ba 100644 --- a/docs/src/api/context.md +++ b/docs/src/api/context.md @@ -1,7 +1,6 @@ # Context -```@docs -mx.Context -mx.cpu -mx.gpu +```@autodocs +Modules = [MXNet.mx] +Pages = ["context.jl"] ``` diff --git a/docs/src/api/executor.md b/docs/src/api/executor.md new file mode 100644 index 000000000000..b560c7a0864d --- /dev/null +++ b/docs/src/api/executor.md @@ -0,0 +1,6 @@ +# Executor + +```@autodocs +Modules = [MXNet.mx] +Pages = ["executor.jl"] +``` diff --git a/docs/src/api/initializer.md b/docs/src/api/initializer.md new file mode 100644 index 000000000000..d0aad2def4cd --- /dev/null +++ b/docs/src/api/initializer.md @@ -0,0 +1,6 @@ +# Initializer + +```@autodocs +Modules = [MXNet.mx] +Pages = ["initializer.jl"] +``` diff --git a/docs/src/api/io.md b/docs/src/api/io.md new file mode 100644 index 000000000000..43439a930378 --- /dev/null +++ b/docs/src/api/io.md @@ -0,0 +1,6 @@ +# Data Providers + +```@autodocs +Modules = [MXNet.mx] +Pages = ["io.jl"] +``` diff --git a/docs/src/api/kvstore.md b/docs/src/api/kvstore.md new file mode 100644 index 000000000000..34a5027f85fb --- /dev/null +++ b/docs/src/api/kvstore.md @@ -0,0 +1,6 @@ +# Key-Value Store + +```@autodocs +Modules = [MXNet.mx] +Pages = ["kvstore.jl"] +``` diff --git a/docs/src/api/metric.md b/docs/src/api/metric.md new file mode 100644 index 000000000000..3c800b3a8152 --- /dev/null +++ b/docs/src/api/metric.md @@ -0,0 +1,6 @@ +# Evaluation Metrics + +```@autodocs +Modules = [MXNet.mx] +Pages = ["metric.jl"] +``` diff --git a/docs/src/api/model.md b/docs/src/api/model.md new file mode 100644 index 000000000000..739c3ea7bf9a --- /dev/null +++ b/docs/src/api/model.md @@ -0,0 +1,6 @@ +# Model + +```@autodocs +Modules = [MXNet.mx] +Pages = ["model.jl"] +``` diff --git a/docs/src/api/ndarry.md b/docs/src/api/ndarry.md new file mode 100644 index 000000000000..4c6832c58969 --- /dev/null +++ b/docs/src/api/ndarry.md @@ -0,0 +1,6 @@ +# NDArray API + +```@autodocs +Modules = [MXNet.mx] +Pages = ["ndarray.jl"] +``` diff --git a/docs/src/api/nn-factory.md b/docs/src/api/nn-factory.md new file mode 100644 index 000000000000..6aa6c4e5bb8e --- /dev/null +++ b/docs/src/api/nn-factory.md @@ -0,0 +1,6 @@ +# Neural Network Factory + +```@autodocs +Modules = [MXNet.mx] +Pages = ["nn-factory.jl"] +``` diff --git a/docs/src/api/optimizer.md b/docs/src/api/optimizer.md new file mode 100644 index 000000000000..b67a369587e4 --- /dev/null +++ b/docs/src/api/optimizer.md @@ -0,0 +1,10 @@ +# Optimizers + +```@contents +Pages = ["optimizers/adam.md", "optimizers/sgd.md"] +``` + +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizer.jl"] +``` diff --git a/docs/src/api/optimizers/adam.md b/docs/src/api/optimizers/adam.md new file mode 100644 index 000000000000..269d82a9c80e --- /dev/null +++ b/docs/src/api/optimizers/adam.md @@ -0,0 +1,6 @@ +# ADAM + +```@autodocs +Modules = [MXNet.mx] +Pages = ["adam.jl"] +``` diff --git a/docs/src/api/optimizers/sgd.md b/docs/src/api/optimizers/sgd.md new file mode 100644 index 000000000000..506c7b5c266d --- /dev/null +++ b/docs/src/api/optimizers/sgd.md @@ -0,0 +1,6 @@ +# Stochastic Gradient Descent + +```@autodocs +Modules = [MXNet.mx] +Pages = ["sgd.jl"] +``` diff --git a/docs/src/api/random.md b/docs/src/api/random.md new file mode 100644 index 000000000000..91372836c467 --- /dev/null +++ b/docs/src/api/random.md @@ -0,0 +1,6 @@ +# Random + +```@autodocs +Modules = [MXNet.mx] +Pages = ["random.jl"] +``` diff --git a/docs/src/api/symbolic-node.md b/docs/src/api/symbolic-node.md new file mode 100644 index 000000000000..ef731d9f7d00 --- /dev/null +++ b/docs/src/api/symbolic-node.md @@ -0,0 +1,6 @@ +# Symbolic API + +```@autodocs +Modules = [MXNet.mx] +Pages = ["symbolic-node.jl"] +``` diff --git a/docs/src/api/visualize.md b/docs/src/api/visualize.md new file mode 100644 index 000000000000..429a927012e4 --- /dev/null +++ b/docs/src/api/visualize.md @@ -0,0 +1,6 @@ +# Network Visualization + +```@autodocs +Modules = [MXNet.mx] +Pages = ["visualize.jl"] +``` From 9113b21721b8f39daf5a7238f755c9826e7dcc14 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 13 Jul 2016 02:44:50 +0900 Subject: [PATCH 349/630] rough first pass .rst -> .md --- docs/src/api/io.md | 3 + docs/src/api/metric.md | 4 + docs/src/api/model.md | 3 + docs/src/api/nn-factory.md | 5 +- docs/src/api/optimizer.md | 10 +- docs/src/api/random.md | 6 - src/callback.jl | 80 +++++---- src/executor.jl | 40 ++--- src/initializer.jl | 89 +++++----- src/io.jl | 105 +++++------ src/metric.jl | 63 +++---- src/model.jl | 109 ++++++------ src/ndarray.jl | 347 +++++++++++++++++-------------------- src/nn-factory.jl | 26 +-- src/optimizer.jl | 126 ++++++-------- src/symbolic-node.jl | 257 +++++++++++++-------------- src/visualize.jl | 17 +- 17 files changed, 583 insertions(+), 707 deletions(-) delete mode 100644 docs/src/api/random.md diff --git a/docs/src/api/io.md b/docs/src/api/io.md index 43439a930378..32a7d14f9144 100644 --- a/docs/src/api/io.md +++ b/docs/src/api/io.md @@ -1,5 +1,8 @@ # Data Providers +Data providers are wrappers that load external data, be it images, text, or general tensors, +and split it into mini-batches so that the model can consume the data in a uniformed way. + ```@autodocs Modules = [MXNet.mx] Pages = ["io.jl"] diff --git a/docs/src/api/metric.md b/docs/src/api/metric.md index 3c800b3a8152..63cca0cc41ba 100644 --- a/docs/src/api/metric.md +++ b/docs/src/api/metric.md @@ -1,5 +1,9 @@ # Evaluation Metrics +Evaluation metrics provide a way to evaluate the performance of a learned model. +This is typically used during training to monitor performance on the validation +set. + ```@autodocs Modules = [MXNet.mx] Pages = ["metric.jl"] diff --git a/docs/src/api/model.md b/docs/src/api/model.md index 739c3ea7bf9a..f793c7c406c7 100644 --- a/docs/src/api/model.md +++ b/docs/src/api/model.md @@ -1,5 +1,8 @@ # Model +The model API provides convenient high-level interface to do training and predicting on +a network described using the symbolic API. + ```@autodocs Modules = [MXNet.mx] Pages = ["model.jl"] diff --git a/docs/src/api/nn-factory.md b/docs/src/api/nn-factory.md index 6aa6c4e5bb8e..41474383aa76 100644 --- a/docs/src/api/nn-factory.md +++ b/docs/src/api/nn-factory.md @@ -1,4 +1,7 @@ -# Neural Network Factory +# Neural Network Factora + +Neural network factory provide convenient helper functions to define +common neural networks. ```@autodocs Modules = [MXNet.mx] diff --git a/docs/src/api/optimizer.md b/docs/src/api/optimizer.md index b67a369587e4..043529a670c1 100644 --- a/docs/src/api/optimizer.md +++ b/docs/src/api/optimizer.md @@ -1,10 +1,12 @@ # Optimizers -```@contents -Pages = ["optimizers/adam.md", "optimizers/sgd.md"] -``` - ```@autodocs Modules = [MXNet.mx] Pages = ["optimizer.jl"] ``` + +## Built-in optimizers + +```@contents +Pages = ["optimizers/adam.md", "optimizers/sgd.md"] +``` diff --git a/docs/src/api/random.md b/docs/src/api/random.md deleted file mode 100644 index 91372836c467..000000000000 --- a/docs/src/api/random.md +++ /dev/null @@ -1,6 +0,0 @@ -# Random - -```@autodocs -Modules = [MXNet.mx] -Pages = ["random.jl"] -``` diff --git a/src/callback.jl b/src/callback.jl index e18184b1131a..3aca66a7bbf0 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -1,27 +1,22 @@ -#=doc -Callbacks in training -===================== -=# +""" + AbstractCallback -#=doc -.. class:: AbstractCallback - - Abstract type of callback functions used in training. -=# +Abstract type of callback functions used in training. +""" abstract AbstractCallback -#=doc -.. class:: AbstractBatchCallback +""" + AbstractBatchCallback - Abstract type of callbacks to be called every mini-batch. -=# +Abstract type of callbacks to be called every mini-batch. +""" abstract AbstractBatchCallback <: AbstractCallback -#=doc -.. class:: AbstractEpochCallback +""" + AbstractEpochCallback - Abstract type of callbacks to be called every epoch. -=# +Abstract type of callbacks to be called every epoch. +""" abstract AbstractEpochCallback <: AbstractCallback type BatchCallback <: AbstractBatchCallback @@ -30,15 +25,16 @@ type BatchCallback <: AbstractBatchCallback callback :: Function end -#=doc -.. function:: every_n_batch(callback :: Function, n :: Int; call_on_0 = false) +""" + every_n_batch(callback :: Function, n :: Int; call_on_0 = false) - A convenient function to construct a callback that runs every ``n`` mini-batches. +A convenient function to construct a callback that runs every ``n`` mini-batches. - :param Int call_on_0: keyword argument, default false. Unless set, the callback +# Arguments +* `call_on_0::Bool`: keyword argument, default false. Unless set, the callback will **not** be run on batch 0. - For example, the :func:`speedometer` callback is defined as +For example, the :func:`speedometer` callback is defined as .. code-block:: julia @@ -51,7 +47,7 @@ end end :seealso: :func:`every_n_epoch`, :func:`speedometer`. -=# +""" function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) BatchCallback(n, call_on_0, callback) end @@ -65,15 +61,16 @@ function Base.call(cb :: BatchCallback, state :: OptimizationState) end end -#=doc -.. function:: speedometer(; frequency=50) +""" + speedometer(; frequency=50) - Create an :class:`AbstractBatchCallback` that measure the training speed +Create an :class:`AbstractBatchCallback` that measure the training speed (number of samples processed per second) every k mini-batches. - :param Int frequency: keyword argument, default 50. The frequency (number of +# Arguments +* Int frequency: keyword argument, default 50. The frequency (number of min-batches) to measure and report the speed. -=# +""" function speedometer(;frequency::Int=50) cl_tic = 0 every_n_batch(frequency, call_on_0=true) do state :: OptimizationState @@ -95,18 +92,18 @@ type EpochCallback <: AbstractEpochCallback callback :: Function end -#=doc -.. function:: every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) +""" + every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) - A convenient function to construct a callback that runs every ``n`` full data-passes. +A convenient function to construct a callback that runs every ``n`` full data-passes. - :param Int call_on_0: keyword argument, default false. Unless set, the callback +* Int call_on_0: keyword argument, default false. Unless set, the callback will **not** be run on epoch 0. Epoch 0 means no training has been performed yet. This is useful if you want to inspect the randomly initialized model that has not seen any data yet. :seealso: :func:`every_n_iter`. -=# +""" function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end @@ -120,20 +117,21 @@ function Base.call{T<:Real}(cb :: EpochCallback, model :: Any, state :: Optimiza end end -#=doc -.. function:: do_checkpoint(prefix; frequency=1, save_epoch_0=false) +""" + do_checkpoint(prefix; frequency=1, save_epoch_0=false) - Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. - The checkpoints can be loaded back later on. +Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. +The checkpoints can be loaded back later on. - :param AbstractString prefix: the prefix of the filenames to save the model. The model +# Arguments +* `prefix::AbstractString`: the prefix of the filenames to save the model. The model architecture will be saved to prefix-symbol.json, while the weights will be saved to prefix-0012.params, for example, for the 12-th epoch. - :param Int frequency: keyword argument, default 1. The frequency (measured in epochs) to +* Int frequency: keyword argument, default 1. The frequency (measured in epochs) to save checkpoints. - :param Bool save_epoch_0: keyword argument, default false. Whether we should save a +* Bool save_epoch_0: keyword argument, default false. Whether we should save a checkpoint for epoch 0 (model initialized but not seen any data yet). -=# +""" function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) mkpath(dirname(prefix)) every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state, metric diff --git a/src/executor.jl b/src/executor.jl index 86f3f65921f4..edebc79907c7 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -1,15 +1,10 @@ -#=doc -Executor -======== -=# - -#=doc -.. class:: Executor - - An executor is a realization of a symbolic architecture defined by a :class:`SymbolicNode`. - The actual forward and backward computation specified by the network architecture can - be carried out with an executor. -=# +""" + Executor + +An executor is a realization of a symbolic architecture defined by a :class:`SymbolicNode`. +The actual forward and backward computation specified by the network architecture can +be carried out with an executor. +""" type Executor handle :: MX_ExecutorHandle symbol :: SymbolicNode @@ -70,21 +65,22 @@ function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDA return (args_hdr, args_vec) end -#=doc -.. function:: bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) +""" + bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) - Create an :class:`Executor` by binding a :class:`SymbolicNode` to concrete :class:`NDArray`. +Create an :class:`Executor` by binding a :class:`SymbolicNode` to concrete :class:`NDArray`. - :param SymbolicNode sym: the network architecture describing the computation graph. - :param Context ctx: the context on which the computation should run. - :param args: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete +# Arguments +* `sym::SymbolicNode`: the network architecture describing the computation graph. +* `ctx::Context`: the context on which the computation should run. +* `args`: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` and :func:`infer_shape`. - :param args_grad: TODO - :param aux_states: - :param grad_req: -=# +* `args_grad`: +* `aux_states`: +* `grad_req`: +""" @enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 function bind(self :: SymbolicNode, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), diff --git a/src/initializer.jl b/src/initializer.jl index 8be33bdf66d2..7ee9920a9d12 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -1,27 +1,20 @@ -#=doc -Initializers -============ -Interface ---------- -=# +""" + AbstractInitializer -#=doc -.. class:: AbstractInitializer - - The abstract base class for all initializers. +The abstract base class for all initializers. To define a new initializer, it is enough to derive a new type, and implement one or more of the following methods: -.. function:: _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -.. function:: _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + _init_weight(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + _init_gamma(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + _init_beta(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) Or, if full behavior customization is needed, override the following function -.. function:: init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) -=# + init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) +""" abstract AbstractInitializer function init{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) @@ -60,67 +53,63 @@ function _init_default(self :: AbstractInitializer, name :: Base.Symbol, array : error("Do not know how to init $name") end -#=doc -Built-in initializers ---------------------- -=# -#=doc -.. class:: UniformInitializer +""" + UniformInitializer - Initialize weights according to a uniform distribution within the provided scale. -=# +Initialize weights according to a uniform distribution within the provided scale. +""" immutable UniformInitializer <: AbstractInitializer scale :: AbstractFloat end -#=doc -.. function UniformInitializer(scale=0.07) +""" + UniformInitializer(scale=0.07) - Construct a :class:`UniformInitializer` with the specified scale. -=# +Construct a :class:`UniformInitializer` with the specified scale. +""" UniformInitializer() = UniformInitializer(0.07) function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: NDArray) rand!(-self.scale, self.scale, array) end -#=doc -.. class:: NormalInitializer +""" + NormalInitializer - Initialize weights according to a univariate Gaussian distribution. -=# +Initialize weights according to a univariate Gaussian distribution. +""" immutable NormalInitializer <: AbstractInitializer μ :: AbstractFloat σ :: AbstractFloat end -#=doc -.. function:: NormalIninitializer(; mu=0, sigma=0.01) +""" + NormalIninitializer(; mu=0, sigma=0.01) - Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. -=# +Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. +""" NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) randn!(self.μ, self.σ, array) end -#=doc -.. class:: XavierInitializer +""" + XavierInitializer - The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding - the difficulty of training deep feedforward neuralnetworks*. +The initializer documented in the paper [Bengio and Glorot 2010]: *Understanding +the difficulty of training deep feedforward neuralnetworks*. - There are several different version of the XavierInitializer used in the wild. - The general idea is that the variance of the initialization distribution is controlled - by the dimensionality of the input and output. As a distribution one can either choose - a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. +There are several different version of the XavierInitializer used in the wild. +The general idea is that the variance of the initialization distribution is controlled +by the dimensionality of the input and output. As a distribution one can either choose +a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to σ. - Several different ways of calculating the variance are given in the literature or are - used by various libraries. +Several different ways of calculating the variance are given in the literature or are +used by various libraries. - - [Bengio and Glorot 2010]: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)`` - - [K. He, X. Zhang, S. Ren, and J. Sun 2015]: ``mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)`` - - caffe_avg: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)`` -=# +* [Bengio and Glorot 2010]: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)`` +* [K. He, X. Zhang, S. Ren, and J. Sun 2015]: ``mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)`` +* caffe_avg: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)`` +""" @enum XavierDistribution xv_uniform xv_normal @enum XavierRegularization xv_avg xv_in xv_out diff --git a/src/io.jl b/src/io.jl index 46fbcfe86a2d..0e7d5c22490c 100644 --- a/src/io.jl +++ b/src/io.jl @@ -1,31 +1,21 @@ -#=doc -Data Providers -============== -Interface ---------- +""" + AbstractDataProvider -Data providers are wrappers that load external data, be it images, text, or general tensors, -and split it into mini-batches so that the model can consume the data in a uniformed way. -=# +The root type for all data provider. A data provider should implement the following interfaces: -#=doc -.. class:: AbstractDataProvider - - The root type for all data provider. A data provider should implement the following interfaces: - - .. function:: get_batch_size(provider) -> Int + get_batch_size(provider) -> Int :param AbstractDataProvider provider: the data provider. :return: the mini-batch size of the provided data. All the provided data should have the same mini-batch size (i.e. the last dimension). - .. function:: provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} + provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} :param AbstractDataProvider provider: the data provider. :return: a vector of (name, shape) pairs describing the names of the data it provides, and the corresponding shapes. - .. function:: provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} + provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} :param AbstractDataProvider provider: the data provider. :return: a vector of (name, shape) pairs describing the names of the labels it provides, and @@ -65,25 +55,25 @@ and split it into mini-batches so that the model can consume the data in a unifo The detailed interface functions for the iterator API is listed below: -.. function:: Base.eltype(provider) -> AbstractDataBatch + Base.eltype(provider) -> AbstractDataBatch :param AbstractDataProvider provider: the data provider. :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. -.. function:: Base.start(provider) -> AbstractDataProviderState + Base.start(provider) -> AbstractDataProviderState :param AbstractDataProvider provider: the data provider. This function is always called before iterating into the dataset. It should initialize the iterator, reset the index, and do data shuffling if needed. -.. function:: Base.done(provider, state) -> Bool + Base.done(provider, state) -> Bool :param AbstractDataProvider provider: the data provider. :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. :return: true if there is no more data to iterate in this dataset. -.. function:: Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) :param AbstractDataProvider provider: the data provider. :return: the current data batch, and the state for the next iteration. @@ -118,29 +108,29 @@ of the built-in :class:`MXDataProvider` for example. # ... end end -=# +""" abstract AbstractDataProvider -#=doc -.. class:: AbstractDataProviderState +""" + AbstractDataProviderState Base type for data provider states. -=# +""" abstract AbstractDataProviderState -#=doc -.. class:: AbstractDataBatch +""" + AbstractDataBatch Base type for a data mini-batch. It should implement the following interfaces: - .. function:: count_samples(provider, batch) -> Int + count_samples(provider, batch) -> Int :param AbstractDataBatch batch: the data batch object. :return: the number of samples in this batch. This number should be greater than 0, but less than or equal to the batch size. This is used to indicate at the end of the data set, there might not be enough samples for a whole mini-batch. - .. function:: get_data(provider, batch) -> Vector{NDArray} + get_data(provider, batch) -> Vector{NDArray} :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. @@ -151,7 +141,7 @@ abstract AbstractDataProviderState :func:`count_samples` returns a value less than the batch size. In this case, the data provider is free to pad the remaining contents with any value. - .. function:: get_label(provider, batch) -> Vector{NDArray} + get_label(provider, batch) -> Vector{NDArray} :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. @@ -160,7 +150,7 @@ abstract AbstractDataProviderState The following utility functions will be automatically defined. - .. function:: get(provider, batch, name) -> NDArray + get(provider, batch, name) -> NDArray :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. @@ -169,7 +159,7 @@ abstract AbstractDataProviderState or :func:`provide_label() `. :return: the corresponding data array corresponding to that name. - .. function:: load_data!(provider, batch, targets) + load_data!(provider, batch, targets) :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. @@ -185,7 +175,7 @@ abstract AbstractDataProviderState This utility function is used in data parallelization, where a mini-batch is splited and computed on several different devices. - .. function:: load_label!(provider, batch, targets) + load_label!(provider, batch, targets) :param AbstractDataProvider provider: the data provider. :param AbstractDataBatch batch: the data batch object. @@ -193,15 +183,15 @@ abstract AbstractDataProviderState :type targets: Vector{Vector{SlicedNDArray}} The same as :func:`load_data!`, except that this is for loading labels. -=# +""" abstract AbstractDataBatch -#=doc -.. class:: DataBatch +""" + DataBatch A basic subclass of :class:`AbstractDataBatch`, that implement the interface by accessing member fields. -=# +""" type DataBatch <: AbstractDataBatch data :: Vector{NDArray} label :: Vector{NDArray} @@ -211,11 +201,11 @@ count_samples(batch :: DataBatch) = batch.count get_data{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batch.data get_label{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batch.label -#=doc -.. class:: SlicedNDArray +""" + SlicedNDArray A alias type of ``Tuple{UnitRange{Int},NDArray}``. -=# +""" typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, @@ -264,17 +254,11 @@ end eachbatch(provider :: AbstractDataProvider) = provider -#=doc -Built-in data providers ------------------------ -=# - -################################################################################ -#=doc -.. class:: ArrayDataProvider +""" + ArrayDataProvider A convenient tool to iterate :class:`NDArray` or Julia ``Array``. -=# +""" type ArrayDataProvider <: AbstractDataProvider data_arrays :: Vector{Array{MX_float}} data_names :: Vector{Base.Symbol} @@ -290,8 +274,8 @@ type ArrayDataProvider <: AbstractDataProvider label_batch :: Vector{NDArray} end -#=doc -.. function:: ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) +""" + ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) Construct a data provider from :class:`NDArray` or Julia Arrays. @@ -314,7 +298,7 @@ end TODO: remove ``data_padding`` and ``label_padding``, and implement rollover that copies the last or first several training samples to feed the padding. -=# +""" # Julia's type system is sometimes very frustrating. You cannot specify a function # with argument Vector{Pair} to expect to be matched when calling with the parameter # [:foo => zeros(2,3), :bar => zeros(3)] because the type inference gives very specific @@ -463,18 +447,12 @@ function get_label(provider :: ArrayDataProvider, batch :: ArrayDataBatch) end -#=doc -libmxnet data providers ------------------------ -=# - -################################################################################ -#=doc -.. class:: MXDataProvider +""" + MXDataProvider - A data provider that wrap built-in data iterators from libmxnet. See below for - a list of built-in data iterators. -=# +A data provider that wrap built-in data iterators from libmxnet. See below for +a list of built-in data iterators. +""" type MXDataProvider <: AbstractDataProvider handle :: MX_DataIterHandle data_shape :: Vector{Tuple{Base.Symbol, Tuple}} @@ -569,9 +547,6 @@ function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) return provider.batch_size - Int(ref_pad[]) end -#=doc -**autogen:EMBED:io:EMBED:autogen** -=# function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) ref_name = Ref{char_p}(0) ref_desc = Ref{char_p}(0) diff --git a/src/metric.jl b/src/metric.jl index 1f601980ad13..21d5e4e34b14 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -1,17 +1,8 @@ -#=doc -Evaluation Metrics -================== +""" + AbstractEvalMetric -Evaluation metrics provide a way to evaluate the performance of a learned model. -This is typically used during training to monitor performance on the validation -set. -=# - -#=doc -.. class:: AbstractEvalMetric - - The base class for all evaluation metrics. The sub-types should implement the following - interfaces. +The base class for all evaluation metrics. The sub-types should implement the following +interfaces. .. function:: update!(metric, labels, preds) @@ -33,7 +24,7 @@ set. :return: ``Vector{Tuple{Base.Symbol, Real}}``, a list of name-value pairs. For example, ``[(:accuracy, 0.9)]``. -=# +""" abstract AbstractEvalMetric # Generic update! version @@ -49,14 +40,14 @@ function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray} end -#=doc -.. class:: Accuracy +""" + Accuracy - Multiclass classification accuracy. +Multiclass classification accuracy. - Calculates the mean accuracy per sample for softmax in one dimension. - For a multi-dimensional softmax the mean accuracy over all dimensions is calculated. -=# +Calculates the mean accuracy per sample for softmax in one dimension. +For a multi-dimensional softmax the mean accuracy over all dimensions is calculated. +""" type Accuracy <: AbstractEvalMetric acc_sum :: Float64 n_sample :: Int @@ -108,13 +99,13 @@ function reset!(metric :: Accuracy) metric.n_sample = 0 end -#=doc -.. class:: MSE +""" + MSE - Mean Squared Error. TODO: add support for multi-dimensional outputs. +Mean Squared Error. TODO: add support for multi-dimensional outputs. - Calculates the mean squared error regression loss in one dimension. -=# +Calculates the mean squared error regression loss in one dimension. +""" type MSE <: AbstractEvalMetric mse_sum :: Float64 @@ -144,13 +135,13 @@ function reset!(metric :: MSE) metric.n_sample = 0 end -#=doc -.. class:: ACE +""" + ACE - Averaged cross-entropy for classification. This also know als logloss. +Averaged cross-entropy for classification. This also know als logloss. - Calculated the averaged cross entropy for multi-dimentions output. -=# +Calculated the averaged cross entropy for multi-dimentions output. +""" type ACE <: AbstractEvalMetric ace_sum :: Float64 n_sample :: Int @@ -192,14 +183,14 @@ function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) end end -#=doc -.. class:: MultiACE +""" + MultiACE - Averaged cross-entropy for classification. This also know als logloss. - This variant keeps track of the different losses per class. +Averaged cross-entropy for classification. This also know als logloss. +This variant keeps track of the different losses per class. - Calculated the averaged cross entropy for multi-dimentions output. -=# +Calculated the averaged cross entropy for multi-dimentions output. +""" type MultiACE <: AbstractEvalMetric aces :: Vector{Float64} counts :: Vector{Int} diff --git a/src/model.jl b/src/model.jl index d9b81d02f3d9..ac96d047b668 100644 --- a/src/model.jl +++ b/src/model.jl @@ -1,27 +1,19 @@ -#=doc -Models -====== - -The model API provides convenient high-level interface to do training and predicting on -a network described using the symbolic API. -=# - -#=doc -.. class:: AbstractModel +""" + AbstractModel - The abstract super type of all models in MXNet.jl. -=# +The abstract super type of all models in MXNet.jl. +""" abstract AbstractModel -#=doc -.. class:: FeedForward +""" + FeedForward - The feedforward model provides convenient interface to train and predict on - feedforward architectures like multi-layer MLP, ConvNets, etc. There is no - explicitly handling of *time index*, but it is relatively easy to implement - unrolled RNN / LSTM under this framework (**TODO**: add example). For models - that handles sequential data explicitly, please use **TODO**... -=# +The feedforward model provides convenient interface to train and predict on +feedforward architectures like multi-layer MLP, ConvNets, etc. There is no +explicitly handling of *time index*, but it is relatively easy to implement +unrolled RNN / LSTM under this framework (**TODO**: add example). For models +that handles sequential data explicitly, please use **TODO**... +""" type FeedForward <: AbstractModel arch :: SymbolicNode ctx :: Vector{Context} @@ -35,9 +27,10 @@ type FeedForward <: AbstractModel FeedForward(arch :: SymbolicNode, ctx :: Vector{Context}) = new(arch, ctx) end -"""Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector - of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that - piece. +""" +Get a split of `batch_size` into `n_split` pieces for data parallelization. Returns a vector +of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that +piece. """ function _split_inputs(batch_size :: Int, n_split :: Int) @assert(batch_size >= n_split) @@ -51,14 +44,14 @@ function _split_inputs(batch_size :: Int, n_split :: Int) return idx end -#=doc -.. function:: FeedForward(arch :: SymbolicNode, ctx) +""" + FeedForward(arch :: SymbolicNode, ctx) - :param arch: the architecture of the network constructed using the symbolic API. - :param ctx: the devices on which this model should do computation. It could be a single :class:`Context` +* arch: the architecture of the network constructed using the symbolic API. +* ctx: the devices on which this model should do computation. It could be a single :class:`Context` or a list of :class:`Context` objects. In the latter case, data parallelization will be used for training. If no context is provided, the default context ``cpu()`` will be used. -=# +""" function FeedForward(arch :: SymbolicNode; context :: Union{Context, Vector{Context}, Void} = nothing) if isa(context, Void) context = [Context(CPU)] @@ -68,8 +61,8 @@ function FeedForward(arch :: SymbolicNode; context :: Union{Context, Vector{Cont FeedForward(arch, context) end -#=doc -.. function:: init_model(self, initializer; overwrite=false, input_shapes...) +""" + init_model(self, initializer; overwrite=false, input_shapes...) Initialize the weights in the model. @@ -77,12 +70,12 @@ end need to call this method unless one needs to inspect a model with only randomly initialized weights. - :param FeedForward self: the model to be initialized. - :param AbstractInitializer initializer: an initializer describing how the weights should be initialized. - :param Bool overwrite: keyword argument, force initialization even when weights already exists. - :param input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. +* FeedForward self: the model to be initialized. +* AbstractInitializer initializer: an initializer describing how the weights should be initialized. +* Bool overwrite: keyword argument, force initialization even when weights already exists. +* input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. For example, ``data=(28,28,1,100), label=(100,)``. -=# +""" function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) # all arg names, including data, label, and parameters arg_names = list_arguments(self.arch) @@ -168,7 +161,7 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha end end -#=doc +""" .. function:: predict(self, data; overwrite=false, callback=nothing) @@ -182,9 +175,9 @@ end # consume or write batch_output to file end - :param FeedForward self: the model. - :param AbstractDataProvider data: the data to perform prediction on. - :param Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory +* FeedForward self: the model. +* AbstractDataProvider data: the data to perform prediction on. +* Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory allocation of the :class:`Executor` depends on the mini-batch size of the test data provider. If you call predict twice with data provider of the same batch-size, then the executor can be potentially be re-used. So, if ``overwrite`` is false, @@ -209,7 +202,7 @@ end predict and synchronize the weights again. :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` -=# +""" function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = true) predict(self, data; overwrite = overwrite, callback=callback) end @@ -314,41 +307,41 @@ function _invoke_callbacks{T<:Real}(self::FeedForward, callbacks::Vector{Abstrac end end -#=doc -.. function:: train(model :: FeedForward, ...) +""" + train(model :: FeedForward, ...) - Alias to :func:`fit`. -=# +Alias to :func:`fit`. +""" function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) fit(self, optimizer, data; kwargs...) end -#=doc -.. function:: fit(model :: FeedForward, optimizer, data; kwargs...) +""" + fit(model :: FeedForward, optimizer, data; kwargs...) - Train the ``model`` on ``data`` with the ``optimizer``. +Train the ``model`` on ``data`` with the ``optimizer``. - :param FeedForward model: the model to be trained. - :param AbstractOptimizer optimizer: the optimization algorithm to use. - :param AbstractDataProvider data: the training data provider. - :param Int n_epoch: default 10, the number of full data-passes to run. - :param AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for +* FeedForward model: the model to be trained. +* AbstractOptimizer optimizer: the optimization algorithm to use. +* AbstractDataProvider data: the training data provider. +* Int n_epoch: default 10, the number of full data-passes to run. +* AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for the validation set. - :param AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used +* AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used to evaluate the training performance. If ``eval_data`` is provided, the same metric is also calculated on the validation set. - :param kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients +* kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients and parameters when multiple devices are used for training. :type kvstore: :class:`KVStore` or ``Base.Symbol`` - :param AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. - :param Bool force_init: keyword argument, default false. By default, the random initialization using the +* AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. +* Bool force_init: keyword argument, default false. By default, the random initialization using the provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When this option is set, it will always do random initialization at the begining of training. - :param callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, +* callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, see :class:`AbstractCallback`. :type callbacks: ``Vector{AbstractCallback}`` -=# +""" function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) diff --git a/src/ndarray.jl b/src/ndarray.jl index d78d647fe185..dacee56d7281 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,8 +1,3 @@ -#=doc -NDArray API -=========== -=# - # All the types supported by mshadow. typealias DType Union{Float32, Float64, Float16, UInt8, Int32} @enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 @@ -72,8 +67,8 @@ end ################################################################################ # NDArray Type ################################################################################ -#=doc -.. class:: NDArray +""" + NDArray Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block of tensor-based computation. @@ -88,7 +83,7 @@ end about shapes. For example, a mini-batch of 100 MNIST images is a tensor of C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory have shape (28,28,1,100). -=# +""" type NDArray handle :: MX_NDArrayHandle writable :: Bool @@ -115,11 +110,11 @@ Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) ################################################################################ # NDArray functions exported to the users ################################################################################ -#=doc -.. function:: context(arr :: NDArray) +""" + context(arr :: NDArray) Get the context that this :class:`NDArray` lives on. -=# +""" function context(arr :: NDArray) ref_typeid = Ref{Cint}(0) ref_devid = Ref{Cint}(0) @@ -129,14 +124,14 @@ function context(arr :: NDArray) end -#=doc +""" .. function:: empty(DType, shape :: Tuple, ctx :: Context) empty(DType, shape :: Tuple) empty(DType, dim1, dim2, ...) Allocate memory for an uninitialized :class:`NDArray` with a specified type. -=# +""" function empty{N,T<:DType}(::Type{T}, shape :: NTuple{N, Int}) empty(T, shape, cpu()) end @@ -147,14 +142,14 @@ function empty{T<:DType}(:: Type{T}, shape :: Int...) empty(T, shape) end -#=doc +""" .. function:: empty(shape :: Tuple, ctx :: Context) empty(shape :: Tuple) empty(dim1, dim2, ...) Allocate memory for an uninitialized :class:`NDArray` with specific shape of type Float32. -=# +""" function empty{N}(shape :: NTuple{N, Int}) empty(shape, cpu()) end @@ -165,19 +160,13 @@ function empty(shape :: Int...) empty(shape) end -#=doc -Interface functions similar to Julia Arrays -------------------------------------------- -=# - -#=doc -.. function:: - zeros(DType, shape :: Tuple, ctx :: Context) - zeros(DType, shape :: Tuple) - zeros(DType, dim1, dim2, ...) +""" + zeros(DType, shape :: Tuple, ctx :: Context) + zeros(DType, shape :: Tuple) + zeros(DType, dim1, dim2, ...) - Create zero-ed :class:`NDArray` with specific shape and type -=# +Create zero-ed :class:`NDArray` with specific shape and type +""" function zeros{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) zeros(T, shape, cpu()) end @@ -190,14 +179,13 @@ function zeros{T<:DType}(:: Type{T}, shape :: Int...) zeros(T, shape) end -#=doc -.. function:: - zeros(shape :: Tuple, ctx :: Context) - zeros(shape :: Tuple) - zeros(dim1, dim2, ...) +""" + zeros(shape :: Tuple, ctx :: Context) + zeros(shape :: Tuple) + zeros(dim1, dim2, ...) - Create zero-ed :class:`NDArray` with specific shape. -=# +Create zero-ed :class:`NDArray` with specific shape. +""" function zeros{N}(shape :: NTuple{N, Int}) zeros(shape, cpu()) end @@ -210,14 +198,13 @@ function zeros(shape :: Int...) zeros(shape) end -#=doc -.. function:: - ones(DType, shape :: Tuple, ctx :: Context) - ones(DType, shape :: Tuple) - ones(DType, dim1, dim2, ...) +""" + ones(DType, shape :: Tuple, ctx :: Context) + ones(DType, shape :: Tuple) + ones(DType, dim1, dim2, ...) - Create an :class:`NDArray` with specific shape & type, and initialize with 1. -=# +Create an :class:`NDArray` with specific shape & type, and initialize with 1. +""" function ones{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) ones(T, shape, cpu()) end @@ -230,14 +217,13 @@ function ones{T<:DType}(:: Type{T}, shape :: Int...) ones(T, shape) end -#=doc -.. function:: - ones(shape :: Tuple, ctx :: Context) - ones(shape :: Tuple) - ones(dim1, dim2, ...) +""" + ones(shape :: Tuple, ctx :: Context) + ones(shape :: Tuple) + ones(dim1, dim2, ...) - Create an :class:`NDArray` with specific shape and initialize with 1. -=# +Create an :class:`NDArray` with specific shape and initialize with 1. +""" function ones{N}(shape :: NTuple{N, Int}) ones(shape, cpu()) end @@ -252,14 +238,14 @@ end import Base: size, length, ndims, eltype -#=doc +""" .. function:: size(arr :: NDArray) size(arr :: NDArray, dim :: Int) Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See also the :ref:`notes on NDArray shapes `. -=# +""" function size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) ref_shape = Ref{Ptr{MX_uint}}(0) @@ -271,29 +257,29 @@ function size(arr :: NDArray, dim :: Int) size(arr)[dim] end -#=doc -.. function:: length(arr :: NDArray) +""" + length(arr :: NDArray) Get the number of elements in an :class:`NDArray`. -=# +""" function length(arr :: NDArray) prod(size(arr)) end -#=doc -.. function:: ndims(arr :: NDArray) +""" + ndims(arr :: NDArray) Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. -=# +""" function ndims(arr :: NDArray) length(size(arr)) end -#=doc -.. function:: eltype(arr :: NDArray) +""" + eltype(arr :: NDArray) Get the element type of an :class:`NDArray`. -=# +""" function eltype{T <: Union{NDArray, MX_NDArrayHandle}}(arr :: T) dtype_ref = Ref{Cint}(0) @mxcall(:MXNDArrayGetDType, (MX_handle, Ptr{Cint}), arr, dtype_ref) @@ -310,15 +296,15 @@ end import Base: slice -#=doc -.. function:: slice(arr :: NDArray, start:stop) +""" + slice(arr :: NDArray, start:stop) Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest changing dimension is supported. In Julia's column-major perspective, this is the last dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is used in data parallelization to split mini-batch into sub-batches for different devices. -=# +""" function slice(arr :: NDArray, ::Colon) arr end @@ -340,8 +326,8 @@ end import Base: setindex! -#=doc -.. function:: setindex!(arr :: NDArray, val, idx) +""" + setindex!(arr :: NDArray, val, idx) Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following scenarios are supported @@ -350,7 +336,7 @@ import Base: setindex! or :class:`NDArray`) of the same shape. - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of the same shape to the slice. See also :func:`slice`. -=# +""" function setindex!(arr :: NDArray, val :: Real, ::Colon) @assert(arr.writable) _set_value(convert(eltype(arr), val), arr) @@ -366,10 +352,11 @@ function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, id setindex!(slice(arr, idx), val, Colon()) end -#=doc -.. function:: getindex(arr :: NDArray, idx) +import Base: getindex +""" + getindex(arr :: NDArray, idx) - Shortcut for :func:`slice`. A typical use is to write +Shortcut for :func:`slice`. A typical use is to write .. code-block:: julia @@ -392,29 +379,26 @@ end The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is a *slice* that shares the memory. -=# -import Base: getindex -"""Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a -copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. """ function getindex(arr :: NDArray, ::Colon) return arr end + +""" +Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a +copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. +""" function getindex(arr :: NDArray, idx::UnitRange{Int}) slice(arr, idx) end -#=doc -Copying functions ------------------ -=# import Base: copy!, copy, convert -#=doc +""" .. function:: copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) Copy contents of ``src`` into ``dst``. -=# +""" function copy!(dst :: NDArray, src :: NDArray) @assert(dst.writable) if dst.handle == src.handle @@ -456,7 +440,7 @@ function copy_ignore_shape!{T<:Real}(dst :: NDArray, src :: Array{T}) end -#=doc +""" .. function:: copy(arr :: NDArray) copy(arr :: NDArray, ctx :: Context) @@ -464,7 +448,7 @@ end Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. Otherwise, create an :class:`NDArray` on the specified context. -=# +""" # Create copy: NDArray -> Julia Array function copy(arr :: NDArray) j_arr = Array{eltype(arr)}(size(arr)) @@ -483,31 +467,26 @@ function copy{T<:DType}(arr :: Array{T}, ctx :: Context) copy!(dst, arr) end -#=doc -.. function:: convert(::Type{Array{T}}, arr :: NDArray) +""" + convert(::Type{Array{T}}, arr :: NDArray) Convert an :class:`NDArray` into a Julia ``Array`` of specific type. Data will be copied. -=# +""" # Convert copy: NDArray -> Julia Array function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) convert(t, copy(arr)) end -#=doc -Basic arithmetics ------------------ -=# - -#=doc -.. function:: @inplace +""" + @inplace - Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), - When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new - memory for the results, and the newly allocated :class:`NDArray` object is then assigned - back to a, while the original contents in a is discarded. This is very inefficient - when we want to do inplace update. +Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), +When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new +memory for the results, and the newly allocated :class:`NDArray` object is then assigned +back to a, while the original contents in a is discarded. This is very inefficient +when we want to do inplace update. - This macro is a simple utility to implement this behavior. Write +This macro is a simple utility to implement this behavior. Write .. code-block:: julia @@ -520,7 +499,7 @@ Basic arithmetics mx.add_to!(a, b) which will do inplace adding of the contents of ``b`` into ``a``. -=# +""" macro inplace(stmt) if stmt.head == :+= || stmt.head == :.+= Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) @@ -535,11 +514,11 @@ macro inplace(stmt) end end -#=doc -.. function:: add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) +""" + add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) - Add a bunch of arguments into ``dst``. Inplace updating. -=# +Add a bunch of arguments into ``dst``. Inplace updating. +""" function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) @assert dst.writable for arg in args @@ -552,16 +531,16 @@ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) return dst end -#=doc -.. function:: - +(args...) - .+(args...) - - Summation. Multiple arguments of either scalar or :class:`NDArray` could be - added together. Note at least the first or second argument needs to be an :class:`NDArray` to - avoid ambiguity of built-in summation. -=# import Base: +, .+ + +""" + +(args...) + .+(args...) + +Summation. Multiple arguments of either scalar or :class:`NDArray` could be +added together. Note at least the first or second argument needs to be an :class:`NDArray` to +avoid ambiguity of built-in summation. +""" function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) ret = copy(arg0, context(arg0)) add_to!(ret, args...) @@ -576,11 +555,11 @@ function .+(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) .+(arg1, arg0, args...) end -#=doc -.. function:: sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) +""" + sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) Subtract a bunch of arguments from ``dst``. Inplace updating. -=# +""" function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) @@ -590,16 +569,16 @@ function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) end end -#=doc -.. function:: - -(arg0, arg1) - -(arg0) - .-(arg0, arg1) - - Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create - the negative of ``arg0``. -=# import Base: -, .- + +""" + -(arg0, arg1) + -(arg0) + .-(arg0, arg1) + +Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create +the negative of ``arg0``. +""" function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) sub_from!(ret, arg1) @@ -620,12 +599,12 @@ function -(arg0 :: NDArray) _mul_scalar(arg0, -one(eltype(arg0))) end -#=doc -.. function:: mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) +""" + mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. Inplace updating. -=# +""" function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) @@ -636,13 +615,13 @@ function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) return dst end -#=doc -.. function:: - .*(arg0, arg1) - - Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. -=# import Base: .*, * + +""" + .*(arg0, arg1) + +Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. +""" function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) @@ -651,13 +630,12 @@ function .*(arg0 :: Real, arg :: NDArray) .*(arg, arg0) end -#=doc -.. function:: - *(arg0, arg1) +""" + *(arg0, arg1) - Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication - is to be added soon. -=# +Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication +is to be added soon. +""" function *(arg0 :: NDArray, arg :: Real) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) @@ -666,11 +644,11 @@ function *(arg0 :: Real, arg :: NDArray) *(arg, arg0) end -#=doc -.. function:: div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) +""" + div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) - Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. -=# +Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. +""" function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) @@ -681,31 +659,31 @@ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) end import Base: ./, / -#=doc -.. function:: ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) +""" + ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) - Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. -=# +Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. +""" function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) div_from!(ret, arg) end -#=doc -.. function:: /(arg0 :: NDArray, arg :: Real) +""" + /(arg0 :: NDArray, arg :: Real) - Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. -=# +Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. +""" function /(arg0 :: NDArray, arg :: Real) ./(arg0, arg) end -#=doc +""" Manipulating as Julia Arrays ---------------------------- -.. function:: @nd_as_jl(captures..., statement) + @nd_as_jl(captures..., statement) A convenient macro that allows to operate :class:`NDArray` as Julia Arrays. For example, @@ -743,7 +721,7 @@ Manipulating as Julia Arrays - The statements are wrapped in a ``let``, thus locally introduced new variables will not be available after the statements. So you will need to declare the variables before calling the macro if needed. -=# +""" macro nd_as_jl(m_args...) @assert(length(m_args) > 0) stmts = m_args[end] @@ -831,18 +809,18 @@ function _wait_to_write(arr :: NDArray) @mxcall(:MXNDArrayWaitToWrite, (MX_handle,), arr) end -#=doc -.. function:: try_get_shared(arr) +""" + try_get_shared(arr) Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. - :param NDArray arr: the array to be shared. +* NDArray arr: the array to be shared. .. warning:: The returned array does not guarantee to share data with the underlying :class:`NDArray`. In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. -=# +""" function try_get_shared(arr :: NDArray) if context(arr).device_type == CPU # try to do data sharing @@ -854,14 +832,14 @@ function try_get_shared(arr :: NDArray) end end -#=doc -.. function:: is_shared(j_arr, arr) +""" + is_shared(j_arr, arr) Test whether ``j_arr`` is sharing data with ``arr``. - :param Array j_arr: the Julia Array. - :param NDArray arr: the :class:`NDArray`. -=# +* Array j_arr: the Julia Array. +* NDArray arr: the :class:`NDArray`. +""" function is_shared(j_arr :: Array, arr :: NDArray) false end @@ -875,24 +853,19 @@ function is_shared{T<:DType}(j_arr :: Array{T}, arr :: NDArray) return pointer(j_arr) == pointer(arr) end -#=doc -IO --- -=# -#=doc -.. function:: load(filename, ::Type{NDArray}) +""" + load(filename, ::Type{NDArray}) - Load NDArrays from binary file. +Load NDArrays from binary file. - :param AbstractString filename: the path of the file to load. It could be S3 or HDFS address. +* AbstractString filename: the path of the file to load. It could be S3 or HDFS address. :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. - If the ``libmxnet`` is built with the corresponding component enabled. Examples - - * ``s3://my-bucket/path/my-s3-ndarray`` - * ``hdfs://my-bucket/path/my-hdfs-ndarray`` - * ``/path-to/my-local-ndarray`` -=# +If the ``libmxnet`` is built with the corresponding component enabled. Examples +* ``s3://my-bucket/path/my-s3-ndarray`` +* ``hdfs://my-bucket/path/my-hdfs-ndarray`` +* ``/path-to/my-local-ndarray`` +""" function load(filename::AbstractString, ::Type{NDArray}) out_size = Ref{MX_uint}(0) out_hdrs = Ref{Ptr{MX_handle}}(0) @@ -911,16 +884,16 @@ function load(filename::AbstractString, ::Type{NDArray}) end end -#=doc -.. function:: save(filename :: AbstractString, data) +""" + save(filename :: AbstractString, data) - Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built - with corresponding support. +Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built +with corresponding support. - :param AbstractString filename: path to the binary file to write to. - :param data: data to save to file. +* AbstractString filename: path to the binary file to write to. +* data: data to save to file. :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. -=# +""" function save(filename::AbstractString, data::NDArray) save(filename, [data]) end @@ -937,10 +910,6 @@ function save(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) filename, length(names), arrays, names) end -#=doc -libmxnet APIs -------------- -=# ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -959,7 +928,7 @@ end # functions can overload them import Base: sqrt -#=doc +""" The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered as @@ -985,9 +954,7 @@ Upon calling, the output arguments will be automatically initialized with empty Those functions always return the output arguments. If there is only one output (the typical situation), that object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. - -**autogen:EMBED:ndarray:EMBED:autogen** -=# +""" function _get_ndarray_functions() n = Ref{MX_uint}(0) diff --git a/src/nn-factory.jl b/src/nn-factory.jl index 3725587f37a1..b170214a1af9 100644 --- a/src/nn-factory.jl +++ b/src/nn-factory.jl @@ -1,33 +1,25 @@ -#=doc -Neural Networks Factory -======================= +""" + MLP(input, spec) -Neural network factory provide convenient helper functions to define -common neural networks. -=# +Construct a multi-layer perceptron. A MLP is a multi-layer neural network with +fully connected layers. -#=doc -.. function:: MLP(input, spec) - - Construct a multi-layer perceptron. A MLP is a multi-layer neural network with - fully connected layers. - - :param SymbolicNode input: the input to the mlp. - :param spec: the mlp specification, a list of hidden dimensions. For example, +* SymbolicNode input: the input to the mlp. +* spec: the mlp specification, a list of hidden dimensions. For example, ``[128, (512, :sigmoid), 10]``. The number in the list indicate the number of hidden units in each layer. A tuple could be used to specify the activation of each layer. Otherwise, the default activation will be used (except for the last layer). - :param Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating +* Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating the default activation for hidden layers. The specification here could be overwritten by layer-wise specification in the ``spec`` argument. Also activation is not applied to the last, i.e. the prediction layer. See :func:`Activation` for a list of supported activation types. - :param prefix: keyword argument, default ``gensym()``, used as the prefix to +* prefix: keyword argument, default ``gensym()``, used as the prefix to name the constructed layers. :return: the constructed MLP. -=# +""" function MLP(input, spec; hidden_activation::Base.Symbol=:relu, prefix=gensym()) spec = convert(Vector{Union{Int,Tuple}}, spec) diff --git a/src/optimizer.jl b/src/optimizer.jl index 887fd9ab59e0..e2fc44338a2f 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -1,37 +1,28 @@ -#=doc -Optimizers -========== +""" + AbstractOptimizer -Common interfaces ------------------ -=# - - -#=doc -.. class:: AbstractOptimizer - - Base type for all optimizers. -=# +Base type for all optimizers. +""" abstract AbstractOptimizer -#=doc -.. class:: AbstractLearningRateScheduler +""" + AbstractLearningRateScheduler - Base type for all learning rate scheduler. -=# +Base type for all learning rate scheduler. +""" abstract AbstractLearningRateScheduler -#=doc -.. class:: AbstractMomentumScheduler +""" + AbstractMomentumScheduler - Base type for all momentum scheduler. -=# +Base type for all momentum scheduler. +""" abstract AbstractMomentumScheduler -#=doc -.. class:: OptimizationState +""" + OptimizationState .. attribute:: batch_size @@ -54,7 +45,7 @@ abstract AbstractMomentumScheduler The current iteration count. One iteration corresponds to one mini-batch, but unlike the mini-batch count, the iteration count does **not** reset in each epoch. So it track the *total* number of mini-batches seen so far. -=# +""" type OptimizationState batch_size :: Int curr_epoch :: Int @@ -64,13 +55,14 @@ end OptimizationState(batch_size::Int) = OptimizationState(batch_size, 0, 0, 0) -#=doc -.. function:: get_learning_rate(scheduler, state) +""" + get_learning_rate(scheduler, state) - :param AbstractLearningRateScheduler scheduler: a learning rate scheduler. - :param OptimizationState state: the current state about epoch, mini-batch and iteration count. +# Arguments +* AbstractLearningRateScheduler scheduler: a learning rate scheduler. +* OptimizationState state: the current state about epoch, mini-batch and iteration count. :return: the current learning rate. -=# +""" function get_learning_rate end @@ -79,22 +71,22 @@ end module LearningRate import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate -#=doc -.. class:: LearningRate.Fixed +""" + LearningRate.Fixed Fixed learning rate scheduler always return the same learning rate. -=# +""" type Fixed <: AbstractLearningRateScheduler learning_rate :: Float64 end get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate -#=doc -.. class:: LearningRate.Exp +""" + LearningRate.Exp :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` is set to true. -=# +""" type Exp <: AbstractLearningRateScheduler learning_rate :: Float64 gamma :: Float64 @@ -106,13 +98,13 @@ function Exp(base_lr::Real; gamma::Real=0.9, decay_on_iteration::Bool=false) end get_learning_rate(self :: Exp, state :: OptimizationState) = self.learning_rate * self.gamma ^ (self.on_iteration ? state.curr_iter : state.curr_epoch) -#=doc -.. class:: LearningRate.Inv +""" + LearningRate.Inv :math:`\eta_t = \eta_0 * (1 + \gamma * t)^(-power)`. Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` is set to true. -=# +""" type Inv <: AbstractLearningRateScheduler learning_rate :: Float64 gamma :: Float64 @@ -137,13 +129,13 @@ function get_lr_scheduler(scheduler :: Any, lr :: Real) end -#=doc -.. function:: get_momentum(scheduler, state) +""" + get_momentum(scheduler, state) - :param AbstractMomentumScheduler scheduler: the momentum scheduler. - :param OptimizationState state: the state about current epoch, mini-batch and iteration count. +* AbstractMomentumScheduler scheduler: the momentum scheduler. +* OptimizationState state: the state about current epoch, mini-batch and iteration count. :return: the current momentum. -=# +""" function get_momentum end @@ -153,21 +145,21 @@ end module Momentum import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum -#=doc -.. class:: Momentum.Null +""" + Momentum.Null The null momentum scheduler always returns 0 for momentum. It is also used to explicitly indicate momentum should not be used. -=# +""" type Null <: AbstractMomentumScheduler end get_momentum(self :: Null, state :: OptimizationState) = 0.0 -#=doc -.. class:: Momentum.Fixed +""" + Momentum.Fixed Fixed momentum scheduler always returns the same value. -=# +""" type Fixed <: AbstractMomentumScheduler momentum :: Float64 end @@ -185,14 +177,14 @@ function get_momentum_scheduler(scheduler :: Any, momentum :: Real) end -#=doc -.. function:: get_updater(optimizer) +""" + get_updater(optimizer) - :param AbstractOptimizer optimizer: the underlying optimizer. +* AbstractOptimizer optimizer: the underlying optimizer. A utility function to create an updater function, that uses its closure to store all the states needed for each weights. -=# +""" function get_updater(optimizer :: AbstractOptimizer) states = Dict{Int,Any}() function updater(index :: Int, grad :: NDArray, weight :: NDArray) @@ -204,30 +196,24 @@ function get_updater(optimizer :: AbstractOptimizer) return updater end -################################################################################ -#=doc -Built-in optimizers -------------------- -=# - -#=doc -.. class:: AbstractOptimizerOptions +""" + AbstractOptimizerOptions - Base class for all optimizer options. -=# +Base class for all optimizer options. +""" abstract AbstractOptimizerOptions -#=doc -.. function:: normalized_gradient(opts, state, grad) +""" + normalized_gradient(opts, state, grad) - :param AbstractOptimizerOptions opts: options for the optimizer, should contain the field +* AbstractOptimizerOptions opts: options for the optimizer, should contain the field ``grad_scale``, ``grad_clip`` and ``weight_decay``. - :param OptimizationState state: the current optimization state. - :param NDArray weight: the trainable weights. - :param NDArray grad: the original gradient of the weights. +* OptimizationState state: the current optimization state. +* NDArray weight: the trainable weights. +* NDArray grad: the original gradient of the weights. Get the properly normalized gradient (re-scaled and clipped if necessary). -=# +""" function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, weight::NDArray, grad::NDArray) grad_scale = 1.0 / state.batch_size diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 5bd1a6120f0e..850379899642 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -1,13 +1,8 @@ -#=doc -Symbolic API -============ -=# +""" + SymbolicNode -#=doc -.. class:: SymbolicNode - - SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. -=# +SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. +""" type SymbolicNode handle :: MX_SymbolHandle end @@ -17,34 +12,33 @@ end Base.convert(t::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(t, obj) -#=doc -.. function:: deepcopy(self :: SymbolicNode) +""" + deepcopy(self :: SymbolicNode) - Make a deep copy of a SymbolicNode. -=# +Make a deep copy of a SymbolicNode. +""" function Base.deepcopy(self :: SymbolicNode) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCopy, (MX_handle, Ref{MX_handle}), self, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -#=doc -.. function:: copy(self :: SymbolicNode) +""" + copy(self :: SymbolicNode) - Make a copy of a SymbolicNode. The same as making a deep copy. -=# +Make a copy of a SymbolicNode. The same as making a deep copy. +""" function Base.copy(self :: SymbolicNode) Base.deepcopy(self) end -#=doc -.. function:: - call(self :: SymbolicNode, args :: SymbolicNode...) - call(self :: SymbolicNode; kwargs...) +""" + call(self :: SymbolicNode, args :: SymbolicNode...) + call(self :: SymbolicNode; kwargs...) - Make a new node by composing ``self`` with ``args``. Or the arguments - can be specified using keyword arguments. -=# +Make a new node by composing ``self`` with ``args``. Or the arguments +can be specified using keyword arguments. +""" function Base.call(self :: SymbolicNode, args :: SymbolicNode...) s = deepcopy(self) _compose!(s, args...) @@ -67,67 +61,67 @@ macro _list_symbol_info(self, func_name) end end -#=doc -.. function:: list_arguments(self :: SymbolicNode) +""" + list_arguments(self :: SymbolicNode) - List all the arguments of this node. The argument for a node contains both - the inputs and parameters. For example, a :class:`FullyConnected` node will - have both data and weights in its arguments. A composed node (e.g. a MLP) will - list all the arguments for intermediate nodes. +List all the arguments of this node. The argument for a node contains both +the inputs and parameters. For example, a :class:`FullyConnected` node will +have both data and weights in its arguments. A composed node (e.g. a MLP) will +list all the arguments for intermediate nodes. :return: A list of symbols indicating the names of the arguments. -=# +""" function list_arguments(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListArguments) end -#=doc -.. function:: list_outputs(self :: SymbolicNode) +""" + list_outputs(self :: SymbolicNode) - List all the outputs of this node. +List all the outputs of this node. :return: A list of symbols indicating the names of the outputs. -=# +""" function list_outputs(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListOutputs) end -#=doc -.. function:: list_auxiliary_states(self :: SymbolicNode) +""" + list_auxiliary_states(self :: SymbolicNode) - List all auxiliary states in the symbool. +List all auxiliary states in the symbool. - Auxiliary states are special states of symbols that do not corresponds to an argument, - and do not have gradient. But still be useful for the specific operations. - A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. - Most operators do not have Auxiliary states. +Auxiliary states are special states of symbols that do not corresponds to an argument, +and do not have gradient. But still be useful for the specific operations. +A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. +Most operators do not have Auxiliary states. :return: A list of symbols indicating the names of the auxiliary states. -=# +""" function list_auxiliary_states(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) end -#=doc -.. function:: get_internals(self :: SymbolicNode) +""" + get_internals(self :: SymbolicNode) - Get a new grouped :class:`SymbolicNode` whose output contains all the internal outputs of - this :class:`SymbolicNode`. -=# +Get a new grouped :class:`SymbolicNode` whose output contains all the internal outputs of +this :class:`SymbolicNode`. +""" function get_internals(self :: SymbolicNode) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolGetInternals, (MX_handle, Ref{MX_handle}), self, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -#=doc -.. function:: get_attr(self :: SymbolicNode, key :: Symbol) +""" + get_attr(self :: SymbolicNode, key :: Symbol) - Get attribute attached to this :class:`SymbolicNode` belonging to key. - :return: The value belonging to key as a :class:`Nullable`. -=# +Get attribute attached to this :class:`SymbolicNode` belonging to key. +:return: The value belonging to key as a :class:`Nullable`. +""" function get_attr(self :: SymbolicNode, key :: Symbol) key_s = @compat String(string(key)) ref_out = Ref{Cstring}() @@ -141,12 +135,12 @@ function get_attr(self :: SymbolicNode, key :: Symbol) end end -#=doc -.. function: list_attr(self :: SymbolicNode) +""" + list_attr(self :: SymbolicNode) - Get all attributes from a symbol. - :return: Dictionary of attributes. -=# +Get all attributes from a symbol. +:return: Dictionary of attributes. +""" function list_attr(self :: SymbolicNode) ref_sz = Ref{MX_uint}(0) ref_strings = Ref{char_pp}(0) @@ -163,12 +157,12 @@ function list_attr(self :: SymbolicNode) return out end -#=doc -.. function: list_all_attr(self :: SymbolicNode) +""" + list_all_attr(self :: SymbolicNode) - Get all attributes from the symbol graph. - :return: Dictionary of attributes. -=# +Get all attributes from the symbol graph. +:return: Dictionary of attributes. +""" function list_all_attr(self :: SymbolicNode) ref_sz = Ref{MX_uint}(0) ref_strings = Ref{char_pp}(0) @@ -185,18 +179,17 @@ function list_all_attr(self :: SymbolicNode) return out end -#=doc -.. function:: set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) - - Set the attribute key to value for this :class:`SymbolicNode`. +""" + set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) - .. warning:: +Set the attribute key to value for this :class:`SymbolicNode`. - It is encouraged not to call this function directly, unless you know exactly what you are doing. The - recommended way of setting attributes is when creating the :class:`SymbolicNode`. Changing - the attributes of a :class:`SymbolicNode` that is already been used somewhere else might - cause unexpected behavior and inconsistency. -=# +# Warning +It is encouraged not to call this function directly, unless you know exactly what you are doing. The +recommended way of setting attributes is when creating the :class:`SymbolicNode`. Changing +the attributes of a :class:`SymbolicNode` that is already been used somewhere else might +cause unexpected behavior and inconsistency. +""" function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) key_s = @compat String(string(key)) value_s = @compat String(value) @@ -204,14 +197,15 @@ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) @mxcall(:MXSymbolSetAttr, (MX_handle, Cstring, Cstring), self, key_s, value_s) end -#=doc -.. function:: Variable(name :: Union{Symbol, AbstractString}) +""" + Variable(name :: Union{Symbol, AbstractString}) - Create a symbolic variable with the given name. This is typically used as a placeholder. - For example, the data node, acting as the starting point of a network architecture. +Create a symbolic variable with the given name. This is typically used as a placeholder. +For example, the data node, acting as the starting point of a network architecture. - :param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`Variable`. -=# +# Arguments +* Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`Variable`. +""" function Variable(name :: Union{Symbol, AbstractString}; attrs = Dict()) attrs = convert(Dict{Symbol, AbstractString}, attrs) hdr_ref = Ref{MX_handle}(0) @@ -223,11 +217,11 @@ function Variable(name :: Union{Symbol, AbstractString}; attrs = Dict()) node end -#=doc -.. function:: Group(nodes :: SymbolicNode...) +""" + Group(nodes :: SymbolicNode...) - Create a :class:`SymbolicNode` by grouping nodes together. -=# +Create a :class:`SymbolicNode` by grouping nodes together. +""" function Group(nodes :: SymbolicNode...) handles = MX_handle[nodes...] ref_hdr = Ref{MX_handle}(0) @@ -279,20 +273,19 @@ function _infer_shape(self, keys, indptr, sdata) end end -#=doc -.. function:: - infer_shape(self :: SymbolicNode, args...) - infer_shape(self :: SymbolicNode; kwargs...) +""" + infer_shape(self :: SymbolicNode, args...) + infer_shape(self :: SymbolicNode; kwargs...) - Do shape inference according to the input shapes. The input shapes could be provided - as a list of shapes, which should specify the shapes of inputs in the same order as - the arguments returned by :func:`list_arguments`. Alternatively, the shape information - could be specified via keyword arguments. +Do shape inference according to the input shapes. The input shapes could be provided +as a list of shapes, which should specify the shapes of inputs in the same order as +the arguments returned by :func:`list_arguments`. Alternatively, the shape information +could be specified via keyword arguments. - :return: A 3-tuple containing shapes of all the arguments, shapes of all the outputs and - shapes of all the auxiliary variables. If shape inference failed due to incomplete - or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. -=# +:return: A 3-tuple containing shapes of all the arguments, shapes of all the outputs and + shapes of all the auxiliary variables. If shape inference failed due to incomplete + or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. +""" function infer_shape(self :: SymbolicNode; kwargs...) sdata = MX_uint[] indptr = MX_uint[0] @@ -348,20 +341,19 @@ function _infer_type(self, keys, arg_type_data) end end -#=doc -.. function:: - infer_type(self :: SymbolicNode; kwargs...) - infer_type(self :: SymbolicNode, args...) +""" + infer_type(self :: SymbolicNode; kwargs...) + infer_type(self :: SymbolicNode, args...) - Do type inference according to the input types. The input types could be provided - as a list of types, which should specify the types of inputs in the same order as - the arguments returned by :func:`list_arguments`. Alternatively, the type information - could be specified via keyword arguments. +Do type inference according to the input types. The input types could be provided +as a list of types, which should specify the types of inputs in the same order as +the arguments returned by :func:`list_arguments`. Alternatively, the type information +could be specified via keyword arguments. - :return: A 3-tuple containing types of all the arguments, types of all the outputs and - types of all the auxiliary variables. If type inference failed due to incomplete - or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. -=# +:return: A 3-tuple containing types of all the arguments, types of all the outputs and + types of all the auxiliary variables. If type inference failed due to incomplete + or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. +""" function infer_type(self :: SymbolicNode; kwargs...) types = Cint[toTypeFlag(x[2]) for x in kwargs] keys = AbstractString[string(x[1]) for x in kwargs] @@ -379,14 +371,13 @@ function infer_type(self :: SymbolicNode, args :: Union{Tuple, Void}...) _infer_type(self, keys, types) end -#=doc -.. function:: - getindex(self :: SymbolicNode, idx :: Union{Int, Base.Symbol, AbstractString}) +""" + getindex(self :: SymbolicNode, idx :: Union{Int, Base.Symbol, AbstractString}) - Get a node representing the specified output of this node. The index could be - a symbol or string indicating the name of the output, or a 1-based integer - indicating the index, as in the list of :func:`list_outputs`. -=# +Get a node representing the specified output of this node. The index could be +a symbol or string indicating the name of the output, or a 1-based integer +indicating the index, as in the list of :func:`list_outputs`. +""" function Base.getindex(self :: SymbolicNode, idx :: Union{Base.Symbol, AbstractString}) idx = Symbol(idx) i_idx = find(idx .== list_outputs(self)) @@ -529,54 +520,48 @@ function _compose!(node :: SymbolicNode, name :: Union{Base.Symbol, char_p}, arg return node end -#=doc -.. function:: to_json(self :: SymbolicNode) +""" + to_json(self :: SymbolicNode) - Convert a :class:`SymbolicNode` into a JSON string. -=# +Convert a :class:`SymbolicNode` into a JSON string. +""" function to_json(self :: SymbolicNode) ref_json = Ref{char_p}(0) @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) return @compat String(ref_json[]) end -#=doc -.. function:: from_json(repr :: AbstractString, ::Type{SymbolicNode}) +""" + from_json(repr :: AbstractString, ::Type{SymbolicNode}) - Load a :class:`SymbolicNode` from a JSON string representation. -=# +Load a :class:`SymbolicNode` from a JSON string representation. +""" function from_json(repr :: AbstractString, ::Type{SymbolicNode}) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateFromJSON, (char_p, Ref{MX_handle}), repr, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -#=doc -.. function:: load(filename :: AbstractString, ::Type{SymbolicNode}) +""" + load(filename :: AbstractString, ::Type{SymbolicNode}) - Load a :class:`SymbolicNode` from a JSON file. -=# +Load a :class:`SymbolicNode` from a JSON file. +""" function load(filename :: AbstractString, ::Type{SymbolicNode}) ref_hdr = Ref{MX_handle}(0) @mxcall(:MXSymbolCreateFromFile, (char_p, Ref{MX_handle}), filename, ref_hdr) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -#=doc -.. function:: save(filename :: AbstractString, node :: SymbolicNode) +""" + save(filename :: AbstractString, node :: SymbolicNode) - Save a :class:`SymbolicNode` to a JSON file. -=# +Save a :class:`SymbolicNode` to a JSON file. +""" function save(filename :: AbstractString, node :: SymbolicNode) @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), node, filename) end -#=doc -libmxnet APIs -------------- - -**autogen:EMBED:symbolic-node:EMBED:autogen** -=# ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet ################################################################################ diff --git a/src/visualize.jl b/src/visualize.jl index 3d46b9c38cb3..15f23c6bfffc 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -1,21 +1,16 @@ import JSON -#=doc -Network Visualization -===================== -=# +""" + to_graphviz(network) -#=doc -.. function:: to_graphviz(network) - - :param SymbolicNode network: the network to visualize. - :param AbstractString title: keyword argument, default "Network Visualization", +* SymbolicNode network: the network to visualize. +* AbstractString title: keyword argument, default "Network Visualization", the title of the GraphViz graph. - :param input_shapes: keyword argument, default ``nothing``. If provided, +* input_shapes: keyword argument, default ``nothing``. If provided, will run shape inference and plot with the shape information. Should be either a dictionary of name-shape mapping or an array of shapes. :return: the graph description in GraphViz ``dot`` language. -=# +""" function to_graphviz(network :: SymbolicNode; title="Network Visualization", input_shapes=nothing) if !isa(input_shapes, Void) internals = get_internals(network) From d8de74250c059aa2916d596c60b08720063da046 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 13 Jul 2016 02:58:29 +0900 Subject: [PATCH 350/630] improve autogenerated documentation --- src/util.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/util.jl b/src/util.jl index 73c3316fa9f7..13ce10f58a77 100644 --- a/src/util.jl +++ b/src/util.jl @@ -67,12 +67,12 @@ function _format_typestring(typestr :: AbstractString) replace(typestr, r"\bSymbol\b", "SymbolicNode") end function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) - param_keys = Set{AbstractString}() + param_keys = Set{String}() arg_names = pointer_to_array(arg_names[], narg) arg_types = pointer_to_array(arg_types[], narg) arg_descs = pointer_to_array(arg_descs[], narg) - docstrings = AbstractString[] + docstrings = String[] for i = 1:narg arg_name = @compat String(arg_names[i]) @@ -83,7 +83,7 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch arg_type = _format_typestring(@compat String(arg_types[i])) arg_desc = @compat String(arg_descs[i]) - push!(docstrings, ":param $arg_name: $arg_desc\n:type $arg_name: $arg_type\n\n") + push!(docstrings, "* `$arg_name::$arg_type`: $arg_desc\n") end return join(docstrings, "\n") end From 597c7366b4c023acb67155e03a3c7faab454bf75 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 13 Jul 2016 04:00:20 +0900 Subject: [PATCH 351/630] convert rst to md with pandoc --- docs/src/tutorial/char-lstm.md | 574 ++++++++++++++----------------- docs/src/tutorial/mnist.md | 455 ++++++++++++------------ docs/src/user-guide/faq.md | 8 + docs/src/user-guide/faq.rst | 7 - docs/src/user-guide/install.md | 67 ++++ docs/src/user-guide/install.rst | 56 --- docs/src/user-guide/overview.md | 412 ++++++++++++++++++++++ docs/src/user-guide/overview.rst | 376 -------------------- 8 files changed, 980 insertions(+), 975 deletions(-) create mode 100644 docs/src/user-guide/faq.md delete mode 100644 docs/src/user-guide/faq.rst create mode 100644 docs/src/user-guide/install.md delete mode 100644 docs/src/user-guide/install.rst create mode 100644 docs/src/user-guide/overview.md delete mode 100644 docs/src/user-guide/overview.rst diff --git a/docs/src/tutorial/char-lstm.md b/docs/src/tutorial/char-lstm.md index 5b1c348e5568..5c20c8f05830 100644 --- a/docs/src/tutorial/char-lstm.md +++ b/docs/src/tutorial/char-lstm.md @@ -1,368 +1,308 @@ Generating Random Sentence with LSTM RNN ======================================== -This tutorial shows how to train a LSTM (Long short-term memory) RNN (recurrent -neural network) to perform character-level sequence training and prediction. The -original model, usually called ``char-rnn`` is described in `Andrej Karpathy's -blog `_, with -a reference implementation in Torch available `here -`_. - -Because MXNet.jl does not have a specialized model for recurrent neural networks -yet, the example shown here is an implementation of LSTM by using the default -:class:`FeedForward` model via explicitly unfolding over time. We will be using -fixed-length input sequence for training. The code is adapted from the `char-rnn -example for MXNet's Python binding -`_, which -demonstrates how to use low-level :doc:`symbolic APIs ` to -build customized neural network models directly. - -The most important code snippets of this example is shown and explained here. -To see and run the complete code, please refer to the `examples/char-lstm -`_ directory. -You will need to install `Iterators.jl -`_ and `StatsBase.jl -`_ to run this example. +This tutorial shows how to train a LSTM (Long short-term memory) RNN +(recurrent neural network) to perform character-level sequence training +and prediction. The original model, usually called `char-rnn` is +described in [Andrej Karpathy's +blog](http://karpathy.github.io/2015/05/21/rnn-effectiveness/), with a +reference implementation in Torch available +[here](https://github.com/karpathy/char-rnn). + +Because MXNet.jl does not have a specialized model for recurrent neural +networks yet, the example shown here is an implementation of LSTM by +using the default FeedForward model via explicitly unfolding over time. +We will be using fixed-length input sequence for training. The code is +adapted from the [char-rnn example for MXNet's Python +binding](https://github.com/dmlc/mxnet/blob/master/example/rnn/char_lstm.ipynb), +which demonstrates how to use low-level +symbolic APIs </api/symbolic-node> to build customized neural +network models directly. + +The most important code snippets of this example is shown and explained +here. To see and run the complete code, please refer to the +[examples/char-lstm](https://github.com/dmlc/MXNet.jl/tree/master/examples/char-lstm) +directory. You will need to install +[Iterators.jl](https://github.com/JuliaLang/Iterators.jl) and +[StatsBase.jl](https://github.com/JuliaStats/StatsBase.jl) to run this +example. LSTM Cells ---------- -Christopher Olah has a `great blog post about LSTM -`_ with beautiful and -clear illustrations. So we will not repeat the definition and explanation of -what an LSTM cell is here. Basically, an LSTM cell takes input ``x``, as well as -previous states (including ``c`` and ``h``), and produce the next states. -We define a helper type to bundle the two state variables together: +Christopher Olah has a [great blog post about +LSTM](http://colah.github.io/posts/2015-08-Understanding-LSTMs/) with +beautiful and clear illustrations. So we will not repeat the definition +and explanation of what an LSTM cell is here. Basically, an LSTM cell +takes input `x`, as well as previous states (including `c` and `h`), and +produce the next states. We define a helper type to bundle the two state +variables together: -.. literalinclude:: ../../examples/char-lstm/lstm.jl - :language: julia - :start-after: #--LSTMState - :end-before: #--/LSTMState +Because LSTM weights are shared at every time when we do explicit +unfolding, so we also define a helper type to hold all the weights (and +bias) for an LSTM cell for convenience. -Because LSTM weights are shared at every time when we do explicit unfolding, so -we also define a helper type to hold all the weights (and bias) for an LSTM cell -for convenience. +Note all the variables are of type SymbolicNode. We will construct the +LSTM network as a symbolic computation graph, which is then instantiated +with NDArray for actual computation. -.. literalinclude:: ../../examples/char-lstm/lstm.jl - :language: julia - :start-after: #--LSTMParam - :end-before: #--/LSTMParam +The following figure is stolen (permission requested) from [Christopher +Olah's blog](http://colah.github.io/posts/2015-08-Understanding-LSTMs/), +which illustrate exactly what the code snippet above is doing. -Note all the variables are of type :class:`SymbolicNode`. We will construct the -LSTM network as a symbolic computation graph, which is then instantiated with -:class:`NDArray` for actual computation. +![image](images/LSTM3-chain.png) -.. literalinclude:: ../../examples/char-lstm/lstm.jl - :language: julia - :start-after: #--lstm_cell - :end-before: #--/lstm_cell - -The following figure is stolen (permission requested) from -`Christopher Olah's blog -`_, which illustrate -exactly what the code snippet above is doing. - -.. image:: images/LSTM3-chain.png - -In particular, instead of defining the four gates independently, we do the -computation together and then use :class:`SliceChannel` to split them into four -outputs. The computation of gates are all done with the symbolic API. The return -value is a LSTM state containing the output of a LSTM cell. +In particular, instead of defining the four gates independently, we do +the computation together and then use SliceChannel to split them into +four outputs. The computation of gates are all done with the symbolic +API. The return value is a LSTM state containing the output of a LSTM +cell. Unfolding LSTM -------------- -Using the LSTM cell defined above, we are now ready to define a function to -unfold a LSTM network with L layers and T time steps. The first part of the -function is just defining all the symbolic variables for the shared weights and -states. - -The ``embed_W`` is the weights used for character embedding --- i.e. mapping the -one-hot encoded characters into real vectors. The ``pred_W`` and ``pred_b`` are -weights and bias for the final prediction at each time step. - -Then we define the weights for each LSTM cell. Note there is one cell for each -layer, and it will be replicated (unrolled) over time. The states are, however, -*not* shared over time. Instead, here we define the initial states here at the -beginning of a sequence, and we will update them with the output states at each -time step as we explicitly unroll the LSTM. - -.. literalinclude:: ../../examples/char-lstm/lstm.jl - :language: julia - :start-after: #--LSTM-part1 - :end-before: #--/LSTM-part1 - -Unrolling over time is a straightforward procedure of stacking the embedding -layer, and then LSTM cells, on top of which the prediction layer. During -unrolling, we update the states and collect all the outputs. Note each time step -takes data and label as inputs. If the LSTM is named as ``:ptb``, the data and -label at step ``t`` will be named ``:ptb_data_$t`` and ``:ptb_label_$t``. Late -on when we prepare the data, we will define the data provider to match those -names. - -.. literalinclude:: ../../examples/char-lstm/lstm.jl - :language: julia - :start-after: #--LSTM-part2 - :end-before: #--/LSTM-part2 - -Note at each time step, the prediction is connected to a :class:`SoftmaxOutput` -operator, which could back propagate when corresponding labels are provided. The -states are then connected to the next time step, which allows back propagate -through time. However, at the end of the sequence, the final states are not -connected to anything. This dangling outputs is problematic, so we explicitly -connect each of them to a :class:`BlockGrad` operator, which simply back -propagates 0-gradient and closes the computation graph. - -In the end, we just group all the prediction outputs at each time step as -a single :class:`SymbolicNode` and return. Optionally we will also group the -final states, this is used when we use the trained LSTM to sample sentences. - -.. literalinclude:: ../../examples/char-lstm/lstm.jl - :language: julia - :start-after: #--LSTM-part3 - :end-before: #--/LSTM-part3 + +Using the LSTM cell defined above, we are now ready to define a function +to unfold a LSTM network with L layers and T time steps. The first part +of the function is just defining all the symbolic variables for the +shared weights and states. + +The `embed_W` is the weights used for character embedding --- i.e. +mapping the one-hot encoded characters into real vectors. The `pred_W` +and `pred_b` are weights and bias for the final prediction at each time +step. + +Then we define the weights for each LSTM cell. Note there is one cell +for each layer, and it will be replicated (unrolled) over time. The +states are, however, *not* shared over time. Instead, here we define the +initial states here at the beginning of a sequence, and we will update +them with the output states at each time step as we explicitly unroll +the LSTM. + +Unrolling over time is a straightforward procedure of stacking the +embedding layer, and then LSTM cells, on top of which the prediction +layer. During unrolling, we update the states and collect all the +outputs. Note each time step takes data and label as inputs. If the LSTM +is named as `:ptb`, the data and label at step `t` will be named +`:ptb_data_$t` and `:ptb_label_$t`. Late on when we prepare the data, we +will define the data provider to match those names. + +Note at each time step, the prediction is connected to a SoftmaxOutput +operator, which could back propagate when corresponding labels are +provided. The states are then connected to the next time step, which +allows back propagate through time. However, at the end of the sequence, +the final states are not connected to anything. This dangling outputs is +problematic, so we explicitly connect each of them to a BlockGrad +operator, which simply back propagates 0-gradient and closes the +computation graph. + +In the end, we just group all the prediction outputs at each time step +as a single SymbolicNode and return. Optionally we will also group the +final states, this is used when we use the trained LSTM to sample +sentences. Data Provider for Text Sequences -------------------------------- -Now we need to construct a data provider that takes a text file, divide the text -into mini-batches of fixed-length character-sequences, and provide them as -one-hot encoded vectors. - -Note the is no fancy feature extraction at all. Each character is simply encoded -as a one-hot vector: a 0-1 vector of the size given by the vocabulary. Here we -just construct the vocabulary by collecting all the unique characters in the -training text -- there are not too many of them (including punctuations and -whitespace) for English text. Each input character is then encoded as a vector -of 0s on all coordinates, and 1 on the coordinate corresponding to that -character. The character-to-coordinate mapping is giving by the vocabulary. - -The text sequence data provider implement the :doc:`data provider API -`. We define the ``CharSeqProvider`` as below: - -.. literalinclude:: ../../examples/char-lstm/seq-data.jl - :language: julia - :start-after: #--CharSeqProvider - :end-before: #--/CharSeqProvider - -The provided data and labels follow the naming convention of inputs used when -unrolling the LSTM. Note in the code below, apart from ``$name_data_$t`` and -``$name_label_$t``, we also provides the initial ``c`` and ``h`` states for each -layer. This is because we are using the high-level :class:`FeedForward` API, -which has no idea about time and states. So we will feed the initial states for -each sequence from the data provider. Since the initial states is always zero, -we just need to always provide constant zero blobs. - -.. literalinclude:: ../../examples/char-lstm/seq-data.jl - :language: julia - :start-after: #--provide - :end-before: #--/provide - -Next we implement the :func:`AbstractDataProvider.eachbatch` interface for the provider. -We start by defining the data and label arrays, and the ``DataBatch`` object we -will provide in each iteration. - -.. literalinclude:: ../../examples/char-lstm/seq-data.jl - :language: julia - :start-after: #--eachbatch-part1 - :end-before: #--/eachbatch-part1 - -The actual data providing iteration is implemented as a Julia **coroutine**. In this -way, we can write the data loading logic as a simple coherent ``for`` loop, and -do not need to implement the interface functions like :func:`Base.start`, -:func:`Base.next`, etc. - -Basically, we partition the text into -batches, each batch containing several contiguous text sequences. Note at each -time step, the LSTM is trained to predict the next character, so the label is -the same as the data, but shifted ahead by one index. - -.. literalinclude:: ../../examples/char-lstm/seq-data.jl - :language: julia - :start-after: #--eachbatch-part2 - :end-before: #--/eachbatch-part2 - +Now we need to construct a data provider that takes a text file, divide +the text into mini-batches of fixed-length character-sequences, and +provide them as one-hot encoded vectors. + +Note the is no fancy feature extraction at all. Each character is simply +encoded as a one-hot vector: a 0-1 vector of the size given by the +vocabulary. Here we just construct the vocabulary by collecting all the +unique characters in the training text -- there are not too many of them +(including punctuations and whitespace) for English text. Each input +character is then encoded as a vector of 0s on all coordinates, and 1 on +the coordinate corresponding to that character. The +character-to-coordinate mapping is giving by the vocabulary. + +The text sequence data provider implement the data provider API +</api/io>. We define the `CharSeqProvider` as below: + +The provided data and labels follow the naming convention of inputs used +when unrolling the LSTM. Note in the code below, apart from +`$name_data_$t` and `$name_label_$t`, we also provides the initial `c` +and `h` states for each layer. This is because we are using the +high-level FeedForward API, which has no idea about time and states. So +we will feed the initial states for each sequence from the data +provider. Since the initial states is always zero, we just need to +always provide constant zero blobs. + +Next we implement the AbstractDataProvider.eachbatch interface for the +provider. We start by defining the data and label arrays, and the +`DataBatch` object we will provide in each iteration. + +The actual data providing iteration is implemented as a Julia +**coroutine**. In this way, we can write the data loading logic as a +simple coherent `for` loop, and do not need to implement the interface +functions like Base.start, Base.next, etc. + +Basically, we partition the text into batches, each batch containing +several contiguous text sequences. Note at each time step, the LSTM is +trained to predict the next character, so the label is the same as the +data, but shifted ahead by one index. Training the LSTM ----------------- -Now we have implemented all the supporting infrastructures for our char-lstm. -To train the model, we just follow the standard high-level API. Firstly, we -construct a LSTM symbolic architecture: - -.. literalinclude:: ../../examples/char-lstm/train.jl - :language: julia - :start-after: #--LSTM - :end-before: #--/LSTM - -Note all the parameters are defined in `examples/char-lstm/config.jl -`_. -Now we load the text file and define the data provider. The data ``input.txt`` -we used in this example is `a tiny Shakespeare dataset -`_. But you -can try with other text files. - -.. literalinclude:: ../../examples/char-lstm/train.jl - :language: julia - :start-after: #--data - :end-before: #--/data - -The last step is to construct a model, an optimizer and fit the mode to the -data. We are using the :class:`ADAM` optimizer [Adam]_ in this example. - -.. literalinclude:: ../../examples/char-lstm/train.jl - :language: julia - :start-after: #--train - :end-before: #--/train - -Note we are also using a customized ``NLL`` evaluation metric, which calculate -the negative log-likelihood during training. Here is an output sample at the end of -the training process. - -.. code-block:: text - - ... - INFO: Speed: 357.72 samples/sec - INFO: == Epoch 020 ========== - INFO: ## Training summary - INFO: NLL = 1.4672 - INFO: perplexity = 4.3373 - INFO: time = 87.2631 seconds - INFO: ## Validation summary - INFO: NLL = 1.6374 - INFO: perplexity = 5.1418 - INFO: Saved checkpoint to 'char-lstm/checkpoints/ptb-0020.params' - INFO: Speed: 368.74 samples/sec - INFO: Speed: 361.04 samples/sec - INFO: Speed: 360.02 samples/sec - INFO: Speed: 362.34 samples/sec - INFO: Speed: 360.80 samples/sec - INFO: Speed: 362.77 samples/sec - INFO: Speed: 357.18 samples/sec - INFO: Speed: 355.30 samples/sec - INFO: Speed: 362.33 samples/sec - INFO: Speed: 359.23 samples/sec - INFO: Speed: 358.09 samples/sec - INFO: Speed: 356.89 samples/sec - INFO: Speed: 371.91 samples/sec - INFO: Speed: 372.24 samples/sec - INFO: Speed: 356.59 samples/sec - INFO: Speed: 356.64 samples/sec - INFO: Speed: 360.24 samples/sec - INFO: Speed: 360.32 samples/sec - INFO: Speed: 362.38 samples/sec - INFO: == Epoch 021 ========== - INFO: ## Training summary - INFO: NLL = 1.4655 - INFO: perplexity = 4.3297 - INFO: time = 86.9243 seconds - INFO: ## Validation summary - INFO: NLL = 1.6366 - INFO: perplexity = 5.1378 - INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0021.params' - - -.. [Adam] Diederik Kingma and Jimmy Ba: *Adam: A Method for Stochastic - Optimization*. `arXiv:1412.6980 `_ - [cs.LG]. - +Now we have implemented all the supporting infrastructures for our +char-lstm. To train the model, we just follow the standard high-level +API. Firstly, we construct a LSTM symbolic architecture: + +Note all the parameters are defined in +[examples/char-lstm/config.jl](https://github.com/dmlc/MXNet.jl/blob/master/examples/char-lstm/config.jl). +Now we load the text file and define the data provider. The data +`input.txt` we used in this example is [a tiny Shakespeare +dataset](https://github.com/dmlc/web-data/tree/master/mxnet/tinyshakespeare). +But you can try with other text files. + +The last step is to construct a model, an optimizer and fit the mode to +the data. We are using the ADAM optimizer \[Adam\]\_ in this example. + +Note we are also using a customized `NLL` evaluation metric, which +calculate the negative log-likelihood during training. Here is an output +sample at the end of the training process. + +``` {.sourceCode .text} +... +INFO: Speed: 357.72 samples/sec +INFO: == Epoch 020 ========== +INFO: ## Training summary +INFO: NLL = 1.4672 +INFO: perplexity = 4.3373 +INFO: time = 87.2631 seconds +INFO: ## Validation summary +INFO: NLL = 1.6374 +INFO: perplexity = 5.1418 +INFO: Saved checkpoint to 'char-lstm/checkpoints/ptb-0020.params' +INFO: Speed: 368.74 samples/sec +INFO: Speed: 361.04 samples/sec +INFO: Speed: 360.02 samples/sec +INFO: Speed: 362.34 samples/sec +INFO: Speed: 360.80 samples/sec +INFO: Speed: 362.77 samples/sec +INFO: Speed: 357.18 samples/sec +INFO: Speed: 355.30 samples/sec +INFO: Speed: 362.33 samples/sec +INFO: Speed: 359.23 samples/sec +INFO: Speed: 358.09 samples/sec +INFO: Speed: 356.89 samples/sec +INFO: Speed: 371.91 samples/sec +INFO: Speed: 372.24 samples/sec +INFO: Speed: 356.59 samples/sec +INFO: Speed: 356.64 samples/sec +INFO: Speed: 360.24 samples/sec +INFO: Speed: 360.32 samples/sec +INFO: Speed: 362.38 samples/sec +INFO: == Epoch 021 ========== +INFO: ## Training summary +INFO: NLL = 1.4655 +INFO: perplexity = 4.3297 +INFO: time = 86.9243 seconds +INFO: ## Validation summary +INFO: NLL = 1.6366 +INFO: perplexity = 5.1378 +INFO: Saved checkpoint to 'examples/char-lstm/checkpoints/ptb-0021.params' +``` Sampling Random Sentences ------------------------- -After training the LSTM, we can now sample random sentences from the trained -model. The sampler works in the following way: - -- Starting from some fixed character, take ``a`` for example, and feed it as input to the LSTM. -- The LSTM will produce an output distribution over the vocabulary and a state - in the first time step. We sample a character from the output distribution, - fix it as the second character. -- In the next time step, we feed the previously sampled character as input and - continue running the LSTM by also taking the previous states (instead of the - 0 initial states). -- Continue running until we sampled enough characters. +After training the LSTM, we can now sample random sentences from the +trained model. The sampler works in the following way: -Note we are running with mini-batches, so several sentences could be sampled -simultaneously. Here are some sampled outputs from a network I trained for -around half an hour on the Shakespeare dataset. Note all the line-breaks, -punctuations and upper-lower case letters are produced by the sampler itself. -I did not do any post-processing. +- Starting from some fixed character, take `a` for example, and feed + it as input to the LSTM. +- The LSTM will produce an output distribution over the vocabulary and + a state in the first time step. We sample a character from the + output distribution, fix it as the second character. +- In the next time step, we feed the previously sampled character as + input and continue running the LSTM by also taking the previous + states (instead of the 0 initial states). +- Continue running until we sampled enough characters. -.. code-block:: text +Note we are running with mini-batches, so several sentences could be +sampled simultaneously. Here are some sampled outputs from a network I +trained for around half an hour on the Shakespeare dataset. Note all the +line-breaks, punctuations and upper-lower case letters are produced by +the sampler itself. I did not do any post-processing. - ## Sample 1 - all have sir, - Away will fill'd in His time, I'll keep her, do not madam, if they here? Some more ha? +``` {.sourceCode .text} +## Sample 1 +all have sir, +Away will fill'd in His time, I'll keep her, do not madam, if they here? Some more ha? - ## Sample 2 - am. +## Sample 2 +am. - CLAUDIO: - Hone here, let her, the remedge, and I know not slept a likely, thou some soully free? +CLAUDIO: +Hone here, let her, the remedge, and I know not slept a likely, thou some soully free? - ## Sample 3 - arrel which noble thing - The exchnachsureding worns: I ne'er drunken Biancas, fairer, than the lawfu? +## Sample 3 +arrel which noble thing +The exchnachsureding worns: I ne'er drunken Biancas, fairer, than the lawfu? - ## Sample 4 - augh assalu, you'ld tell me corn; - Farew. First, for me of a loved. Has thereat I knock you presents? +## Sample 4 +augh assalu, you'ld tell me corn; +Farew. First, for me of a loved. Has thereat I knock you presents? - ## Sample 5 - ame the first answer. +## Sample 5 +ame the first answer. - MARIZARINIO: - Door of Angelo as her lord, shrield liken Here fellow the fool ? +MARIZARINIO: +Door of Angelo as her lord, shrield liken Here fellow the fool ? - ## Sample 6 - ad well. +## Sample 6 +ad well. - CLAUDIO: - Soon him a fellows here; for her fine edge in a bogms' lord's wife. +CLAUDIO: +Soon him a fellows here; for her fine edge in a bogms' lord's wife. - LUCENTIO: - I? +LUCENTIO: +I? - ## Sample 7 - adrezilian measure. +## Sample 7 +adrezilian measure. - LUCENTIO: - So, help'd you hath nes have a than dream's corn, beautio, I perchas? +LUCENTIO: +So, help'd you hath nes have a than dream's corn, beautio, I perchas? - ## Sample 8 - as eatter me; - The girlly: and no other conciolation! +## Sample 8 +as eatter me; +The girlly: and no other conciolation! - BISTRUMIO: - I have be rest girl. O, that I a h? +BISTRUMIO: +I have be rest girl. O, that I a h? - ## Sample 9 - and is intend you sort: - What held her all 'clama's for maffice. Some servant.' what I say me the cu? +## Sample 9 +and is intend you sort: +What held her all 'clama's for maffice. Some servant.' what I say me the cu? - ## Sample 10 - an thoughts will said in our pleasue, - Not scanin on him that you live; believaries she. +## Sample 10 +an thoughts will said in our pleasue, +Not scanin on him that you live; believaries she. - ISABELLLLL? +ISABELLLLL? +``` -See `Andrej Karpathy's blog post -`_ on more examples and -links including Linux source codes, Algebraic Geometry Theorems, and even -cooking recipes. The code for sampling can be found in -`examples/char-lstm/sampler.jl -`_. +See [Andrej Karpathy's blog +post](http://karpathy.github.io/2015/05/21/rnn-effectiveness/) on more +examples and links including Linux source codes, Algebraic Geometry +Theorems, and even cooking recipes. The code for sampling can be found +in +[examples/char-lstm/sampler.jl](https://github.com/dmlc/MXNet.jl/blob/master/examples/char-lstm/sampler.jl). Visualizing the LSTM -------------------- -Finally, you could visualize the LSTM by calling :func:`to_graphviz` on the -constructed LSTM symbolic architecture. We only show an example of 1-layer and -2-time-step LSTM below. The automatic layout produced by GraphViz is definitely -much less clear than `Christopher Olah's illustrations -`_, but could -otherwise be very useful for debugging. As we can see, the LSTM unfolded over -time is just a (very) deep neural network. The complete code for producing this -visualization can be found in `examples/char-lstm/visualize.jl -`_. - -.. image:: images/char-lstm-vis.svg +Finally, you could visualize the LSTM by calling to\_graphviz on the +constructed LSTM symbolic architecture. We only show an example of +1-layer and 2-time-step LSTM below. The automatic layout produced by +GraphViz is definitely much less clear than [Christopher Olah's +illustrations](http://colah.github.io/posts/2015-08-Understanding-LSTMs/), +but could otherwise be very useful for debugging. As we can see, the +LSTM unfolded over time is just a (very) deep neural network. The +complete code for producing this visualization can be found in +[examples/char-lstm/visualize.jl](https://github.com/dmlc/MXNet.jl/blob/master/examples/char-lstm/visualize.jl). + +![image](images/char-lstm-vis.svg) diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index fc2e548dd1c2..b4e7a3be89ee 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -1,253 +1,270 @@ Digit Recognition on MNIST ========================== -In this tutorial, we will work through examples of training a simple multi-layer -perceptron and then a convolutional neural network (the LeNet architecture) on -the `MNIST handwritten digit dataset `_. The -code for this tutorial could be found in `examples/mnist -`_. +In this tutorial, we will work through examples of training a simple +multi-layer perceptron and then a convolutional neural network (the +LeNet architecture) on the [MNIST handwritten digit +dataset](http://yann.lecun.com/exdb/mnist/). The code for this tutorial +could be found in +[examples/mnist](https://github.com/dmlc/MXNet.jl/tree/master/examples/mnist). Simple 3-layer MLP ------------------ -This is a tiny 3-layer MLP that could be easily trained on CPU. The script starts with +This is a tiny 3-layer MLP that could be easily trained on CPU. The +script starts with -.. code-block:: julia +``` {.sourceCode .julia} +using MXNet +``` - using MXNet +to load the `MXNet` module. Then we are ready to define the network +architecture via the symbolic API </user-guide/overview>. We start +with a placeholder `data` symbol, -to load the ``MXNet`` module. Then we are ready to define the network -architecture via the :doc:`symbolic API `. We start with -a placeholder ``data`` symbol, - -.. code-block:: julia - - data = mx.Variable(:data) +``` {.sourceCode .julia} +data = mx.Variable(:data) +``` and then cascading fully-connected layers and activation functions: -.. code-block:: julia - - fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) - act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) - fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) - act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) - fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) - -Note each composition we take the previous symbol as the `data` argument, forming a feedforward chain. The architecture looks like - -.. code-block:: julia - - Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units - -where the last 10 units correspond to the 10 output classes (digits 0,...,9). We -then add a final :class:`SoftmaxOutput` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: - -.. code-block:: julia - - mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) - -As we can see, the MLP is just a chain of layers. For this case, we can also use -the ``mx.chain`` macro. The same architecture above can be defined as - -.. code-block:: julia - - mlp = @mx.chain mx.Variable(:data) => - mx.FullyConnected(name=:fc1, num_hidden=128) => - mx.Activation(name=:relu1, act_type=:relu) => - mx.FullyConnected(name=:fc2, num_hidden=64) => - mx.Activation(name=:relu2, act_type=:relu) => - mx.FullyConnected(name=:fc3, num_hidden=10) => - mx.SoftmaxOutput(name=:softmax) - -After defining the architecture, we are ready to load the MNIST data. MXNet.jl -provide built-in data providers for the MNIST dataset, which could automatically -download the dataset into ``Pkg.dir("MXNet")/data/mnist`` if necessary. We wrap -the code to construct the data provider into ``mnist-data.jl`` so that it could be shared by both the MLP example and the LeNet ConvNets example. - -.. code-block:: julia - - batch_size = 100 - include("mnist-data.jl") - train_provider, eval_provider = get_mnist_providers(batch_size) - -If you need to write your own data providers for customized data format, please -refer to :class:`AbstractDataProvider`. - -Given the architecture and data, we can instantiate an *model* to do the actual -training. ``mx.FeedForward`` is the built-in model that is suitable for most feed-forward architectures. When constructing the model, we also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. - -.. code-block:: julia - - model = mx.FeedForward(mlp, context=mx.cpu()) - -You can use a ``mx.gpu()`` or if a list of devices (e.g. ``[mx.gpu(0), -mx.gpu(1)]``) is provided, data-parallelization will be used automatically. But for this tiny example, using a GPU device might not help. - -The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 and momentum 0.9: - -.. code-block:: julia - - optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) - -Now we can do the training. Here the ``n_epoch`` parameter specifies that we -want to train for 20 epochs. We also supply a ``eval_data`` to monitor validation accuracy on the validation set. - -.. code-block:: julia - - mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +``` {.sourceCode .julia} +fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) +act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) +fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) +act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) +fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) +``` + +Note each composition we take the previous symbol as the data argument, +forming a feedforward chain. The architecture looks like + +``` {.sourceCode .julia} +Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units +``` + +where the last 10 units correspond to the 10 output classes (digits +0,...,9). We then add a final SoftmaxOutput operation to turn the +10-dimensional prediction to proper probability values for the 10 +classes: + +``` {.sourceCode .julia} +mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) +``` + +As we can see, the MLP is just a chain of layers. For this case, we can +also use the `mx.chain` macro. The same architecture above can be +defined as + +``` {.sourceCode .julia} +mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=64) => + mx.Activation(name=:relu2, act_type=:relu) => + mx.FullyConnected(name=:fc3, num_hidden=10) => + mx.SoftmaxOutput(name=:softmax) +``` + +After defining the architecture, we are ready to load the MNIST data. +MXNet.jl provide built-in data providers for the MNIST dataset, which +could automatically download the dataset into +`Pkg.dir("MXNet")/data/mnist` if necessary. We wrap the code to +construct the data provider into `mnist-data.jl` so that it could be +shared by both the MLP example and the LeNet ConvNets example. + +``` {.sourceCode .julia} +batch_size = 100 +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size) +``` + +If you need to write your own data providers for customized data format, +please refer to AbstractDataProvider. + +Given the architecture and data, we can instantiate an *model* to do the +actual training. `mx.FeedForward` is the built-in model that is suitable +for most feed-forward architectures. When constructing the model, we +also specify the *context* on which the computation should be carried +out. Because this is a really tiny MLP, we will just run on a single CPU +device. + +``` {.sourceCode .julia} +model = mx.FeedForward(mlp, context=mx.cpu()) +``` + +You can use a `mx.gpu()` or if a list of devices (e.g. +`[mx.gpu(0), mx.gpu(1)]`) is provided, data-parallelization will be used +automatically. But for this tiny example, using a GPU device might not +help. + +The last thing we need to specify is the optimization algorithm (a.k.a. +*optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 +and momentum 0.9: + +``` {.sourceCode .julia} +optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) +``` + +Now we can do the training. Here the `n_epoch` parameter specifies that +we want to train for 20 epochs. We also supply a `eval_data` to monitor +validation accuracy on the validation set. + +``` {.sourceCode .julia} +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +``` Here is a sample output -.. code-block:: text - - INFO: Start training on [CPU0] - INFO: Initializing parameters... - INFO: Creating KVStore... - INFO: == Epoch 001 ========== - INFO: ## Training summary - INFO: :accuracy = 0.7554 - INFO: time = 1.3165 seconds - INFO: ## Validation summary - INFO: :accuracy = 0.9502 - ... - INFO: == Epoch 020 ========== - INFO: ## Training summary - INFO: :accuracy = 0.9949 - INFO: time = 0.9287 seconds - INFO: ## Validation summary - INFO: :accuracy = 0.9775 - +``` {.sourceCode .text} +INFO: Start training on [CPU0] +INFO: Initializing parameters... +INFO: Creating KVStore... +INFO: == Epoch 001 ========== +INFO: ## Training summary +INFO: :accuracy = 0.7554 +INFO: time = 1.3165 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9502 +... +INFO: == Epoch 020 ========== +INFO: ## Training summary +INFO: :accuracy = 0.9949 +INFO: time = 0.9287 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9775 +``` Convolutional Neural Networks ----------------------------- -In the second example, we show a slightly more complicated architecture that -involves convolution and pooling. This architecture for the MNIST is usually -called the [LeNet]_. The first part of the architecture is listed below: - -.. code-block:: julia +In the second example, we show a slightly more complicated architecture +that involves convolution and pooling. This architecture for the MNIST +is usually called the \[LeNet\]\_. The first part of the architecture is +listed below: - # input - data = mx.Variable(:data) +``` {.sourceCode .julia} +# input +data = mx.Variable(:data) - # first conv - conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => - mx.Activation(act_type=:tanh) => - mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) +# first conv +conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) - # second conv - conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => - mx.Activation(act_type=:tanh) => - mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) +# second conv +conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) +``` We basically defined two convolution modules. Each convolution module is -actually a chain of ``Convolution``, ``tanh`` activation and then max ``Pooling`` operations. - -Each sample in the MNIST dataset is a 28x28 single-channel grayscale image. In -the tensor format used by ``NDArray``, a batch of 100 samples is a tensor of -shape ``(28,28,1,100)``. The convolution and pooling operates in the spatial -axis, so ``kernel=(5,5)`` indicate a square region of 5-width and 5-height. -The rest of the architecture follows as: - -.. code-block:: julia - - # first fully-connected - fc1 = @mx.chain mx.Flatten(data=conv2) => - mx.FullyConnected(num_hidden=500) => - mx.Activation(act_type=:tanh) - - # second fully-connected - fc2 = mx.FullyConnected(data=fc1, num_hidden=10) - - # softmax loss - lenet = mx.Softmax(data=fc2, name=:softmax) - -Note a fully-connected operator expects the input to be a matrix. However, the -results from spatial convolution and pooling are 4D tensors. So we explicitly -used a ``Flatten`` operator to flat the tensor, before connecting it to the -``FullyConnected`` operator. - -The rest of the network is the same as the previous MLP example. As before, we can now load the MNIST dataset: - -.. code-block:: julia - - batch_size = 100 - include("mnist-data.jl") - train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) - -Note we specified ``flat=false`` to tell the data provider to provide 4D tensors instead of 2D matrices because the convolution operators needs correct spatial shape information. We then construct a feedforward model on GPU, and train it. - -.. code-block:: julia - - #-------------------------------------------------------------------------------- - # fit model - model = mx.FeedForward(lenet, context=mx.gpu()) - - # optimizer - optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) - - # fit parameters - mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +actually a chain of `Convolution`, `tanh` activation and then max +`Pooling` operations. + +Each sample in the MNIST dataset is a 28x28 single-channel grayscale +image. In the tensor format used by `NDArray`, a batch of 100 samples is +a tensor of shape `(28,28,1,100)`. The convolution and pooling operates +in the spatial axis, so `kernel=(5,5)` indicate a square region of +5-width and 5-height. The rest of the architecture follows as: + +``` {.sourceCode .julia} +# first fully-connected +fc1 = @mx.chain mx.Flatten(data=conv2) => + mx.FullyConnected(num_hidden=500) => + mx.Activation(act_type=:tanh) + +# second fully-connected +fc2 = mx.FullyConnected(data=fc1, num_hidden=10) + +# softmax loss +lenet = mx.Softmax(data=fc2, name=:softmax) +``` + +Note a fully-connected operator expects the input to be a matrix. +However, the results from spatial convolution and pooling are 4D +tensors. So we explicitly used a `Flatten` operator to flat the tensor, +before connecting it to the `FullyConnected` operator. + +The rest of the network is the same as the previous MLP example. As +before, we can now load the MNIST dataset: + +``` {.sourceCode .julia} +batch_size = 100 +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) +``` + +Note we specified `flat=false` to tell the data provider to provide 4D +tensors instead of 2D matrices because the convolution operators needs +correct spatial shape information. We then construct a feedforward model +on GPU, and train it. + +``` {.sourceCode .julia} +#-------------------------------------------------------------------------------- +# fit model +model = mx.FeedForward(lenet, context=mx.gpu()) + +# optimizer +optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) + +# fit parameters +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) +``` And here is a sample of running outputs: -.. code-block:: text - - INFO: == Epoch 001 ========== - INFO: ## Training summary - INFO: :accuracy = 0.6750 - INFO: time = 4.9814 seconds - INFO: ## Validation summary - INFO: :accuracy = 0.9712 - ... - INFO: == Epoch 020 ========== - INFO: ## Training summary - INFO: :accuracy = 1.0000 - INFO: time = 4.0086 seconds - INFO: ## Validation summary - INFO: :accuracy = 0.9915 - - -.. [LeNet] Lecun, Y.; Bottou, L.; Bengio, Y.; Haffner, P., - *Gradient-based learning applied to document recognition*, - Proceedings of the IEEE, vol.86, no.11, pp.2278-2324, - Nov 1998. +``` {.sourceCode .text} +INFO: == Epoch 001 ========== +INFO: ## Training summary +INFO: :accuracy = 0.6750 +INFO: time = 4.9814 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9712 +... +INFO: == Epoch 020 ========== +INFO: ## Training summary +INFO: :accuracy = 1.0000 +INFO: time = 4.0086 seconds +INFO: ## Validation summary +INFO: :accuracy = 0.9915 +``` Predicting with a trained model ------------------------------- -Predicting with a trained model is very simple. By calling ``mx.predict`` with the -model and a data provider, we get the model output as a Julia Array: - -.. code-block:: julia - - probs = mx.predict(model, eval_provider) - -The following code shows a stupid way of getting all the labels from the data -provider, and compute the prediction accuracy manually: - -.. code-block:: julia - - # collect all labels from eval data - labels = Array[] - for batch in eval_provider - push!(labels, copy(mx.get_label(batch))) - end - labels = cat(1, labels...) - - # Now we use compute the accuracy - correct = 0 - for i = 1:length(labels) - # labels are 0...9 - if indmax(probs[:,i]) == labels[i]+1 - correct += 1 - end - end - println(mx.format("Accuracy on eval set: {1:.2f}%", 100correct/length(labels))) +Predicting with a trained model is very simple. By calling `mx.predict` +with the model and a data provider, we get the model output as a Julia +Array: + +``` {.sourceCode .julia} +probs = mx.predict(model, eval_provider) +``` + +The following code shows a stupid way of getting all the labels from the +data provider, and compute the prediction accuracy manually: + +``` {.sourceCode .julia} +# collect all labels from eval data +labels = Array[] +for batch in eval_provider + push!(labels, copy(mx.get_label(batch))) +end +labels = cat(1, labels...) + +# Now we use compute the accuracy +correct = 0 +for i = 1:length(labels) + # labels are 0...9 + if indmax(probs[:,i]) == labels[i]+1 + correct += 1 + end +end +println(mx.format("Accuracy on eval set: {1:.2f}%", 100correct/length(labels))) +``` Alternatively, when the dataset is huge, one can provide a callback to -``mx.predict``, then the callback function will be invoked with the outputs of -each mini-batch. The callback could, for example, write the data to disk for -future inspection. In this case, no value is returned from ``mx.predict``. See -also :func:`predict`. +`mx.predict`, then the callback function will be invoked with the +outputs of each mini-batch. The callback could, for example, write the +data to disk for future inspection. In this case, no value is returned +from `mx.predict`. See also predict. diff --git a/docs/src/user-guide/faq.md b/docs/src/user-guide/faq.md new file mode 100644 index 000000000000..8fd8a6b34551 --- /dev/null +++ b/docs/src/user-guide/faq.md @@ -0,0 +1,8 @@ +FAQ +=== + +Running MXNet on AWS GPU instances +---------------------------------- + +See the discussions and notes +[here](https://github.com/dmlc/MXNet.jl/issues/43). diff --git a/docs/src/user-guide/faq.rst b/docs/src/user-guide/faq.rst deleted file mode 100644 index 602c8ab9fda5..000000000000 --- a/docs/src/user-guide/faq.rst +++ /dev/null @@ -1,7 +0,0 @@ -FAQ -=== - -Running MXNet on AWS GPU instances ----------------------------------- -See the discussions and notes `here -`_. diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md new file mode 100644 index 000000000000..9c66a84204da --- /dev/null +++ b/docs/src/user-guide/install.md @@ -0,0 +1,67 @@ +Installation Guide +================== + +Automatic Installation +---------------------- + +To install MXNet.jl, simply type + +``` {.sourceCode .julia} +Pkg.add("MXNet") +``` + +in the Julia REPL. Or to use the latest git version of MXNet.jl, use the +following command instead + +``` {.sourceCode .julia} +Pkg.checkout("MXNet") +``` + +MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). +Upon installation, Julia will try to automatically download and build +libmxnet. + +The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. +The automatic build is using default configurations, with OpenCV, CUDA +disabled. If the compilation failed due to unresolved dependency, or if +you want to customize the build, it is recommended to compile and +install libmxnet manually. Please see below <manual-compilation> +for more details. + +Manual Compilation +------------------ + +It is possible to compile libmxnet separately and point MXNet.jl to a +the existing library in case automatic compilation fails due to +unresolved dependencies in an un-standard environment; Or when one want +to work with a seperate, maybe customized libmxnet. + +To build libmxnet, please refer to [the installation guide of +libmxnet](http://mxnet.readthedocs.org/en/latest/build.html). After +successfully installing libmxnet, set the `MXNET_HOME` environment +variable to the location of libmxnet. In other words, the compiled +`libmxnet.so` should be found in `$MXNET_HOME/lib`. + +> **note** +> +> The constant `MXNET_HOME` is pre-compiled in MXNet.jl package cache. +> If you updated the environment variable after installing MXNet.jl, +> make sure to update the pre-compilation cache by +> `Base.compilecache("MXNet")`. + +When the `MXNET_HOME` environment variable is detected and the +corresponding `libmxnet.so` could be loaded successfully, MXNet.jl will +skip automatic building during installation and use the specified +libmxnet instead. + +Basically, MXNet.jl will search `libmxnet.so` or `libmxnet.dll` in the +following paths (and in that order): + +- `$MXNET_HOME/lib`: customized libmxnet builds +- `Pkg.dir("MXNet")/deps/usr/lib`: automatic builds +- Any system wide library search path + +Note that MXNet.jl will not find `libmxnet.so` even if it is on one of +the paths above if a library it depends upon is missing from the +`LD_LIBRARY_PATH`. Thus, if you are going to compile to add CUDA, the +path to the CUDA libraries will have to be added to `LD_LIBRARY_PATH`. diff --git a/docs/src/user-guide/install.rst b/docs/src/user-guide/install.rst deleted file mode 100644 index d53830a0cb37..000000000000 --- a/docs/src/user-guide/install.rst +++ /dev/null @@ -1,56 +0,0 @@ -Installation Guide -================== - -Automatic Installation ----------------------- - -To install MXNet.jl, simply type - -.. code-block:: julia - - Pkg.add("MXNet") - -in the Julia REPL. Or to use the latest git version of MXNet.jl, use the following command instead - -.. code-block:: julia - - Pkg.checkout("MXNet") - -MXNet.jl is built on top of `libmxnet -`_. Upon installation, Julia will try to -automatically download and build libmxnet. - -The libmxnet source is downloaded to ``Pkg.dir("MXNet")/deps/src/mxnet``. The -automatic build is using default configurations, with OpenCV, CUDA disabled. -If the compilation failed due to unresolved dependency, or if you want to -customize the build, it is recommended to compile and install libmxnet manually. -Please see :ref:`below ` for more details. - - -Manual Compilation ------------------- - -It is possible to compile libmxnet separately and point MXNet.jl to a the existing library in case automatic compilation fails due to unresolved dependencies in an un-standard environment; Or when one want to work with a seperate, maybe customized libmxnet. - -To build libmxnet, please refer to `the installation guide of libmxnet -`_. After successfully -installing libmxnet, set the ``MXNET_HOME`` environment variable to the location -of libmxnet. In other words, the compiled ``libmxnet.so`` should be found in -``$MXNET_HOME/lib``. - -.. note:: - - The constant ``MXNET_HOME`` is pre-compiled in MXNet.jl package cache. If you - updated the environment variable after installing MXNet.jl, make sure to - update the pre-compilation cache by ``Base.compilecache("MXNet")``. - -When the ``MXNET_HOME`` environment variable is detected and the corresponding -``libmxnet.so`` could be loaded successfully, MXNet.jl will skip automatic building during installation and use the specified libmxnet instead. - -Basically, MXNet.jl will search ``libmxnet.so`` or ``libmxnet.dll`` in the following paths (and in that order): - -* ``$MXNET_HOME/lib``: customized libmxnet builds -* ``Pkg.dir("MXNet")/deps/usr/lib``: automatic builds -* Any system wide library search path - -Note that MXNet.jl will not find ``libmxnet.so`` even if it is on one of the paths above if a library it depends upon is missing from the ``LD_LIBRARY_PATH``. Thus, if you are going to compile to add CUDA, the path to the CUDA libraries will have to be added to ``LD_LIBRARY_PATH``. diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md new file mode 100644 index 000000000000..6197fc417a7e --- /dev/null +++ b/docs/src/user-guide/overview.md @@ -0,0 +1,412 @@ +Overview +======== + +MXNet.jl Namespace +------------------ + +Most the functions and types in MXNet.jl are organized in a flat +namespace. Because many some functions are conflicting with existing +names in the Julia Base module, we wrap them all in a `mx` module. The +convention of accessing the MXNet.jl interface is the to use the `mx.` +prefix explicitly: + +``` {.sourceCode .julia} +using MXNet + +x = mx.zeros(2,3) # MXNet NDArray +y = zeros(eltype(x), size(x)) # Julia Array +copy!(y, x) # Overloaded function in Julia Base +z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU +mx.copy!(z, y) # Same as copy!(z, y) +``` + +Note functions like `size`, `copy!` that is extensively overloaded for +various types works out of the box. But functions like `zeros` and +`ones` will be ambiguous, so we always use the `mx.` prefix. If you +prefer, the `mx.` prefix can be used explicitly for all MXNet.jl +functions, including `size` and `copy!` as shown in the last line. + +Low Level Interface +------------------- + +### NDArrays + +NDArray is the basic building blocks of the actual computations in +MXNet. It is like a Julia `Array` object, with some important +differences listed here: + +- The actual data could live on different `Context` (e.g. GPUs). For + some contexts, iterating into the elements one by one is very slow, + thus indexing into NDArray is not supported in general. The easiest + way to inspect the contents of an NDArray is to use the `copy` + function to copy the contents as a Julia `Array`. +- Operations on NDArray (including basic arithmetics and neural + network related operators) are executed in parallel with automatic + dependency tracking to ensure correctness. +- There is no generics in NDArray, the `eltype` is always + `mx.MX_float`. Because for applications in machine learning, single + precision floating point numbers are typical a best choice balancing + between precision, speed and portability. Also since libmxnet is + designed to support multiple languages as front-ends, it is much + simpler to implement with a fixed data type. + +While most of the computation is hidden in libmxnet by operators +corresponding to various neural network layers. Getting familiar with +the NDArray API is useful for implementing `Optimizer` or customized +operators in Julia directly. + +The followings are common ways to create NDArray objects: + +- `mx.empty(shape[, context])`: create on uninitialized array of a + given shape on a specific device. For example, + `` mx.empty(2,3)`, `mx.((2,3), mx.gpu(2)) ``. +- `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: + similar to the Julia's built-in `zeros` and `ones`. +- `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to + a specific device. + +Most of the convenient functions like `size`, `length`, `ndims`, +`eltype` on array objects should work out-of-the-box. Although indexing +is not supported, it is possible to take *slices*: + +``` {.sourceCode .julia} +a = mx.ones(2,3) +b = mx.slice(a, 1:2) +b[:] = 2 +println(copy(a)) +# => +# Float32[2.0 2.0 1.0 +# 2.0 2.0 1.0] +``` + +A slice is a sub-region sharing the same memory with the original +NDArray object. A slice is always a contiguous piece of memory, so only +slicing on the *last* dimension is supported. The example above also +shows a way to set the contents of an NDArray. + +``` {.sourceCode .julia} +a = mx.empty(2,3) +a[:] = 0.5 # set all elements to a scalar +a[:] = rand(size(a)) # set contents with a Julia Array +copy!(a, rand(size(a))) # set value by copying a Julia Array +b = mx.empty(size(a)) +b[:] = a # copying and assignment between NDArrays +``` + +Note due to the intrinsic design of the Julia language, a normal +assignment + +``` {.sourceCode .julia} +a = b +``` + +does **not** mean copying the contents of `b` to `a`. Instead, it just +make the variable `a` pointing to a new object, which is `b`. Similarly, +inplace arithmetics does not work as expected: + +``` {.sourceCode .julia} +a = mx.ones(2) +r = a # keep a reference to a +b = mx.ones(2) +a += b # translates to a = a + b +println(copy(a)) +# => Float32[2.0f0,2.0f0] +println(copy(r)) +# => Float32[1.0f0,1.0f0] +``` + +As we can see, `a` has expected value, but instead of inplace updating, +a new NDArray is created and `a` is set to point to this new object. If +we look at `r`, which still reference to the old `a`, its content has +not changed. There is currently no way in Julia to overload the +operators like `+=` to get customized behavior. + +Instead, you will need to write `a[:] = a+b`, or if you want *real* +inplace `+=` operation, MXNet.jl provides a simple macro `@mx.inplace`: + +``` {.sourceCode .julia} +@mx.inplace a += b +macroexpand(:(@mx.inplace a += b)) +# => :(MXNet.mx.add_to!(a,b)) +``` + +As we can see, it translate the `+=` operator to an explicit `add_to!` +function call, which invokes into libmxnet to add the contents of `b` +into `a` directly. For example, the following is the update rule in the +SGD `Optimizer` (both `grad` and `weight` are NDArray objects): + +``` {.sourceCode .julia} +@inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) +``` + +Note there is no much magic in `mx.inplace`: it only does a shallow +translation. In the SGD update rule example above, the computation like +scaling the gradient by `grad_scale` and adding the weight decay all +create temporary NDArray objects. To mitigate this issue, libmxnet has a +customized memory allocator designed specifically to handle this kind of +situations. The following snippet does a simple benchmark on allocating +temp NDArray vs. pre-allocating: + +``` {.sourceCode .julia} +using Benchmark +using MXNet + +N_REP = 1000 +SHAPE = (128, 64) +CTX = mx.cpu() +LR = 0.1 + +function inplace_op() + weight = mx.zeros(SHAPE, CTX) + grad = mx.ones(SHAPE, CTX) + + # pre-allocate temp objects + grad_lr = mx.empty(SHAPE, CTX) + + for i = 1:N_REP + copy!(grad_lr, grad) + @mx.inplace grad_lr .*= LR + @mx.inplace weight -= grad_lr + end + return weight +end + +function normal_op() + weight = mx.zeros(SHAPE, CTX) + grad = mx.ones(SHAPE, CTX) + + for i = 1:N_REP + weight[:] -= LR * grad + end + return weight +end + +# make sure the results are the same +@assert(maximum(abs(copy(normal_op() - inplace_op()))) < 1e-6) + +println(compare([inplace_op, normal_op], 100)) +``` + +The comparison on my laptop shows that `normal_op` while allocating a +lot of temp NDArray in the loop (the performance gets worse when +increasing `N_REP`), is only about twice slower than the pre-allocated +one. + + Row Function Average Relative Replications + ------ --------------- ------------ ----------- --------------- + 1 "inplace\_op" 0.0074854 1.0 100 + 2 "normal\_op" 0.0174202 2.32723 100 + +So it will usually not be a big problem unless you are at the bottleneck +of the computation. + +### Distributed Key-value Store + +The type `KVStore` and related methods are used for data sharing across +different devices or machines. It provides a simple and efficient +integer - NDArray key-value storage system that each device can pull or +push. + +The following example shows how to create a local `KVStore`, initialize +a value and then pull it back. + +``` {.sourceCode .julia} +kv = mx.KVStore(:local) +shape = (2,3) +key = 3 + +mx.init!(kv, key, mx.ones(shape)*2) +a = mx.empty(shape) +mx.pull!(kv, key, a) # pull value into a +println(copy(a)) +# => +# Float32[2.0 2.0 2.0 +# 2.0 2.0 2.0] +``` + +Intermediate Level Interface +---------------------------- + +### Symbols and Composition + +The way we build deep learning models in MXNet.jl is to use the powerful +symbolic composition system. It is like +[Theano](http://deeplearning.net/software/theano/), except that we +avoided long expression compiliation time by providing *larger* neural +network related building blocks to guarantee computation performance. +See also [this +note](http://mxnet.readthedocs.org/en/latest/program_model.html) for the +design and trade-off of the MXNet symbolic composition system. + +The basic type is `mx.Symbol`. The following is a trivial example of +composing two symbols with the `+` operation. + +``` {.sourceCode .julia} +A = mx.Variable(:A) +B = mx.Variable(:B) +C = A + B +``` + +We get a new *symbol* by composing existing *symbols* by some +*operations*. A hierarchical architecture of a deep neural network could +be realized by recursive composition. For example, the following code +snippet shows a simple 2-layer MLP construction, using a hidden layer of +128 units and a ReLU activation function. + +``` {.sourceCode .julia} +net = mx.Variable(:data) +net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) +net = mx.Activation(data=net, name=:relu1, act_type=:relu) +net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) +net = mx.Softmax(data=net, name=:out) +``` + +Each time we take the previous symbol, and compose with an operation. +Unlike the simple `+` example above, the *operations* here are "bigger" +ones, that correspond to common computation layers in deep neural +networks. + +Each of those operation takes one or more input symbols for composition, +with optional hyper-parameters (e.g. `num_hidden`, `act_type`) to +further customize the composition results. + +When applying those operations, we can also specify a `name` for the +result symbol. This is convenient if we want to refer to this symbol +later on. If not supplied, a name will be automatically generated. + +Each symbol takes some arguments. For example, in the `+` case above, to +compute the value of `C`, we will need to know the values of the two +inputs `A` and `B`. For neural networks, the arguments are primarily two +categories: *inputs* and *parameters*. *inputs* are data and labels for +the networks, while *parameters* are typically trainable *weights*, +*bias*, *filters*. + +When composing symbols, their arguments accumulates. We can list all the +arguments by + +``` {.sourceCode .julia} +julia> mx.list_arguments(net) +6-element Array{Symbol,1}: + :data # Input data, name from the first data variable + :fc1_weight # Weights of the fully connected layer named :fc1 + :fc1_bias # Bias of the layer :fc1 + :fc2_weight # Weights of the layer :fc2 + :fc2_bias # Bias of the layer :fc2 + :out_label # Input label, required by the softmax layer named :out +``` + +Note the names of the arguments are generated according to the provided +name for each layer. We can also specify those names explicitly: + +``` {.sourceCode .julia} +net = mx.Variable(:data) +w = mx.Variable(:myweight) +net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) +mx.list_arguments(net) +# => +# 3-element Array{Symbol,1}: +# :data +# :myweight +# :fc1_bias +``` + +The simple fact is that a `Variable` is just a placeholder `mx.Symbol`. +In composition, we can use arbitrary symbols for arguments. For example: + +``` {.sourceCode .julia} +net = mx.Variable(:data) +net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) +net2 = mx.Variable(:data2) +net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) +mx.list_arguments(net2) +# => +# 3-element Array{Symbol,1}: +# :data2 +# :net2_weight +# :net2_bias +composed_net = net2(data2=net, name=:composed) +mx.list_arguments(composed_net) +# => +# 5-element Array{Symbol,1}: +# :data +# :fc1_weight +# :fc1_bias +# :net2_weight +# :net2_bias +``` + +Note we use a composed symbol, `net` as the argument `data2` for `net2` +to get a new symbol, which we named `:composed`. It also shows that a +symbol itself is a call-able object, which can be invoked to fill in +missing arguments and get more complicated symbol compositions. + +### Shape Inference + +Given enough information, the shapes of all arguments in a composed +symbol could be inferred automatically. For example, given the input +shape, and some hyper-parameters like `num_hidden`, the shapes for the +weights and bias in a neural network could be inferred. + +``` {.sourceCode .julia} +net = mx.Variable(:data) +net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) +arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) +``` + +The returned shapes corresponds to arguments with the same order as +returned by `mx.list_arguments`. The `out_shapes` are shapes for +outputs, and `aux_shapes` can be safely ignored for now. + +``` {.sourceCode .julia} +for (n,s) in zip(mx.list_arguments(net), arg_shapes) + println("$n => $s") +end +# => +# data => (10,64) +# fc1_weight => (10,10) +# fc1_bias => (10,) +for (n,s) in zip(mx.list_outputs(net), out_shapes) + println("$n => $s") +end +# => +# fc1_output => (10,64) +``` + +### Binding and Executing + +In order to execute the computation graph specified a composed symbol, +we will *bind* the free variables to concrete values, specified as +`mx.NDArray`. This will create an `mx.Executor` on a given `mx.Context`. +A context describes the computation devices (CPUs, GPUs, etc.) and an +executor will carry out the computation (forward/backward) specified in +the corresponding symbolic composition. + +``` {.sourceCode .julia} +A = mx.Variable(:A) +B = mx.Variable(:B) +C = A .* B +a = mx.ones(3) * 4 +b = mx.ones(3) * 2 +c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)) + +mx.forward(c_exec) +copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array +# => +# 3-element Array{Float32,1}: +# 8.0 +# 8.0 +# 8.0 +``` + +For neural networks, it is easier to use `simple_bind`. By providing the +shape for input arguments, it will perform a shape inference for the +rest of the arguments and create the NDArray automatically. In practice, +the binding and executing steps are hidden under the `Model` interface. + +**TODO** Provide pointers to model tutorial and further details about +binding and symbolic API. + +High Level Interface +-------------------- + +The high level interface include model training and prediction API, etc. diff --git a/docs/src/user-guide/overview.rst b/docs/src/user-guide/overview.rst deleted file mode 100644 index b93de48fed7f..000000000000 --- a/docs/src/user-guide/overview.rst +++ /dev/null @@ -1,376 +0,0 @@ -Overview -======== - -MXNet.jl Namespace ------------------- - -Most the functions and types in MXNet.jl are organized in a flat namespace. -Because many some functions are conflicting with existing names in the Julia -Base module, we wrap them all in a ``mx`` module. The convention of accessing -the MXNet.jl interface is the to use the ``mx.`` prefix explicitly: - -.. code-block:: julia - - using MXNet - - x = mx.zeros(2,3) # MXNet NDArray - y = zeros(eltype(x), size(x)) # Julia Array - copy!(y, x) # Overloaded function in Julia Base - z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU - mx.copy!(z, y) # Same as copy!(z, y) - -Note functions like ``size``, ``copy!`` that is extensively overloaded for -various types works out of the box. But functions like ``zeros`` and ``ones`` -will be ambiguous, so we always use the ``mx.`` prefix. If you prefer, the -``mx.`` prefix can be used explicitly for all MXNet.jl functions, including -``size`` and ``copy!`` as shown in the last line. - -Low Level Interface -------------------- - -NDArrays -~~~~~~~~ - -:class:`NDArray` is the basic building blocks of the actual computations in MXNet. It -is like a Julia ``Array`` object, with some important differences listed here: - -* The actual data could live on different ``Context`` (e.g. GPUs). For some - contexts, iterating into the elements one by one is very slow, thus indexing - into :class:`NDArray` is not supported in general. The easiest way to inspect the - contents of an :class:`NDArray` is to use the ``copy`` function to copy the - contents as a Julia ``Array``. -* Operations on :class:`NDArray` (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. -* There is no generics in :class:`NDArray`, the ``eltype`` is always ``mx.MX_float``. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is designed to support multiple languages as front-ends, it is much simpler to implement with a fixed data type. - -While most of the computation is hidden in libmxnet by operators corresponding -to various neural network layers. Getting familiar with the :class:`NDArray` API is -useful for implementing ``Optimizer`` or customized operators in Julia directly. - -The followings are common ways to create :class:`NDArray` objects: - -* ``mx.empty(shape[, context])``: create on uninitialized array of a given shape - on a specific device. For example, ``mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))``. -* ``mx.zeros(shape[, context])`` and ``mx.ones(shape[, context])``: similar to - the Julia's built-in ``zeros`` and ``ones``. -* ``mx.copy(jl_arr, context)``: copy the contents of a Julia ``Array`` to a specific device. - -Most of the convenient functions like ``size``, ``length``, ``ndims``, ``eltype`` on array objects should work out-of-the-box. Although indexing is not supported, it is possible to take *slices*: - -.. code-block:: julia - - a = mx.ones(2,3) - b = mx.slice(a, 1:2) - b[:] = 2 - println(copy(a)) - # => - # Float32[2.0 2.0 1.0 - # 2.0 2.0 1.0] - -A slice is a sub-region sharing the same memory with the original :class:`NDArray` -object. A slice is always a contiguous piece of memory, so only slicing on the -*last* dimension is supported. The example above also shows a way to set the -contents of an :class:`NDArray`. - -.. code-block:: julia - - a = mx.empty(2,3) - a[:] = 0.5 # set all elements to a scalar - a[:] = rand(size(a)) # set contents with a Julia Array - copy!(a, rand(size(a))) # set value by copying a Julia Array - b = mx.empty(size(a)) - b[:] = a # copying and assignment between NDArrays - -Note due to the intrinsic design of the Julia language, a normal assignment - -.. code-block:: julia - - a = b - -does **not** mean copying the contents of ``b`` to ``a``. Instead, it just make -the variable ``a`` pointing to a new object, which is ``b``. Similarly, inplace arithmetics does not work as expected: - -.. code-block:: julia - - a = mx.ones(2) - r = a # keep a reference to a - b = mx.ones(2) - a += b # translates to a = a + b - println(copy(a)) - # => Float32[2.0f0,2.0f0] - println(copy(r)) - # => Float32[1.0f0,1.0f0] - -As we can see, ``a`` has expected value, but instead of inplace updating, a new -:class:`NDArray` is created and ``a`` is set to point to this new object. If we look -at ``r``, which still reference to the old ``a``, its content has not changed. -There is currently no way in Julia to overload the operators like ``+=`` to get customized behavior. - -Instead, you will need to write ``a[:] = a+b``, or if you want *real* inplace -``+=`` operation, MXNet.jl provides a simple macro ``@mx.inplace``: - -.. code-block:: julia - - @mx.inplace a += b - macroexpand(:(@mx.inplace a += b)) - # => :(MXNet.mx.add_to!(a,b)) - -As we can see, it translate the ``+=`` operator to an explicit ``add_to!`` -function call, which invokes into libmxnet to add the contents of ``b`` into -``a`` directly. For example, the following is the update rule in the SGD -``Optimizer`` (both ``grad`` and ``weight`` are :class:`NDArray` objects): - -.. code-block:: julia - - @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) - -Note there is no much magic in ``mx.inplace``: it only does a shallow -translation. In the SGD update rule example above, the computation like scaling -the gradient by ``grad_scale`` and adding the weight decay all create temporary -:class:`NDArray` objects. To mitigate this issue, libmxnet has a customized memory -allocator designed specifically to handle this kind of situations. The following -snippet does a simple benchmark on allocating temp :class:`NDArray` vs. pre-allocating: - -.. code-block:: julia - - using Benchmark - using MXNet - - N_REP = 1000 - SHAPE = (128, 64) - CTX = mx.cpu() - LR = 0.1 - - function inplace_op() - weight = mx.zeros(SHAPE, CTX) - grad = mx.ones(SHAPE, CTX) - - # pre-allocate temp objects - grad_lr = mx.empty(SHAPE, CTX) - - for i = 1:N_REP - copy!(grad_lr, grad) - @mx.inplace grad_lr .*= LR - @mx.inplace weight -= grad_lr - end - return weight - end - - function normal_op() - weight = mx.zeros(SHAPE, CTX) - grad = mx.ones(SHAPE, CTX) - - for i = 1:N_REP - weight[:] -= LR * grad - end - return weight - end - - # make sure the results are the same - @assert(maximum(abs(copy(normal_op() - inplace_op()))) < 1e-6) - - println(compare([inplace_op, normal_op], 100)) - -The comparison on my laptop shows that ``normal_op`` while allocating a lot of -temp :class:`NDArray` in the loop (the performance gets worse when increasing -``N_REP``), is only about twice slower than the pre-allocated one. - -+-----+--------------+-----------+----------+--------------+ -| Row | Function | Average | Relative | Replications | -+=====+==============+===========+==========+==============+ -| 1 | "inplace_op" | 0.0074854 | 1.0 | 100 | -+-----+--------------+-----------+----------+--------------+ -| 2 | "normal_op" | 0.0174202 | 2.32723 | 100 | -+-----+--------------+-----------+----------+--------------+ - -So it will usually not be a big problem unless you are at the bottleneck of the computation. - -Distributed Key-value Store -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The type ``KVStore`` and related methods are used for data sharing across -different devices or machines. It provides a simple and efficient -integer - :class:`NDArray` key-value storage system that each device can pull or push. - -The following example shows how to create a local ``KVStore``, initialize a value and then pull it back. - -.. code-block:: julia - - kv = mx.KVStore(:local) - shape = (2,3) - key = 3 - - mx.init!(kv, key, mx.ones(shape)*2) - a = mx.empty(shape) - mx.pull!(kv, key, a) # pull value into a - println(copy(a)) - # => - # Float32[2.0 2.0 2.0 - # 2.0 2.0 2.0] - -Intermediate Level Interface ----------------------------- - -Symbols and Composition -~~~~~~~~~~~~~~~~~~~~~~~ - -The way we build deep learning models in MXNet.jl is to use the powerful -symbolic composition system. It is like `Theano -`_, except that we avoided long -expression compiliation time by providing *larger* neural network related -building blocks to guarantee computation performance. See also `this note -`_ for the design and trade-off of the MXNet symbolic composition system. - -The basic type is ``mx.Symbol``. The following is a trivial example of composing -two symbols with the ``+`` operation. - -.. code-block:: julia - - A = mx.Variable(:A) - B = mx.Variable(:B) - C = A + B - -We get a new *symbol* by composing existing *symbols* by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. - -.. code-block:: julia - - net = mx.Variable(:data) - net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) - net = mx.Activation(data=net, name=:relu1, act_type=:relu) - net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) - net = mx.Softmax(data=net, name=:out) - -Each time we take the previous symbol, and compose with an operation. Unlike the -simple ``+`` example above, the *operations* here are "bigger" ones, that correspond to common computation layers in deep neural networks. - -Each of those operation takes one or more input symbols for composition, with -optional hyper-parameters (e.g. ``num_hidden``, ``act_type``) to further customize the composition results. - -When applying those operations, we can also specify a ``name`` for the result symbol. This is convenient if we want to refer to this symbol later on. If not supplied, a name will be automatically generated. - -Each symbol takes some arguments. For example, in the ``+`` case above, to -compute the value of ``C``, we will need to know the values of the two inputs -``A`` and ``B``. For neural networks, the arguments are primarily two categories: *inputs* and *parameters*. *inputs* are data and labels for the networks, while *parameters* are typically trainable *weights*, *bias*, *filters*. - -When composing symbols, their arguments accumulates. We can list all the arguments by - -.. code-block:: julia - - julia> mx.list_arguments(net) - 6-element Array{Symbol,1}: - :data # Input data, name from the first data variable - :fc1_weight # Weights of the fully connected layer named :fc1 - :fc1_bias # Bias of the layer :fc1 - :fc2_weight # Weights of the layer :fc2 - :fc2_bias # Bias of the layer :fc2 - :out_label # Input label, required by the softmax layer named :out - -Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: - -.. code-block:: julia - - net = mx.Variable(:data) - w = mx.Variable(:myweight) - net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) - mx.list_arguments(net) - # => - # 3-element Array{Symbol,1}: - # :data - # :myweight - # :fc1_bias - -The simple fact is that a ``Variable`` is just a placeholder ``mx.Symbol``. In composition, we can use arbitrary symbols for arguments. For example: - -.. code-block:: julia - - net = mx.Variable(:data) - net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) - net2 = mx.Variable(:data2) - net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) - mx.list_arguments(net2) - # => - # 3-element Array{Symbol,1}: - # :data2 - # :net2_weight - # :net2_bias - composed_net = net2(data2=net, name=:composed) - mx.list_arguments(composed_net) - # => - # 5-element Array{Symbol,1}: - # :data - # :fc1_weight - # :fc1_bias - # :net2_weight - # :net2_bias - -Note we use a composed symbol, ``net`` as the argument ``data2`` for ``net2`` to -get a new symbol, which we named ``:composed``. It also shows that a symbol itself is a call-able object, which can be invoked to fill in missing arguments and get more complicated symbol compositions. - -Shape Inference -~~~~~~~~~~~~~~~ - -Given enough information, the shapes of all arguments in a composed symbol could -be inferred automatically. For example, given the input shape, and some -hyper-parameters like ``num_hidden``, the shapes for the weights and bias in a neural network could be inferred. - -.. code-block:: julia - - net = mx.Variable(:data) - net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) - arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) - -The returned shapes corresponds to arguments with the same order as returned by -``mx.list_arguments``. The ``out_shapes`` are shapes for outputs, and -``aux_shapes`` can be safely ignored for now. - -.. code-block:: julia - - for (n,s) in zip(mx.list_arguments(net), arg_shapes) - println("$n => $s") - end - # => - # data => (10,64) - # fc1_weight => (10,10) - # fc1_bias => (10,) - for (n,s) in zip(mx.list_outputs(net), out_shapes) - println("$n => $s") - end - # => - # fc1_output => (10,64) - - -Binding and Executing -~~~~~~~~~~~~~~~~~~~~~ - -In order to execute the computation graph specified a composed symbol, we will -*bind* the free variables to concrete values, specified as ``mx.NDArray``. This -will create an ``mx.Executor`` on a given ``mx.Context``. A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. - -.. code-block:: julia - - A = mx.Variable(:A) - B = mx.Variable(:B) - C = A .* B - a = mx.ones(3) * 4 - b = mx.ones(3) * 2 - c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)) - - mx.forward(c_exec) - copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array - # => - # 3-element Array{Float32,1}: - # 8.0 - # 8.0 - # 8.0 - -For neural networks, it is easier to use ``simple_bind``. By providing the shape -for input arguments, it will perform a shape inference for the rest of the -arguments and create the :class:`NDArray` automatically. In practice, the binding and -executing steps are hidden under the ``Model`` interface. - -**TODO** Provide pointers to model tutorial and further details about binding and symbolic API. - - -High Level Interface --------------------- - -The high level interface include model training and prediction API, etc. From d7d4d9028e8e63d339ed2dd48c9026ab72a0683c Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 13 Jul 2016 06:31:50 +0900 Subject: [PATCH 352/630] finish mkdocs support --- docs/mkdocs.yml | 41 ++++++++++++++++++++++++++ docs/src/api/{ndarry.md => ndarray.md} | 0 docs/src/index.md | 23 ++++++++++++--- docs/src/tutorial.md | 0 docs/src/user-guide.md | 0 5 files changed, 60 insertions(+), 4 deletions(-) create mode 100644 docs/mkdocs.yml rename docs/src/api/{ndarry.md => ndarray.md} (100%) delete mode 100644 docs/src/tutorial.md delete mode 100644 docs/src/user-guide.md diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml new file mode 100644 index 000000000000..287119257604 --- /dev/null +++ b/docs/mkdocs.yml @@ -0,0 +1,41 @@ +site_name: MXNet.jl +repo_url: https://github.com/dmlc/MXNet.jl + +theme: readthedocs + +extra_css: + - assets/Documenter.css + +extra_javascript: + - https://cdn.mathjax.org/mathjax/latest/MathJax.jl?config=TeX-AMS-MML_HTMLorMML + - assets/mathjaxhelper.js + +markdown_extensions: + - extra + - tables + - fenced_code + +docs_dir: 'build' + +pages: + - Home: index.md + - Tutorial: + - Digit Recognition on MNIST: tutorial/mnist.md + - Generating Random Sentence with LSTM RNN: tutorial/char-lstm.md + - User Guide: + - Installation Guide: user-guide/install.md + - Overview: user-guide/overview.md + - FAQ: user-guide/faq.md + - API Documentation: + - Context: api/context.md + - Models: api/model.md + - Initializers: api/initializer.md + - Optimizers: api/optimizer.md + - Callbacks in training: api/callback.md + - Evaluation Metrics: api/metric.md + - Data Providers: api/io.md + - NDArray API: api/ndarray.md + - Symbolic API: api/symbolic-node.md + - Neural Networks Factory: api/nn-factory.md + - Executor: api/executor.md + - Network Visualization: api/visualize.md diff --git a/docs/src/api/ndarry.md b/docs/src/api/ndarray.md similarity index 100% rename from docs/src/api/ndarry.md rename to docs/src/api/ndarray.md diff --git a/docs/src/index.md b/docs/src/index.md index 0ad69711e41c..93e3fa49660a 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -1,8 +1,8 @@ # MXNet Documentation -[`MXNet.jl`](https://github.com/dmlc/MXNet.jl>) is the -[`Julia`](http://julialang.org/) package of -[`dmlc/mxnet`](https://github.com/dmlc/mxnet). `MXNet.jl` brings flexible and efficient GPU +[MXNet.jl](https://github.com/dmlc/MXNet.jl>) is the +[Julia](http://julialang.org/) package of +[dmlc/mxnet](https://github.com/dmlc/mxnet). `MXNet.jl` brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: @@ -12,7 +12,22 @@ include: For more details, see documentation below. Please also checkout the [examples](https://github.com/dmlc/MXNet.jl/tree/master/examples) directory. +## Tutorials + +```@contents +Pages = ["tutorial/mnist.md", "tutorial/char-lstm.md"] +Depth = 2 +``` + +## User's Guide + ```@contents -Pages = ["tutorial.md", "user-guide.md", "api.md"] +Pages = ["user-guide/install.md", "user-guide/overview.md", "user-guide/faq.md"] Depth = 2 ``` + +## API Documentation + +```@contents +Pages = ["api/context.md", "api/model.md", "api/initializers.md", "api/optimizers.md", "api/callbacks.md", "api/metric.md", "api/io.md", "api/ndarray.md", "api/symbolic-node.md", "api/nn-factory.md", "api/executor.md", "api/visualize.md"] +``` diff --git a/docs/src/tutorial.md b/docs/src/tutorial.md deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/docs/src/user-guide.md b/docs/src/user-guide.md deleted file mode 100644 index e69de29bb2d1..000000000000 From 3dbec0f8d66b8d8bf617f2f6cfd35bfb0ada25f0 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 19 Jul 2016 03:52:06 +0900 Subject: [PATCH 353/630] cleanup docs --- docs/src/tutorial/char-lstm.md | 14 +++++------- docs/src/tutorial/mnist.md | 39 ++++++++++++++++----------------- docs/src/user-guide/install.md | 4 ++-- docs/src/user-guide/overview.md | 34 ++++++++++++++-------------- 4 files changed, 44 insertions(+), 47 deletions(-) diff --git a/docs/src/tutorial/char-lstm.md b/docs/src/tutorial/char-lstm.md index 5c20c8f05830..369bcddd53e9 100644 --- a/docs/src/tutorial/char-lstm.md +++ b/docs/src/tutorial/char-lstm.md @@ -16,7 +16,7 @@ We will be using fixed-length input sequence for training. The code is adapted from the [char-rnn example for MXNet's Python binding](https://github.com/dmlc/mxnet/blob/master/example/rnn/char_lstm.ipynb), which demonstrates how to use low-level -symbolic APIs </api/symbolic-node> to build customized neural +[Symbolic API](@ref) to build customized neural network models directly. The most important code snippets of this example is shown and explained @@ -30,8 +30,7 @@ example. LSTM Cells ---------- -Christopher Olah has a [great blog post about -LSTM](http://colah.github.io/posts/2015-08-Understanding-LSTMs/) with +Christopher Olah has a [great blog post about LSTM](http://colah.github.io/posts/2015-08-Understanding-LSTMs/) with beautiful and clear illustrations. So we will not repeat the definition and explanation of what an LSTM cell is here. Basically, an LSTM cell takes input `x`, as well as previous states (including `c` and `h`), and @@ -116,8 +115,7 @@ character is then encoded as a vector of 0s on all coordinates, and 1 on the coordinate corresponding to that character. The character-to-coordinate mapping is giving by the vocabulary. -The text sequence data provider implement the data provider API -</api/io>. We define the `CharSeqProvider` as below: +The text sequence data provider implements the [Data Providers](@ref) api. We define the `CharSeqProvider` as below: The provided data and labels follow the naming convention of inputs used when unrolling the LSTM. Note in the code below, apart from @@ -128,7 +126,7 @@ we will feed the initial states for each sequence from the data provider. Since the initial states is always zero, we just need to always provide constant zero blobs. -Next we implement the AbstractDataProvider.eachbatch interface for the +Next we implement the `eachbatch` method from the [`mx.AbstractDataProvider`](@ref) interface for the provider. We start by defining the data and label arrays, and the `DataBatch` object we will provide in each iteration. @@ -163,7 +161,7 @@ Note we are also using a customized `NLL` evaluation metric, which calculate the negative log-likelihood during training. Here is an output sample at the end of the training process. -``` {.sourceCode .text} +``` ... INFO: Speed: 357.72 samples/sec INFO: == Epoch 020 ========== @@ -227,7 +225,7 @@ trained for around half an hour on the Shakespeare dataset. Note all the line-breaks, punctuations and upper-lower case letters are produced by the sampler itself. I did not do any post-processing. -``` {.sourceCode .text} +``` ## Sample 1 all have sir, Away will fill'd in His time, I'll keep her, do not madam, if they here? Some more ha? diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index b4e7a3be89ee..8282f48ac8ca 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -14,7 +14,7 @@ Simple 3-layer MLP This is a tiny 3-layer MLP that could be easily trained on CPU. The script starts with -``` {.sourceCode .julia} +```julia using MXNet ``` @@ -22,7 +22,7 @@ to load the `MXNet` module. Then we are ready to define the network architecture via the symbolic API </user-guide/overview>. We start with a placeholder `data` symbol, -``` {.sourceCode .julia} +```julia data = mx.Variable(:data) ``` @@ -39,16 +39,16 @@ fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) Note each composition we take the previous symbol as the data argument, forming a feedforward chain. The architecture looks like -``` {.sourceCode .julia} +``` Input --> 128 units (ReLU) --> 64 units (ReLU) --> 10 units ``` where the last 10 units correspond to the 10 output classes (digits -0,...,9). We then add a final SoftmaxOutput operation to turn the +0,...,9). We then add a final `SoftmaxOutput` operation to turn the 10-dimensional prediction to proper probability values for the 10 classes: -``` {.sourceCode .julia} +```julia mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) ``` @@ -56,7 +56,7 @@ As we can see, the MLP is just a chain of layers. For this case, we can also use the `mx.chain` macro. The same architecture above can be defined as -``` {.sourceCode .julia} +```julia mlp = @mx.chain mx.Variable(:data) => mx.FullyConnected(name=:fc1, num_hidden=128) => mx.Activation(name=:relu1, act_type=:relu) => @@ -73,14 +73,14 @@ could automatically download the dataset into construct the data provider into `mnist-data.jl` so that it could be shared by both the MLP example and the LeNet ConvNets example. -``` {.sourceCode .julia} +```julia batch_size = 100 include("mnist-data.jl") train_provider, eval_provider = get_mnist_providers(batch_size) ``` If you need to write your own data providers for customized data format, -please refer to AbstractDataProvider. +please refer to [`mx.AbstractDataProvider`](@ref). Given the architecture and data, we can instantiate an *model* to do the actual training. `mx.FeedForward` is the built-in model that is suitable @@ -89,7 +89,7 @@ also specify the *context* on which the computation should be carried out. Because this is a really tiny MLP, we will just run on a single CPU device. -``` {.sourceCode .julia} +```julia model = mx.FeedForward(mlp, context=mx.cpu()) ``` @@ -102,7 +102,7 @@ The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 and momentum 0.9: -``` {.sourceCode .julia} +```julia optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) ``` @@ -110,13 +110,13 @@ Now we can do the training. Here the `n_epoch` parameter specifies that we want to train for 20 epochs. We also supply a `eval_data` to monitor validation accuracy on the validation set. -``` {.sourceCode .julia} +```julia mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) ``` Here is a sample output -``` {.sourceCode .text} +``` INFO: Start training on [CPU0] INFO: Initializing parameters... INFO: Creating KVStore... @@ -143,7 +143,7 @@ that involves convolution and pooling. This architecture for the MNIST is usually called the \[LeNet\]\_. The first part of the architecture is listed below: -``` {.sourceCode .julia} +```julia # input data = mx.Variable(:data) @@ -168,7 +168,7 @@ a tensor of shape `(28,28,1,100)`. The convolution and pooling operates in the spatial axis, so `kernel=(5,5)` indicate a square region of 5-width and 5-height. The rest of the architecture follows as: -``` {.sourceCode .julia} +```ulia # first fully-connected fc1 = @mx.chain mx.Flatten(data=conv2) => mx.FullyConnected(num_hidden=500) => @@ -189,7 +189,7 @@ before connecting it to the `FullyConnected` operator. The rest of the network is the same as the previous MLP example. As before, we can now load the MNIST dataset: -``` {.sourceCode .julia} +```julia batch_size = 100 include("mnist-data.jl") train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) @@ -200,8 +200,7 @@ tensors instead of 2D matrices because the convolution operators needs correct spatial shape information. We then construct a feedforward model on GPU, and train it. -``` {.sourceCode .julia} -#-------------------------------------------------------------------------------- +```julia # fit model model = mx.FeedForward(lenet, context=mx.gpu()) @@ -214,7 +213,7 @@ mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) And here is a sample of running outputs: -``` {.sourceCode .text} +``` INFO: == Epoch 001 ========== INFO: ## Training summary INFO: :accuracy = 0.6750 @@ -237,14 +236,14 @@ Predicting with a trained model is very simple. By calling `mx.predict` with the model and a data provider, we get the model output as a Julia Array: -``` {.sourceCode .julia} +```julia probs = mx.predict(model, eval_provider) ``` The following code shows a stupid way of getting all the labels from the data provider, and compute the prediction accuracy manually: -``` {.sourceCode .julia} +```julia # collect all labels from eval data labels = Array[] for batch in eval_provider diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index 9c66a84204da..fe586c248da3 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -6,14 +6,14 @@ Automatic Installation To install MXNet.jl, simply type -``` {.sourceCode .julia} +```julia Pkg.add("MXNet") ``` in the Julia REPL. Or to use the latest git version of MXNet.jl, use the following command instead -``` {.sourceCode .julia} +```julia Pkg.checkout("MXNet") ``` diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index 6197fc417a7e..691ab93e9acb 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -10,7 +10,7 @@ names in the Julia Base module, we wrap them all in a `mx` module. The convention of accessing the MXNet.jl interface is the to use the `mx.` prefix explicitly: -``` {.sourceCode .julia} +```julia using MXNet x = mx.zeros(2,3) # MXNet NDArray @@ -69,7 +69,7 @@ Most of the convenient functions like `size`, `length`, `ndims`, `eltype` on array objects should work out-of-the-box. Although indexing is not supported, it is possible to take *slices*: -``` {.sourceCode .julia} +```julia a = mx.ones(2,3) b = mx.slice(a, 1:2) b[:] = 2 @@ -84,7 +84,7 @@ NDArray object. A slice is always a contiguous piece of memory, so only slicing on the *last* dimension is supported. The example above also shows a way to set the contents of an NDArray. -``` {.sourceCode .julia} +```julia a = mx.empty(2,3) a[:] = 0.5 # set all elements to a scalar a[:] = rand(size(a)) # set contents with a Julia Array @@ -96,7 +96,7 @@ b[:] = a # copying and assignment between NDArrays Note due to the intrinsic design of the Julia language, a normal assignment -``` {.sourceCode .julia} +```julia a = b ``` @@ -104,7 +104,7 @@ does **not** mean copying the contents of `b` to `a`. Instead, it just make the variable `a` pointing to a new object, which is `b`. Similarly, inplace arithmetics does not work as expected: -``` {.sourceCode .julia} +```julia a = mx.ones(2) r = a # keep a reference to a b = mx.ones(2) @@ -124,7 +124,7 @@ operators like `+=` to get customized behavior. Instead, you will need to write `a[:] = a+b`, or if you want *real* inplace `+=` operation, MXNet.jl provides a simple macro `@mx.inplace`: -``` {.sourceCode .julia} +```julia @mx.inplace a += b macroexpand(:(@mx.inplace a += b)) # => :(MXNet.mx.add_to!(a,b)) @@ -135,7 +135,7 @@ function call, which invokes into libmxnet to add the contents of `b` into `a` directly. For example, the following is the update rule in the SGD `Optimizer` (both `grad` and `weight` are NDArray objects): -``` {.sourceCode .julia} +```julia @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) ``` @@ -147,7 +147,7 @@ customized memory allocator designed specifically to handle this kind of situations. The following snippet does a simple benchmark on allocating temp NDArray vs. pre-allocating: -``` {.sourceCode .julia} +```julia using Benchmark using MXNet @@ -210,7 +210,7 @@ push. The following example shows how to create a local `KVStore`, initialize a value and then pull it back. -``` {.sourceCode .julia} +```julia kv = mx.KVStore(:local) shape = (2,3) key = 3 @@ -241,7 +241,7 @@ design and trade-off of the MXNet symbolic composition system. The basic type is `mx.Symbol`. The following is a trivial example of composing two symbols with the `+` operation. -``` {.sourceCode .julia} +```julia A = mx.Variable(:A) B = mx.Variable(:B) C = A + B @@ -253,7 +253,7 @@ be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. -``` {.sourceCode .julia} +```julia net = mx.Variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) net = mx.Activation(data=net, name=:relu1, act_type=:relu) @@ -284,7 +284,7 @@ the networks, while *parameters* are typically trainable *weights*, When composing symbols, their arguments accumulates. We can list all the arguments by -``` {.sourceCode .julia} +```julia julia> mx.list_arguments(net) 6-element Array{Symbol,1}: :data # Input data, name from the first data variable @@ -298,7 +298,7 @@ julia> mx.list_arguments(net) Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: -``` {.sourceCode .julia} +```julia net = mx.Variable(:data) w = mx.Variable(:myweight) net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) @@ -313,7 +313,7 @@ mx.list_arguments(net) The simple fact is that a `Variable` is just a placeholder `mx.Symbol`. In composition, we can use arbitrary symbols for arguments. For example: -``` {.sourceCode .julia} +```julia net = mx.Variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) net2 = mx.Variable(:data2) @@ -347,7 +347,7 @@ symbol could be inferred automatically. For example, given the input shape, and some hyper-parameters like `num_hidden`, the shapes for the weights and bias in a neural network could be inferred. -``` {.sourceCode .julia} +```julia net = mx.Variable(:data) net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) @@ -357,7 +357,7 @@ The returned shapes corresponds to arguments with the same order as returned by `mx.list_arguments`. The `out_shapes` are shapes for outputs, and `aux_shapes` can be safely ignored for now. -``` {.sourceCode .julia} +```julia for (n,s) in zip(mx.list_arguments(net), arg_shapes) println("$n => $s") end @@ -381,7 +381,7 @@ A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. -``` {.sourceCode .julia} +```julia A = mx.Variable(:A) B = mx.Variable(:B) C = A .* B From c74594f6f27eae02936d2265a243ce9f208e7395 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 2 Aug 2016 02:14:13 +0900 Subject: [PATCH 354/630] enable travis to build docs --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index 9f08e176eed2..1f827371a6a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -37,3 +37,6 @@ script: after_success: - source $TRAVIS/run_coverage.sh + - julia -e 'Pkg.add("Documenter")' + - julia -e 'cd(Pkg.dir("PACKAGE_NAME")); include(joinpath("docs", "make.jl"))' + From 85ec1112333a5dd6432c6a54b1308feea4d2a3b4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 2 Aug 2016 03:24:46 +0900 Subject: [PATCH 355/630] finalize mkdocs setup --- docs/make.jl | 8 +++++++- docs/mkdocs.yml | 9 ++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/docs/make.jl b/docs/make.jl index f5569035bc69..06bbef0466cd 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,5 +1,11 @@ using Documenter, MXNet makedocs( - modules = [MXNet] + modules = MXNet, + doctest = false +) + +deploydocs( + deps = Deps.pip("pygments", "mkdocs", "mkdocs-material", "python-markdown-math"), + repo = "github.com/dmlc/MXNet.jl.git", ) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index 287119257604..bf604818027e 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -1,7 +1,12 @@ site_name: MXNet.jl repo_url: https://github.com/dmlc/MXNet.jl -theme: readthedocs +theme: material + +extra: + palette: + primary: 'indigo' + accent: 'blue' extra_css: - assets/Documenter.css @@ -14,6 +19,8 @@ markdown_extensions: - extra - tables - fenced_code + - mdx_math + - admonition docs_dir: 'build' From 789bb6b16367c4fd7fc818ff254b2bcba67f40d3 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 2 Aug 2016 11:36:17 -0700 Subject: [PATCH 356/630] setup Documenter.jl --- docs/.documenter.enc | Bin 0 -> 1680 bytes 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/.documenter.enc diff --git a/docs/.documenter.enc b/docs/.documenter.enc new file mode 100644 index 0000000000000000000000000000000000000000..48bd92bc4742234f2364e62fe899efdf64a1f5d8 GIT binary patch literal 1680 zcmV;B25rWS<*O0{K{-TA^4}R}MY#~pBnC~nXf)$xi`VgJ=(}CLo z99B@ySk*kZH{=OD%kZt*$@gI0bE~V&8IFr)?w5O6@f*MXO1rRL!8xs5b~i-YSaAz8 z?Oa!ONEJ)LuTIWymjXYs=Oi>jHonzG$sc;Z*bK0{asSaCl)K8Dt9hwefjeLh>CozO zqh;;QUGT7%|V}q;lmM@=0X9JJ!pB?v8#q`2cOu+ zU@YKi4jzn?`JhZiTmUfQFk4;S}Pw~t=F}U z$%Rz&t@nw^pFcHC2L-yI$Y*cB3#7Rxc^W0K%)Ks79+Uzgm`KXYOBtz%nbUK)6W{GD z6vGFcsUm(_XRIWEX3zin!FjO}Sz?&zh2<}q#6!7T#mDR*Upz5b8j3x4wws>0y8d2U zgr4|K1?&yV3gZfVwYp8Mk)r^>44lPXYH~*@OrgnM4=fOdZBZK!f2-)hDy5ti9w|to z3b(Q=|AHjoLA#w>fJ(S+W#2{r*t{_IpAvCG#Dzda4Y6A710Z2cBun5P^+T7Tm*oQd z1GpiOufpgUcuJQvxFw|HHB`nA_*$k*K{RRaP4)YfMu!gzCr!upY4?3CSKuLt4nsHk z7GV%hH^^e+!1|r&n~=xskC>gi03^36$z|~i_qZ2JUcTV%POTl65g}~L$K<+;J@wUSt;2vLL4Ojd?W zBFdaPoa|QQmU>G641cN1B)OdmcaScG-0Zu4 zafkd|kBaQySh0;{(bK&+u)17jYVSCTLTJ3Yud0x;nOIIGq{LUyoxoez_PJcAA=DK< znsoy=qR64tOErR%#{xT9UVX&!%X)SR<|qo_MO@5o2!lje^^fxFuyLo!EM`7@P9Z(`Iqdg=4#^%Q8z=!DfcOz&iz zHt+S7=xTE<>f>P?R*4NHpCz5FiKsVp^Ld zd5$9(3lZ3}iDb`iIty37map_wc;xQPUfE59I#tt(Kp2#wv?ea~%%2gdJ4%Mt(%~9d zRHCY}3szlG0?5myNXNw9CBbPl_uoPVNqr%~LyW*k;fnaIS6ZXp9i@{Y}u8$kaBHuAgSBe z29Hcga%%BupHsJhkLpmccg5yF!GveSFS*-xYBpnpNO-TiIzIo+%#)oUKoVD`UI~HO z$TkJN-PR>vZr&d{r9ca1?@0Y3U{V?O`WSuiW!9;0Uh2#KbU_q{C38*|0zxhApkC&X zeUx~}r7VL8qdBHi48~Lh&rPXZviPvjgScMatjU&#pKvk^C8E`Wu{OUd>%v70GDn2S aF;ziPu#v|oq3fM$n|LCvE&l?FaMg<7X*&S` literal 0 HcmV?d00001 From 2c651ff096fd60bee1b8f0c29e42c136358a7240 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 2 Aug 2016 12:10:42 -0700 Subject: [PATCH 357/630] fix documenter config bug --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 1f827371a6a8..651ba9640f8b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,5 +38,5 @@ script: after_success: - source $TRAVIS/run_coverage.sh - julia -e 'Pkg.add("Documenter")' - - julia -e 'cd(Pkg.dir("PACKAGE_NAME")); include(joinpath("docs", "make.jl"))' + - julia -e 'cd(Pkg.dir("MXNet")); include(joinpath("docs", "make.jl"))' From f442c4916fd107c436d85a02cfd2d8fa4502faad Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 2 Aug 2016 12:57:47 -0700 Subject: [PATCH 358/630] switch doc to new location. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 855680ae6d1a..bdfbcc3e1148 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) [![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) -[![Documentation Status](https://readthedocs.org/projects/mxnetjl/badge/?version=latest)](http://mxnetjl.readthedocs.org/en/latest/?badge=latest) +[![](https://img.shields.io/badge/docs-latest-blue.svg)](https://dmlc.github.io/MXNet.jl/latest) [![MXNet](http://pkg.julialang.org/badges/MXNet_0.4.svg)](http://pkg.julialang.org/?pkg=MXNet) [![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) [![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) From 79f51e6532b4673d3b7fbbc810a59bb4904c1984 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 2 Aug 2016 12:59:24 -0700 Subject: [PATCH 359/630] fix URL to Windows building instruction (#111) --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 9449c6337102..86c79905b889 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -24,7 +24,7 @@ if !libmxnet_detected @windows_only begin info("Please follow the libmxnet documentation on how to build manually") info("or to install pre-build packages:") - info("http://mxnet.readthedocs.org/en/latest/build.html#building-on-windows") + info("http://mxnet.readthedocs.io/en/latest/how_to/build.html#building-on-windows") error("Automatic building libxmnet on Windows is currently not supported yet.") end From b54620b342b1016a4782eb1ff85c45c9cea77a99 Mon Sep 17 00:00:00 2001 From: Scott Lundberg Date: Wed, 3 Aug 2016 11:13:39 -0700 Subject: [PATCH 360/630] Fix broken documentation link. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index bdfbcc3e1148..06313473c060 100644 --- a/README.md +++ b/README.md @@ -65,4 +65,4 @@ accuracy = 100correct/length(labels) println(mx.format("Accuracy on eval set: {1:.2f}%", accuracy)) ``` -For more details, please refer to the [documentation](http://mxnetjl.readthedocs.org/) and [examples](examples). +For more details, please refer to the [documentation](https://dmlc.github.io/MXNet.jl/latest) and [examples](examples). From a7b1c65b96ba91878d4902cb5b526c5b4640f40d Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 05:44:21 +0900 Subject: [PATCH 361/630] fix deprecations in build.jl --- deps/build.jl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 86c79905b889..1736bf9eb0a2 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -1,3 +1,4 @@ +using Compat ################################################################################ # First try to detect and load existing libmxnet ################################################################################ @@ -21,7 +22,7 @@ if !libmxnet_detected ################################################################################ # If not found, try to build automatically using BinDeps ################################################################################ - @windows_only begin + if is_windows() info("Please follow the libmxnet documentation on how to build manually") info("or to install pre-build packages:") info("http://mxnet.readthedocs.io/en/latest/how_to/build.html#building-on-windows") @@ -60,7 +61,9 @@ if !libmxnet_detected FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin ChangeDirectory("$_mxdir") `cp make/config.mk config.mk` - @osx_only `cp make/osx.mk config.mk` + if is_apple() + `cp make/osx.mk config.mk` + end `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` `cp ../../cblas.h include/cblas.h` From 0408e3e7cdfa4c14897fe01ccf36c11da0b10aaa Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 06:15:51 +0900 Subject: [PATCH 362/630] fix String deprecations --- src/base.jl | 2 +- src/executor.jl | 4 ++-- src/io.jl | 10 +++++----- src/kvstore.jl | 2 +- src/ndarray.jl | 11 +++++------ src/symbolic-node.jl | 38 +++++++++++++++++++------------------- src/util.jl | 18 +++++++++--------- 7 files changed, 42 insertions(+), 43 deletions(-) diff --git a/src/base.jl b/src/base.jl index 32968ea0afb6..d35a1bec3818 100644 --- a/src/base.jl +++ b/src/base.jl @@ -43,7 +43,7 @@ function mx_get_last_error() if msg == C_NULL throw(MXError("Failed to get last error message")) end - return @compat String(msg) + return unsafe_string(msg) end "Utility macro to call MXNet API functions" diff --git a/src/executor.jl b/src/executor.jl index edebc79907c7..0bdccc942d22 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -23,7 +23,7 @@ function Executor(hdr :: MX_ExecutorHandle, symbol :: SymbolicNode, ref_hdrs = Ref{Ptr{MX_handle}}(0) @mxcall(:MXExecutorOutputs, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_handle}}), hdr, ref_size, ref_hdrs) - out_hdrs = pointer_to_array(ref_hdrs[], ref_size[]) + out_hdrs = unsafe_wrap(Array, ref_hdrs[], ref_size[]) out_arrays = [NDArray(MX_NDArrayHandle(x)) for x in out_hdrs] arg_names = list_arguments(symbol) @@ -217,5 +217,5 @@ Can be used to get an estimated about the memory cost. function debug_str(self :: Executor) s_ref = Ref{Cstring}() @mxcall(:MXExecutorPrint, (MX_handle, Ptr{Cstring}), self.handle, s_ref) - @compat String(s_ref[]) + unsafe_string(s_ref[]) end diff --git a/src/io.jl b/src/io.jl index 0e7d5c22490c..f240388ffbda 100644 --- a/src/io.jl +++ b/src/io.jl @@ -559,7 +559,7 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - iter_name = Symbol(String(ref_name[])) + iter_name = Symbol(unsafe_wrap(String, ref_name[])) if gen_docs if endswith(string(iter_name), "Iter") @@ -567,7 +567,7 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) else f_desc = "" end - f_desc *= String(ref_desc[]) * "\n\n" + f_desc *= unsafe_string(ref_desc[]) * "\n\n" f_desc *= ":param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data.\n" f_desc *= ":param Base.Symbol label_name: keyword argument, default ``:softmax_label``. " * "The name of the label. Could be ``nothing`` if no label is presented in this dataset.\n\n" @@ -578,8 +578,8 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) defun = quote function $iter_name(; kwargs...) - arg_keys = AbstractString[string(k) for (k,v) in kwargs] - arg_vals = AbstractString[dump_mx_param(v) for (k,v) in kwargs] + arg_keys = String[string(k) for (k,v) in kwargs] + arg_vals = String[dump_mx_param(v) for (k,v) in kwargs] ref_hdr = Ref{MX_handle}(0) @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), @@ -603,7 +603,7 @@ function _import_io_iterators(;gen_docs::Bool=false) @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) n_creators = n_ref[] - h_creators = pointer_to_array(h_ref[], n_creators) + h_creators = unsafe_wrap(Array, h_ref[], n_creators) if gen_docs docs = Dict{Base.Symbol, AbstractString}() diff --git a/src/kvstore.jl b/src/kvstore.jl index bc2e65c6fdd3..03da58197d70 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -87,7 +87,7 @@ end function get_type(self :: KVStore) type_ref = Ref{char_p}(0) @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) - return Symbol(@compat String(type_ref[])) + return Symbol(unsafe_wrap(String, type_ref[])) end function get_num_workers(self :: KVStore) diff --git a/src/ndarray.jl b/src/ndarray.jl index dacee56d7281..1d7619a2ad3e 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -251,7 +251,7 @@ function size(arr :: NDArray) ref_shape = Ref{Ptr{MX_uint}}(0) @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), arr, ref_ndim, ref_shape) - tuple(map(Int, flipdim(pointer_to_array(ref_shape[], ref_ndim[]),1))...) + tuple(map(Int, flipdim(unsafe_wrap(Array, ref_shape[], ref_ndim[]),1))...) end function size(arr :: NDArray, dim :: Int) size(arr)[dim] @@ -824,8 +824,7 @@ end function try_get_shared(arr :: NDArray) if context(arr).device_type == CPU # try to do data sharing - vec = pointer_to_array(pointer(arr), length(arr)) - return reshape(vec, size(arr)) + return unsafe_wrap(Array, pointer(arr), size(arr)) else # impossible to share, just copying return copy(arr) @@ -876,11 +875,11 @@ function load(filename::AbstractString, ::Type{NDArray}) out_name_size = out_name_size[] out_size = out_size[] if out_name_size == 0 - return [NDArray(MX_NDArrayHandle(hdr)) for hdr in pointer_to_array(out_hdrs[], out_size)] + return [NDArray(MX_NDArrayHandle(hdr)) for hdr in unsafe_wrap(Array, out_hdrs[], out_size)] else @assert out_size == out_name_size - return Dict([(Symbol(@compat String(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in - zip(pointer_to_array(out_names[], out_size), pointer_to_array(out_hdrs[], out_size))]) + return @compat Dict((Symbol(unsafe_wrap(String, k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in + zip(unsafe_wrap(Array, out_names[], out_size), unsafe_wrap(Array, out_hdrs[], out_size))) end end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 850379899642..94e7bb10c128 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -55,8 +55,8 @@ macro _list_symbol_info(self, func_name) @mxcall($func_name, (MX_handle, Ref{MX_uint}, Ref{char_pp}), $self, ref_sz, ref_names) narg = ref_sz[] - names = pointer_to_array(ref_names[], narg) - names = [Symbol(@compat String(x)) for x in names] + names = unsafe_wrap(Array, ref_names[], narg) + names = [Symbol(unsafe_wrap(String, x)) for x in names] return names end end @@ -123,13 +123,13 @@ Get attribute attached to this :class:`SymbolicNode` belonging to key. :return: The value belonging to key as a :class:`Nullable`. """ function get_attr(self :: SymbolicNode, key :: Symbol) - key_s = @compat String(string(key)) + key_s = string(key) ref_out = Ref{Cstring}() ref_success = Ref{Cint}(-1) @mxcall(:MXSymbolGetAttr, (MX_handle, Cstring, Ref{Cstring}, Ref{Cint}), self, key_s, ref_out, ref_success) if ref_success[] == 1 - return Nullable{String}(@compat String(ref_out[])) + return Nullable{String}(unsafe_string(ref_out[])) else return Nullable{String}() end @@ -147,11 +147,11 @@ function list_attr(self :: SymbolicNode) @mxcall(:MXSymbolListAttrShallow, (MX_handle, Ref{MX_uint}, Ref{char_pp}), self, ref_sz, ref_strings) narg = 2*ref_sz[] - strings = pointer_to_array(ref_strings[], narg) + strings = unsafe_wrap(Array, ref_strings[], narg) out = Dict{Symbol, String}() for i in 1:2:narg - key = Symbol(@compat String(strings[i])) - value = @compat String(strings[i+1]) + key = Symbol(unsafe_wrap(String, strings[i])) + value = unsafe_string(strings[i+1]) # Creates a copy of string out[key] = value end return out @@ -169,11 +169,11 @@ function list_all_attr(self :: SymbolicNode) @mxcall(:MXSymbolListAttr, (MX_handle, Ref{MX_uint}, Ref{char_pp}), self, ref_sz, ref_strings) narg = 2*ref_sz[] - strings = pointer_to_array(ref_strings[], narg) + strings = unsafe_wrap(Array, ref_strings[], narg) out = Dict{Symbol, String}() for i in 1:2:narg - key = Symbol(@compat String(strings[i])) - value = @compat String(strings[i+1]) + key = Symbol(unsafe_wrap(String, strings[i])) + value = unsafe_string(strings[i+1]) out[key] = value end return out @@ -191,8 +191,8 @@ the attributes of a :class:`SymbolicNode` that is already been used somewhere el cause unexpected behavior and inconsistency. """ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) - key_s = @compat String(string(key)) - value_s = @compat String(value) + key_s = string(key) + value_s = String(value) @mxcall(:MXSymbolSetAttr, (MX_handle, Cstring, Cstring), self, key_s, value_s) end @@ -231,10 +231,10 @@ function Group(nodes :: SymbolicNode...) end function _build_shapes(shape_size::MX_uint, shape_ndim::Ptr{MX_uint}, shape_data::Ptr{Ptr{MX_uint}}) - shape_ndim = pointer_to_array(shape_ndim, shape_size) - shape_data = pointer_to_array(shape_data, shape_size) + shape_ndim = unsafe_wrap(Array, shape_ndim, shape_size) + shape_data = unsafe_wrap(Array, shape_data, shape_size) shapes = map(1:shape_size) do i - my_shape = pointer_to_array(shape_data[i], shape_ndim[i]) + my_shape = unsafe_wrap(Array, shape_data[i], shape_ndim[i]) tuple(flipdim(Int[my_shape...],1)...) end convert(Vector{Tuple}, shapes) @@ -332,9 +332,9 @@ function _infer_type(self, keys, arg_type_data) if ref_complete[] == 0 return (nothing, nothing, nothing) else - in_type = pointer_to_array(ref_in_type_data[], ref_in_type_size[]) - out_type = pointer_to_array(ref_out_type_data[], ref_out_type_size[]) - aux_type = pointer_to_array(ref_aux_type_data[], ref_aux_type_size[]) + in_type = unsafe_wrap(Array, ref_in_type_data[], ref_in_type_size[]) + out_type = unsafe_wrap(Array, ref_out_type_data[], ref_out_type_size[]) + aux_type = unsafe_wrap(Array, ref_aux_type_data[], ref_aux_type_size[]) return ([fromTypeFlag(TypeFlag(t)) for t in in_type], [fromTypeFlag(TypeFlag(t)) for t in out_type], [fromTypeFlag(TypeFlag(t)) for t in aux_type]) @@ -528,7 +528,7 @@ Convert a :class:`SymbolicNode` into a JSON string. function to_json(self :: SymbolicNode) ref_json = Ref{char_p}(0) @mxcall(:MXSymbolSaveToJSON, (MX_handle, Ref{char_p}), self, ref_json) - return @compat String(ref_json[]) + return unsafe_string(ref_json[]) end """ diff --git a/src/util.jl b/src/util.jl index 13ce10f58a77..402f633a8567 100644 --- a/src/util.jl +++ b/src/util.jl @@ -15,7 +15,7 @@ function get_mnist_ubyte() :train_label => "train-labels-idx1-ubyte", :test_data => "t10k-images-idx3-ubyte", :test_label => "t10k-labels-idx1-ubyte") - filenames = [k => joinpath(mnist_dir, v) for (k,v) in filenames] + filenames = @compat Dict(k => joinpath(mnist_dir, v) for (k,v) in filenames) if !all(isfile, values(filenames)) cd(mnist_dir) do mnist_dir = download("http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip", "mnist.zip") @@ -38,7 +38,7 @@ function get_cifar10() cifar10_dir = joinpath(data_dir, "cifar10") mkpath(cifar10_dir) filenames = Dict(:train => "cifar/train.rec", :test => "cifar/test.rec") - filenames = [k => joinpath(cifar10_dir, v) for (k,v) in filenames] + filenames = @compat Dict(k => joinpath(cifar10_dir, v) for (k,v) in filenames) if !all(isfile, values(filenames)) cd(cifar10_dir) do run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) @@ -63,26 +63,26 @@ end # Internal Utilities ################################################################################ const DOC_EMBED_ANCHOR = "**autogen:EMBED:{1}:EMBED:autogen**" -function _format_typestring(typestr :: AbstractString) +function _format_typestring(typestr :: String) replace(typestr, r"\bSymbol\b", "SymbolicNode") end function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{char_pp}, arg_descs::Ref{char_pp}, remove_dup::Bool=true) param_keys = Set{String}() - arg_names = pointer_to_array(arg_names[], narg) - arg_types = pointer_to_array(arg_types[], narg) - arg_descs = pointer_to_array(arg_descs[], narg) + arg_names = unsafe_wrap(Array, arg_names[], narg) + arg_types = unsafe_wrap(Array, arg_types[], narg) + arg_descs = unsafe_wrap(Array, arg_descs[], narg) docstrings = String[] for i = 1:narg - arg_name = @compat String(arg_names[i]) + arg_name = unsafe_string(arg_names[i]) if arg_name ∈ param_keys && remove_dup continue end push!(param_keys, arg_name) - arg_type = _format_typestring(@compat String(arg_types[i])) - arg_desc = @compat String(arg_descs[i]) + arg_type = _format_typestring(unsafe_string(arg_types[i])) + arg_desc = unsafe_string(arg_descs[i]) push!(docstrings, "* `$arg_name::$arg_type`: $arg_desc\n") end return join(docstrings, "\n") From 31459739c244a3d38a69b8f68ee01a893ed9bf60 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 06:27:50 +0900 Subject: [PATCH 363/630] Change back to dict comprehension syntax for v0.4 0.4 doesn't support the generator syntac (even through Compat). This commit should be reverted once we are v0.5 only. --- src/model.jl | 4 ++-- src/ndarray.jl | 4 ++-- src/util.jl | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/model.jl b/src/model.jl index ac96d047b668..f47d49d1a82d 100644 --- a/src/model.jl +++ b/src/model.jl @@ -385,8 +385,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra train_execs = Array(Executor, num_dev) for i = 1:num_dev - data_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)] - label_shapes = [k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)] + data_shapes = Dict([k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)]) + label_shapes = Dict([k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)]) train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=grad_req, data_shapes..., label_shapes...) dbg_str = mx.debug_str(train_execs[i]) info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[i])) diff --git a/src/ndarray.jl b/src/ndarray.jl index 1d7619a2ad3e..5a4bac527d9d 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -878,8 +878,8 @@ function load(filename::AbstractString, ::Type{NDArray}) return [NDArray(MX_NDArrayHandle(hdr)) for hdr in unsafe_wrap(Array, out_hdrs[], out_size)] else @assert out_size == out_name_size - return @compat Dict((Symbol(unsafe_wrap(String, k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in - zip(unsafe_wrap(Array, out_names[], out_size), unsafe_wrap(Array, out_hdrs[], out_size))) + return Dict([(Symbol(unsafe_wrap(String, k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in + zip(unsafe_wrap(Array, out_names[], out_size), unsafe_wrap(Array, out_hdrs[], out_size))]) end end diff --git a/src/util.jl b/src/util.jl index 402f633a8567..73ee877973f3 100644 --- a/src/util.jl +++ b/src/util.jl @@ -15,7 +15,7 @@ function get_mnist_ubyte() :train_label => "train-labels-idx1-ubyte", :test_data => "t10k-images-idx3-ubyte", :test_label => "t10k-labels-idx1-ubyte") - filenames = @compat Dict(k => joinpath(mnist_dir, v) for (k,v) in filenames) + filenames = Dict([k => joinpath(mnist_dir, v) for (k,v) in filenames]) if !all(isfile, values(filenames)) cd(mnist_dir) do mnist_dir = download("http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip", "mnist.zip") @@ -38,7 +38,7 @@ function get_cifar10() cifar10_dir = joinpath(data_dir, "cifar10") mkpath(cifar10_dir) filenames = Dict(:train => "cifar/train.rec", :test => "cifar/test.rec") - filenames = @compat Dict(k => joinpath(cifar10_dir, v) for (k,v) in filenames) + filenames = Dict([k => joinpath(cifar10_dir, v) for (k,v) in filenames]) if !all(isfile, values(filenames)) cd(cifar10_dir) do run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) From 016efd6dc8b002f717c65f093cdac4c4aa2fe650 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 06:55:53 +0900 Subject: [PATCH 364/630] improve docstrings --- src/ndarray.jl | 6 ++++-- src/util.jl | 7 +++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 5a4bac527d9d..713b446864ac 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -997,8 +997,10 @@ function _get_function_description(handle :: MX_handle) ref_arg_types, ref_arg_descs, ref_ret_type) name = Symbol(unsafe_wrap(String, ref_name[])) - - desc = unsafe_wrap(String, ref_desc[]) * "\n\n" + signature = _format_signature(Int(ref_narg[]), ref_arg_names) + desc = " " * string(name) * "(" * signature * ")\n\n" + desc *= unsafe_wrap(String, ref_desc[]) * "\n\n" + desc *= "# Arguments\n" desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) return name, desc end diff --git a/src/util.jl b/src/util.jl index 73ee877973f3..5c50d20357e7 100644 --- a/src/util.jl +++ b/src/util.jl @@ -87,3 +87,10 @@ function _format_docstring(narg::Int, arg_names::Ref{char_pp}, arg_types::Ref{ch end return join(docstrings, "\n") end + +function _format_signature(narg::Int, arg_names::Ref{char_pp}) + arg_names = unsafe_wrap(Array, arg_names[], narg) + + return join([unsafe_string(name) for name in arg_names] , ", ") +end + From 50882421bb357a88370c328edc7c5a3aef1d90f5 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 07:04:33 +0900 Subject: [PATCH 365/630] Don't override help for already defined methods --- src/ndarray.jl | 14 +++++++++++--- src/symbolic-node.jl | 11 ++++++++--- 2 files changed, 19 insertions(+), 6 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 713b446864ac..49555b50f09b 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1082,9 +1082,17 @@ macro _import_ndarray_functions() name, desc = _get_function_description(handle) exprs = _get_function_expressions(handle, name) - expr = quote - $(exprs...) - @doc $desc $name + # TODO(vchuravy): Fix this in a more elegant way once we only support + # v0.5 + if isdefined(Base, name) || isdefined(name) + expr = quote + $(exprs...) + end + else + expr = quote + $(exprs...) + @doc $desc $name + end end push!(func_exprs, expr) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 94e7bb10c128..bd2dd54c6282 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -675,9 +675,14 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) end func_def = Expr(:function, func_head, Expr(:block, func_body)) - quote - $func_def - @doc $f_desc $func_name + # TODO(vchuravy) find a more elegant solution fro v0.5 + if isdefined(Base, func_name) || isdefined(func_name) + return func_def + else + return quote + $func_def + @doc $f_desc $func_name + end end end From 3bc93fe33395c3c571b9ccf96956d579c573f880 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 07:18:29 +0900 Subject: [PATCH 366/630] improve documentation formatting for SymbolicNodes --- src/symbolic-node.jl | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index bd2dd54c6282..32be8f63a7ea 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -586,14 +586,17 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) kv_nargs_s = unsafe_wrap(String, ref_kv_nargs[]) kv_nargs = Symbol(kv_nargs_s) + signature = _format_signature(Int(ref_nargs[]), ref_arg_names) + f_desc = " " * func_name_s * "(" * signature * ")\n\n" f_desc = unsafe_wrap(String, ref_desc[]) * "\n\n" if !isempty(kv_nargs_s) - f_desc *= "This function support variable length positional :class:`SymbolicNode` inputs.\n\n" + f_desc *= "This function support variable length positional `SymbolicNode` inputs.\n\n" end + f_desc *= "# Arguments\n" f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":param Symbol name: The name of the :class:`SymbolicNode`. (e.g. `:my_symbol`), optional.\n" - f_desc *= ":param Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`SymbolicNode`.\n\n" - f_desc *= ":return: $(_format_typestring(unsafe_wrap(String, ref_ret_type[]))).\n\n" + f_desc *= "* `name::Symbol`: The name of the `SymbolicNode`. (e.g. `:my_symbol`), optional.\n" + f_desc *= "* `attrs::Dict{Symbol, AbstractString}`: The attributes associated with this `SymbolicNode`.\n\n" + f_desc *= "Returns `$(_format_typestring(unsafe_wrap(String, ref_ret_type[])))`." # function $func_name(args...; kwargs...) func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) From b328f242028696fe6b21e8db01480b3aff3e9fe1 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 07:45:16 +0900 Subject: [PATCH 367/630] fix deprecation for call --- src/callback.jl | 4 ++-- src/symbolic-node.jl | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/callback.jl b/src/callback.jl index 3aca66a7bbf0..c3e1d299403e 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -51,7 +51,7 @@ For example, the :func:`speedometer` callback is defined as function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) BatchCallback(n, call_on_0, callback) end -function Base.call(cb :: BatchCallback, state :: OptimizationState) +@compat function (cb :: BatchCallback)(state :: OptimizationState) if state.curr_batch == 0 if cb.call_on_0 cb.callback(state) @@ -107,7 +107,7 @@ A convenient function to construct a callback that runs every ``n`` full data-pa function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -function Base.call{T<:Real}(cb :: EpochCallback, model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) +@compat function (cb :: EpochCallback){T<:Real}(model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) if state.curr_epoch == 0 if cb.call_on_0 cb.callback(model, state, metric) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 32be8f63a7ea..600bc75b3aeb 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -32,6 +32,7 @@ function Base.copy(self :: SymbolicNode) Base.deepcopy(self) end +# TODO(vchuravy) How to add documentation to the v0.5 style call overloading """ call(self :: SymbolicNode, args :: SymbolicNode...) call(self :: SymbolicNode; kwargs...) @@ -39,11 +40,11 @@ end Make a new node by composing ``self`` with ``args``. Or the arguments can be specified using keyword arguments. """ -function Base.call(self :: SymbolicNode, args :: SymbolicNode...) +@compat function (self::SymbolicNode)(args :: SymbolicNode...) s = deepcopy(self) _compose!(s, args...) end -function Base.call(self :: SymbolicNode; kwargs...) +@compat function (self::SymbolicNode)(;kwargs...) s = deepcopy(self) _compose!(s; kwargs...) end From 2c36a5debbfb7122ea68b65807d7e5a911669fd2 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 11 Aug 2016 07:56:08 +0900 Subject: [PATCH 368/630] nightly is v0.6 --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 651ba9640f8b..08c7db10e0ab 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,7 @@ os: - osx julia: - 0.4 + - 0.5 - nightly # dependent apt packages From 63e6063d9d99af2e4b1ecc3108128fe43203cd0e Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 11 Aug 2016 11:37:48 -0700 Subject: [PATCH 369/630] fix Inception URL #113 --- models/Inception/get.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/Inception/get.sh b/models/Inception/get.sh index 392403a82703..dfd3701bd0b4 100755 --- a/models/Inception/get.sh +++ b/models/Inception/get.sh @@ -1,4 +1,4 @@ #!/bin/bash -wget -c http://webdocs.cs.ualberta.ca/~bx3/data/Inception.zip +wget -c http://data.dmlc.ml/mxnet/data/Inception.zip unzip Inception.zip From 513c030fad4a734983a143e51b10630fdf5e658d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 16 Aug 2016 12:50:37 -0700 Subject: [PATCH 370/630] fix link in doc --- docs/src/index.md | 4 ++-- docs/src/tutorial/mnist.md | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 93e3fa49660a..a41b77478631 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -1,8 +1,8 @@ # MXNet Documentation -[MXNet.jl](https://github.com/dmlc/MXNet.jl>) is the +[MXNet.jl](https://github.com/dmlc/MXNet.jl) is the [Julia](http://julialang.org/) package of -[dmlc/mxnet](https://github.com/dmlc/mxnet). `MXNet.jl` brings flexible and efficient GPU +[dmlc/mxnet](https://github.com/dmlc/mxnet). MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of features include: diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index 8282f48ac8ca..3047a9c1d3ca 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -19,7 +19,7 @@ using MXNet ``` to load the `MXNet` module. Then we are ready to define the network -architecture via the symbolic API </user-guide/overview>. We start +architecture via the [symbolic API](../user-guide/overview.md). We start with a placeholder `data` symbol, ```julia From 3a7913f18a1ab977ae9bb980c1b78fa85a238dec Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 16 Aug 2016 12:56:00 -0700 Subject: [PATCH 371/630] fix some formatting errors in docs --- docs/src/user-guide/overview.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index 691ab93e9acb..85814cdc63fe 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -192,10 +192,10 @@ lot of temp NDArray in the loop (the performance gets worse when increasing `N_REP`), is only about twice slower than the pre-allocated one. - Row Function Average Relative Replications - ------ --------------- ------------ ----------- --------------- - 1 "inplace\_op" 0.0074854 1.0 100 - 2 "normal\_op" 0.0174202 2.32723 100 +| Row | Function | Average | Relative | Replications | +| ------ | --------------- | ------------ | ----------- | --------------- | +| 1 | "inplace\_op" | 0.0074854 | 1.0 | 100 | +| 2 | "normal\_op" | 0.0174202 | 2.32723 | 100 | So it will usually not be a big problem unless you are at the bottleneck of the computation. From 63772b8e1a6685c09085b739fae98ba5243a39d1 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 16 Aug 2016 13:03:07 -0700 Subject: [PATCH 372/630] fix typos in doc --- docs/src/api/nn-factory.md | 2 +- docs/src/user-guide/install.md | 13 ++++++------- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/docs/src/api/nn-factory.md b/docs/src/api/nn-factory.md index 41474383aa76..833d9a3efd53 100644 --- a/docs/src/api/nn-factory.md +++ b/docs/src/api/nn-factory.md @@ -1,4 +1,4 @@ -# Neural Network Factora +# Neural Network Factory Neural network factory provide convenient helper functions to define common neural networks. diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index fe586c248da3..34c532321ef7 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -24,9 +24,8 @@ libmxnet. The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. The automatic build is using default configurations, with OpenCV, CUDA disabled. If the compilation failed due to unresolved dependency, or if -you want to customize the build, it is recommended to compile and -install libmxnet manually. Please see below <manual-compilation> -for more details. +you want to customize the build, you can compile and +install libmxnet manually. Please see below for more details. Manual Compilation ------------------ @@ -38,8 +37,8 @@ to work with a seperate, maybe customized libmxnet. To build libmxnet, please refer to [the installation guide of libmxnet](http://mxnet.readthedocs.org/en/latest/build.html). After -successfully installing libmxnet, set the `MXNET_HOME` environment -variable to the location of libmxnet. In other words, the compiled +successfully installing libmxnet, set the `MXNET_HOME` *environment +variable* to the location of libmxnet. In other words, the compiled `libmxnet.so` should be found in `$MXNET_HOME/lib`. > **note** @@ -61,7 +60,7 @@ following paths (and in that order): - `Pkg.dir("MXNet")/deps/usr/lib`: automatic builds - Any system wide library search path -Note that MXNet.jl will not find `libmxnet.so` even if it is on one of -the paths above if a library it depends upon is missing from the +Note that MXNet.jl can not load `libmxnet.so` even if it is on one of +the paths above in case a library it depends upon is missing from the `LD_LIBRARY_PATH`. Thus, if you are going to compile to add CUDA, the path to the CUDA libraries will have to be added to `LD_LIBRARY_PATH`. From 50a294611fc93ef5170318b03b38e76509fc6247 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 18 Aug 2016 11:28:25 +0900 Subject: [PATCH 373/630] documentation cleanup --- docs/src/tutorial/mnist.md | 2 +- src/ndarray.jl | 268 ++++++++++++++++++------------------- src/nn-factory.jl | 17 +-- 3 files changed, 143 insertions(+), 144 deletions(-) diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index 3047a9c1d3ca..096d7dd0310f 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -28,7 +28,7 @@ data = mx.Variable(:data) and then cascading fully-connected layers and activation functions: -``` {.sourceCode .julia} +```julia fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) diff --git a/src/ndarray.jl b/src/ndarray.jl index 49555b50f09b..14fdee01564f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -70,13 +70,10 @@ end """ NDArray - Wrapper of the ``NDArray`` type in ``libmxnet``. This is the basic building block - of tensor-based computation. - - .. _ndarray-shape-note: - - .. note:: +Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block +of tensor-based computation. +!!! note since C/C++ use row-major ordering for arrays while Julia follows a column-major ordering. To keep things consistent, we keep the underlying data in their original layout, but use *language-native* convention when we talk @@ -113,7 +110,7 @@ Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) """ context(arr :: NDArray) - Get the context that this :class:`NDArray` lives on. +Get the context that this `NDArray` lives on. """ function context(arr :: NDArray) ref_typeid = Ref{Cint}(0) @@ -130,7 +127,7 @@ end empty(DType, shape :: Tuple) empty(DType, dim1, dim2, ...) - Allocate memory for an uninitialized :class:`NDArray` with a specified type. +Allocate memory for an uninitialized `NDArray` with a specified type. """ function empty{N,T<:DType}(::Type{T}, shape :: NTuple{N, Int}) empty(T, shape, cpu()) @@ -148,7 +145,7 @@ end empty(shape :: Tuple) empty(dim1, dim2, ...) - Allocate memory for an uninitialized :class:`NDArray` with specific shape of type Float32. +Allocate memory for an uninitialized `NDArray` with specific shape of type Float32. """ function empty{N}(shape :: NTuple{N, Int}) empty(shape, cpu()) @@ -165,7 +162,7 @@ end zeros(DType, shape :: Tuple) zeros(DType, dim1, dim2, ...) -Create zero-ed :class:`NDArray` with specific shape and type +Create zero-ed `NDArray` with specific shape and type """ function zeros{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) zeros(T, shape, cpu()) @@ -184,7 +181,7 @@ end zeros(shape :: Tuple) zeros(dim1, dim2, ...) -Create zero-ed :class:`NDArray` with specific shape. +Create zero-ed `NDArray` with specific shape. """ function zeros{N}(shape :: NTuple{N, Int}) zeros(shape, cpu()) @@ -203,7 +200,7 @@ end ones(DType, shape :: Tuple) ones(DType, dim1, dim2, ...) -Create an :class:`NDArray` with specific shape & type, and initialize with 1. +Create an `NDArray` with specific shape & type, and initialize with 1. """ function ones{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) ones(T, shape, cpu()) @@ -222,7 +219,7 @@ end ones(shape :: Tuple) ones(dim1, dim2, ...) -Create an :class:`NDArray` with specific shape and initialize with 1. +Create an `NDArray` with specific shape and initialize with 1. """ function ones{N}(shape :: NTuple{N, Int}) ones(shape, cpu()) @@ -243,8 +240,8 @@ import Base: size, length, ndims, eltype size(arr :: NDArray) size(arr :: NDArray, dim :: Int) - Get the shape of an :class:`NDArray`. The shape is in Julia's column-major convention. See - also the :ref:`notes on NDArray shapes `. +Get the shape of an `NDArray`. The shape is in Julia's column-major convention. See +also the notes on NDArray shapes [`NDArrat`](@ref). """ function size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) @@ -260,7 +257,7 @@ end """ length(arr :: NDArray) - Get the number of elements in an :class:`NDArray`. +Get the number of elements in an `NDArray`. """ function length(arr :: NDArray) prod(size(arr)) @@ -269,7 +266,7 @@ end """ ndims(arr :: NDArray) - Get the number of dimensions of an :class:`NDArray`. Is equivalent to ``length(size(arr))``. +Get the number of dimensions of an `NDArray`. Is equivalent to `length(size(arr))`. """ function ndims(arr :: NDArray) length(size(arr)) @@ -278,7 +275,7 @@ end """ eltype(arr :: NDArray) - Get the element type of an :class:`NDArray`. +Get the element type of an `NDArray`. """ function eltype{T <: Union{NDArray, MX_NDArrayHandle}}(arr :: T) dtype_ref = Ref{Cint}(0) @@ -299,11 +296,11 @@ import Base: slice """ slice(arr :: NDArray, start:stop) - Create a view into a sub-slice of an :class:`NDArray`. Note only slicing at the slowest - changing dimension is supported. In Julia's column-major perspective, this is the last - dimension. For example, given an :class:`NDArray` of shape (2,3,4), ``slice(array, 2:3)`` will create - a :class:`NDArray` of shape (2,3,2), sharing the data with the original array. This operation is - used in data parallelization to split mini-batch into sub-batches for different devices. +Create a view into a sub-slice of an `NDArray`. Note only slicing at the slowest +changing dimension is supported. In Julia's column-major perspective, this is the last +dimension. For example, given an `NDArray` of shape (2,3,4), `slice(array, 2:3)` will create +a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is +used in data parallelization to split mini-batch into sub-batches for different devices. """ function slice(arr :: NDArray, ::Colon) arr @@ -329,13 +326,13 @@ import Base: setindex! """ setindex!(arr :: NDArray, val, idx) - Assign values to an :class:`NDArray`. Elementwise assignment is not implemented, only the following - scenarios are supported +Assign values to an `NDArray`. Elementwise assignment is not implemented, only the following +scenarios are supported - - ``arr[:] = val``: whole array assignment, ``val`` could be a scalar or an array (Julia ``Array`` - or :class:`NDArray`) of the same shape. - - ``arr[start:stop] = val``: assignment to a *slice*, ``val`` could be a scalar or an array of - the same shape to the slice. See also :func:`slice`. +* `arr[:] = val`: whole array assignment, `val` could be a scalar or an array (Julia `Array` + or `NDArray`) of the same shape. +* `arr[start:stop] = val`: assignment to a *slice*, `val` could be a scalar or an array of + the same shape to the slice. See also [`slice`](@ref). """ function setindex!(arr :: NDArray, val :: Real, ::Colon) @assert(arr.writable) @@ -356,36 +353,35 @@ import Base: getindex """ getindex(arr :: NDArray, idx) -Shortcut for :func:`slice`. A typical use is to write - - .. code-block:: julia - - arr[:] += 5 +Shortcut for [`slice`](@ref). A typical use is to write - which translates into +```julia + arr[:] += 5 +``` - .. code-block:: julia +which translates into - arr[:] = arr[:] + 5 +```julia + arr[:] = arr[:] + 5 +``` - which furthur translates into +which furthur translates into - .. code-block:: julia +```julia + setindex!(getindex(arr, Colon()), 5, Colon()) +``` - setindex!(getindex(arr, Colon()), 5, Colon()) - - .. note:: - - The behavior is quite different from indexing into Julia's ``Array``. For example, ``arr[2:5]`` - create a **copy** of the sub-array for Julia ``Array``, while for :class:`NDArray`, this is - a *slice* that shares the memory. +!!! note + The behavior is quite different from indexing into Julia's `Array`. For example, `arr[2:5]` + create a **copy** of the sub-array for Julia `Array`, while for `NDArray`, this is + a *slice* that shares the memory. """ function getindex(arr :: NDArray, ::Colon) return arr end """ -Shortcut for `slice`. **NOTE** the behavior for Julia's built-in index slicing is to create a +Shortcut for [`slice`](@ref). **NOTE** the behavior for Julia's built-in index slicing is to create a copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. """ function getindex(arr :: NDArray, idx::UnitRange{Int}) @@ -397,7 +393,7 @@ import Base: copy!, copy, convert .. function:: copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) - Copy contents of ``src`` into ``dst``. +Copy contents of `src` into `dst`. """ function copy!(dst :: NDArray, src :: NDArray) @assert(dst.writable) @@ -441,13 +437,12 @@ end """ -.. function:: - copy(arr :: NDArray) - copy(arr :: NDArray, ctx :: Context) - copy(arr :: Array, ctx :: Context) + copy(arr :: NDArray) + copy(arr :: NDArray, ctx :: Context) + copy(arr :: Array, ctx :: Context) - Create a copy of an array. When no :class:`Context` is given, create a Julia ``Array``. - Otherwise, create an :class:`NDArray` on the specified context. +Create a copy of an array. When no `Context` is given, create a Julia `Array`. +Otherwise, create an `NDArray` on the specified context. """ # Create copy: NDArray -> Julia Array function copy(arr :: NDArray) @@ -470,7 +465,7 @@ end """ convert(::Type{Array{T}}, arr :: NDArray) - Convert an :class:`NDArray` into a Julia ``Array`` of specific type. Data will be copied. +Convert an `NDArray` into a Julia `Array` of specific type. Data will be copied. """ # Convert copy: NDArray -> Julia Array function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) @@ -480,25 +475,25 @@ end """ @inplace -Julia does not support re-definiton of ``+=`` operator (like ``__iadd__`` in python), -When one write ``a += b``, it gets translated to ``a = a+b``. ``a+b`` will allocate new -memory for the results, and the newly allocated :class:`NDArray` object is then assigned +Julia does not support re-definiton of `+=` operator (like `__iadd__` in python), +When one write `a += b`, it gets translated to `a = a+b`. `a+b` will allocate new +memory for the results, and the newly allocated `NDArray` object is then assigned back to a, while the original contents in a is discarded. This is very inefficient when we want to do inplace update. This macro is a simple utility to implement this behavior. Write - .. code-block:: julia +```julia + @mx.inplace a += b +``` - @mx.inplace a += b +will translate into - will translate into +```julia + mx.add_to!(a, b) +``` - .. code-block:: julia - - mx.add_to!(a, b) - - which will do inplace adding of the contents of ``b`` into ``a``. +which will do inplace adding of the contents of `b` into `a`. """ macro inplace(stmt) if stmt.head == :+= || stmt.head == :.+= @@ -517,7 +512,7 @@ end """ add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) -Add a bunch of arguments into ``dst``. Inplace updating. +Add a bunch of arguments into `dst`. Inplace updating. """ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) @assert dst.writable @@ -537,8 +532,8 @@ import Base: +, .+ +(args...) .+(args...) -Summation. Multiple arguments of either scalar or :class:`NDArray` could be -added together. Note at least the first or second argument needs to be an :class:`NDArray` to +Summation. Multiple arguments of either scalar or `NDArray` could be +added together. Note at least the first or second argument needs to be an `NDArray` to avoid ambiguity of built-in summation. """ function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) @@ -558,7 +553,7 @@ end """ sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) - Subtract a bunch of arguments from ``dst``. Inplace updating. +Subtract a bunch of arguments from `dst`. Inplace updating. """ function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable @@ -576,8 +571,8 @@ import Base: -, .- -(arg0) .-(arg0, arg1) -Subtraction ``arg0 - arg1``, of scalar types or :class:`NDArray`. Or create -the negative of ``arg0``. +Subtraction `arg0 - arg1`, of scalar types or `NDArray`. Or create +the negative of `arg0`. """ function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) @@ -602,8 +597,8 @@ end """ mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) - Elementwise multiplication into ``dst`` of either a scalar or an :class:`NDArray` of the same shape. - Inplace updating. +Elementwise multiplication into `dst` of either a scalar or an `NDArray` of the same shape. +Inplace updating. """ function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable @@ -620,7 +615,7 @@ import Base: .*, * """ .*(arg0, arg1) -Elementwise multiplication of ``arg0`` and ``arg``, could be either scalar or :class:`NDArray`. +Elementwise multiplication of `arg0` and `arg`, could be either scalar or `NDArray`. """ function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) @@ -633,7 +628,7 @@ end """ *(arg0, arg1) -Currently only multiplication a scalar with an :class:`NDArray` is implemented. Matrix multiplication +Currently only multiplication a scalar with an `NDArray` is implemented. Matrix multiplication is to be added soon. """ function *(arg0 :: NDArray, arg :: Real) @@ -647,7 +642,7 @@ end """ div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) -Elementwise divide a scalar or an :class:`NDArray` of the same shape from ``dst``. Inplace updating. +Elementwise divide a scalar or an `NDArray` of the same shape from `dst`. Inplace updating. """ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable @@ -662,7 +657,7 @@ import Base: ./, / """ ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) -Elementwise dividing an :class:`NDArray` by a scalar or another :class:`NDArray` of the same shape. +Elementwise dividing an `NDArray` by a scalar or another `NDArray` of the same shape. """ function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) @@ -672,7 +667,7 @@ end """ /(arg0 :: NDArray, arg :: Real) -Divide an :class:`NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. +Divide an `NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. """ function /(arg0 :: NDArray, arg :: Real) ./(arg0, arg) @@ -685,42 +680,41 @@ Manipulating as Julia Arrays @nd_as_jl(captures..., statement) - A convenient macro that allows to operate :class:`NDArray` as Julia Arrays. For example, +A convenient macro that allows to operate `NDArray` as Julia Arrays. For example, - .. code-block:: julia +```julia + x = mx.zeros(3,4) + y = mx.ones(3,4) + z = mx.zeros((3,4), mx.gpu()) - x = mx.zeros(3,4) - y = mx.ones(3,4) - z = mx.zeros((3,4), mx.gpu()) - - @mx.nd_as_jl ro=(x,y) rw=z begin - # now x, y, z are just ordinary Julia Arrays - z[:,1] = y[:,2] - z[:,2] = 5 - end - - Under the hood, the macro convert all the declared captures from :class:`NDArray` into Julia - Arrays, by using :func:`try_get_shared`. And automatically commit the modifications back into - the :class:`NDArray` that is declared as ``rw``. This is useful for fast prototyping and when - implement non-critical computations, such as :class:`AbstractEvalMetric`. - - .. note:: - - - Multiple ``rw`` and / or ``ro`` capture declaration could be made. - - The macro does **not** check to make sure that ``ro`` captures are not modified. If the - original :class:`NDArray` lives in CPU memory, then it is very likely the corresponding - Julia Array shares data with the :class:`NDArray`, so modifying the Julia Array will also - modify the underlying :class:`NDArray`. - - More importantly, since the :class:`NDArray` is - asynchronized, we will wait for *writing* for ``rw`` variables but wait only for *reading* - in ``ro`` variables. If we write into those ``ro`` variables, **and** if the memory is - shared, racing condition might happen, and the behavior is undefined. - - When an :class:`NDArray` is declared to be captured as ``rw``, its contents is always sync - back in the end. - - The execution results of the expanded macro is always ``nothing``. - - The statements are wrapped in a ``let``, thus locally introduced new variables will not be - available after the statements. So you will need to declare the variables before calling the - macro if needed. + @mx.nd_as_jl ro=(x,y) rw=z begin + # now x, y, z are just ordinary Julia Arrays + z[:,1] = y[:,2] + z[:,2] = 5 + end +``` + +Under the hood, the macro convert all the declared captures from `NDArray` into Julia +Arrays, by using `try_get_shared`. And automatically commit the modifications back into +the `NDArray` that is declared as `rw`. This is useful for fast prototyping and when +implement non-critical computations, such as `AbstractEvalMetric`. + +!!! note +* Multiple `rw` and / or `ro` capture declaration could be made. +* The macro does **not** check to make sure that `ro` captures are not modified. If the + original `NDArray` lives in CPU memory, then it is very likely the corresponding + Julia Array shares data with the `NDArray`, so modifying the Julia Array will also + modify the underlying `NDArray`. +* More importantly, since the `NDArray` is + asynchronized, we will wait for *writing* for `rw` variables but wait only for *reading* + in `ro` variables. If we write into those `ro` variables, **and** if the memory is + shared, racing condition might happen, and the behavior is undefined. +* When an `NDArray` is declared to be captured as `rw`, its contents is always sync + back in the end. +* The execution results of the expanded macro is always `nothing`. +* The statements are wrapped in a `let`, thus locally introduced new variables will not be + available after the statements. So you will need to declare the variables before calling the + macro if needed. """ macro nd_as_jl(m_args...) @assert(length(m_args) > 0) @@ -812,14 +806,15 @@ end """ try_get_shared(arr) - Try to create a Julia array by sharing the data with the underlying :class:`NDArray`. +Try to create a Julia array by sharing the data with the underlying `NDArray`. -* NDArray arr: the array to be shared. +# Arguments: +* `arr::NDArray`: the array to be shared. .. warning:: - The returned array does not guarantee to share data with the underlying :class:`NDArray`. - In particular, data sharing is possible only when the :class:`NDArray` lives on CPU. + The returned array does not guarantee to share data with the underlying `NDArray`. + In particular, data sharing is possible only when the `NDArray` lives on CPU. """ function try_get_shared(arr :: NDArray) if context(arr).device_type == CPU @@ -834,10 +829,11 @@ end """ is_shared(j_arr, arr) - Test whether ``j_arr`` is sharing data with ``arr``. +Test whether `j_arr` is sharing data with `arr`. +# Arguments: * Array j_arr: the Julia Array. -* NDArray arr: the :class:`NDArray`. +* NDArray arr: the `NDArray`. """ function is_shared(j_arr :: Array, arr :: NDArray) false @@ -857,13 +853,16 @@ end Load NDArrays from binary file. -* AbstractString filename: the path of the file to load. It could be S3 or HDFS address. - :return: Either ``Dict{Base.Symbol, NDArray}`` or ``Vector{NDArray}``. +# Arguments: +* `filename::String`: the path of the file to load. It could be S3 or HDFS address. + +Returns either `Dict{Symbol, NDArray}` or `Vector{NDArray}`. -If the ``libmxnet`` is built with the corresponding component enabled. Examples -* ``s3://my-bucket/path/my-s3-ndarray`` -* ``hdfs://my-bucket/path/my-hdfs-ndarray`` -* ``/path-to/my-local-ndarray`` +`filename` can point to `s3` or `hdfs` resources if the `libmxnet` is built with the +corresponding components enabled. Examples: +* `s3://my-bucket/path/my-s3-ndarray` +* `hdfs://my-bucket/path/my-hdfs-ndarray` +* `/path-to/my-local-ndarray` """ function load(filename::AbstractString, ::Type{NDArray}) out_size = Ref{MX_uint}(0) @@ -886,12 +885,11 @@ end """ save(filename :: AbstractString, data) -Save NDarrays to binary file. Filename could be S3 or HDFS address, if ``libmxnet`` is built -with corresponding support. +Save NDarrays to binary file. Filename could be S3 or HDFS address, if `libmxnet` is built +with corresponding support (see `load`). -* AbstractString filename: path to the binary file to write to. -* data: data to save to file. - :type data: :class:`NDArray`, or a ``Vector{NDArray}`` or a ``Dict{Base.Symbol, NDArray}``. +* `filename::String`: path to the binary file to write to. +* `data`: data to save to file. Data can be a`NDArray`, a `Vector{NDArray}`, or a `Dict{Base.Symbol, NDArray}`. """ function save(filename::AbstractString, data::NDArray) save(filename, [data]) @@ -928,22 +926,22 @@ end import Base: sqrt """ -The libxmnet APIs are automatically imported from ``libmxnet.so``. The functions listed -here operate on :class:`NDArray` objects. The arguments to the functions are typically ordered +The libxmnet APIs are automatically imported from `libmxnet.so`. The functions listed +here operate on `NDArray` objects. The arguments to the functions are typically ordered as .. code-block:: julia func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) -unless ``NDARRAY_ARG_BEFORE_SCALAR`` is not set. In this case, the scalars are put before the input arguments: +unless `NDARRAY_ARG_BEFORE_SCALAR` is not set. In this case, the scalars are put before the input arguments: .. code-block:: julia func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) -If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the output arguments will also be defined: +If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: .. code-block:: julia @@ -952,7 +950,7 @@ If ``ACCEPT_EMPTY_MUTATE_TARGET`` is set. An overloaded function without the out Upon calling, the output arguments will be automatically initialized with empty NDArrays. Those functions always return the output arguments. If there is only one output (the typical situation), that -object (:class:`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. +object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. """ function _get_ndarray_functions() diff --git a/src/nn-factory.jl b/src/nn-factory.jl index b170214a1af9..a60a4716bfef 100644 --- a/src/nn-factory.jl +++ b/src/nn-factory.jl @@ -1,24 +1,25 @@ """ - MLP(input, spec) + MLP(input, spec; hidden_activation = :relu, prefix) Construct a multi-layer perceptron. A MLP is a multi-layer neural network with fully connected layers. -* SymbolicNode input: the input to the mlp. -* spec: the mlp specification, a list of hidden dimensions. For example, - ``[128, (512, :sigmoid), 10]``. The number in the list indicate the +# Arguments: +* `input::SymbolicNode`: the input to the mlp. +* `spec`: the mlp specification, a list of hidden dimensions. For example, + `[128, (512, :sigmoid), 10]`. The number in the list indicate the number of hidden units in each layer. A tuple could be used to specify the activation of each layer. Otherwise, the default activation will be used (except for the last layer). -* Base.Symbol hidden_activation: keyword argument, default ``:relu``, indicating +* `hidden_activation::Symbol`: keyword argument, default `:relu`, indicating the default activation for hidden layers. The specification here could be overwritten - by layer-wise specification in the ``spec`` argument. Also activation is not + by layer-wise specification in the `spec` argument. Also activation is not applied to the last, i.e. the prediction layer. See :func:`Activation` for a list of supported activation types. -* prefix: keyword argument, default ``gensym()``, used as the prefix to +* `prefix`: keyword argument, default `gensym()`, used as the prefix to name the constructed layers. - :return: the constructed MLP. +Returns the constructed MLP. """ function MLP(input, spec; hidden_activation::Base.Symbol=:relu, prefix=gensym()) spec = convert(Vector{Union{Int,Tuple}}, spec) From 71eefbbbc4ea20e123abf63fdfdf21b6b96cdf55 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 18 Aug 2016 21:44:27 +0900 Subject: [PATCH 374/630] Remove `` and :class: --- docs/src/user-guide/overview.md | 2 +- src/callback.jl | 8 ++--- src/context.jl | 2 +- src/executor.jl | 6 ++-- src/initializer.jl | 10 +++--- src/io.jl | 42 ++++++++++++------------- src/metric.jl | 4 +-- src/model.jl | 44 +++++++++++++------------- src/optimizer.jl | 6 ++-- src/symbolic-node.jl | 55 +++++++++++++++++---------------- src/visualize.jl | 4 +-- 11 files changed, 93 insertions(+), 90 deletions(-) diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index 85814cdc63fe..9a7d8e514894 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -59,7 +59,7 @@ The followings are common ways to create NDArray objects: - `mx.empty(shape[, context])`: create on uninitialized array of a given shape on a specific device. For example, - `` mx.empty(2,3)`, `mx.((2,3), mx.gpu(2)) ``. + ` mx.empty(2,3)`, `mx.((2,3), mx.gpu(2)) `. - `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: similar to the Julia's built-in `zeros` and `ones`. - `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to diff --git a/src/callback.jl b/src/callback.jl index c3e1d299403e..6203f9f51f87 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -28,7 +28,7 @@ end """ every_n_batch(callback :: Function, n :: Int; call_on_0 = false) -A convenient function to construct a callback that runs every ``n`` mini-batches. +A convenient function to construct a callback that runs every `n` mini-batches. # Arguments * `call_on_0::Bool`: keyword argument, default false. Unless set, the callback @@ -64,7 +64,7 @@ end """ speedometer(; frequency=50) -Create an :class:`AbstractBatchCallback` that measure the training speed +Create an `AbstractBatchCallback` that measure the training speed (number of samples processed per second) every k mini-batches. # Arguments @@ -95,7 +95,7 @@ end """ every_n_epoch(callback :: Function, n :: Int; call_on_0 = false) -A convenient function to construct a callback that runs every ``n`` full data-passes. +A convenient function to construct a callback that runs every `n` full data-passes. * Int call_on_0: keyword argument, default false. Unless set, the callback will **not** be run on epoch 0. Epoch 0 means no training has been performed @@ -120,7 +120,7 @@ end """ do_checkpoint(prefix; frequency=1, save_epoch_0=false) -Create an :class:`AbstractEpochCallback` that save checkpoints of the model to disk. +Create an `AbstractEpochCallback` that save checkpoints of the model to disk. The checkpoints can be loaded back later on. # Arguments diff --git a/src/context.jl b/src/context.jl index 908d542f5ba3..410a80ca8b4c 100644 --- a/src/context.jl +++ b/src/context.jl @@ -19,7 +19,7 @@ end """ cpu(dev_id) -Get a CPU context with a specific id. ``cpu()`` is usually the default context for many +Get a CPU context with a specific id. `cpu()` is usually the default context for many operations when no context is specified. # Arguments diff --git a/src/executor.jl b/src/executor.jl index 0bdccc942d22..8e8bdd3bec93 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -1,7 +1,7 @@ """ Executor -An executor is a realization of a symbolic architecture defined by a :class:`SymbolicNode`. +An executor is a realization of a symbolic architecture defined by a `SymbolicNode`. The actual forward and backward computation specified by the network architecture can be carried out with an executor. """ @@ -68,12 +68,12 @@ end """ bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) -Create an :class:`Executor` by binding a :class:`SymbolicNode` to concrete :class:`NDArray`. +Create an `Executor` by binding a `SymbolicNode` to concrete `NDArray`. # Arguments * `sym::SymbolicNode`: the network architecture describing the computation graph. * `ctx::Context`: the context on which the computation should run. -* `args`: either a list of :class:`NDArray` or a dictionary of name-array pairs. Concrete +* `args`: either a list of `NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` and :func:`infer_shape`. diff --git a/src/initializer.jl b/src/initializer.jl index 7ee9920a9d12..dacb06f349c3 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -64,7 +64,7 @@ end """ UniformInitializer(scale=0.07) -Construct a :class:`UniformInitializer` with the specified scale. +Construct a `UniformInitializer` with the specified scale. """ UniformInitializer() = UniformInitializer(0.07) @@ -84,7 +84,7 @@ end """ NormalIninitializer(; mu=0, sigma=0.01) -Construct a :class:`NormalInitializer` with mean ``mu`` and variance ``sigma``. +Construct a `NormalInitializer` with mean `mu` and variance `sigma`. """ NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) @@ -106,9 +106,9 @@ a normal distribution with μ = 0 and σ² or a uniform distribution from -σ to Several different ways of calculating the variance are given in the literature or are used by various libraries. -* [Bengio and Glorot 2010]: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)`` -* [K. He, X. Zhang, S. Ren, and J. Sun 2015]: ``mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)`` -* caffe_avg: ``mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)`` +* [Bengio and Glorot 2010]: `mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1)` +* [K. He, X. Zhang, S. Ren, and J. Sun 2015]: `mx.XavierInitializer(distribution = mx.xv_gaussian, regularization = mx.xv_in, magnitude = 2)` +* caffe_avg: `mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 3)` """ @enum XavierDistribution xv_uniform xv_normal diff --git a/src/io.jl b/src/io.jl index f240388ffbda..56b463a1a6c3 100644 --- a/src/io.jl +++ b/src/io.jl @@ -25,7 +25,7 @@ The root type for all data provider. A data provider should implement the follow training stage, both *data* and *label* will be feeded into the model, while during prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and of any shapes. The provided data and label names here should match the input names in a target - :class:`SymbolicNode`. + `SymbolicNode`. A data provider should also implement the Julia iteration interface, in order to allow iterating through the data set. The provider will be called in the following way: @@ -48,7 +48,7 @@ The root type for all data provider. A data provider should implement the follow By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface is implemented on the provider type itself. But the extra layer of abstraction allows us to - implement a data provider easily via a Julia ``Task`` coroutine. See the + implement a data provider easily via a Julia `Task` coroutine. See the data provider defined in :doc:`the char-lstm example ` for an example of using coroutine to define data providers. @@ -58,7 +58,7 @@ The detailed interface functions for the iterator API is listed below: Base.eltype(provider) -> AbstractDataBatch :param AbstractDataProvider provider: the data provider. - :return: the specific subtype representing a data batch. See :class:`AbstractDataBatch`. + :return: the specific subtype representing a data batch. See `AbstractDataBatch`. Base.start(provider) -> AbstractDataProviderState @@ -91,7 +91,7 @@ case, you can safely assume that not be called. With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation -of the built-in :class:`MXDataProvider` for example. +of the built-in `MXDataProvider` for example. .. caution:: @@ -137,7 +137,7 @@ abstract AbstractDataProviderState :return: a vector of data in this batch, should be in the same order as declared in :func:`provide_data() `. - The last dimension of each :class:`NDArray` should always match the batch_size, even when + The last dimension of each `NDArray` should always match the batch_size, even when :func:`count_samples` returns a value less than the batch size. In this case, the data provider is free to pad the remaining contents with any value. @@ -167,7 +167,7 @@ abstract AbstractDataProviderState :type targets: Vector{Vector{SlicedNDArray}} The targets is a list of the same length as number of data provided by this provider. - Each element in the list is a list of :class:`SlicedNDArray`. This list described a + Each element in the list is a list of `SlicedNDArray`. This list described a spliting scheme of this data batch into different slices, each slice is specified by a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch that should be loaded into the corresponding *ndarray*. @@ -189,7 +189,7 @@ abstract AbstractDataBatch """ DataBatch - A basic subclass of :class:`AbstractDataBatch`, that implement the interface by + A basic subclass of `AbstractDataBatch`, that implement the interface by accessing member fields. """ type DataBatch <: AbstractDataBatch @@ -204,7 +204,7 @@ get_label{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batc """ SlicedNDArray - A alias type of ``Tuple{UnitRange{Int},NDArray}``. + A alias type of `Tuple{UnitRange{Int},NDArray}`. """ typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} @@ -257,7 +257,7 @@ eachbatch(provider :: AbstractDataProvider) = provider """ ArrayDataProvider - A convenient tool to iterate :class:`NDArray` or Julia ``Array``. + A convenient tool to iterate `NDArray` or Julia `Array`. """ type ArrayDataProvider <: AbstractDataProvider data_arrays :: Vector{Array{MX_float}} @@ -277,16 +277,16 @@ end """ ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) - Construct a data provider from :class:`NDArray` or Julia Arrays. + Construct a data provider from `NDArray` or Julia Arrays. :param data: the data, could be - - a :class:`NDArray`, or a Julia Array. This is equivalent to ``:data => data``. - - a name-data pair, like ``:mydata => array``, where ``:mydata`` is the name of the data - and ``array`` is an :class:`NDArray` or a Julia Array. + - a `NDArray`, or a Julia Array. This is equivalent to `:data => data`. + - a name-data pair, like `:mydata => array`, where `:mydata` is the name of the data + and `array` is an `NDArray` or a Julia Array. - a list of name-data pairs. - :param label: the same as the ``data`` parameter. When this argument is omitted, the constructed + :param label: the same as the `data` parameter. When this argument is omitted, the constructed provider will provide no labels. :param Int batch_size: the batch size, default is 0, which means treating the whole array as a single mini-batch. @@ -294,9 +294,9 @@ end :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might be less samples to include than a mini-batch. This value specify a scalar to pad the contents of all the missing data points. - :param Real label_padding: the same as ``data_padding``, except for the labels. + :param Real label_padding: the same as `data_padding`, except for the labels. - TODO: remove ``data_padding`` and ``label_padding``, and implement rollover that copies + TODO: remove `data_padding` and `label_padding`, and implement rollover that copies the last or first several training samples to feed the padding. """ # Julia's type system is sometimes very frustrating. You cannot specify a function @@ -563,16 +563,16 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) if gen_docs if endswith(string(iter_name), "Iter") - f_desc = "Can also be called with the alias ``$(string(iter_name)[1:end-4] * "Provider")``.\n" + f_desc = "Can also be called with the alias `$(string(iter_name)[1:end-4] * "Provider")`.\n" else f_desc = "" end f_desc *= unsafe_string(ref_desc[]) * "\n\n" - f_desc *= ":param Base.Symbol data_name: keyword argument, default ``:data``. The name of the data.\n" - f_desc *= ":param Base.Symbol label_name: keyword argument, default ``:softmax_label``. " * - "The name of the label. Could be ``nothing`` if no label is presented in this dataset.\n\n" + f_desc *= ":param Base.Symbol data_name: keyword argument, default `:data`. The name of the data.\n" + f_desc *= ":param Base.Symbol label_name: keyword argument, default `:softmax_label`. " * + "The name of the label. Could be `nothing` if no label is presented in this dataset.\n\n" f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":return: the constructed :class:`MXDataProvider`." + f_desc *= ":return: the constructed `MXDataProvider`." return (iter_name, f_desc) end diff --git a/src/metric.jl b/src/metric.jl index 21d5e4e34b14..aea8a0af628e 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -22,8 +22,8 @@ interfaces. Get the accumulated metrics. - :return: ``Vector{Tuple{Base.Symbol, Real}}``, a list of name-value pairs. For - example, ``[(:accuracy, 0.9)]``. + :return: `Vector{Tuple{Base.Symbol, Real}}`, a list of name-value pairs. For + example, `[(:accuracy, 0.9)]`. """ abstract AbstractEvalMetric diff --git a/src/model.jl b/src/model.jl index f47d49d1a82d..d6752e24ebd3 100644 --- a/src/model.jl +++ b/src/model.jl @@ -48,9 +48,9 @@ end FeedForward(arch :: SymbolicNode, ctx) * arch: the architecture of the network constructed using the symbolic API. -* ctx: the devices on which this model should do computation. It could be a single :class:`Context` - or a list of :class:`Context` objects. In the latter case, data parallelization will be used - for training. If no context is provided, the default context ``cpu()`` will be used. +* ctx: the devices on which this model should do computation. It could be a single `Context` + or a list of `Context` objects. In the latter case, data parallelization will be used + for training. If no context is provided, the default context `cpu()` will be used. """ function FeedForward(arch :: SymbolicNode; context :: Union{Context, Vector{Context}, Void} = nothing) if isa(context, Void) @@ -74,7 +74,7 @@ end * AbstractInitializer initializer: an initializer describing how the weights should be initialized. * Bool overwrite: keyword argument, force initialization even when weights already exists. * input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. - For example, ``data=(28,28,1,100), label=(100,)``. + For example, `data=(28,28,1,100), label=(100,)`. """ function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) # all arg names, including data, label, and parameters @@ -177,12 +177,12 @@ end * FeedForward self: the model. * AbstractDataProvider data: the data to perform prediction on. -* Bool overwrite: an :class:`Executor` is initialized the first time predict is called. The memory - allocation of the :class:`Executor` depends on the mini-batch size of the test +* Bool overwrite: an `Executor` is initialized the first time predict is called. The memory + allocation of the `Executor` depends on the mini-batch size of the test data provider. If you call predict twice with data provider of the same batch-size, - then the executor can be potentially be re-used. So, if ``overwrite`` is false, - we will try to re-use, and raise an error if batch-size changed. If ``overwrite`` - is true (the default), a new :class:`Executor` will be created to replace the old one. + then the executor can be potentially be re-used. So, if `overwrite` is false, + we will try to re-use, and raise an error if batch-size changed. If `overwrite` + is true (the default), a new `Executor` will be created to replace the old one. .. note:: @@ -196,9 +196,9 @@ end .. note:: - If you perform further after prediction. The weights are not automatically synchronized if ``overwrite`` + If you perform further after prediction. The weights are not automatically synchronized if `overwrite` is set to false and the old predictor is re-used. In this case - setting ``overwrite`` to true (the default) will re-initialize the predictor the next time you call + setting `overwrite` to true (the default) will re-initialize the predictor the next time you call predict and synchronize the weights again. :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` @@ -319,28 +319,28 @@ end """ fit(model :: FeedForward, optimizer, data; kwargs...) -Train the ``model`` on ``data`` with the ``optimizer``. +Train the `model` on `data` with the `optimizer`. * FeedForward model: the model to be trained. * AbstractOptimizer optimizer: the optimization algorithm to use. * AbstractDataProvider data: the training data provider. * Int n_epoch: default 10, the number of full data-passes to run. -* AbstractDataProvider eval_data: keyword argument, default ``nothing``. The data provider for +* AbstractDataProvider eval_data: keyword argument, default `nothing`. The data provider for the validation set. -* AbstractEvalMetric eval_metric: keyword argument, default ``Accuracy()``. The metric used - to evaluate the training performance. If ``eval_data`` is provided, the same metric is also +* AbstractEvalMetric eval_metric: keyword argument, default `Accuracy()`. The metric used + to evaluate the training performance. If `eval_data` is provided, the same metric is also calculated on the validation set. -* kvstore: keyword argument, default ``:local``. The key-value store used to synchronize gradients +* kvstore: keyword argument, default `:local`. The key-value store used to synchronize gradients and parameters when multiple devices are used for training. - :type kvstore: :class:`KVStore` or ``Base.Symbol`` -* AbstractInitializer initializer: keyword argument, default ``UniformInitializer(0.01)``. + :type kvstore: `KVStore` or `Base.Symbol` +* AbstractInitializer initializer: keyword argument, default `UniformInitializer(0.01)`. * Bool force_init: keyword argument, default false. By default, the random initialization using the - provided ``initializer`` will be skipped if the model weights already exists, maybe from a previous + provided `initializer` will be skipped if the model weights already exists, maybe from a previous call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When this option is set, it will always do random initialization at the begining of training. -* callbacks: keyword argument, default ``[]``. Callbacks to be invoked at each epoch or mini-batch, - see :class:`AbstractCallback`. - :type callbacks: ``Vector{AbstractCallback}`` +* callbacks: keyword argument, default `[]`. Callbacks to be invoked at each epoch or mini-batch, + see `AbstractCallback`. + :type callbacks: `Vector{AbstractCallback}` """ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) diff --git a/src/optimizer.jl b/src/optimizer.jl index e2fc44338a2f..c5d4b29aa308 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -85,7 +85,7 @@ get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rat LearningRate.Exp :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration - count if ``decay_on_iteration`` is set to true. + count if `decay_on_iteration` is set to true. """ type Exp <: AbstractLearningRateScheduler learning_rate :: Float64 @@ -102,7 +102,7 @@ get_learning_rate(self :: Exp, state :: OptimizationState) = LearningRate.Inv :math:`\eta_t = \eta_0 * (1 + \gamma * t)^(-power)`. - Here :math:`t` is the epoch count, or the iteration count if ``decay_on_iteration`` + Here :math:`t` is the epoch count, or the iteration count if `decay_on_iteration` is set to true. """ type Inv <: AbstractLearningRateScheduler @@ -207,7 +207,7 @@ abstract AbstractOptimizerOptions normalized_gradient(opts, state, grad) * AbstractOptimizerOptions opts: options for the optimizer, should contain the field - ``grad_scale``, ``grad_clip`` and ``weight_decay``. + `grad_scale`, `grad_clip` and `weight_decay`. * OptimizationState state: the current optimization state. * NDArray weight: the trainable weights. * NDArray grad: the original gradient of the weights. diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 600bc75b3aeb..b71733ace71b 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -37,7 +37,7 @@ end call(self :: SymbolicNode, args :: SymbolicNode...) call(self :: SymbolicNode; kwargs...) -Make a new node by composing ``self`` with ``args``. Or the arguments +Make a new node by composing `self` with `args`. Or the arguments can be specified using keyword arguments. """ @compat function (self::SymbolicNode)(args :: SymbolicNode...) @@ -66,11 +66,11 @@ end list_arguments(self :: SymbolicNode) List all the arguments of this node. The argument for a node contains both -the inputs and parameters. For example, a :class:`FullyConnected` node will +the inputs and parameters. For example, a `FullyConnected` node will have both data and weights in its arguments. A composed node (e.g. a MLP) will list all the arguments for intermediate nodes. - :return: A list of symbols indicating the names of the arguments. +Returns a list of symbols indicating the names of the arguments. """ function list_arguments(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListArguments) @@ -81,7 +81,7 @@ end List all the outputs of this node. - :return: A list of symbols indicating the names of the outputs. +Returns a list of symbols indicating the names of the outputs. """ function list_outputs(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListOutputs) @@ -99,7 +99,7 @@ and do not have gradient. But still be useful for the specific operations. A common example of auxiliary state is the moving_mean and moving_variance in BatchNorm. Most operators do not have Auxiliary states. - :return: A list of symbols indicating the names of the auxiliary states. +Returns a list of symbols indicating the names of the auxiliary states. """ function list_auxiliary_states(self :: SymbolicNode) @_list_symbol_info(self, :MXSymbolListAuxiliaryStates) @@ -108,8 +108,8 @@ end """ get_internals(self :: SymbolicNode) -Get a new grouped :class:`SymbolicNode` whose output contains all the internal outputs of -this :class:`SymbolicNode`. +Get a new grouped `SymbolicNode` whose output contains all the internal outputs of +this `SymbolicNode`. """ function get_internals(self :: SymbolicNode) ref_hdr = Ref{MX_handle}(0) @@ -120,8 +120,9 @@ end """ get_attr(self :: SymbolicNode, key :: Symbol) -Get attribute attached to this :class:`SymbolicNode` belonging to key. -:return: The value belonging to key as a :class:`Nullable`. +Get attribute attached to this `SymbolicNode` belonging to key. + +Returns the value belonging to key as a `Nullable`. """ function get_attr(self :: SymbolicNode, key :: Symbol) key_s = string(key) @@ -140,7 +141,8 @@ end list_attr(self :: SymbolicNode) Get all attributes from a symbol. -:return: Dictionary of attributes. + +Returns a dictionary of attributes. """ function list_attr(self :: SymbolicNode) ref_sz = Ref{MX_uint}(0) @@ -162,7 +164,8 @@ end list_all_attr(self :: SymbolicNode) Get all attributes from the symbol graph. -:return: Dictionary of attributes. + +Returns a dictionary of attributes. """ function list_all_attr(self :: SymbolicNode) ref_sz = Ref{MX_uint}(0) @@ -183,12 +186,12 @@ end """ set_attr(self:: SymbolicNode, key :: Symbol, value :: AbstractString) -Set the attribute key to value for this :class:`SymbolicNode`. +Set the attribute key to value for this `SymbolicNode`. # Warning It is encouraged not to call this function directly, unless you know exactly what you are doing. The -recommended way of setting attributes is when creating the :class:`SymbolicNode`. Changing -the attributes of a :class:`SymbolicNode` that is already been used somewhere else might +recommended way of setting attributes is when creating the `SymbolicNode`. Changing +the attributes of a `SymbolicNode` that is already been used somewhere else might cause unexpected behavior and inconsistency. """ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) @@ -205,7 +208,7 @@ Create a symbolic variable with the given name. This is typically used as a plac For example, the data node, acting as the starting point of a network architecture. # Arguments -* Dict{Symbol, AbstractString} attrs: The attributes associated with this :class:`Variable`. +* Dict{Symbol, AbstractString} attrs: The attributes associated with this `Variable`. """ function Variable(name :: Union{Symbol, AbstractString}; attrs = Dict()) attrs = convert(Dict{Symbol, AbstractString}, attrs) @@ -221,7 +224,7 @@ end """ Group(nodes :: SymbolicNode...) -Create a :class:`SymbolicNode` by grouping nodes together. +Create a `SymbolicNode` by grouping nodes together. """ function Group(nodes :: SymbolicNode...) handles = MX_handle[nodes...] @@ -283,9 +286,9 @@ as a list of shapes, which should specify the shapes of inputs in the same order the arguments returned by :func:`list_arguments`. Alternatively, the shape information could be specified via keyword arguments. -:return: A 3-tuple containing shapes of all the arguments, shapes of all the outputs and - shapes of all the auxiliary variables. If shape inference failed due to incomplete - or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. +Returns a 3-tuple containing shapes of all the arguments, shapes of all the outputs and +shapes of all the auxiliary variables. If shape inference failed due to incomplete +or incompatible inputs, the return value will be `(nothing, nothing, nothing)`. """ function infer_shape(self :: SymbolicNode; kwargs...) sdata = MX_uint[] @@ -351,9 +354,9 @@ as a list of types, which should specify the types of inputs in the same order a the arguments returned by :func:`list_arguments`. Alternatively, the type information could be specified via keyword arguments. -:return: A 3-tuple containing types of all the arguments, types of all the outputs and - types of all the auxiliary variables. If type inference failed due to incomplete - or incompatible inputs, the return value will be ``(nothing, nothing, nothing)``. +Returns a 3-tuple containing types of all the arguments, types of all the outputs and +types of all the auxiliary variables. If type inference failed due to incomplete +or incompatible inputs, the return value will be `(nothing, nothing, nothing)`. """ function infer_type(self :: SymbolicNode; kwargs...) types = Cint[toTypeFlag(x[2]) for x in kwargs] @@ -524,7 +527,7 @@ end """ to_json(self :: SymbolicNode) -Convert a :class:`SymbolicNode` into a JSON string. +Convert a `SymbolicNode` into a JSON string. """ function to_json(self :: SymbolicNode) ref_json = Ref{char_p}(0) @@ -535,7 +538,7 @@ end """ from_json(repr :: AbstractString, ::Type{SymbolicNode}) -Load a :class:`SymbolicNode` from a JSON string representation. +Load a `SymbolicNode` from a JSON string representation. """ function from_json(repr :: AbstractString, ::Type{SymbolicNode}) ref_hdr = Ref{MX_handle}(0) @@ -546,7 +549,7 @@ end """ load(filename :: AbstractString, ::Type{SymbolicNode}) -Load a :class:`SymbolicNode` from a JSON file. +Load a `SymbolicNode` from a JSON file. """ function load(filename :: AbstractString, ::Type{SymbolicNode}) ref_hdr = Ref{MX_handle}(0) @@ -557,7 +560,7 @@ end """ save(filename :: AbstractString, node :: SymbolicNode) -Save a :class:`SymbolicNode` to a JSON file. +Save a `SymbolicNode` to a JSON file. """ function save(filename :: AbstractString, node :: SymbolicNode) @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), node, filename) diff --git a/src/visualize.jl b/src/visualize.jl index 15f23c6bfffc..c60868430a9c 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -6,10 +6,10 @@ import JSON * SymbolicNode network: the network to visualize. * AbstractString title: keyword argument, default "Network Visualization", the title of the GraphViz graph. -* input_shapes: keyword argument, default ``nothing``. If provided, +* input_shapes: keyword argument, default `nothing`. If provided, will run shape inference and plot with the shape information. Should be either a dictionary of name-shape mapping or an array of shapes. - :return: the graph description in GraphViz ``dot`` language. + :return: the graph description in GraphViz `dot` language. """ function to_graphviz(network :: SymbolicNode; title="Network Visualization", input_shapes=nothing) if !isa(input_shapes, Void) From 221611ae19cbf5a4bec4c1832853a02b4cc62176 Mon Sep 17 00:00:00 2001 From: Ranjan Anantharaman Date: Sun, 21 Aug 2016 18:10:11 +0530 Subject: [PATCH 375/630] Get rid of sub depwarn --- src/metric.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index aea8a0af628e..dc00bdafb267 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -69,7 +69,7 @@ function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDA for j in 1:size(labels, 2) for i in 1:size(labels, 1) label = labels[i, j, 1, sample] - klasses = sub(pred, i, j, :, sample) + klasses = view(pred, i, j, :, sample) klass = indmax(klasses) - 1 # Classes start at 0...k-1 metric.acc_sum += klass == label @@ -79,7 +79,7 @@ function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDA end elseif ndims(pred) == 2 # 1-dimensional case for sample in 1:size(label, 1) - klass = indmax(sub(pred, :, sample)) - 1 + klass = indmax(view(pred, :, sample)) - 1 metric.acc_sum += klass == label[sample] metric.n_sample += 1 end From e672e427b6a696b594cf17fafa280e60d56f6aee Mon Sep 17 00:00:00 2001 From: Ranjan Anantharaman Date: Sun, 21 Aug 2016 18:11:06 +0530 Subject: [PATCH 376/630] import Compat.view --- src/MXNet.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/MXNet.jl b/src/MXNet.jl index c447617e7fbf..b9de52a58bf8 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -9,6 +9,7 @@ module mx using Compat import Compat.String +import Compat.view using Formatting From 08ba2d13d6cd45bb77be9149c6fb730a4a3a19b4 Mon Sep 17 00:00:00 2001 From: Ranjan Anantharaman Date: Sun, 21 Aug 2016 18:11:48 +0530 Subject: [PATCH 377/630] Update REQUIRE with min version of Compat --- REQUIRE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/REQUIRE b/REQUIRE index d97c567e15db..6fbfb73bd328 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,5 @@ julia 0.4+ -Compat +Compat 0.8.4 Formatting BinDeps JSON From e3b793544d1d48486aa9a3d67d4c1d6c76335894 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 23 Aug 2016 04:32:37 +0900 Subject: [PATCH 378/630] reorder docs for io.jl and bring them into the markdown age --- docs/src/api/io.md | 111 ++++++++++++ src/base.jl | 2 +- src/io.jl | 413 ++++++++++++++++++++++----------------------- src/ndarray.jl | 2 +- 4 files changed, 312 insertions(+), 216 deletions(-) diff --git a/docs/src/api/io.md b/docs/src/api/io.md index 32a7d14f9144..fe5fd2f46e2b 100644 --- a/docs/src/api/io.md +++ b/docs/src/api/io.md @@ -3,6 +3,117 @@ Data providers are wrappers that load external data, be it images, text, or general tensors, and split it into mini-batches so that the model can consume the data in a uniformed way. +## AbstractDataProvider interface + +```@docs +mx.AbstractDataProvider +``` + +The difference between *data* and *label* is that during training stage, +both *data* and *label* will be feeded into the model, while during +prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and +of any shapes. The provided data and label names here should match the input names in a target +`SymbolicNode`. + +A data provider should also implement the Julia iteration interface, in order to allow iterating +through the data set. The provider will be called in the following way: + +```julia +for batch in eachbatch(provider) + data = get_data(provider, batch) +end +``` + +which will be translated by Julia compiler into + +```julia +state = Base.start(eachbatch(provider)) +while !Base.done(provider, state) + (batch, state) = Base.next(provider, state) + data = get_data(provider, batch) +end +``` + +By default, `eachbatch` simply returns the provider itself, so the iterator interface +is implemented on the provider type itself. But the extra layer of abstraction allows us to +implement a data provider easily via a Julia `Task` coroutine. See the +data provider defined in [the char-lstm example](tutorial/char-lstm) for an example of using coroutine to define data +providers. + +The detailed interface functions for the iterator API is listed below: + + Base.eltype(provider) -> AbstractDataBatch + +Returns the specific subtype representing a data batch. See `AbstractDataBatch`. +* `provider::AbstractDataProvider`: the data provider. + + Base.start(provider) -> AbstractDataProviderState + +This function is always called before iterating into the dataset. It should initialize +the iterator, reset the index, and do data shuffling if needed. +* `provider::AbstractDataProvider`: the data provider. + + Base.done(provider, state) -> Bool + +True if there is no more data to iterate in this dataset. +* `provider::AbstractDataProvider`: the data provider. +* `state::AbstractDataProviderState`: the state returned by `Base.start` and `Base.next`. + + Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) + +Returns the current data batch, and the state for the next iteration. +* `provider::AbstractDataProvider`: the data provider. + +Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that +is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this +case, you can safely assume that + +* `Base.start` will always be called, and called only once before the iteration starts. +* `Base.done` will always be called at the beginning of every iteration and always be called once. +* If `Base.done` return true, the iteration will stop, until the next round, again, starting with + a call to `Base.start`. +* `Base.next` will always be called only once in each iteration. It will always be called after + one and only one call to `Base.done`; but if `Base.done` returns true, `Base.next` will + not be called. + +With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation +of the built-in `MXDataProvider` for example. + +### Note: +Please do not use the one data provider simultaneously in two different places, either in parallel, +or in a nested loop. For example, the behavior for the following code is undefined + +```julia +for batch in data + # updating the parameters + + # now let's test the performance on the training set + for b2 in data + # ... + end +end +``` + +```@docs +mx.get_batch_size +mx.provide_data +mx.provide_label +``` + +## AbstractDataBatch interface + +```@docs +mx.AbstractDataProviderState +mx.count_samples +mx.get_data +mx.get_label +mx.get +mx.load_data! +mx.load_label! +``` + +## Implemented providers and other methods + ```@autodocs Modules = [MXNet.mx] Pages = ["io.jl"] diff --git a/src/base.jl b/src/base.jl index d35a1bec3818..cca45c273b96 100644 --- a/src/base.jl +++ b/src/base.jl @@ -30,7 +30,7 @@ end function __init__() _populate_symbol_creator_cache!() - _import_io_iterators() + _populate_iter_creator_cache!() atexit() do # notify libmxnet we are shutting down diff --git a/src/io.jl b/src/io.jl index 56b463a1a6c3..1878125f15fd 100644 --- a/src/io.jl +++ b/src/io.jl @@ -3,113 +3,50 @@ The root type for all data provider. A data provider should implement the following interfaces: - get_batch_size(provider) -> Int +* [`get_batch_size`](@ref) +* [`provide_data`](@ref) +* [`provide_label`](@ref) - :param AbstractDataProvider provider: the data provider. - :return: the mini-batch size of the provided data. All the provided data should have the - same mini-batch size (i.e. the last dimension). +As well as the Julia iterator interface (see [the Julia manual](http://docs.julialang.org/en/stable/manual/interfaces/)). +Normally this involves defining: - provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - - :param AbstractDataProvider provider: the data provider. - :return: a vector of (name, shape) pairs describing the names of the data it provides, and - the corresponding shapes. - - provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - - :param AbstractDataProvider provider: the data provider. - :return: a vector of (name, shape) pairs describing the names of the labels it provides, and - the corresponding shapes. - - The difference between *data* and *label* is that during - training stage, both *data* and *label* will be feeded into the model, while during - prediction stage, only *data* is loaded. Otherwise, they could be anything, with any names, and - of any shapes. The provided data and label names here should match the input names in a target - `SymbolicNode`. - - A data provider should also implement the Julia iteration interface, in order to allow iterating - through the data set. The provider will be called in the following way: - - .. code-block:: julia - - for batch in eachbatch(provider) - data = get_data(provider, batch) - end - - which will be translated by Julia compiler into - - .. code-block:: julia - - state = Base.start(eachbatch(provider)) - while !Base.done(provider, state) - (batch, state) = Base.next(provider, state) - data = get_data(provider, batch) - end - - By default, :func:`eachbatch` simply returns the provider itself, so the iterator interface - is implemented on the provider type itself. But the extra layer of abstraction allows us to - implement a data provider easily via a Julia `Task` coroutine. See the - data provider defined in :doc:`the char-lstm example - ` for an example of using coroutine to define data - providers. - -The detailed interface functions for the iterator API is listed below: - - Base.eltype(provider) -> AbstractDataBatch - - :param AbstractDataProvider provider: the data provider. - :return: the specific subtype representing a data batch. See `AbstractDataBatch`. - - Base.start(provider) -> AbstractDataProviderState - - :param AbstractDataProvider provider: the data provider. - - This function is always called before iterating into the dataset. It should initialize - the iterator, reset the index, and do data shuffling if needed. - - Base.done(provider, state) -> Bool - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataProviderState state: the state returned by :func:`Base.start` :func:`Base.next`. - :return: true if there is no more data to iterate in this dataset. +* `Base.eltype(provider) -> AbstractDataBatch` +* `Base.start(provider) -> AbstractDataProviderState` +* `Base.done(provider, state) -> Bool` +* `Base.next(provider, state) -> (AbstractDataBatch, AbstractDataProvider)` +""" +abstract AbstractDataProvider - Base.next(provider) -> (AbstractDataBatch, AbstractDataProviderState) +""" + get_batch_size(provider) -> Int - :param AbstractDataProvider provider: the data provider. - :return: the current data batch, and the state for the next iteration. +# Arguments: +* `provider::AbstractDataProvider`: the data provider. -Note sometimes you are wrapping an existing data iterator (e.g. the built-in libmxnet data iterator) that -is built with a different convention. It might be difficult to adapt to the interfaces stated here. In this -case, you can safely assume that +Returns the mini-batch size of the provided data. All the provided data should have the same mini-batch size (i.e. the last dimension). +""" +function get_batch_size end -* :func:`Base.start` will always be called, and called only once before the iteration starts. -* :func:`Base.done` will always be called at the beginning of every iteration and always be called once. -* If :func:`Base.done` return true, the iteration will stop, until the next round, again, starting with - a call to :func:`Base.start`. -* :func:`Base.next` will always be called only once in each iteration. It will always be called after - one and only one call to :func:`Base.done`; but if :func:`Base.done` returns true, :func:`Base.next` will - not be called. +""" + provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} -With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation -of the built-in `MXDataProvider` for example. +# Arguments: +* `provider::AbstractDataProvider`: the data provider. -.. caution:: +Returns a vector of (name, shape) pairs describing the names of the data it provides, and the corresponding shapes. - Please do not use the one data provider simultaneously in two different places, either in parallel, - or in a nested loop. For example, the behavior for the following code is undefined +""" +function provide_data end - .. code-block:: julia +""" + provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} - for batch in data - # updating the parameters +# Arguments: +* `provider::AbstractDataProvider`: the data provider. - # now let's test the performance on the training set - for b2 in data - # ... - end - end +Returns a vector of (name, shape) pairs describing the names of the labels it provides, and the corresponding shapes. """ -abstract AbstractDataProvider +function provide_label end """ AbstractDataProviderState @@ -123,74 +60,58 @@ abstract AbstractDataProviderState Base type for a data mini-batch. It should implement the following interfaces: - count_samples(provider, batch) -> Int +* [`count_samples`](@ref) +* [`get_data`](@ref) +* [`get_label`](@ref) - :param AbstractDataBatch batch: the data batch object. - :return: the number of samples in this batch. This number should be greater than 0, but - less than or equal to the batch size. This is used to indicate at the end of - the data set, there might not be enough samples for a whole mini-batch. +The following utility functions will be automatically defined: - get_data(provider, batch) -> Vector{NDArray} - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :return: a vector of data in this batch, should be in the same order as declared in - :func:`provide_data() `. - - The last dimension of each `NDArray` should always match the batch_size, even when - :func:`count_samples` returns a value less than the batch size. In this case, - the data provider is free to pad the remaining contents with any value. - - get_label(provider, batch) -> Vector{NDArray} - - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :return: a vector of labels in this batch. Similar to :func:`get_data`. +* [`get`](@ref) +* [`load_data!`](@ref) +* [`load_label!`](@ref) +""" +abstract AbstractDataBatch +""" + count_samples(provider, batch) -> Int - The following utility functions will be automatically defined. +# Arguments: +* `batch::AbstractDataBatch`: the data batch object. - get(provider, batch, name) -> NDArray +Returns the number of samples in this batch. This number should be greater than 0, but less than or equal to the batch size. This is used to indicate at the end of the data set, there might not be enough samples for a whole mini-batch. - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param Base.Symbol name: the name of the data to get, should be one of the names - provided in either :func:`provide_data() ` - or :func:`provide_label() `. - :return: the corresponding data array corresponding to that name. +""" +function count_samples end - load_data!(provider, batch, targets) +""" + get_data(provider, batch) -> Vector{NDArray} - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param targets: the targets to load data into. - :type targets: Vector{Vector{SlicedNDArray}} +# Arguments: +* `provider::AbstractDataProvider`: the data provider. +* `batch::AbstractDataBatch`: the data batch object. - The targets is a list of the same length as number of data provided by this provider. - Each element in the list is a list of `SlicedNDArray`. This list described a - spliting scheme of this data batch into different slices, each slice is specified by - a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch - that should be loaded into the corresponding *ndarray*. +Returns a vector of data in this batch, should be in the same order as declared in `provide_data() `. - This utility function is used in data parallelization, where a mini-batch is splited - and computed on several different devices. +The last dimension of each `NDArray` should always match the batch_size, even when `count_samples` returns a value less than the batch size. In this case, the data provider is free to pad the remaining contents with any value. +""" +function get_data end - load_label!(provider, batch, targets) +""" + get_label(provider, batch) -> Vector{NDArray} - :param AbstractDataProvider provider: the data provider. - :param AbstractDataBatch batch: the data batch object. - :param targets: the targets to load label into. - :type targets: Vector{Vector{SlicedNDArray}} +# Arguments: +* `provider::AbstractDataProvider`: the data provider. +* `batch::AbstractDataBatch`: the data batch object. - The same as :func:`load_data!`, except that this is for loading labels. +Returns a vector of labels in this batch. Similar to [`get_data`](@ref). """ -abstract AbstractDataBatch +function get_label end """ DataBatch - A basic subclass of `AbstractDataBatch`, that implement the interface by - accessing member fields. +A basic subclass of `AbstractDataBatch`, that implement the interface by +accessing member fields. """ type DataBatch <: AbstractDataBatch data :: Vector{NDArray} @@ -204,7 +125,7 @@ get_label{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batc """ SlicedNDArray - A alias type of `Tuple{UnitRange{Int},NDArray}`. +A alias type of `Tuple{UnitRange{Int},NDArray}`. """ typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} @@ -217,10 +138,38 @@ function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataB end end end + +""" + load_data!(provider, batch, targets) + +# Arguments: +* `provider::AbstractDataProvider`: the data provider. +* `batch::AbstractDataBatch`: the data batch object. +* `targets::Vector{Vector{SlicedNDArray}}`: the targets to load data into. + +The targets is a list of the same length as number of data provided by this provider. +Each element in the list is a list of `SlicedNDArray`. This list described a +spliting scheme of this data batch into different slices, each slice is specified by +a slice-ndarray pair, where *slice* specify the range of samples in the mini-batch +that should be loaded into the corresponding *ndarray*. + +This utility function is used in data parallelization, where a mini-batch is splited +and computed on several different devices. +""" function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) _load_general!(provider, batch, targets, get_data) end + +""" + load_label!(provider, batch, targets) + +* `provider::AbstractDataProvider provider`: the data provider. +* `batch::AbstractDataBatch batch`: the data batch object. +* `targets::Vector{Vector{SlicedNDArray}}`: the targets to load label into. + +The same as [`load_data!`](@ref), except that this is for loading labels. +""" function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}) _load_general!(provider, batch, targets, get_label) @@ -238,6 +187,17 @@ function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatc end import Base.get +""" + get(provider, batch, name) -> NDArray + +* `provider::AbstractDataProvider`: the data provider. +* `batch::AbstractDataBatch`: the data batch object. +* `name::Symbol`: the name of the data to get, should be one of the names + provided in either `provide_data() ` + or `provide_label() `. + +Returns the corresponding data array corresponding to that name. +""" function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name :: Base.Symbol) for (idx, (k, s)) in enumerate(provide_data(provider)) if name == k @@ -257,7 +217,29 @@ eachbatch(provider :: AbstractDataProvider) = provider """ ArrayDataProvider - A convenient tool to iterate `NDArray` or Julia `Array`. +A convenient tool to iterate `NDArray` or Julia `Array`. + + ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) + +Construct a data provider from `NDArray` or Julia Arrays. + +# Arguments: +* `data`: the data, could be + * a `NDArray`, or a Julia Array. This is equivalent to `:data => data`. + * a name-data pair, like `:mydata => array`, where `:mydata` is the name of the data + * and `array` is an `NDArray` or a Julia Array. + * a list of name-data pairs. + +* `label`: the same as the `data` parameter. When this argument is omitted, the constructed provider will provide no labels. +* `batch_size::Int`: the batch size, default is 0, which means treating the whole array as a single mini-batch. +* `shuffle::Bool`: turn on if the data should be shuffled at every epoch. +* `data_padding::Real`: when the mini-batch goes beyond the dataset boundary, there might + be less samples to include than a mini-batch. This value specify a scalar to pad the + contents of all the missing data points. +* `label_padding::Real`: the same as `data_padding`, except for the labels. + +TODO: remove `data_padding` and `label_padding`, and implement rollover that copies +the last or first several training samples to feed the padding. """ type ArrayDataProvider <: AbstractDataProvider data_arrays :: Vector{Array{MX_float}} @@ -274,31 +256,6 @@ type ArrayDataProvider <: AbstractDataProvider label_batch :: Vector{NDArray} end -""" - ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) - - Construct a data provider from `NDArray` or Julia Arrays. - - :param data: the data, could be - - - a `NDArray`, or a Julia Array. This is equivalent to `:data => data`. - - a name-data pair, like `:mydata => array`, where `:mydata` is the name of the data - and `array` is an `NDArray` or a Julia Array. - - a list of name-data pairs. - - :param label: the same as the `data` parameter. When this argument is omitted, the constructed - provider will provide no labels. - :param Int batch_size: the batch size, default is 0, which means treating the whole array as a - single mini-batch. - :param Bool shuffle: turn on if the data should be shuffled at every epoch. - :param Real data_padding: when the mini-batch goes beyond the dataset boundary, there might - be less samples to include than a mini-batch. This value specify a scalar to pad the - contents of all the missing data points. - :param Real label_padding: the same as `data_padding`, except for the labels. - - TODO: remove `data_padding` and `label_padding`, and implement rollover that copies - the last or first several training samples to feed the padding. -""" # Julia's type system is sometimes very frustrating. You cannot specify a function # with argument Vector{Pair} to expect to be matched when calling with the parameter # [:foo => zeros(2,3), :bar => zeros(3)] because the type inference gives very specific @@ -387,9 +344,11 @@ end function provide_data(provider::ArrayDataProvider) return collect(zip(provider.data_names, map(size, provider.data_batch))) end + function provide_label(provider::ArrayDataProvider) return collect(zip(provider.label_names, map(size, provider.label_batch))) end + get_batch_size(provider::ArrayDataProvider) = provider.batch_size immutable ArrayDataProviderState <: AbstractDataProviderState @@ -547,7 +506,42 @@ function count_samples(provider :: MXDataProvider, batch :: MXDataBatch) return provider.batch_size - Int(ref_pad[]) end -function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) +function _get_iter_creators() + n_ref = Ref{MX_uint}(0) + h_ref = Ref{Ptr{MX_handle}}(0) + @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) + + return unsafe_wrap(Array, h_ref[], n_ref[]) +end + +function _get_iter_name(hdr :: MX_handle) + ref_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + @mxcall(:MXDataIterGetIterInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), + hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) + + return Symbol(unsafe_wrap(String, ref_name[])) +end + +const _iter_creator_cache = Dict{Symbol, MX_handle}() +function _populate_iter_creator_cache!() + empty!(_iter_creator_cache) + h_creators = _get_iter_creators() + for handle in h_creators + name = _get_iter_name(handle) + _iter_creator_cache[name] = handle + end +end + +_get_iter_creator(name :: Symbol) = _iter_creator_cache[name] + +function _define_data_iter_creator(hdr :: MX_handle) ref_name = Ref{char_p}(0) ref_desc = Ref{char_p}(0) ref_narg = Ref{MX_uint}(0) @@ -561,19 +555,24 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) iter_name = Symbol(unsafe_wrap(String, ref_name[])) - if gen_docs - if endswith(string(iter_name), "Iter") - f_desc = "Can also be called with the alias `$(string(iter_name)[1:end-4] * "Provider")`.\n" - else - f_desc = "" - end - f_desc *= unsafe_string(ref_desc[]) * "\n\n" - f_desc *= ":param Base.Symbol data_name: keyword argument, default `:data`. The name of the data.\n" - f_desc *= ":param Base.Symbol label_name: keyword argument, default `:softmax_label`. " * - "The name of the label. Could be `nothing` if no label is presented in this dataset.\n\n" - f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) - f_desc *= ":return: the constructed `MXDataProvider`." - return (iter_name, f_desc) + isprovider = endswith(string(iter_name), "Iter") + signature = _format_signature(Int(ref_narg[]), ref_arg_names) + f_desc = " " * string(iter_name) * "(" *signature * ")\n\n" + if isprovider + f_desc *= "Can also be called with the alias `$(string(iter_name)[1:end-4] * "Provider")`.\n" + end + f_desc *= unsafe_string(ref_desc[]) * "\n\n" + f_desc *= "# Arguments:\n" + f_desc *= "* `data_name::Symbol`: keyword argument, default `:data`. The name of the data.\n" + f_desc *= "* `label_name::Symbol`: keyword argument, default `:softmax_label`. " * + "The name of the label. Could be `nothing` if no label is presented in this dataset.\n\n" + f_desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) * "\n" + f_desc *= "Returns the constructed `MXDataProvider`." + + if isprovider + alias_name = Symbol(string(iter_name)[1:end-4] * "Provider") + else + alias_name = nothing end defun = quote @@ -582,42 +581,28 @@ function _define_data_iter_creator(hdr :: MX_handle; gen_docs::Bool=false) arg_vals = String[dump_mx_param(v) for (k,v) in kwargs] ref_hdr = Ref{MX_handle}(0) + local hdr = _get_iter_creator($(QuoteNode(iter_name))) @mxcall(:MXDataIterCreateIter, (MX_handle, MX_uint, char_pp, char_pp, Ref{MX_handle}), - $hdr, length(arg_keys), arg_keys, arg_vals, ref_hdr) + hdr, length(arg_keys), arg_keys, arg_vals, ref_hdr) return MXDataProvider(MX_DataIterHandle(ref_hdr[]); kwargs...) end - end - eval(defun) + $(isprovider ? :(const $alias_name = $iter_name) : :()) - # add an alias XXXProvider => XXXIter - if endswith(string(iter_name), "Iter") - alias_name = Symbol(string(iter_name)[1:end-4] * "Provider") - eval(:($alias_name = $iter_name)) + @doc $f_desc $iter_name end + defun end -function _import_io_iterators(;gen_docs::Bool=false) - n_ref = Ref{MX_uint}(0) - h_ref = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXListDataIters, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - - n_creators = n_ref[] - h_creators = unsafe_wrap(Array, h_ref[], n_creators) - - if gen_docs - docs = Dict{Base.Symbol, AbstractString}() - end - - for i = 1:n_creators - creator_hdr = h_creators[i] - ret = _define_data_iter_creator(creator_hdr; gen_docs=gen_docs) - if gen_docs - docs[ret[1]] = ret[2] - end - end - - if gen_docs - return docs +macro _import_io_iterators() + creators = _get_iter_creators() + defs = Expr[] + for handle in creators + push!(defs, _define_data_iter_creator(handle)) end + esc(quote + $(defs...) + end) end + +@_import_io_iterators() diff --git a/src/ndarray.jl b/src/ndarray.jl index 14fdee01564f..65f1a15bd7b1 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -241,7 +241,7 @@ import Base: size, length, ndims, eltype size(arr :: NDArray, dim :: Int) Get the shape of an `NDArray`. The shape is in Julia's column-major convention. See -also the notes on NDArray shapes [`NDArrat`](@ref). +also the notes on NDArray shapes [`NDArray`](@ref). """ function size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) From c1268953bf95d45e314b790251b20a442f387cf7 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 23 Aug 2016 05:16:38 +0900 Subject: [PATCH 379/630] use `!!! note` syntax (this breaks documentation creation on v0.4) --- .travis.yml | 1 + docs/src/api/io.md | 26 +++++++++++++------------- 2 files changed, 14 insertions(+), 13 deletions(-) diff --git a/.travis.yml b/.travis.yml index 08c7db10e0ab..284ca4b422ae 100644 --- a/.travis.yml +++ b/.travis.yml @@ -38,6 +38,7 @@ script: after_success: - source $TRAVIS/run_coverage.sh + - echo $TRAVIS_JULIA_VERSION - julia -e 'Pkg.add("Documenter")' - julia -e 'cd(Pkg.dir("MXNet")); include(joinpath("docs", "make.jl"))' diff --git a/docs/src/api/io.md b/docs/src/api/io.md index fe5fd2f46e2b..7312259dbf3c 100644 --- a/docs/src/api/io.md +++ b/docs/src/api/io.md @@ -79,20 +79,20 @@ case, you can safely assume that With those assumptions, it will be relatively easy to adapt any existing iterator. See the implementation of the built-in `MXDataProvider` for example. -### Note: -Please do not use the one data provider simultaneously in two different places, either in parallel, -or in a nested loop. For example, the behavior for the following code is undefined - -```julia -for batch in data - # updating the parameters - - # now let's test the performance on the training set - for b2 in data - # ... +!!! note + Please do not use the one data provider simultaneously in two different places, either in parallel, + or in a nested loop. For example, the behavior for the following code is undefined + + ```julia + for batch in data + # updating the parameters + + # now let's test the performance on the training set + for b2 in data + # ... + end end -end -``` + ``` ```@docs mx.get_batch_size From 3bbd8663fb34f6f400cdaef73a539c6504eded4b Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 23 Aug 2016 06:19:00 +0900 Subject: [PATCH 380/630] build documentation on 0.5 --- docs/make.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/make.jl b/docs/make.jl index 06bbef0466cd..a84f680711a5 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -8,4 +8,5 @@ makedocs( deploydocs( deps = Deps.pip("pygments", "mkdocs", "mkdocs-material", "python-markdown-math"), repo = "github.com/dmlc/MXNet.jl.git", + julia = "0.5", ) From 84866f46890b61213e0326c06415fed890291d3b Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 23 Aug 2016 07:24:00 +0900 Subject: [PATCH 381/630] fix documentation being attached to the wrong binding: --- src/symbolic-node.jl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index b71733ace71b..40fc84b3cb42 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -33,13 +33,14 @@ function Base.copy(self :: SymbolicNode) end # TODO(vchuravy) How to add documentation to the v0.5 style call overloading -""" +@doc """ call(self :: SymbolicNode, args :: SymbolicNode...) call(self :: SymbolicNode; kwargs...) Make a new node by composing `self` with `args`. Or the arguments can be specified using keyword arguments. -""" +""" SymbolicNode + @compat function (self::SymbolicNode)(args :: SymbolicNode...) s = deepcopy(self) _compose!(s, args...) From 053ab4258456251f6989bb9902a48fb7071c5f8b Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 24 Aug 2016 06:58:04 +0900 Subject: [PATCH 382/630] fix overseen typo --- src/model.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index d6752e24ebd3..580896e051e6 100644 --- a/src/model.jl +++ b/src/model.jl @@ -113,7 +113,7 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove for (name, shape) in zip(aux_names, aux_shapes) if haskey(self.aux_params, name) - if shape == size(self.auxg_params[name]) + if shape == size(self.aux_params[name]) aux_params[name] = self.aux_params[name] continue else From e6979600f8781567a20a29241db7f1c29d8cddec Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 27 Aug 2016 16:57:52 +0900 Subject: [PATCH 383/630] add grad function to do autodiff --- src/symbolic-node.jl | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 40fc84b3cb42..498de4ba42bc 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -202,6 +202,26 @@ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) @mxcall(:MXSymbolSetAttr, (MX_handle, Cstring, Cstring), self, key_s, value_s) end +""" + grad(self :: SymbolicNode, wrt :: Vector{SymbolicNode}) + +Get the autodiff gradient of the current `SymbolicNode`. This function can +only be used if the current symbol is a loss function. + +# Arguments: +* `self::SymbolicNode`: current node. +* `wrt::Vector{Symbol}`: the names of the arguments to the gradient. + +Returns a gradient symbol of the corresponding gradient. +""" +function grad(self :: SymbolicNode, wrt :: Vector{Symbol}) + hdr_ref = Ref{MX_handle}(0) + keys = String[string(key) for key in wrt] + + @mxcall(:MXSymbolGrad, (MX_handle, MX_uint, char_pp, Ptr{MX_handle}), self, length(keys), keys, hdr_ref) + return SymbolicNode(MX_SymbolHandle(hdr_ref[])) +end + """ Variable(name :: Union{Symbol, AbstractString}) From 4403ef3c810e960d6b619de01354b66bfdf2315e Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 25 Aug 2016 05:03:17 +0900 Subject: [PATCH 384/630] add MultiMetric and reorder metric docs --- src/metric.jl | 88 ++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 66 insertions(+), 22 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index dc00bdafb267..1e11098321ff 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -2,32 +2,24 @@ AbstractEvalMetric The base class for all evaluation metrics. The sub-types should implement the following -interfaces. +interfaces: - .. function:: update!(metric, labels, preds) - - Update and accumulate metrics. - - :param AbstractEvalMetric metric: the metric object. - :param labels: the labels from the data provider. - :type labels: Vector{NDArray} - :param preds: the outputs (predictions) of the network. - :type preds: Vector{NDArray} - - .. function:: reset!(metric) - - Reset the accumulation counter. +* [`update!`](@ref) +* [`reset!`](@ref) +* [`get`](@ref) +""" +abstract AbstractEvalMetric - .. function:: get(metric) +""" + update!(metric, labels, preds) - Get the accumulated metrics. +Update and accumulate metrics. - :return: `Vector{Tuple{Base.Symbol, Real}}`, a list of name-value pairs. For - example, `[(:accuracy, 0.9)]`. +# Arguments: +* `metric::AbstractEvalMetric`: the metric object. +* `labels::Vector{NDArray}`: the labels from the data provider. +* `preds::Vector{NDArray}`: the outputs (predictions) of the network. """ -abstract AbstractEvalMetric - -# Generic update! version function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}) if length(labels) != length(preds) Base.warn_once( @@ -39,6 +31,59 @@ function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray} end end +""" + reset!(metric) + +Reset the accumulation counter. +""" +function reset!(metric :: AbstractEvalMetric) + throw(MethodError(reset!, (typeof(metric),))) +end + + +import Base: get +""" + get(metric) + +Get the accumulated metrics. + +Returns `Vector{Tuple{Base.Symbol, Real}}`, a list of name-value pairs. +For example, `[(:accuracy, 0.9)]`. +""" +function get(metric :: AbstractEvalMetric) + throw(MethodError(get, (typeof(metric),))) +end + +""" + MultiMetric(metrics::Vector{AbstractEvalMetric}) + +Combine multiple metrics in one and get a result for all of them. + +# Usage +To calculate both mean-squared error [`Accuracy`](@ref) and log-loss [`ACE`](@ref): +```julia + mx.fit(..., eval_metric = mx.MultiMetric([mx.Accuracy(), mx.ACE()])) +``` +""" +type MultiMetric <: mx.AbstractEvalMetric + metrics :: Vector{mx.AbstractEvalMetric} +end + +function update!(metric :: MultiMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + for m in metric.metrics + update!(m, labels, preds) + end + return nothing +end + +function reset!(metric :: MultiMetric) + map(reset!, metric.metrics) + return nothing +end + +function get(metric :: MultiMetric) + mapreduce(get, append!, metric.metrics) +end """ Accuracy @@ -89,7 +134,6 @@ function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDA end end -import Base: get function get(metric :: Accuracy) return [(:accuracy, metric.acc_sum / metric.n_sample)] end From 12a87e540f2c97b1bb5ef0870c043f440d93788c Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 24 Aug 2016 21:33:39 +0900 Subject: [PATCH 385/630] add one-dimensional ACE --- src/metric.jl | 69 +++++++++++++++++++++++++++++++-------------------- 1 file changed, 42 insertions(+), 27 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 1e11098321ff..14f56697c4cf 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -206,23 +206,30 @@ function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) - @assert ndims(pred) == 4 - - labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) - for sample in 1:size(labels, 4) - for j in 1:size(labels, 2) - for i in 1:size(labels, 1) - label = labels[i, j, 1, sample] - - # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification - # Since we can only target labels right now this is the only thing we can do. - target = Int(label) + 1 # klasses are 0...k-1 => julia indexing - p_k = pred[i, j, target, sample] + if ndims(pred) == 4 + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification + # Since we can only target labels right now this is the only thing we can do. + target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing + p_k = pred[i, j, target, sample] - metric.ace_sum += log(p_k) - metric.n_sample += 1 + metric.ace_sum += log(p_k) + metric.n_sample += 1 + end end end + elseif ndims(pred) == 2 # 1-dimensional case + for sample in 1:size(labels, 1) + target = Int(labels[sample]) + 1 + p_k = pred[target, sample] + metric.ace_sum += log(p_k) + metric.n_sample += 1 + end + else + error("Can't handle prediction with dimensions $(ndims(pred)).") end end end @@ -257,23 +264,31 @@ function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDA @nd_as_jl ro=(label,pred) begin # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) - @assert ndims(pred) == 4 - - labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) - for sample in 1:size(labels, 4) - for j in 1:size(labels, 2) - for i in 1:size(labels, 1) - label = labels[i, j, 1, sample] - # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification - # Since we can only target labels right now this is the only thing we can do. - target = Int(label) + 1 # klasses are 0...k-1 => julia indexing - p_k = pred[i, j, target, sample] + if ndims(pred) == 4 + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification + # Since we can only target labels right now this is the only thing we can do. + target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing + p_k = pred[i, j, target, sample] - metric.aces[target] += log(p_k) - metric.counts[target] += 1 + metric.aces[target] += log(p_k) + metric.counts[target] += 1 + end end end + elseif ndims(pred) == 2 + for sample in 1:size(label, 1) + target = Int(label[sample]) + 1 + p_k = pred[target, sample] + metric.aces[target] += log(p_k) + metric.counts[target] += 1 + end + else + error("Can't handle prediction with dimensions $(ndims(pred)).") end end end From ab19db8b76084fc678a95aaba141b0a9c1b833f4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 29 Aug 2016 03:22:14 +0900 Subject: [PATCH 386/630] update docs for ACE --- src/metric.jl | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 14f56697c4cf..4ae66acf25b7 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -182,9 +182,7 @@ end """ ACE -Averaged cross-entropy for classification. This also know als logloss. - -Calculated the averaged cross entropy for multi-dimentions output. +Calculates the averaged cross-entropy (logloss) for classification. """ type ACE <: AbstractEvalMetric ace_sum :: Float64 @@ -237,10 +235,8 @@ end """ MultiACE -Averaged cross-entropy for classification. This also know als logloss. -This variant keeps track of the different losses per class. - -Calculated the averaged cross entropy for multi-dimentions output. +Calculates the averaged cross-entropy per class and overall (see [`ACE`](@ref)). +This can be used to quantify the influence of different classes on the overall loss. """ type MultiACE <: AbstractEvalMetric aces :: Vector{Float64} From 637e3785d97b76daf4ba9d9ba7b46a26fd148a39 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 1 Sep 2016 02:08:10 +0900 Subject: [PATCH 387/630] cleanup docs a bit more --- docs/src/api/optimizer.md | 15 +++- docs/src/api/optimizers/adam.md | 6 -- docs/src/api/optimizers/sgd.md | 6 -- src/callback.jl | 36 ++++----- src/executor.jl | 6 +- src/model.jl | 131 ++++++++++++++++---------------- src/ndarray.jl | 49 +++++------- src/nn-factory.jl | 2 +- src/optimizer.jl | 84 ++++++++++---------- src/optimizers/adam.jl | 34 ++++----- src/optimizers/sgd.jl | 35 ++++----- src/symbolic-node.jl | 16 ++-- src/util.jl | 1 - src/visualize.jl | 9 ++- 14 files changed, 207 insertions(+), 223 deletions(-) delete mode 100644 docs/src/api/optimizers/adam.md delete mode 100644 docs/src/api/optimizers/sgd.md diff --git a/docs/src/api/optimizer.md b/docs/src/api/optimizer.md index 043529a670c1..81fad7cb827e 100644 --- a/docs/src/api/optimizer.md +++ b/docs/src/api/optimizer.md @@ -1,12 +1,21 @@ # Optimizers ```@autodocs -Modules = [MXNet.mx] +Modules = [MXNet.mx, MXNet.mx.LearningRate, MXNet.mx.Momentum] Pages = ["optimizer.jl"] ``` ## Built-in optimizers -```@contents -Pages = ["optimizers/adam.md", "optimizers/sgd.md"] +### Stochastic Gradient Descent +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/sgd.jl"] +``` + +### ADAM +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/adam.jl"] ``` + diff --git a/docs/src/api/optimizers/adam.md b/docs/src/api/optimizers/adam.md deleted file mode 100644 index 269d82a9c80e..000000000000 --- a/docs/src/api/optimizers/adam.md +++ /dev/null @@ -1,6 +0,0 @@ -# ADAM - -```@autodocs -Modules = [MXNet.mx] -Pages = ["adam.jl"] -``` diff --git a/docs/src/api/optimizers/sgd.md b/docs/src/api/optimizers/sgd.md deleted file mode 100644 index 506c7b5c266d..000000000000 --- a/docs/src/api/optimizers/sgd.md +++ /dev/null @@ -1,6 +0,0 @@ -# Stochastic Gradient Descent - -```@autodocs -Modules = [MXNet.mx] -Pages = ["sgd.jl"] -``` diff --git a/src/callback.jl b/src/callback.jl index 6203f9f51f87..7736bbf08a44 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -32,21 +32,21 @@ A convenient function to construct a callback that runs every `n` mini-batches. # Arguments * `call_on_0::Bool`: keyword argument, default false. Unless set, the callback - will **not** be run on batch 0. + will *not* be run on batch 0. -For example, the :func:`speedometer` callback is defined as +For example, the [`speedometer`](@ref) callback is defined as - .. code-block:: julia - - every_n_iter(frequency, call_on_0=true) do state :: OptimizationState - if state.curr_batch == 0 - # reset timer - else - # compute and print speed - end - end +```julia +every_n_iter(frequency, call_on_0=true) do state :: OptimizationState + if state.curr_batch == 0 + # reset timer + else + # compute and print speed + end +end +``` - :seealso: :func:`every_n_epoch`, :func:`speedometer`. +See also [`every_n_epoch`](@ref) and [`speedometer`](@ref). """ function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) BatchCallback(n, call_on_0, callback) @@ -68,7 +68,7 @@ Create an `AbstractBatchCallback` that measure the training speed (number of samples processed per second) every k mini-batches. # Arguments -* Int frequency: keyword argument, default 50. The frequency (number of +* `frequency::Int`: keyword argument, default 50. The frequency (number of min-batches) to measure and report the speed. """ function speedometer(;frequency::Int=50) @@ -97,12 +97,12 @@ end A convenient function to construct a callback that runs every `n` full data-passes. -* Int call_on_0: keyword argument, default false. Unless set, the callback - will **not** be run on epoch 0. Epoch 0 means no training has been performed +* `call_on_0::Int`: keyword argument, default false. Unless set, the callback + will *not* be run on epoch 0. Epoch 0 means no training has been performed yet. This is useful if you want to inspect the randomly initialized model that has not seen any data yet. - :seealso: :func:`every_n_iter`. +See also [`every_n_iter`](@ref). """ function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) @@ -127,9 +127,9 @@ The checkpoints can be loaded back later on. * `prefix::AbstractString`: the prefix of the filenames to save the model. The model architecture will be saved to prefix-symbol.json, while the weights will be saved to prefix-0012.params, for example, for the 12-th epoch. -* Int frequency: keyword argument, default 1. The frequency (measured in epochs) to +* `frequency::Int`: keyword argument, default 1. The frequency (measured in epochs) to save checkpoints. -* Bool save_epoch_0: keyword argument, default false. Whether we should save a +* `save_epoch_0::Bool`: keyword argument, default false. Whether we should save a checkpoint for epoch 0 (model initialized but not seen any data yet). """ function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) diff --git a/src/executor.jl b/src/executor.jl index 8e8bdd3bec93..3ae5301a6c92 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -75,8 +75,8 @@ Create an `Executor` by binding a `SymbolicNode` to concrete `NDArray`. * `ctx::Context`: the context on which the computation should run. * `args`: either a list of `NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include - network parameters (weights, bias, filters, etc.), data and labels. See :func:`list_arguments` - and :func:`infer_shape`. + network parameters (weights, bias, filters, etc.), data and labels. See [`list_arguments`](@ref) + and [`infer_shape`](@ref). * `args_grad`: * `aux_states`: * `grad_req`: @@ -211,7 +211,7 @@ Can be used to get an estimated about the memory cost. dProvider = ... # DataProvider exec = mx.simple_bind(net, mx.cpu(), data=size(dProvider.data_batch[1])) dbg_str = mx.debug_str(exec) - println(split(ref, ['\n'])[end-2]) + println(split(ref, ['\\n'])[end-2]) ``` """ function debug_str(self :: Executor) diff --git a/src/model.jl b/src/model.jl index 580896e051e6..a892dc886ca3 100644 --- a/src/model.jl +++ b/src/model.jl @@ -11,8 +11,8 @@ abstract AbstractModel The feedforward model provides convenient interface to train and predict on feedforward architectures like multi-layer MLP, ConvNets, etc. There is no explicitly handling of *time index*, but it is relatively easy to implement -unrolled RNN / LSTM under this framework (**TODO**: add example). For models -that handles sequential data explicitly, please use **TODO**... +unrolled RNN / LSTM under this framework (*TODO*: add example). For models +that handles sequential data explicitly, please use *TODO*... """ type FeedForward <: AbstractModel arch :: SymbolicNode @@ -47,10 +47,11 @@ end """ FeedForward(arch :: SymbolicNode, ctx) -* arch: the architecture of the network constructed using the symbolic API. -* ctx: the devices on which this model should do computation. It could be a single `Context` - or a list of `Context` objects. In the latter case, data parallelization will be used - for training. If no context is provided, the default context `cpu()` will be used. +# Arguments: +* `arch`: the architecture of the network constructed using the symbolic API. +* `ctx`: the devices on which this model should do computation. It could be a single `Context` + or a list of `Context` objects. In the latter case, data parallelization will be used + for training. If no context is provided, the default context `cpu()` will be used. """ function FeedForward(arch :: SymbolicNode; context :: Union{Context, Vector{Context}, Void} = nothing) if isa(context, Void) @@ -64,17 +65,18 @@ end """ init_model(self, initializer; overwrite=false, input_shapes...) - Initialize the weights in the model. +Initialize the weights in the model. - This method will be called automatically when training a model. So there is usually no - need to call this method unless one needs to inspect a model with only randomly initialized - weights. +This method will be called automatically when training a model. So there is usually no +need to call this method unless one needs to inspect a model with only randomly initialized +weights. -* FeedForward self: the model to be initialized. -* AbstractInitializer initializer: an initializer describing how the weights should be initialized. -* Bool overwrite: keyword argument, force initialization even when weights already exists. -* input_shapes: the shape of all data and label inputs to this model, given as keyword arguments. - For example, `data=(28,28,1,100), label=(100,)`. +# Arguments: +* `self::FeedForward`: the model to be initialized. +* `initializer::AbstractInitializer`: an initializer describing how the weights should be initialized. +* `overwrite::Bool`: keyword argument, force initialization even when weights already exists. +* `input_shapes`: the shape of all data and label inputs to this model, given as keyword arguments. + For example, `data=(28,28,1,100), label=(100,)`. """ function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) # all arg names, including data, label, and parameters @@ -162,46 +164,44 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha end """ -.. function:: - predict(self, data; overwrite=false, callback=nothing) + predict(self, data; overwrite=false, callback=nothing) - Predict using an existing model. The model should be already initialized, or trained or loaded from - a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, - so it is possible to do +Predict using an existing model. The model should be already initialized, or trained or loaded from +a checkpoint. There is an overloaded function that allows to pass the callback as the first argument, +so it is possible to do - .. code-block:: julia - - predict(model, data) do batch_output - # consume or write batch_output to file - end - -* FeedForward self: the model. -* AbstractDataProvider data: the data to perform prediction on. -* Bool overwrite: an `Executor` is initialized the first time predict is called. The memory - allocation of the `Executor` depends on the mini-batch size of the test - data provider. If you call predict twice with data provider of the same batch-size, - then the executor can be potentially be re-used. So, if `overwrite` is false, - we will try to re-use, and raise an error if batch-size changed. If `overwrite` - is true (the default), a new `Executor` will be created to replace the old one. - - .. note:: - - Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO - for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better - to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a - concern. - - For the same reason, currently prediction will only use the first device even if multiple devices are - provided to construct the model. - - .. note:: - - If you perform further after prediction. The weights are not automatically synchronized if `overwrite` - is set to false and the old predictor is re-used. In this case - setting `overwrite` to true (the default) will re-initialize the predictor the next time you call - predict and synchronize the weights again. - - :seealso: :func:`train`, :func:`fit`, :func:`init_model`, :func:`load_checkpoint` +```julia +predict(model, data) do batch_output + # consume or write batch_output to file +end +``` + +# Arguments: +* `self::FeedForward`: the model. +* `data::AbstractDataProvider`: the data to perform prediction on. +* `overwrite::Bool`: an `Executor` is initialized the first time predict is called. The memory + allocation of the `Executor` depends on the mini-batch size of the test + data provider. If you call predict twice with data provider of the same batch-size, + then the executor can be potentially be re-used. So, if `overwrite` is false, + we will try to re-use, and raise an error if batch-size changed. If `overwrite` + is true (the default), a new `Executor` will be created to replace the old one. + +!!! note + Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO + for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better + to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a + concern. + + For the same reason, currently prediction will only use the first device even if multiple devices are + provided to construct the model. + +!!! note + If you perform further after prediction. The weights are not automatically synchronized if `overwrite` + is set to false and the old predictor is re-used. In this case + setting `overwrite` to true (the default) will re-initialize the predictor the next time you call + predict and synchronize the weights again. + +See also [`train`](@ref), [`fit`](@ref), [`init_model`](@ref), and [`load_checkpoint`](@ref) """ function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = true) predict(self, data; overwrite = overwrite, callback=callback) @@ -310,7 +310,7 @@ end """ train(model :: FeedForward, ...) -Alias to :func:`fit`. +Alias to [`fit`](@ref). """ function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) fit(self, optimizer, data; kwargs...) @@ -321,26 +321,25 @@ end Train the `model` on `data` with the `optimizer`. -* FeedForward model: the model to be trained. -* AbstractOptimizer optimizer: the optimization algorithm to use. -* AbstractDataProvider data: the training data provider. -* Int n_epoch: default 10, the number of full data-passes to run. -* AbstractDataProvider eval_data: keyword argument, default `nothing`. The data provider for +* `model::FeedForward`: the model to be trained. +* `optimizer::AbstractOptimizer`: the optimization algorithm to use. +* `data::AbstractDataProvider`: the training data provider. +* `n_epoch::Int`: default 10, the number of full data-passes to run. +* `eval_data::AbstractDataProvider`: keyword argument, default `nothing`. The data provider for the validation set. -* AbstractEvalMetric eval_metric: keyword argument, default `Accuracy()`. The metric used +* `eval_metric::AbstractEvalMetric`: keyword argument, default [`Accuracy()`](@ref). The metric used to evaluate the training performance. If `eval_data` is provided, the same metric is also calculated on the validation set. -* kvstore: keyword argument, default `:local`. The key-value store used to synchronize gradients +* `kvstore`: keyword argument, default `:local`. The key-value store used to synchronize gradients and parameters when multiple devices are used for training. :type kvstore: `KVStore` or `Base.Symbol` -* AbstractInitializer initializer: keyword argument, default `UniformInitializer(0.01)`. -* Bool force_init: keyword argument, default false. By default, the random initialization using the +* `initializer::AbstractInitializer`: keyword argument, default `UniformInitializer(0.01)`. +* `force_init::Bool`: keyword argument, default false. By default, the random initialization using the provided `initializer` will be skipped if the model weights already exists, maybe from a previous - call to :func:`train` or an explicit call to :func:`init_model` or :func:`load_checkpoint`. When + call to [`train`](@ref) or an explicit call to [`init_model`](@ref) or [`load_checkpoint`](@ref). When this option is set, it will always do random initialization at the begining of training. -* callbacks: keyword argument, default `[]`. Callbacks to be invoked at each epoch or mini-batch, +* `callbacks::Vector{AbstractCallback}`: keyword argument, default `[]`. Callbacks to be invoked at each epoch or mini-batch, see `AbstractCallback`. - :type callbacks: `Vector{AbstractCallback}` """ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) diff --git a/src/ndarray.jl b/src/ndarray.jl index 65f1a15bd7b1..fb06ca2a9573 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -122,10 +122,9 @@ end """ -.. function:: - empty(DType, shape :: Tuple, ctx :: Context) - empty(DType, shape :: Tuple) - empty(DType, dim1, dim2, ...) + empty(DType, shape :: Tuple, ctx :: Context) + empty(DType, shape :: Tuple) + empty(DType, dim1, dim2, ...) Allocate memory for an uninitialized `NDArray` with a specified type. """ @@ -140,10 +139,9 @@ function empty{T<:DType}(:: Type{T}, shape :: Int...) end """ -.. function:: - empty(shape :: Tuple, ctx :: Context) - empty(shape :: Tuple) - empty(dim1, dim2, ...) + empty(shape :: Tuple, ctx :: Context) + empty(shape :: Tuple) + empty(dim1, dim2, ...) Allocate memory for an uninitialized `NDArray` with specific shape of type Float32. """ @@ -236,9 +234,8 @@ end import Base: size, length, ndims, eltype """ -.. function:: - size(arr :: NDArray) - size(arr :: NDArray, dim :: Int) + size(arr :: NDArray) + size(arr :: NDArray, dim :: Int) Get the shape of an `NDArray`. The shape is in Julia's column-major convention. See also the notes on NDArray shapes [`NDArray`](@ref). @@ -390,8 +387,7 @@ end import Base: copy!, copy, convert """ -.. function:: - copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) + copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) Copy contents of `src` into `dst`. """ @@ -467,7 +463,6 @@ end Convert an `NDArray` into a Julia `Array` of specific type. Data will be copied. """ -# Convert copy: NDArray -> Julia Array function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) convert(t, copy(arr)) end @@ -811,10 +806,9 @@ Try to create a Julia array by sharing the data with the underlying `NDArray`. # Arguments: * `arr::NDArray`: the array to be shared. - .. warning:: - - The returned array does not guarantee to share data with the underlying `NDArray`. - In particular, data sharing is possible only when the `NDArray` lives on CPU. +!!! note + The returned array does not guarantee to share data with the underlying `NDArray`. + In particular, data sharing is possible only when the `NDArray` lives on CPU. """ function try_get_shared(arr :: NDArray) if context(arr).device_type == CPU @@ -930,22 +924,21 @@ The libxmnet APIs are automatically imported from `libmxnet.so`. The functions l here operate on `NDArray` objects. The arguments to the functions are typically ordered as -.. code-block:: julia - - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +```julia + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ..., arg_out1, arg_out2, ...) +``` unless `NDARRAY_ARG_BEFORE_SCALAR` is not set. In this case, the scalars are put before the input arguments: -.. code-block:: julia - - func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) - +```julia + func_name(scalar1, scalar2, ..., arg_in1, arg_in2, ..., arg_out1, arg_out2, ...) +``` If `ACCEPT_EMPTY_MUTATE_TARGET` is set. An overloaded function without the output arguments will also be defined: -.. code-block:: julia - - func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +```julia + func_name(arg_in1, arg_in2, ..., scalar1, scalar2, ...) +``` Upon calling, the output arguments will be automatically initialized with empty NDArrays. diff --git a/src/nn-factory.jl b/src/nn-factory.jl index a60a4716bfef..76babffbe035 100644 --- a/src/nn-factory.jl +++ b/src/nn-factory.jl @@ -14,7 +14,7 @@ fully connected layers. * `hidden_activation::Symbol`: keyword argument, default `:relu`, indicating the default activation for hidden layers. The specification here could be overwritten by layer-wise specification in the `spec` argument. Also activation is not - applied to the last, i.e. the prediction layer. See :func:`Activation` for a + applied to the last, i.e. the prediction layer. See [`Activation`](@ref) for a list of supported activation types. * `prefix`: keyword argument, default `gensym()`, used as the prefix to name the constructed layers. diff --git a/src/optimizer.jl b/src/optimizer.jl index c5d4b29aa308..c672c2fe998a 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -24,27 +24,20 @@ abstract AbstractMomentumScheduler """ OptimizationState - .. attribute:: batch_size - - The size of the mini-batch used in stochastic training. - - .. attribute:: curr_epoch - - The current epoch count. Epoch 0 means no training yet, during the first - pass through the data, the epoch will be 1; during the second pass, the - epoch count will be 1, and so on. - - .. attribute:: curr_batch - - The current mini-batch count. The batch count is reset during every epoch. - The batch count 0 means the beginning of each epoch, with no mini-batch - seen yet. During the first mini-batch, the mini-batch count will be 1. - - .. attribute:: curr_iter - - The current iteration count. One iteration corresponds to one mini-batch, - but unlike the mini-batch count, the iteration count does **not** reset - in each epoch. So it track the *total* number of mini-batches seen so far. +# Attributes: +* `batch_size`: The size of the mini-batch used in stochastic training. +* `curr_epoch`: + The current epoch count. Epoch 0 means no training yet, during the first + pass through the data, the epoch will be 1; during the second pass, the + epoch count will be 1, and so on. +* `curr_batch`: + The current mini-batch count. The batch count is reset during every epoch. + The batch count 0 means the beginning of each epoch, with no mini-batch + seen yet. During the first mini-batch, the mini-batch count will be 1. +* `curr_iter`: + The current iteration count. One iteration corresponds to one mini-batch, + but unlike the mini-batch count, the iteration count does **not** reset + in each epoch. So it track the *total* number of mini-batches seen so far. """ type OptimizationState batch_size :: Int @@ -59,12 +52,12 @@ OptimizationState(batch_size::Int) = OptimizationState(batch_size, 0, 0, 0) get_learning_rate(scheduler, state) # Arguments -* AbstractLearningRateScheduler scheduler: a learning rate scheduler. -* OptimizationState state: the current state about epoch, mini-batch and iteration count. - :return: the current learning rate. +* `scheduler::AbstractLearningRateScheduler`: a learning rate scheduler. +* `state::OptimizationState`: the current state about epoch, mini-batch and iteration count. + +Returns the current learning rate. """ -function get_learning_rate -end +function get_learning_rate end ################################################################################ # The learning rate module @@ -74,7 +67,7 @@ import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate """ LearningRate.Fixed - Fixed learning rate scheduler always return the same learning rate. +Fixed learning rate scheduler always return the same learning rate. """ type Fixed <: AbstractLearningRateScheduler learning_rate :: Float64 @@ -84,8 +77,8 @@ get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rat """ LearningRate.Exp - :math:`\eta_t = \eta_0\gamma^t`. Here :math:`t` is the epoch count, or the iteration - count if `decay_on_iteration` is set to true. +``\eta_t = \eta_0\gamma^t``. Here ``t`` is the epoch count, or the iteration +count if `decay_on_iteration` is set to true. """ type Exp <: AbstractLearningRateScheduler learning_rate :: Float64 @@ -101,9 +94,9 @@ get_learning_rate(self :: Exp, state :: OptimizationState) = """ LearningRate.Inv - :math:`\eta_t = \eta_0 * (1 + \gamma * t)^(-power)`. - Here :math:`t` is the epoch count, or the iteration count if `decay_on_iteration` - is set to true. +``\eta_t = \eta_0 * (1 + \gamma * t)^(-power)``. +Here ``t`` is the epoch count, or the iteration count if `decay_on_iteration` +is set to true. """ type Inv <: AbstractLearningRateScheduler learning_rate :: Float64 @@ -132,9 +125,10 @@ end """ get_momentum(scheduler, state) -* AbstractMomentumScheduler scheduler: the momentum scheduler. -* OptimizationState state: the state about current epoch, mini-batch and iteration count. - :return: the current momentum. +* `scheduler::AbstractMomentumScheduler`: the momentum scheduler. +* `state::OptimizationState`: the state about current epoch, mini-batch and iteration count. + +Returns the current momentum. """ function get_momentum end @@ -148,8 +142,8 @@ import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum """ Momentum.Null - The null momentum scheduler always returns 0 for momentum. It is also used to - explicitly indicate momentum should not be used. +The null momentum scheduler always returns 0 for momentum. It is also used to +explicitly indicate momentum should not be used. """ type Null <: AbstractMomentumScheduler end @@ -158,7 +152,7 @@ get_momentum(self :: Null, state :: OptimizationState) = 0.0 """ Momentum.Fixed - Fixed momentum scheduler always returns the same value. +Fixed momentum scheduler always returns the same value. """ type Fixed <: AbstractMomentumScheduler momentum :: Float64 @@ -180,10 +174,10 @@ end """ get_updater(optimizer) -* AbstractOptimizer optimizer: the underlying optimizer. +A utility function to create an updater function, that uses its closure to +store all the states needed for each weights. - A utility function to create an updater function, that uses its closure to - store all the states needed for each weights. +* `optimizer::AbstractOptimizer`: the underlying optimizer. """ function get_updater(optimizer :: AbstractOptimizer) states = Dict{Int,Any}() @@ -206,11 +200,11 @@ abstract AbstractOptimizerOptions """ normalized_gradient(opts, state, grad) -* AbstractOptimizerOptions opts: options for the optimizer, should contain the field +* `opts::AbstractOptimizerOptions`: options for the optimizer, should contain the field `grad_scale`, `grad_clip` and `weight_decay`. -* OptimizationState state: the current optimization state. -* NDArray weight: the trainable weights. -* NDArray grad: the original gradient of the weights. +* `state::OptimizationState`: the current optimization state. +* `weight::NDArray`: the trainable weights. +* `grad::NDArray`: the original gradient of the weights. Get the properly normalized gradient (re-scaled and clipped if necessary). """ diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index 6b17f1b3e152..665cc52694b0 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -8,26 +8,26 @@ lr_scheduler :: Any = nothing ) -#=doc -.. class:: ADAM +""" + ADAM - The solver described in Diederik Kingma, Jimmy Ba: *Adam: A Method for - Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. +The solver described in Diederik Kingma, Jimmy Ba: *Adam: A Method for +Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. - .. function:: ADAM(; kwargs...) + ADAM(; kwargs...) - :param Real lr: default `0.001`, learning rate. - :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` - parameter. - :param Real beta1: default `0.9`. - :param Real beta2: default `0.999`. - :param Real epsilon: default `1e-8`. - :param Real grad_clip: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. - :param Real weight_decay: default `0.00001`, weight decay is equivalent - to adding a global l2 regularizer for all the parameters. -=# +* `lr::Real`: default `0.001`, learning rate. +* `lr_scheduler::AbstractLearningRateScheduler`: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. +* `beta1::Real`: default `0.9`. +* `beta2::Real`: default `0.999`. +* `epsilon::Real`: default `1e-8`. +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. +""" type ADAM <: AbstractOptimizer opts :: ADAMOptions state :: OptimizationState diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index fb6bf195fdb5..2eda1d0bf67c 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -7,26 +7,27 @@ momentum_scheduler :: Any = nothing ) -#=doc -.. class:: SGD +""" + SGD - Stochastic gradient descent optimizer. +Stochastic gradient descent optimizer. - .. function:: SGD(; kwargs...) + SGD(; kwargs...) - :param Real lr: default `0.01`, learning rate. - :param AbstractLearningRateScheduler lr_scheduler: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` - parameter. - :param Real momentum: default `0.0`, the momentum. - :param AbstractMomentumScheduler momentum_scheduler: default `nothing`, - a dynamic momentum scheduler. If set, will overwrite the `momentum` - parameter. - :param Real grad_clip: default `0`, if positive, will clip the gradient - into the bounded range `[-grad_clip, grad_clip]`. - :param Real weight_decay: default `0.0001`, weight decay is equivalent to - adding a global l2 regularizer to the parameters. -=# +# Arguments: +* `lr::Real`: default `0.01`, learning rate. +* `lr_scheduler::AbstractLearningRateScheduler`: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. +* `momentum::Real`: default `0.0`, the momentum. +* `momentum_scheduler::AbstractMomentumScheduler`: default `nothing`, + a dynamic momentum scheduler. If set, will overwrite the `momentum` + parameter. +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the bounded range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.0001`, weight decay is equivalent to + adding a global l2 regularizer to the parameters. +""" type SGD <: AbstractOptimizer opts :: SGDOptions state :: OptimizationState diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 498de4ba42bc..48ec27833ad6 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -189,11 +189,11 @@ end Set the attribute key to value for this `SymbolicNode`. -# Warning -It is encouraged not to call this function directly, unless you know exactly what you are doing. The -recommended way of setting attributes is when creating the `SymbolicNode`. Changing -the attributes of a `SymbolicNode` that is already been used somewhere else might -cause unexpected behavior and inconsistency. +!!! note + It is encouraged not to call this function directly, unless you know exactly what you are doing. The + recommended way of setting attributes is when creating the `SymbolicNode`. Changing + the attributes of a `SymbolicNode` that is already been used somewhere else might + cause unexpected behavior and inconsistency. """ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) key_s = string(key) @@ -304,7 +304,7 @@ end Do shape inference according to the input shapes. The input shapes could be provided as a list of shapes, which should specify the shapes of inputs in the same order as -the arguments returned by :func:`list_arguments`. Alternatively, the shape information +the arguments returned by [`list_arguments`](@ref). Alternatively, the shape information could be specified via keyword arguments. Returns a 3-tuple containing shapes of all the arguments, shapes of all the outputs and @@ -372,7 +372,7 @@ end Do type inference according to the input types. The input types could be provided as a list of types, which should specify the types of inputs in the same order as -the arguments returned by :func:`list_arguments`. Alternatively, the type information +the arguments returned by [`list_arguments`](@ref). Alternatively, the type information could be specified via keyword arguments. Returns a 3-tuple containing types of all the arguments, types of all the outputs and @@ -401,7 +401,7 @@ end Get a node representing the specified output of this node. The index could be a symbol or string indicating the name of the output, or a 1-based integer -indicating the index, as in the list of :func:`list_outputs`. +indicating the index, as in the list of [`list_outputs`](@ref). """ function Base.getindex(self :: SymbolicNode, idx :: Union{Base.Symbol, AbstractString}) idx = Symbol(idx) diff --git a/src/util.jl b/src/util.jl index 5c50d20357e7..544962e4f161 100644 --- a/src/util.jl +++ b/src/util.jl @@ -62,7 +62,6 @@ end ################################################################################ # Internal Utilities ################################################################################ -const DOC_EMBED_ANCHOR = "**autogen:EMBED:{1}:EMBED:autogen**" function _format_typestring(typestr :: String) replace(typestr, r"\bSymbol\b", "SymbolicNode") end diff --git a/src/visualize.jl b/src/visualize.jl index c60868430a9c..f0dd74efdc67 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -3,13 +3,14 @@ import JSON """ to_graphviz(network) -* SymbolicNode network: the network to visualize. -* AbstractString title: keyword argument, default "Network Visualization", +* `network::SymbolicNode`: the network to visualize. +* `title::AbstractString:` keyword argument, default "Network Visualization", the title of the GraphViz graph. -* input_shapes: keyword argument, default `nothing`. If provided, +* `input_shapes`: keyword argument, default `nothing`. If provided, will run shape inference and plot with the shape information. Should be either a dictionary of name-shape mapping or an array of shapes. - :return: the graph description in GraphViz `dot` language. + +Returns the graph description in GraphViz `dot` language. """ function to_graphviz(network :: SymbolicNode; title="Network Visualization", input_shapes=nothing) if !isa(input_shapes, Void) From 3fac768636662e7befc95764fe32996c8e477494 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 2 Sep 2016 02:54:54 +0900 Subject: [PATCH 388/630] associate docs with method and not binding --- src/io.jl | 2 +- src/ndarray.jl | 24 +++++++++--------------- src/symbolic-node.jl | 11 +++-------- 3 files changed, 13 insertions(+), 24 deletions(-) diff --git a/src/io.jl b/src/io.jl index 1878125f15fd..0c5de69e53f8 100644 --- a/src/io.jl +++ b/src/io.jl @@ -576,6 +576,7 @@ function _define_data_iter_creator(hdr :: MX_handle) end defun = quote + @doc $f_desc -> function $iter_name(; kwargs...) arg_keys = String[string(k) for (k,v) in kwargs] arg_vals = String[dump_mx_param(v) for (k,v) in kwargs] @@ -589,7 +590,6 @@ function _define_data_iter_creator(hdr :: MX_handle) end $(isprovider ? :(const $alias_name = $iter_name) : :()) - @doc $f_desc $iter_name end defun end diff --git a/src/ndarray.jl b/src/ndarray.jl index fb06ca2a9573..6cccacd35bd8 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1047,7 +1047,6 @@ function _get_function_expressions(handle :: MX_handle, name) func_head = Expr(:call, name, args...) func_def = Expr(:function, func_head, func_body) - exprs = Expr[func_def] if accept_empty_mutate args0 = args[1:n_used_vars+n_scalars] @@ -1058,9 +1057,10 @@ function _get_function_expressions(handle :: MX_handle, name) func_head0 = Expr(:call, name, args0...) func_def0 = Expr(:function, func_head0, func_body0) - push!(exprs, func_def0) + return func_def, func_def0 + else + return func_def, :() end - return exprs end macro _import_ndarray_functions() @@ -1071,19 +1071,13 @@ macro _import_ndarray_functions() handle = funcs[i] name, desc = _get_function_description(handle) - exprs = _get_function_expressions(handle, name) + func_def, func_def0 = _get_function_expressions(handle, name) - # TODO(vchuravy): Fix this in a more elegant way once we only support - # v0.5 - if isdefined(Base, name) || isdefined(name) - expr = quote - $(exprs...) - end - else - expr = quote - $(exprs...) - @doc $desc $name - end + expr = quote + $(isdefined(Base, name) ? :(import Base.$name) : :()) + @doc $desc -> + $func_def + $func_def0 end push!(func_exprs, expr) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 48ec27833ad6..5c1b123b0b7e 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -703,14 +703,9 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) end func_def = Expr(:function, func_head, Expr(:block, func_body)) - # TODO(vchuravy) find a more elegant solution fro v0.5 - if isdefined(Base, func_name) || isdefined(func_name) - return func_def - else - return quote - $func_def - @doc $f_desc $func_name - end + return quote + @doc $f_desc -> + $func_def end end From f0b758b62cadf48e6d7d836b765960c6e03a8899 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 2 Sep 2016 02:57:51 +0900 Subject: [PATCH 389/630] move call docs into type docs --- src/symbolic-node.jl | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 5c1b123b0b7e..13675450232a 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -2,6 +2,12 @@ SymbolicNode SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. + + (self :: SymbolicNode)(args :: SymbolicNode...) + (self :: SymbolicNode)(; kwargs...) + +Make a new node by composing `self` with `args`. Or the arguments +can be specified using keyword arguments. """ type SymbolicNode handle :: MX_SymbolHandle @@ -32,15 +38,6 @@ function Base.copy(self :: SymbolicNode) Base.deepcopy(self) end -# TODO(vchuravy) How to add documentation to the v0.5 style call overloading -@doc """ - call(self :: SymbolicNode, args :: SymbolicNode...) - call(self :: SymbolicNode; kwargs...) - -Make a new node by composing `self` with `args`. Or the arguments -can be specified using keyword arguments. -""" SymbolicNode - @compat function (self::SymbolicNode)(args :: SymbolicNode...) s = deepcopy(self) _compose!(s, args...) From 1ba38965f2780dad6659dcac74a7654bdf77cf3a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 1 Sep 2016 11:50:48 -0700 Subject: [PATCH 390/630] update default dataset URL (fix test failing of #123) --- src/util.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/util.jl b/src/util.jl index 544962e4f161..10d42b25a37c 100644 --- a/src/util.jl +++ b/src/util.jl @@ -18,7 +18,7 @@ function get_mnist_ubyte() filenames = Dict([k => joinpath(mnist_dir, v) for (k,v) in filenames]) if !all(isfile, values(filenames)) cd(mnist_dir) do - mnist_dir = download("http://webdocs.cs.ualberta.ca/~bx3/data/mnist.zip", "mnist.zip") + mnist_dir = download("http://data.dmlc.ml/mxnet/data/mnist.zip", "mnist.zip") try run(`unzip -u $mnist_dir`) catch @@ -41,7 +41,7 @@ function get_cifar10() filenames = Dict([k => joinpath(cifar10_dir, v) for (k,v) in filenames]) if !all(isfile, values(filenames)) cd(cifar10_dir) do - run(`wget http://webdocs.cs.ualberta.ca/~bx3/data/cifar10.zip`) + run(`http://data.dmlc.ml/mxnet/data/cifar10.zip`) try run(`unzip -u cifar10.zip`) catch From 6840119fb6440c4e6ab3bfbecc131795c8a82557 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 25 Aug 2016 04:28:49 +0900 Subject: [PATCH 391/630] handle kwargs for ndarray functions --- src/ndarray.jl | 40 ++++++++++++++++++++++------------------ test/unittest/ndarray.jl | 11 +++++++++++ 2 files changed, 33 insertions(+), 18 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 6cccacd35bd8..fb203cd91635 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -885,17 +885,17 @@ with corresponding support (see `load`). * `filename::String`: path to the binary file to write to. * `data`: data to save to file. Data can be a`NDArray`, a `Vector{NDArray}`, or a `Dict{Base.Symbol, NDArray}`. """ -function save(filename::AbstractString, data::NDArray) +function save(filename::String, data::NDArray) save(filename, [data]) end -function save(filename::AbstractString, data::Vector{NDArray}) +function save(filename::String, data::Vector{NDArray}) @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), filename, length(data), MX_handle[data...], char_pp(0)) end -function save(filename::AbstractString, data::Dict{Base.Symbol,NDArray}) +function save(filename::String, data::Dict{Base.Symbol,NDArray}) names = [k for k in keys(data)] arrays = MX_handle[data[k] for k in names] - names = AbstractString[string(k) for k in names] + names = String[string(k) for k in names] @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), filename, length(names), arrays, names) @@ -904,10 +904,12 @@ end ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ -function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars) - @mxcall(:MXFuncInvoke, - (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}), - func_handle, use_vars, scalars, mut_vars) +function _invoke_mxfunction(func_handle::MX_handle, use_vars, scalars, mut_vars; kwargs...) + names = String[string(entry[1]) for entry in kwargs] + args = String[string(entry[2]) for entry in kwargs] + @mxcall(:MXFuncInvokeEx, + (MX_handle, Ptr{MX_handle}, Ptr{MX_float}, Ptr{MX_handle}, Cint, char_pp, char_pp), + func_handle, use_vars, scalars, mut_vars, length(names), names, args) end @enum(LIBMX_FUNC_TYPE_MASK, @@ -1035,7 +1037,7 @@ function _get_function_expressions(handle :: MX_handle, name) end stmt_call = quote local handle = _get_function($(QuoteNode(name))) - _invoke_mxfunction(handle, $_use_vars, $_scalars, $_mut_vars) + _invoke_mxfunction(handle, $_use_vars, $_scalars, $_mut_vars; kwargs...) end if n_mutate_vars == 1 stmt_ret = :(return out1) @@ -1043,20 +1045,22 @@ function _get_function_expressions(handle :: MX_handle, name) stmt_ret = Expr(:return, Expr(:tuple, [Symbol("out$i") for i=1:n_mutate_vars]...)) end - func_body = Expr(:block, stmt_call, stmt_ret) - func_head = Expr(:call, name, args...) - - func_def = Expr(:function, func_head, func_body) + func_def = quote + function $name($(args...); kwargs...) + $stmt_call + $stmt_ret + end + end if accept_empty_mutate args0 = args[1:n_used_vars+n_scalars] - func_head0 = Expr(:call, name, args0...) _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] - stmt_call0 = Expr(:call, name, args0..., _mut_vars0...) - func_body0 = Expr(:block, stmt_call0) - func_head0 = Expr(:call, name, args0...) - func_def0 = Expr(:function, func_head0, func_body0) + func_def0 = quote + function $name($(args0...); kwargs...) + $name($(args0...), $(_mut_vars0...); kwargs...) + end + end return func_def, func_def0 else return func_def, :() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index f76d8cd95b59..83b0578bb91f 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -298,6 +298,16 @@ function test_eltype() end end +function test_kwargs() + info("NDArray::kwargs") + dims1 = (2,3,4) + + x = mx.empty(dims1) + tx = mx.transpose(x, axes=(1,0,2)) + # @test size(tx) == (3,2,4) + @test size(tx) == (2,4,3) +end + ################################################################################ # Run tests ################################################################################ @@ -315,5 +325,6 @@ test_sqrt() test_eltype() test_nd_as_jl() test_dot() +test_kwargs() end From e0b76682a9f5a676935d905c50bfcec75fcda9d8 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 2 Sep 2016 02:28:04 +0900 Subject: [PATCH 392/630] special case transpose --- src/ndarray.jl | 10 ++++++++++ test/unittest/ndarray.jl | 10 ++++++---- 2 files changed, 16 insertions(+), 4 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index fb203cd91635..c3288dc323eb 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1035,6 +1035,15 @@ function _get_function_expressions(handle :: MX_handle, name) if name == :dot _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) end + + if name == :transpose + transform = quote + kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] + end + else + transform = :() + end + stmt_call = quote local handle = _get_function($(QuoteNode(name))) _invoke_mxfunction(handle, $_use_vars, $_scalars, $_mut_vars; kwargs...) @@ -1047,6 +1056,7 @@ function _get_function_expressions(handle :: MX_handle, name) func_def = quote function $name($(args...); kwargs...) + $transform $stmt_call $stmt_ret end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 83b0578bb91f..8d5a1b0d57a9 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -302,10 +302,12 @@ function test_kwargs() info("NDArray::kwargs") dims1 = (2,3,4) - x = mx.empty(dims1) - tx = mx.transpose(x, axes=(1,0,2)) - # @test size(tx) == (3,2,4) - @test size(tx) == (2,4,3) + A = rand(Float32, dims1) + x = mx.NDArray(A) + tx = mx.transpose(x, axes=(2,1,3)) + tA = permutedims(A, [2,1,3]) + @test size(tx) == size(tA) + @test all(copy(tx) .== tA) end ################################################################################ From f5e80af08039bb410b69fbbe1899586e70810510 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 2 Sep 2016 05:34:43 +0900 Subject: [PATCH 393/630] add special casing of transpose to the symbolic layer --- src/symbolic-node.jl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 13675450232a..bbaf16c47001 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -630,6 +630,11 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) name = "" end + if $func_name == :transpose + kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] + end + + param_keys = String[] param_vals = String[] symbol_kws = Dict{Symbol, SymbolicNode}() From ae1d45b4566114c1f271c07fdc36e5f3bd4824eb Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 6 Sep 2016 11:47:36 -0400 Subject: [PATCH 394/630] preparing for v0.0.9 --- NEWS.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/NEWS.md b/NEWS.md index f11136c8cd46..03ede8371798 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,10 @@ +# v0.0.9 (2016.09.06) + +* Migrated documentation system to `Documenter.jl` (@vchuravy) +* Simplified building by using Julia's OpenBlas (@staticfloat) +* Freezing parameters (@vchuravy) +* Support `DType` for `NDArray` (@vchuravy) + # v0.0.8 (2016.02.08) * Fix compatability with Julia v0.5. From 9e3a60a56120fa75ad0291ea0cbf81af146c7fc4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 6 Sep 2016 19:55:05 +0200 Subject: [PATCH 395/630] Positional arguments in symbolic API need type restriction. (#134) * fix positional arguments can only be SymbolicNodes * fix missing signature of Symbolic function in docs --- src/symbolic-node.jl | 18 +++++++----------- test/unittest/symbolic-node.jl | 7 +++++++ 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index bbaf16c47001..15ae1d7d0e2d 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -610,7 +610,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) signature = _format_signature(Int(ref_nargs[]), ref_arg_names) f_desc = " " * func_name_s * "(" * signature * ")\n\n" - f_desc = unsafe_wrap(String, ref_desc[]) * "\n\n" + f_desc *= unsafe_wrap(String, ref_desc[]) * "\n\n" if !isempty(kv_nargs_s) f_desc *= "This function support variable length positional `SymbolicNode` inputs.\n\n" end @@ -620,9 +620,9 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) f_desc *= "* `attrs::Dict{Symbol, AbstractString}`: The attributes associated with this `SymbolicNode`.\n\n" f_desc *= "Returns `$(_format_typestring(unsafe_wrap(String, ref_ret_type[])))`." - # function $func_name(args...; kwargs...) - func_head = Expr(:call, func_name, Expr(:parameters, Expr(:..., :kwargs)), Expr(:..., :args)) - func_body = quote + func_def = quote + @doc $f_desc -> + function $func_name(args::SymbolicNode...; kwargs...) idx = findfirst(x -> x[1] == :name, kwargs) if idx > 0 name = kwargs[idx][2] @@ -702,13 +702,9 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) end return node - end - - func_def = Expr(:function, func_head, Expr(:block, func_body)) - return quote - @doc $f_desc -> - $func_def - end + end # function + end # quote + return func_def end function _get_atomic_symbol_creators() diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 825602723151..388a74fe644b 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -106,6 +106,12 @@ function test_attrs() @test_throws MethodError mx.Convolution(data=data2, kernel = (1,1), num_filter = 1, attrs = Dict(:test => "1.0", :test2 => 1.0)) end +function test_functions() + info("SymbolicNode::Functions") + data = mx.Variable(:data) + typeof(mx.sum(data)) == mx.SymbolicNode +end + ################################################################################ # Run tests ################################################################################ @@ -116,5 +122,6 @@ test_infer_shape() test_infer_shape_error() test_saveload() test_attrs() +test_functions() end From 8949dbb5f63fcbc2b97ffcfc6460c270db49c3f6 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 6 Sep 2016 19:59:18 +0200 Subject: [PATCH 396/630] remove mistaken + in REQUIRE (#135) --- REQUIRE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/REQUIRE b/REQUIRE index 6fbfb73bd328..38ef82563fc3 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,4 +1,4 @@ -julia 0.4+ +julia 0.4 Compat 0.8.4 Formatting BinDeps From f06d9943f51fdb77b8c2841aa70258eb4dbf7f30 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 6 Sep 2016 23:22:48 -0400 Subject: [PATCH 397/630] fix special handling of dot for SymbolicNode (#123) --- src/ndarray.jl | 3 +++ src/symbolic-node.jl | 12 ++++++++++-- test/unittest/symbolic-node.jl | 17 ++++++++++++++++- 3 files changed, 29 insertions(+), 3 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index c3288dc323eb..2e2c806552f7 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1036,6 +1036,9 @@ function _get_function_expressions(handle :: MX_handle, name) _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) end + # XXX: hacky way of solving the semantic difference of the axes parameter in Julia + # and in libmxnet. + # See https://github.com/dmlc/MXNet.jl/pull/123 if name == :transpose transform = quote kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 15ae1d7d0e2d..dfc54c3c3b1c 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -629,12 +629,20 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) else name = "" end + + # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped + # See https://github.com/dmlc/MXNet.jl/issues/55 + if $func_name_s == "dot" + args = reverse(args) + end - if $func_name == :transpose + # XXX: hacky way of solving the semantic difference of the axes parameter in Julia + # and in libmxnet. + # See https://github.com/dmlc/MXNet.jl/pull/123 + if $func_name_s == "transpose" kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] end - param_keys = String[] param_vals = String[] symbol_kws = Dict{Symbol, SymbolicNode}() diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 388a74fe644b..d78b0775a983 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -2,7 +2,7 @@ module TestSymbolicNode using MXNet using Base.Test -using ..Main: mlp2 +using ..Main: mlp2, reldiff ################################################################################ # Test Implementations @@ -112,6 +112,20 @@ function test_functions() typeof(mx.sum(data)) == mx.SymbolicNode end +function test_dot() + info("SymbolicNode::dot") + x = mx.Variable(:x) + y = mx.Variable(:y) + z = mx.dot(x, y) + z_exec = mx.bind(z, context=mx.cpu(), + args=Dict(:x=>mx.ones((100, 2)), :y=>mx.ones((2, 200)))) + mx.forward(z_exec) + + ret = copy(z_exec.outputs[1]) + @test size(ret) == (100, 200) + @test reldiff(ret, 2*ones(100, 200)) < 1e-6 +end + ################################################################################ # Run tests ################################################################################ @@ -123,5 +137,6 @@ test_infer_shape_error() test_saveload() test_attrs() test_functions() +test_dot() end From 6e2c1aecf098ee00c9a1cfe495650c1f0d9674b2 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 6 Sep 2016 23:44:05 -0400 Subject: [PATCH 398/630] add readme for developers --- README-DEV.md | 10 ++++++++++ deps/build.jl | 2 ++ 2 files changed, 12 insertions(+) create mode 100644 README-DEV.md diff --git a/README-DEV.md b/README-DEV.md new file mode 100644 index 000000000000..e4dc3fd45c29 --- /dev/null +++ b/README-DEV.md @@ -0,0 +1,10 @@ +# Workflow for making a release + +1. Update `NEWS.md` to list important changes +2. Check out the `stable` branch, rebase with `master`. +3. Update `libmxnet_curr_ver` in `deps/build.jl` to the latest commit SHA (or any proper reference). Using `master` here is not good because future changes in libmxnet might break existing Julia packages. +4. Run tests. +5. Commit changes and push. +6. Run `Pkg.tag("MXNet")` in Julia. +7. Run `Pkg.publish()`, which will open a browser for making a pull request to METADATA.jl. +8. Edit the [releases page](https://github.com/dmlc/MXNet.jl/releases) to copy the release notes from `NEWS.md` to the newly created release tag. diff --git a/deps/build.jl b/deps/build.jl index 1736bf9eb0a2..22f06e7acf36 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -3,6 +3,7 @@ using Compat # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false +libmxnet_curr_ver = "master" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") @@ -58,6 +59,7 @@ if !libmxnet_detected ChangeDirectory(_srcdir) `rm -rf mxnet` `git clone --recursive https://github.com/dmlc/mxnet` + `git checkout $libmxnet_curr_ver` FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin ChangeDirectory("$_mxdir") `cp make/config.mk config.mk` From ebdafe814bfe7f277b256470aafdeed3044f64a5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 6 Sep 2016 23:44:57 -0400 Subject: [PATCH 399/630] update libmxnet SHA --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 22f06e7acf36..cc7fb6e563c3 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -3,7 +3,7 @@ using Compat # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false -libmxnet_curr_ver = "master" +libmxnet_curr_ver = "7a90e598623314b5f7adc1184e4012f6c6160ff6" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") From 545332d3c66545653c525e3982b0f80f34a6409f Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 6 Sep 2016 23:45:38 -0400 Subject: [PATCH 400/630] keep the master branch to track libmxnet master --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index cc7fb6e563c3..22f06e7acf36 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -3,7 +3,7 @@ using Compat # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false -libmxnet_curr_ver = "7a90e598623314b5f7adc1184e4012f6c6160ff6" +libmxnet_curr_ver = "master" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") From 39bc643ee5c54e243c45a5837b408f8ed4f9aa84 Mon Sep 17 00:00:00 2001 From: pluskid Date: Wed, 7 Sep 2016 00:11:48 -0400 Subject: [PATCH 401/630] make is_windows compatible with Julia v0.4 --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 22f06e7acf36..ebce44a22ec7 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -23,7 +23,7 @@ if !libmxnet_detected ################################################################################ # If not found, try to build automatically using BinDeps ################################################################################ - if is_windows() + if @compat(is_windows()) info("Please follow the libmxnet documentation on how to build manually") info("or to install pre-build packages:") info("http://mxnet.readthedocs.io/en/latest/how_to/build.html#building-on-windows") From ba2c9995c87c752fd8ebe495fff7dbeb9488a889 Mon Sep 17 00:00:00 2001 From: pluskid Date: Wed, 7 Sep 2016 00:20:29 -0400 Subject: [PATCH 402/630] fix build script --- deps/build.jl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index ebce44a22ec7..c073bdf6b1c8 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -59,9 +59,12 @@ if !libmxnet_detected ChangeDirectory(_srcdir) `rm -rf mxnet` `git clone --recursive https://github.com/dmlc/mxnet` - `git checkout $libmxnet_curr_ver` + @build_steps begin + ChangeDirectory(joinpath(_srcdir, "mxnet")) + `git checkout $libmxnet_curr_ver` + end FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin - ChangeDirectory("$_mxdir") + ChangeDirectory(_mxdir) `cp make/config.mk config.mk` if is_apple() `cp make/osx.mk config.mk` From 8f4396116ac8e245f4773d50e3323fdd234c1249 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Sep 2016 00:24:32 -0400 Subject: [PATCH 403/630] prepare for v0.1.0 --- NEWS.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/NEWS.md b/NEWS.md index 03ede8371798..4423c42d6724 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,7 @@ +# v0.1.0 (2016.09.07) + +* Track specific libmxnet version for each release. + # v0.0.9 (2016.09.06) * Migrated documentation system to `Documenter.jl` (@vchuravy) From 86362f353ebebe38452dd96853816f7b0b8af79c Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Sep 2016 00:25:37 -0400 Subject: [PATCH 404/630] update workflow for creating release --- README-DEV.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README-DEV.md b/README-DEV.md index e4dc3fd45c29..b148fde3d5c7 100644 --- a/README-DEV.md +++ b/README-DEV.md @@ -1,7 +1,7 @@ # Workflow for making a release 1. Update `NEWS.md` to list important changes -2. Check out the `stable` branch, rebase with `master`. +2. Check out the `stable` branch, merge with `master`. 3. Update `libmxnet_curr_ver` in `deps/build.jl` to the latest commit SHA (or any proper reference). Using `master` here is not good because future changes in libmxnet might break existing Julia packages. 4. Run tests. 5. Commit changes and push. From 1a0b440bef9fe791ffa35abb6bbfe58cccba85c8 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 7 Sep 2016 18:56:11 +0200 Subject: [PATCH 405/630] Make build.jl aware of other blas vendors besides OpenBLAS (#137) * be a bit more lenient about which blas versions we support * also set dependencies to the correct version * Fail if blas_vendor == :unknown --- deps/build.jl | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index c073bdf6b1c8..3efddefa8061 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -30,7 +30,7 @@ if !libmxnet_detected error("Automatic building libxmnet on Windows is currently not supported yet.") end - openblas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) + blas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) if VERSION >= v"0.5.0-dev+4338" blas_vendor = Base.BLAS.vendor() @@ -43,6 +43,14 @@ if !libmxnet_detected ilp64 = "-DINTERFACE64" end + if blas_vendor == :unknown + error("Julia is build with an unkown blas library ($blas_path).\n Automatic building of libmxnet is not yet supported.") + elseif blas_vendor != :openblas64 || blas_vendor != :openblas + warn("Unsure if we can build against $blas_vendor.") + end + + blas_name = blas_vendor == :openblas64 ? "openblas" : string(blas_vendor) + #-------------------------------------------------------------------------------- # Build libmxnet mxnet = library_dependency("mxnet", aliases=["libmxnet", "libmxnet.so"]) @@ -62,6 +70,7 @@ if !libmxnet_detected @build_steps begin ChangeDirectory(joinpath(_srcdir, "mxnet")) `git checkout $libmxnet_curr_ver` + `git submodule update` end FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin ChangeDirectory(_mxdir) @@ -72,7 +81,7 @@ if !libmxnet_detected `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` `cp ../../cblas.h include/cblas.h` - `make USE_BLAS=openblas MSHADOW_LDFLAGS="$openblas_path"` + `make USE_BLAS=$blas_name MSHADOW_LDFLAGS="$blas_path"` `cp lib/libmxnet.so $_libdir` end) end From d4278ca5515c6c89d27fc40350c6adf683caf08d Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Sep 2016 13:13:03 -0400 Subject: [PATCH 406/630] remove unnecessary @compat call --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 3efddefa8061..66b08165cfa4 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -23,7 +23,7 @@ if !libmxnet_detected ################################################################################ # If not found, try to build automatically using BinDeps ################################################################################ - if @compat(is_windows()) + if is_windows() info("Please follow the libmxnet documentation on how to build manually") info("or to install pre-build packages:") info("http://mxnet.readthedocs.io/en/latest/how_to/build.html#building-on-windows") From 1ff097550557e8e560a5be58f2e36920d2f68e56 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Sep 2016 13:33:53 -0400 Subject: [PATCH 407/630] replace pipe with pipeline --- src/util.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.jl b/src/util.jl index 10d42b25a37c..a53647790c06 100644 --- a/src/util.jl +++ b/src/util.jl @@ -46,7 +46,7 @@ function get_cifar10() run(`unzip -u cifar10.zip`) catch try - run(pipe(`7z x cifar10.zip`,stdout=DevNull)) + run(pipeline(`7z x cifar10.zip`, stdout=DevNull)) catch error("Extraction Failed:No extraction program found in path") end From 37e715ab14bd0a3a919277843bd8e8d33d4bb9f5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 7 Sep 2016 14:12:49 -0400 Subject: [PATCH 408/630] fix build script --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 66b08165cfa4..b65c3d89632b 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -45,7 +45,7 @@ if !libmxnet_detected if blas_vendor == :unknown error("Julia is build with an unkown blas library ($blas_path).\n Automatic building of libmxnet is not yet supported.") - elseif blas_vendor != :openblas64 || blas_vendor != :openblas + elseif blas_vendor != :openblas64 && blas_vendor != :openblas warn("Unsure if we can build against $blas_vendor.") end From 9798c974b3365e543a4184a0b8f71174410121e4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 8 Sep 2016 07:36:46 +0900 Subject: [PATCH 409/630] improve buildscript --- deps/build.jl | 51 ++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 36 insertions(+), 15 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index b65c3d89632b..d69ac93a2c3b 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -44,19 +44,25 @@ if !libmxnet_detected end if blas_vendor == :unknown - error("Julia is build with an unkown blas library ($blas_path).\n Automatic building of libmxnet is not yet supported.") - elseif blas_vendor != :openblas64 && blas_vendor != :openblas - warn("Unsure if we can build against $blas_vendor.") + info("Julia is build with an unkown blas library ($blas_path).") + info("Attempting build without reusing the blas library") + USE_JULIA_BLAS = false + elseif !(blas_vendor in (:openblas, :openblas64)) + info("Unsure if we can build against $blas_vendor.") + info("Attempting build anyway.") + USE_JULIA_BLAS = true + else + USE_JULIA_BLAS = true end blas_name = blas_vendor == :openblas64 ? "openblas" : string(blas_vendor) #-------------------------------------------------------------------------------- # Build libmxnet - mxnet = library_dependency("mxnet", aliases=["libmxnet", "libmxnet.so"]) + mxnet = library_dependency("mxnet", aliases=["mxnet", "libmxnet", "libmxnet.so"]) _prefix = joinpath(BinDeps.depsdir(mxnet), "usr") - _srcdir = joinpath(BinDeps.depsdir(mxnet),"src") + _srcdir = joinpath(BinDeps.depsdir(mxnet), "src") _mxdir = joinpath(_srcdir, "mxnet") _libdir = joinpath(_prefix, "lib") provides(BuildProcess, @@ -64,28 +70,43 @@ if !libmxnet_detected CreateDirectory(_srcdir) CreateDirectory(_libdir) @build_steps begin - ChangeDirectory(_srcdir) - `rm -rf mxnet` - `git clone --recursive https://github.com/dmlc/mxnet` + BinDeps.DirectoryRule(_mxdir, @build_steps begin + ChangeDirectory(_srcdir) + `git clone --recursive https://github.com/dmlc/mxnet` + end) @build_steps begin - ChangeDirectory(joinpath(_srcdir, "mxnet")) + ChangeDirectory(_mxdir) + `git fetch` `git checkout $libmxnet_curr_ver` `git submodule update` end - FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin + FileRule(joinpath(_mxdir, "config.mk"), @build_steps begin ChangeDirectory(_mxdir) - `cp make/config.mk config.mk` if is_apple() `cp make/osx.mk config.mk` + else + `cp make/config.mk config.mk` end `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` - `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` + end) + @build_steps begin + ChangeDirectory(_mxdir) `cp ../../cblas.h include/cblas.h` - `make USE_BLAS=$blas_name MSHADOW_LDFLAGS="$blas_path"` - `cp lib/libmxnet.so $_libdir` + if USE_JULIA_BLAS + MakeTargets("USE_BLAS=$blas_name -j$(nprocs())", env=Dict( + "MSHADOW_LDFLAGS" => blas_path, + "MSHADOW_CFLAGS" => ilp64, + )) + else + `make -j$(nprocs())` + end + `rm $_libdir/libmxnet.so` + end + FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin + `cp $_mxdir/lib/libmxnet.so $_libdir/` end) end - end), mxnet) + end), mxnet, installed_libpath=_libdir) @BinDeps.install Dict(:mxnet => :mxnet) end From eb4a3fd72ff743ca43e8478baa0b1b28eef52ae8 Mon Sep 17 00:00:00 2001 From: pluskid Date: Thu, 8 Sep 2016 11:21:32 -0400 Subject: [PATCH 410/630] fix build script. --- deps/build.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index d69ac93a2c3b..d6d6f2386c48 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -44,7 +44,7 @@ if !libmxnet_detected end if blas_vendor == :unknown - info("Julia is build with an unkown blas library ($blas_path).") + info("Julia is built with an unkown blas library ($blas_path).") info("Attempting build without reusing the blas library") USE_JULIA_BLAS = false elseif !(blas_vendor in (:openblas, :openblas64)) @@ -100,7 +100,7 @@ if !libmxnet_detected else `make -j$(nprocs())` end - `rm $_libdir/libmxnet.so` + `rm -f $_libdir/libmxnet.so` end FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin `cp $_mxdir/lib/libmxnet.so $_libdir/` From 44d659ab661baccd6c3eaafa15528026ab67fab4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 9 Sep 2016 02:00:24 +0200 Subject: [PATCH 411/630] Fixes deps/build.jl on Travis (#142) The problem is that we need to specify `MSHADOW_LDFLAGS` and `MSHADOW_CFLAGS`. The way MXNet is currently setup we can't change these from the terminal. This solves the problem by using `sed` to change `mshadow/make/mshadow.mk`. But if mshadow would change git could throw an error on updating. So we remove the changes before an update and add them afterwards again. This has the drawback that users can't modifty that file. (But those users should maintain their own build and use `MXNET_HOME`.) This also eagerly deletes `usr/lib/libmxnet.so` So that we actually trigger a rebuild on an update of the pkg. --- deps/build.jl | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index d6d6f2386c48..b79940e8c1eb 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -56,6 +56,7 @@ if !libmxnet_detected end blas_name = blas_vendor == :openblas64 ? "openblas" : string(blas_vendor) + MSHADOW_LDFLAGS = "MSHADOW_LDFLAGS=-lm $blas_path" #-------------------------------------------------------------------------------- # Build libmxnet @@ -65,6 +66,9 @@ if !libmxnet_detected _srcdir = joinpath(BinDeps.depsdir(mxnet), "src") _mxdir = joinpath(_srcdir, "mxnet") _libdir = joinpath(_prefix, "lib") + # We have do eagerly delete the installed libmxnet.so + # Otherwise we won't rebuild on an update. + run(`rm -f $_libdir/libmxnet.so`) provides(BuildProcess, (@build_steps begin CreateDirectory(_srcdir) @@ -76,9 +80,12 @@ if !libmxnet_detected end) @build_steps begin ChangeDirectory(_mxdir) + # TODO(vchuravy). We have to reset mshadow/make/mshadow.mk + `git -C mshadow checkout -- make/mshadow.mk` `git fetch` `git checkout $libmxnet_curr_ver` `git submodule update` + `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` end FileRule(joinpath(_mxdir, "config.mk"), @build_steps begin ChangeDirectory(_mxdir) @@ -93,14 +100,10 @@ if !libmxnet_detected ChangeDirectory(_mxdir) `cp ../../cblas.h include/cblas.h` if USE_JULIA_BLAS - MakeTargets("USE_BLAS=$blas_name -j$(nprocs())", env=Dict( - "MSHADOW_LDFLAGS" => blas_path, - "MSHADOW_CFLAGS" => ilp64, - )) + `make -j$(nprocs()) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` else `make -j$(nprocs())` end - `rm -f $_libdir/libmxnet.so` end FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin `cp $_mxdir/lib/libmxnet.so $_libdir/` From e218d85baa630ee44eeb2b047aa8d7c24870b5d7 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 8 Sep 2016 21:42:14 -0400 Subject: [PATCH 412/630] fix NEWS.md for skipped v0.0.9 --- NEWS.md | 5 +---- REQUIRE | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/NEWS.md b/NEWS.md index 4423c42d6724..2d0ec817ce39 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,9 +1,6 @@ -# v0.1.0 (2016.09.07) +# v0.1.0 (2016.09.08) * Track specific libmxnet version for each release. - -# v0.0.9 (2016.09.06) - * Migrated documentation system to `Documenter.jl` (@vchuravy) * Simplified building by using Julia's OpenBlas (@staticfloat) * Freezing parameters (@vchuravy) diff --git a/REQUIRE b/REQUIRE index 38ef82563fc3..d37f975fe665 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,5 @@ julia 0.4 -Compat 0.8.4 +Compat 0.9.1 Formatting BinDeps JSON From 6bd3951d93999d1d58681ca56b2269bb4a7558a4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 16 Sep 2016 03:42:17 +0900 Subject: [PATCH 413/630] try to automatically find CUDA --- deps/build.jl | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index b79940e8c1eb..4a13b19c105c 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -32,6 +32,16 @@ if !libmxnet_detected blas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) + # Try to find cuda + hascuda = false + if haskey(ENV, "CUDA_HOME") + hascuda = Libdl.dlopen_e(joinpath(ENV["CUDA_HOME"], "lib64", "libcuda.so")) != C_NULL + else + cudapaths = String["/opt/cuda/lib64", "/usr/local/cuda/lib64"] + cudalib = Libdl.find_library(["libcuda", "libcuda.so"], cudapaths) + hascuda = Libdl.dlopen_e(cudalib) != C_NULL + end + if VERSION >= v"0.5.0-dev+4338" blas_vendor = Base.BLAS.vendor() else @@ -80,7 +90,6 @@ if !libmxnet_detected end) @build_steps begin ChangeDirectory(_mxdir) - # TODO(vchuravy). We have to reset mshadow/make/mshadow.mk `git -C mshadow checkout -- make/mshadow.mk` `git fetch` `git checkout $libmxnet_curr_ver` @@ -95,6 +104,12 @@ if !libmxnet_detected `cp make/config.mk config.mk` end `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` + if hascuda + `sed -i -s 's/USE_CUDA = 0/USE_CUDA = 1/' config.mk` + if haskey(ENV, "CUDA_HOME") + `sed -i -s 's/USE_CUDA_PATH = NULL/USE_CUDA_PATH = $(ENV["CUDA_HOME"])/' config.mk` + end + end end) @build_steps begin ChangeDirectory(_mxdir) From 7a2432c0b4851189108689caf02ac43014285ad5 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Sep 2016 13:15:13 -0400 Subject: [PATCH 414/630] ignore vscode project files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 3325a370ecaf..d6791c8491bf 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ deps/deps.jl docs/_build docs/build/ docs/site/ +.vscode From e6b050805ea81535232c52c4847b8478e54f318a Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Sep 2016 17:58:08 -0400 Subject: [PATCH 415/630] ndarray ops --- src/base.jl | 2 +- src/ndarray.jl | 197 ++++++++++++++----------------------------- src/symbolic-node.jl | 4 +- src/util.jl | 70 +++++++++++++++ 4 files changed, 135 insertions(+), 138 deletions(-) diff --git a/src/base.jl b/src/base.jl index cca45c273b96..a864125757cd 100644 --- a/src/base.jl +++ b/src/base.jl @@ -102,7 +102,7 @@ macro mx_define_handle_t(name, destructor) end @mx_define_handle_t(MX_NDArrayHandle, MXNDArrayFree) -@mx_define_handle_t(MX_FunctionHandle, nop) +@mx_define_handle_t(MX_OpHandle, nop) @mx_define_handle_t(MX_SymbolHandle, MXSymbolFree) @mx_define_handle_t(MX_ExecutorHandle, MXExecutorFree) @mx_define_handle_t(MX_DataIterHandle, MXDataIterFree) diff --git a/src/ndarray.jl b/src/ndarray.jl index 2e2c806552f7..c1cd7ccf16b2 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -333,7 +333,7 @@ scenarios are supported """ function setindex!(arr :: NDArray, val :: Real, ::Colon) @assert(arr.writable) - _set_value(convert(eltype(arr), val), arr) + _set_value(out=arr, src=convert(eltype(arr), val)) return arr end function setindex!{T<:Real}(arr :: NDArray, val :: Array{T}, ::Colon) @@ -948,156 +948,83 @@ Those functions always return the output arguments. If there is only one output object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. """ -function _get_ndarray_functions() - n = Ref{MX_uint}(0) - handles = Ref{Ptr{MX_handle}}(0) - - @mxcall(:MXListFunctions, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n, handles) - - funcs = unsafe_wrap(Array, handles[], n[]) - return funcs -end - -const _function_cache = Dict{Symbol, MX_handle}() -function _get_function(name :: Symbol) - if !haskey(_function_cache, name) - handle = Ref{MX_handle}(0) - - @mxcall(:MXGetFunction, (Cstring, Ref{MX_handle}), name, handle) - _function_cache[name] = handle[] - return handle[] - else - return _function_cache[name] - end -end - -function _get_function_description(handle :: MX_handle) - # get function information (human readable) - ref_name = Ref{char_p}(0) - ref_desc = Ref{char_p}(0) - ref_narg = Ref{MX_uint}(0) - - ref_arg_names = Ref{char_pp}(0) - ref_arg_types = Ref{char_pp}(0) - ref_arg_descs = Ref{char_pp}(0) - - ref_ret_type = Ref{char_p}(0) - - @mxcall(:MXFuncGetInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, - Ref{char_pp}, Ref{char_pp}, Ref{char_p}), - handle, ref_name, ref_desc, ref_narg, ref_arg_names, - ref_arg_types, ref_arg_descs, ref_ret_type) - - name = Symbol(unsafe_wrap(String, ref_name[])) - signature = _format_signature(Int(ref_narg[]), ref_arg_names) - desc = " " * string(name) * "(" * signature * ")\n\n" - desc *= unsafe_wrap(String, ref_desc[]) * "\n\n" - desc *= "# Arguments\n" - desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) - return name, desc -end - -function _get_function_expressions(handle :: MX_handle, name) - # get function specification - ref_n_use_vars = Ref{MX_uint}(0) - ref_n_scalars = Ref{MX_uint}(0) - ref_n_mut_vars = Ref{MX_uint}(0) - ref_type_mask = Ref{Cint}(0) - @mxcall(:MXFuncDescribe, - (MX_handle, Ref{MX_uint}, Ref{MX_uint}, Ref{MX_uint}, Ref{Cint}), - handle, ref_n_use_vars, ref_n_scalars, ref_n_mut_vars, ref_type_mask) - - n_used_vars = ref_n_use_vars[] - n_scalars = ref_n_scalars[] - n_mutate_vars = ref_n_mut_vars[] - type_mask = ref_type_mask[] - accept_empty_mutate = (type_mask & convert(Cint,ACCEPT_EMPTY_MUTATE_TARGET)) != 0 - arg_before_scalar = (type_mask & convert(Cint,NDARRAY_ARG_BEFORE_SCALAR)) != 0 - - # general ndarray function - if arg_before_scalar - args = vcat([Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - else - args = vcat([Expr(:(::), Symbol("sca$i"), Real) for i=1:n_scalars], - [Expr(:(::), Symbol("in$i"), NDArray) for i=1:n_used_vars], - [Expr(:(::), Symbol("out$i"), NDArray) for i=1:n_mutate_vars]) - end - - _use_vars = Expr(:ref, :MX_handle, [Symbol("in$i") for i=1:n_used_vars]...) - _scalars = Expr(:ref, :MX_float, [Symbol("sca$i") for i=1:n_scalars]...) - _mut_vars = Expr(:ref, :MX_handle, [Symbol("out$i") for i=1:n_mutate_vars]...) - - # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped - # See https://github.com/dmlc/MXNet.jl/issues/55 - if name == :dot - _use_vars.args[2:end] = flipdim(_use_vars.args[2:end], 1) - end - - # XXX: hacky way of solving the semantic difference of the axes parameter in Julia - # and in libmxnet. - # See https://github.com/dmlc/MXNet.jl/pull/123 - if name == :transpose - transform = quote - kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] - end - else - transform = :() - end - - stmt_call = quote - local handle = _get_function($(QuoteNode(name))) - _invoke_mxfunction(handle, $_use_vars, $_scalars, $_mut_vars; kwargs...) - end - if n_mutate_vars == 1 - stmt_ret = :(return out1) - else - stmt_ret = Expr(:return, Expr(:tuple, [Symbol("out$i") for i=1:n_mutate_vars]...)) - end +function _get_ndarray_function_def(name :: String) + func_name = Symbol(name) func_def = quote - function $name($(args...); kwargs...) - $transform - $stmt_call - $stmt_ret - end - end + function $func_name(args::NDArray...; out=nothing, kwargs...) + if out != nothing + output_vars = out + if isa(output_vars, NDArray) + output_vars = NDArray[output_vars] + end + num_outputs = length(output_vars) + else + output_vars = NDArray[] + num_outputs = 0 + end + + # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped + # See https://github.com/dmlc/MXNet.jl/issues/55 + if $name == "dot" + args = flipdim(args, 1) + end - if accept_empty_mutate - args0 = args[1:n_used_vars+n_scalars] - _mut_vars0 = [:(NDArray(_ndarray_alloc())) for i=1:n_mutate_vars] + # XXX: hacky way of solving the semantic difference of the axes parameter in Julia + # and in libmxnet. + # See https://github.com/dmlc/MXNet.jl/pull/123 + if $name == "transpose" + kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] + end - func_def0 = quote - function $name($(args0...); kwargs...) - $name($(args0...), $(_mut_vars0...); kwargs...) + output_handles = [Base.cconvert(MX_handle, x) for x in output_vars] + output_handles_pp = [Base.cconvert(Ptr{MX_handle}, output_handles)] + num_outputs_p = [convert(Cint, num_outputs)] + + kw_keys_str = String[string(x[1]) for x in kwargs] + kw_vals_str = String[string(x[2]) for x in kwargs] + + args = collect(args) # tuple to list + op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) + @mxcall(:MXImperativeInvoke, + (MX_handle, Cint, Ptr{MX_handle}, + Ptr{Cint}, Ptr{Ptr{MX_handle}}, + Cint, char_pp, char_pp), + op_handle, length(args), args, + num_outputs_p, output_handles_pp, + length(kwargs), kw_keys_str, kw_vals_str) + + if out == nothing + handle_array = unsafe_wrap(Array, output_handles_pp[], num_outputs_p[]) + arrays = [NDArray(hdr) for hdr in handle_array] + if mx_num_outputs == 1 + return arrays[1] + else + return arrays + end + else + return out end end - return func_def, func_def0 - else - return func_def, :() end + + return func_def end macro _import_ndarray_functions() - funcs = _get_ndarray_functions() - func_exprs = Expr[] + names = _get_libmx_op_names() + func_exprs = map(names) do name + op_handle = _get_libmx_op_handle(name) - for i = 1:length(funcs) - handle = funcs[i] - - name, desc = _get_function_description(handle) - func_def, func_def0 = _get_function_expressions(handle, name) + desc, key_narg = _get_libmx_op_description(name, op_handle) + func_def = _get_ndarray_function_def(name) + func_name = Symbol(name) expr = quote - $(isdefined(Base, name) ? :(import Base.$name) : :()) + $(isdefined(Base, func_name) ? :(import Base.$func_name) : :()) @doc $desc -> $func_def - $func_def0 end - - push!(func_exprs, expr) end esc(quote diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index dfc54c3c3b1c..c1e6f7d8e8c6 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -629,7 +629,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) else name = "" end - + # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped # See https://github.com/dmlc/MXNet.jl/issues/55 if $func_name_s == "dot" @@ -755,7 +755,7 @@ macro _import_atomic_symbol_creators() end) end -@_import_atomic_symbol_creators() +#@_import_atomic_symbol_creators() ################################################################################ # Utility macros to chain up symbols diff --git a/src/util.jl b/src/util.jl index a53647790c06..11d12e7f9dd2 100644 --- a/src/util.jl +++ b/src/util.jl @@ -62,6 +62,76 @@ end ################################################################################ # Internal Utilities ################################################################################ +function _get_libmx_op_names() + n = Ref{MX_uint}(0) + names = Ref{char_pp}(0) + + @mxcall(:MXListAllOpNames, (Ref{MX_uint}, Ref{char_pp}), n, names) + + names = unsafe_wrap(Array, names[], n[]) + return [unsafe_string(x) for x in names] +end +function _get_libmx_op_handle(name :: String) + handle = Ref{MX_handle}(0) + @mxcall(:NNGetOpHandle, (char_p, Ref{MX_handle}), name, handle) + return MX_OpHandle(handle[]) +end + +# We keep a cache and retrieve the address everytime +# we run Julia, instead of pre-compiling with macro, +# because the actual handle might change in different +# runs +const _libmx_op_cache = Dict{String, MX_OpHandle}() +function _get_cached_libmx_op_handle(name :: String) + if !haskey(_libmx_op_cache, name) + handle = _get_libmx_op_handle(name) + _libmx_op_cache[name] = handle + return handle + else + return _libmx_op_cache[name] + end +end + +function _get_libmx_op_description(name :: String, handle :: MX_OpHandle) + # get operator information (human readable) + ref_real_name = Ref{char_p}(0) + ref_desc = Ref{char_p}(0) + ref_narg = Ref{MX_uint}(0) + + ref_arg_names = Ref{char_pp}(0) + ref_arg_types = Ref{char_pp}(0) + ref_arg_descs = Ref{char_pp}(0) + + ref_key_narg = Ref{char_p}(0) + ref_ret_type = Ref{char_p}(0) + + @mxcall(:MXSymbolGetAtomicSymbolInfo, + (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, + Ref{char_pp}, Ref{char_pp}, Ref{char_p}, Ref{char_p}), + handle, ref_real_name, ref_desc, ref_narg, ref_arg_names, + ref_arg_types, ref_arg_descs, ref_key_narg, ref_ret_type) + + real_name = unsafe_string(ref_real_name[]) + signature = _format_signature(Int(ref_narg[]), ref_arg_names) + desc = " " * name * "(" * signature * ")\n\n" + if real_name != name + desc *= name * " is an alias of " * real_name * ".\n\n" + end + + key_narg = unsafe_string(ref_key_narg[]) + if key_narg != "" + desc *= "**Note**: " * name * " takes variable number of positional inputs. " + desc *= "So instead of calling as $name([x, y, z], $key_narg=3), " + desc *= "one should call via $name(x, y, z), and $key_narg will be " + desc *= "determined automatically.\n\n" + end + + desc *= unsafe_string(ref_desc[]) * "\n\n" + desc *= "# Arguments\n" + desc *= _format_docstring(Int(ref_narg[]), ref_arg_names, ref_arg_types, ref_arg_descs) + return desc, key_narg +end + function _format_typestring(typestr :: String) replace(typestr, r"\bSymbol\b", "SymbolicNode") end From d16b654b8dc350355bdc794383beac43de35de66 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Sep 2016 20:16:51 -0400 Subject: [PATCH 416/630] ndarray op test --- src/ndarray.jl | 43 ++++++++++++++++++++++++++++------------ test/unittest/ndarray.jl | 19 +++++++++--------- 2 files changed, 40 insertions(+), 22 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index c1cd7ccf16b2..6cd6762488d4 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -155,6 +155,17 @@ function empty(shape :: Int...) empty(shape) end +import Base.similar + +""" + similar(arr :: NDArray) + +Create an `NDArray` with similar shape, data type, and context with the given one. +""" +function similar(arr :: NDArray) + empty(eltype(arr), size(arr), context(arr)) +end + """ zeros(DType, shape :: Tuple, ctx :: Context) zeros(DType, shape :: Tuple) @@ -398,7 +409,7 @@ function copy!(dst :: NDArray, src :: NDArray) return end - _copyto(src, dst) + _copyto(src, out=dst) return dst end @@ -513,9 +524,9 @@ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) @assert dst.writable for arg in args if isa(arg, Real) - _plus_scalar(dst, convert(eltype(dst), arg), dst) + _plus_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) else - _plus(dst, arg, dst) + _plus(dst, arg, out=dst) end end return dst @@ -553,9 +564,9 @@ Subtract a bunch of arguments from `dst`. Inplace updating. function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) - _minus_scalar(dst, convert(eltype(dst), arg), dst) + _minus_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) else - _minus(dst, arg, dst) + _minus(dst, arg, out=dst) end end @@ -586,7 +597,7 @@ function .-(arg0 :: Real, arg1 :: NDArray) end function -(arg0 :: NDArray) - _mul_scalar(arg0, -one(eltype(arg0))) + _mul_scalar(arg0, scalar=-one(eltype(arg0))) end """ @@ -598,9 +609,9 @@ Inplace updating. function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) - _mul_scalar(dst, convert(eltype(dst), arg), dst) + _mul_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) else - _mul(dst, arg, dst) + _mul(dst, arg, out=dst) end return dst end @@ -642,9 +653,9 @@ Elementwise divide a scalar or an `NDArray` of the same shape from `dst`. Inplac function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) @assert dst.writable if isa(arg, Real) - _div_scalar(dst, convert(eltype(dst), arg), dst) + _div_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) else - _div(dst, arg, dst) + _div(dst, arg, out=dst) end end @@ -964,6 +975,8 @@ function _get_ndarray_function_def(name :: String) num_outputs = 0 end + args = collect(args) # tuple to list + # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped # See https://github.com/dmlc/MXNet.jl/issues/55 if $name == "dot" @@ -978,13 +991,16 @@ function _get_ndarray_function_def(name :: String) end output_handles = [Base.cconvert(MX_handle, x) for x in output_vars] - output_handles_pp = [Base.cconvert(Ptr{MX_handle}, output_handles)] + if length(output_handles) > 0 + output_handles_pp = [Base.cconvert(Ptr{MX_handle}, output_handles)] + else + output_handles_pp = [Base.convert(Ptr{MX_handle}, 0)] + end num_outputs_p = [convert(Cint, num_outputs)] kw_keys_str = String[string(x[1]) for x in kwargs] kw_vals_str = String[string(x[2]) for x in kwargs] - args = collect(args) # tuple to list op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) @mxcall(:MXImperativeInvoke, (MX_handle, Cint, Ptr{MX_handle}, @@ -996,8 +1012,9 @@ function _get_ndarray_function_def(name :: String) if out == nothing handle_array = unsafe_wrap(Array, output_handles_pp[], num_outputs_p[]) + handle_array = [MX_NDArrayHandle(x) for x in handle_array] arrays = [NDArray(hdr) for hdr in handle_array] - if mx_num_outputs == 1 + if length(arrays) == 1 return arrays[1] else return arrays diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 8d5a1b0d57a9..6257a150af19 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -33,38 +33,39 @@ end function test_assign() dims = rand_dims() tensor = rand(mx.MX_float, dims) + thresh = 1e-3 info("NDArray::assign::dims = $dims") # Julia Array -> NDArray assignment array = mx.empty(size(tensor)) array[:]= tensor - @test reldiff(tensor, copy(array)) < 1e-6 + @test reldiff(tensor, copy(array)) < thresh array2 = mx.zeros(size(tensor)) - @test reldiff(zeros(size(tensor)), copy(array2)) < 1e-6 + @test reldiff(zeros(size(tensor)), copy(array2)) < thresh array3 = mx.zeros(Float16, size(tensor)) - @test reldiff(zeros(Float16, size(tensor)), copy(array2)) < 1e-6 + @test reldiff(zeros(Float16, size(tensor)), copy(array2)) < thresh # scalar -> NDArray assignment scalar = rand() array2[:] = scalar - @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 + @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < thresh scalar = rand(Float16) array2[:] = scalar - @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 + @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < thresh scalar = rand(Float64) array2[:] = scalar array3[:] = scalar - @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < 1e-6 - @test reldiff(zeros(Float16,size(tensor))+scalar, copy(array3)) < 1e-6 + @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < thresh + @test reldiff(zeros(Float16,size(tensor))+scalar, copy(array3)) < thresh # NDArray -> NDArray assignment array[:] = array2 - @test reldiff(zeros(size(tensor))+scalar, copy(array)) < 1e-6 + @test reldiff(zeros(size(tensor))+scalar, copy(array)) < thresh end function test_slice() @@ -235,7 +236,7 @@ function test_clip() j_array, nd_array = rand_tensors(dims) clip_up = maximum(abs(j_array)) / 2 clip_down = 0 - clipped = mx.clip(nd_array, clip_down, clip_up) + clipped = mx.clip(nd_array, a_min=clip_down, a_max=clip_up) # make sure the original array is not modified @test reldiff(copy(nd_array), j_array) < 1e-6 From 03509ae17aa86f01be9c696dd2d1b00cfba44dbd Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Wed, 21 Sep 2016 21:15:16 -0400 Subject: [PATCH 417/630] temp commit, symbolic not passing due to ambiguity --- src/base.jl | 3 +- src/ndarray.jl | 6 ++- src/symbolic-node.jl | 101 +++++++++++---------------------------- test/unittest/ndarray.jl | 2 +- 4 files changed, 34 insertions(+), 78 deletions(-) diff --git a/src/base.jl b/src/base.jl index a864125757cd..588b777177bf 100644 --- a/src/base.jl +++ b/src/base.jl @@ -29,7 +29,8 @@ else end function __init__() - _populate_symbol_creator_cache!() + # TODO: bug in nnvm, if do not call this, call get handle "_copyto" will fail + _get_libmx_op_names() _populate_iter_creator_cache!() atexit() do diff --git a/src/ndarray.jl b/src/ndarray.jl index 6cd6762488d4..e516782da3d2 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -964,6 +964,7 @@ function _get_ndarray_function_def(name :: String) func_def = quote function $func_name(args::NDArray...; out=nothing, kwargs...) + println($name) if out != nothing output_vars = out if isa(output_vars, NDArray) @@ -980,7 +981,7 @@ function _get_ndarray_function_def(name :: String) # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped # See https://github.com/dmlc/MXNet.jl/issues/55 if $name == "dot" - args = flipdim(args, 1) + args = reverse(args) end # XXX: hacky way of solving the semantic difference of the axes parameter in Julia @@ -1001,7 +1002,8 @@ function _get_ndarray_function_def(name :: String) kw_keys_str = String[string(x[1]) for x in kwargs] kw_vals_str = String[string(x[2]) for x in kwargs] - op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) + #op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) + op_handle = _get_cached_libmx_op_handle($(name)) @mxcall(:MXImperativeInvoke, (MX_handle, Cint, Ptr{MX_handle}, Ptr{Cint}, Ptr{Ptr{MX_handle}}, diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index c1e6f7d8e8c6..5aedd3f6e439 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -587,39 +587,14 @@ end ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet ################################################################################ -function _define_atomic_symbol_creator(hdr :: MX_handle) - ref_name = Ref{char_p}(0) - ref_desc = Ref{char_p}(0) - ref_kv_nargs = Ref{char_p}(0) - ref_nargs = Ref{MX_uint}(0) - ref_arg_names = Ref{char_pp}(0) - ref_arg_types = Ref{char_pp}(0) - ref_arg_descs = Ref{char_pp}(0) - ref_ret_type = Ref{char_p}(0) - - @mxcall(:MXSymbolGetAtomicSymbolInfo, - (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, - Ref{char_pp}, Ref{char_p}, Ref{char_p}), - hdr, ref_name, ref_desc, ref_nargs, ref_arg_names, ref_arg_types, ref_arg_descs, - ref_kv_nargs, ref_ret_type) - - func_name_s = unsafe_wrap(String, ref_name[]) - func_name = Symbol(func_name_s) - kv_nargs_s = unsafe_wrap(String, ref_kv_nargs[]) - kv_nargs = Symbol(kv_nargs_s) - - signature = _format_signature(Int(ref_nargs[]), ref_arg_names) - f_desc = " " * func_name_s * "(" * signature * ")\n\n" - f_desc *= unsafe_wrap(String, ref_desc[]) * "\n\n" - if !isempty(kv_nargs_s) - f_desc *= "This function support variable length positional `SymbolicNode` inputs.\n\n" - end - f_desc *= "# Arguments\n" - f_desc *= _format_docstring(Int(ref_nargs[]), ref_arg_names, ref_arg_types, ref_arg_descs) +function _define_atomic_symbol_creator(name :: String) + handle = _get_libmx_op_handle(name) + f_desc, key_narg = _get_libmx_op_description(name, handle) + f_desc *= "* `name::Symbol`: The name of the `SymbolicNode`. (e.g. `:my_symbol`), optional.\n" f_desc *= "* `attrs::Dict{Symbol, AbstractString}`: The attributes associated with this `SymbolicNode`.\n\n" - f_desc *= "Returns `$(_format_typestring(unsafe_wrap(String, ref_ret_type[])))`." + func_name = Symbol(name) func_def = quote @doc $f_desc -> function $func_name(args::SymbolicNode...; kwargs...) @@ -632,14 +607,14 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped # See https://github.com/dmlc/MXNet.jl/issues/55 - if $func_name_s == "dot" + if $name == "dot" args = reverse(args) end # XXX: hacky way of solving the semantic difference of the axes parameter in Julia # and in libmxnet. # See https://github.com/dmlc/MXNet.jl/pull/123 - if $func_name_s == "transpose" + if $name == "transpose" kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] end @@ -648,10 +623,10 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) symbol_kws = Dict{Symbol, SymbolicNode}() attrs = Dict{Symbol, String}() - $(if kv_nargs != Symbol("") + $(if key_narg != "" quote - if !in($kv_nargs_s, param_keys) - push!(param_keys, $kv_nargs_s) + if !in(Symbol($key_narg), param_keys) + push!(param_keys, Symbol($key_narg)) push!(param_vals, string(length(args))) end end @@ -674,18 +649,18 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) end if length(args) != 0 && length(symbol_kws) != 0 - @assert(false, $func_name_s * " only accepts Symbols either as positional or keyword arguments, not both.") + @assert(false, $name * " only accepts SymbolicNode either as positional or keyword arguments, not both.") end - $(if kv_nargs != Symbol("") + $(if key_narg != "" quote if length(symbol_kws) > 0 - @assert(false, $func_name_s * " takes variable number of SymbolicNode arguments, " * + @assert(false, $name * " takes variable number of SymbolicNode arguments, " * "please pass input Symbols via positional arguments, instead of keyword arguments.") end end end) - local hdr = _get_symbol_creator($(QuoteNode(func_name))) + local hdr = _get_cached_libmx_op_handle($name) # create the SymbolicNode ref_sym_hdr = Ref{MX_handle}() @@ -695,7 +670,7 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) sym_hdr = ref_sym_hdr[] node = SymbolicNode(MX_SymbolHandle(sym_hdr)) - hint = lowercase($func_name_s) + hint = lowercase($name) name = get!(DEFAULT_NAME_MANAGER, name, hint) # set attrs @@ -715,47 +690,25 @@ function _define_atomic_symbol_creator(hdr :: MX_handle) return func_def end -function _get_atomic_symbol_creators() - n_ref = Ref{MX_uint}(0) - h_ref = Ref{Ptr{MX_handle}}(0) - @mxcall(:MXSymbolListAtomicSymbolCreators, (Ref{MX_uint}, Ref{Ptr{MX_handle}}), n_ref, h_ref) - - return unsafe_wrap(Array, h_ref[], n_ref[]) -end - -function _get_atomic_symbol_name(handle :: MX_handle) - name_r = Ref{char_p}(0) - @mxcall(:MXSymbolGetAtomicSymbolName, (MX_handle, Ref{char_p}), handle, name_r) - return unsafe_wrap(String, name_r[]) -end - -const _symbol_creator_cache = Dict{Symbol, MX_handle}() -function _populate_symbol_creator_cache!() - empty!(_symbol_creator_cache) - h_creators = _get_atomic_symbol_creators() - for handle in h_creators - name = Symbol(_get_atomic_symbol_name(handle)) - _symbol_creator_cache[name] = handle - end -end - -_get_symbol_creator(name :: Symbol) = _symbol_creator_cache[name] - macro _import_atomic_symbol_creators() - h_creators = _get_atomic_symbol_creators() - - exprs = Expr[] - for creator_hdr in h_creators - expr = _define_atomic_symbol_creator(creator_hdr) - push!(exprs, expr) + # XXX: those are operators defined for NDArray, we exclude them here + # because the calling convention for the type signature is not strong + # enough to disambiguate the method for NDArray and SymbolicNode + const ignored_ops = ["_set_value"] + + names = _get_libmx_op_names() + func_exprs = map(names) do name + if name ∉ ignored_ops + expr = _define_atomic_symbol_creator(name) + end end esc(quote - $(exprs...) + $(func_exprs...) end) end -#@_import_atomic_symbol_creators() +@_import_atomic_symbol_creators() ################################################################################ # Utility macros to chain up symbols diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 6257a150af19..abc8d646fc7a 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -314,8 +314,8 @@ end ################################################################################ # Run tests ################################################################################ -test_copy() test_assign() +test_copy() test_slice() test_plus() test_minus() From 623fbaf7e073e9bcb4791df2523715855392f337 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Thu, 22 Sep 2016 16:16:35 -0400 Subject: [PATCH 418/630] disambiguate nd op and sym op --- src/ndarray.jl | 16 +++++++++++----- src/symbolic-node.jl | 20 +++++++++++++++----- test/common.jl | 6 +++--- test/unittest/symbolic-node.jl | 21 ++++++++++----------- 4 files changed, 39 insertions(+), 24 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index e516782da3d2..f32180a95e9c 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -963,8 +963,7 @@ function _get_ndarray_function_def(name :: String) func_name = Symbol(name) func_def = quote - function $func_name(args::NDArray...; out=nothing, kwargs...) - println($name) + function $func_name(::Type{NDArray}, args::NDArray...; out=nothing, kwargs...) if out != nothing output_vars = out if isa(output_vars, NDArray) @@ -1027,7 +1026,13 @@ function _get_ndarray_function_def(name :: String) end end - return func_def + func_def2 = quote + function $func_name(args::NDArray...; out=nothing, kwargs...) + $func_name(NDArray, args...; out=out, kwargs...) + end + end + + return func_def, func_def2 end macro _import_ndarray_functions() @@ -1036,13 +1041,14 @@ macro _import_ndarray_functions() op_handle = _get_libmx_op_handle(name) desc, key_narg = _get_libmx_op_description(name, op_handle) - func_def = _get_ndarray_function_def(name) + func_def, func_def2 = _get_ndarray_function_def(name) func_name = Symbol(name) expr = quote $(isdefined(Base, func_name) ? :(import Base.$func_name) : :()) - @doc $desc -> $func_def + @doc $desc -> + $func_def2 end end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 5aedd3f6e439..1612f0c57771 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -596,8 +596,7 @@ function _define_atomic_symbol_creator(name :: String) func_name = Symbol(name) func_def = quote - @doc $f_desc -> - function $func_name(args::SymbolicNode...; kwargs...) + function $func_name(::Type{SymbolicNode}, args::SymbolicNode...; kwargs...) idx = findfirst(x -> x[1] == :name, kwargs) if idx > 0 name = kwargs[idx][2] @@ -687,7 +686,18 @@ function _define_atomic_symbol_creator(name :: String) return node end # function end # quote - return func_def + + func_def2 = quote + @doc $f_desc -> + function $func_name(args::SymbolicNode...; kwargs...) + $func_name(SymbolicNode, args...; kwargs...) + end # function + end # quote + + return quote + $func_def + $func_def2 + end end macro _import_atomic_symbol_creators() @@ -696,8 +706,8 @@ macro _import_atomic_symbol_creators() # enough to disambiguate the method for NDArray and SymbolicNode const ignored_ops = ["_set_value"] - names = _get_libmx_op_names() - func_exprs = map(names) do name + op_names = _get_libmx_op_names() + func_exprs = map(op_names) do name if name ∉ ignored_ops expr = _define_atomic_symbol_creator(name) end diff --git a/test/common.jl b/test/common.jl index fc4c4f63649e..a394acf95a93 100644 --- a/test/common.jl +++ b/test/common.jl @@ -13,9 +13,9 @@ end function mlp2() data = mx.Variable(:data) - out = mx.FullyConnected(data=data, name=:fc1, num_hidden=1000) - out = mx.Activation(data=out, act_type=:relu) - out = mx.FullyConnected(data=out, name=:fc2, num_hidden=10) + out = mx.FullyConnected(data, name=:fc1, num_hidden=1000) + out = mx.Activation(out, act_type=:relu) + out = mx.FullyConnected(out, name=:fc2, num_hidden=10) return out end diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index d78b0775a983..ca2986d4a377 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -20,8 +20,8 @@ function test_internal() info("SymbolicNode::internal") data = mx.Variable(:data) - oldfc = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) - net1 = mx.FullyConnected(data=oldfc, name=:fc2, num_hidden=100) + oldfc = mx.FullyConnected(data, name=:fc1, num_hidden=10) + net1 = mx.FullyConnected(oldfc, name=:fc2, num_hidden=100) @test mx.list_arguments(net1) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] @@ -34,12 +34,12 @@ function test_compose() info("SymbolicNode::compose") data = mx.Variable(:data) - net1 = mx.FullyConnected(data=data, name=:fc1, num_hidden=10) - net1 = mx.FullyConnected(data=net1, name=:fc2, num_hidden=100) + net1 = mx.FullyConnected(data, name=:fc1, num_hidden=10) + net1 = mx.FullyConnected(net1, name=:fc2, num_hidden=100) - net2 = mx.FullyConnected(name=:fc3, num_hidden=10) - net2 = mx.Activation(data=net2, act_type=:relu) - net2 = mx.FullyConnected(data=net2, name=:fc4, num_hidden=20) + net2 = mx.FullyConnected(mx.SymbolicNode, name=:fc3, num_hidden=10) + net2 = mx.Activation(net2, act_type=:relu) + net2 = mx.FullyConnected(net2, name=:fc4, num_hidden=20) composed = net2(fc3_data=net1, name=:composed) multi_out = mx.Group(composed, net1) @@ -96,14 +96,13 @@ function test_attrs() data2 = mx.Variable(:data2, attrs = Dict(:test => "hallo!")) @test get(mx.get_attr(data2, :test)) == "hallo!" - conv = mx.Convolution(data = data2, kernel = (1,1), num_filter = 1, attrs = Dict(:a => "a", :π => "π")) + conv = mx.Convolution(data2, kernel = (1,1), num_filter = 1, attrs = Dict(:a => "a", :π => "π")) @test isnull(mx.get_attr(conv, :b)) @test get(mx.get_attr(conv, :a)) == "a" @test get(mx.get_attr(conv, :π)) == "π" - @test mx.list_attr(conv) == Dict(:a => "a", :π => "π") @test_throws MethodError mx.Variable(:data3, attrs = Dict(:test => "1.0", :test2 => 1.0)) - @test_throws MethodError mx.Convolution(data=data2, kernel = (1,1), num_filter = 1, attrs = Dict(:test => "1.0", :test2 => 1.0)) + @test_throws MethodError mx.Convolution(data2, kernel = (1,1), num_filter = 1, attrs = Dict(:test => "1.0", :test2 => 1.0)) end function test_functions() @@ -117,7 +116,7 @@ function test_dot() x = mx.Variable(:x) y = mx.Variable(:y) z = mx.dot(x, y) - z_exec = mx.bind(z, context=mx.cpu(), + z_exec = mx.bind(z, context=mx.cpu(), args=Dict(:x=>mx.ones((100, 2)), :y=>mx.ones((2, 200)))) mx.forward(z_exec) From d718cfc81b174c69cf06d6bd02d2aa18bcf0a3f6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 23 Sep 2016 10:33:46 -0400 Subject: [PATCH 419/630] fix API changes in random --- src/random.jl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/random.jl b/src/random.jl index 79a8b6e9e20b..b5b53def8f54 100644 --- a/src/random.jl +++ b/src/random.jl @@ -1,5 +1,6 @@ function rand!(low::Real, high::Real, out::NDArray) - _random_uniform(low, high, out) + # XXX: note we reverse shape because julia and libmx has different dim order + _sample_uniform(NDArray, low=low, high=high, shape=reverse(size(out)), out=out) end function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}) rand(low, high, shape, cpu()) @@ -10,7 +11,8 @@ function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context) end function randn!(mean::Real, stdvar::Real, out::NDArray) - _random_gaussian(mean, stdvar, out) + # XXX: note we reverse shape because julia and libmx has different dim order + _sample_normal(NDArray, loc=mean, scale=stdvar, shape=reverse(size(out)), out=out) end function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}) randn(mean, stdvar, shape, cpu()) From 590055b3f403e4d875e567594db4c0b75552a6c6 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Mon, 26 Sep 2016 15:11:33 -0400 Subject: [PATCH 420/630] change examples for new symbolic calling conventions (nnvm, #146) --- examples/char-lstm/lstm.jl | 8 ++++---- examples/cifar10/cifar10.jl | 16 ++++++++-------- examples/mnist/lenet.jl | 10 +++++----- examples/mnist/mlp.jl | 12 ++++++------ 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index e895d8389e74..e98778b65272 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -25,9 +25,9 @@ function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMPara data = mx.Dropout(data, p=dropout) end - i2h = mx.FullyConnected(data=data, weight=param.i2h_W, bias=param.i2h_b, + i2h = mx.FullyConnected(data, weight=param.i2h_W, bias=param.i2h_b, num_hidden=4num_hidden, name=symbol(name, "_i2h")) - h2h = mx.FullyConnected(data=prev_state.h, weight=param.h2h_W, bias=param.h2h_b, + h2h = mx.FullyConnected(prev_state.h, weight=param.h2h_W, bias=param.h2h_b, num_hidden=4num_hidden, name=symbol(name, "_h2h")) gates = mx.SliceChannel(i2h + h2h, num_outputs=4, name=symbol(name, "_gates")) @@ -71,7 +71,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla for t = 1:seq_len data = mx.Variable(symbol(name, "_data_$t")) label = mx.Variable(symbol(name, "_label_$t")) - hidden = mx.FullyConnected(data=data, weight=embed_W, num_hidden=dim_embed, + hidden = mx.FullyConnected(data, weight=embed_W, num_hidden=dim_embed, no_bias=true, name=symbol(name, "_embed_$t")) # stack LSTM cells @@ -88,7 +88,7 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla if dropout > 0 hidden = mx.Dropout(hidden, p=dropout) end - pred = mx.FullyConnected(data=hidden, weight=pred_W, bias=pred_b, num_hidden=n_class, + pred = mx.FullyConnected(hidden, weight=pred_W, bias=pred_b, num_hidden=n_class, name=symbol(name, "_pred_$t")) smax = mx.SoftmaxOutput(pred, label, name=symbol(name, "_softmax_$t")) push!(outputs, smax) diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index e5ff751eca7d..165ee1934568 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -5,9 +5,9 @@ using MXNet # basic Conv + BN + ReLU factory function conv_factory(data, num_filter, kernel; stride=(1,1), pad=(0,0), act_type=:relu) - conv = mx.Convolution(data=data, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad) - bn = mx.BatchNorm(data=conv) - act = mx.Activation(data=bn, act_type=act_type) + conv = mx.Convolution(data, num_filter=num_filter, kernel=kernel, stride=stride, pad=pad) + bn = mx.BatchNorm(conv) + act = mx.Activation(bn, act_type=act_type) return act end @@ -16,7 +16,7 @@ function downsample_factory(data, ch_3x3) # conv 3x3 conv = conv_factory(data, ch_3x3, (3,3), stride=(2,2), pad=(1,1)) # pool - pool = mx.Pooling(data=data, kernel=(3,3), stride=(2,2), pool_type=:max) + pool = mx.Pooling(data, kernel=(3,3), stride=(2,2), pool_type=:max) # concat concat = mx.Concat(conv, pool) return concat @@ -48,10 +48,10 @@ in4d = simple_factory(in4b, 48, 96) in4e = downsample_factory(in4d, 96) in5a = simple_factory(in4e, 176, 160) in5b = simple_factory(in5a, 176, 160) -pool = mx.Pooling(data=in5b, pool_type=:avg, kernel=(7,7), name=:global_pool) -flatten = mx.Flatten(data=pool, name=:flatten1) -fc = mx.FullyConnected(data=flatten, num_hidden=10, name=:fc1) -softmax = mx.SoftmaxOutput(data=fc, name=:loss) +pool = mx.Pooling(in5b, pool_type=:avg, kernel=(7,7), name=:global_pool) +flatten = mx.Flatten(pool, name=:flatten1) +fc = mx.FullyConnected(flatten, num_hidden=10, name=:fc1) +softmax = mx.SoftmaxOutput(fc, name=:loss) #-------------------------------------------------------------------------------- diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index ca48e6693213..af3e8c41dc71 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -7,25 +7,25 @@ using MXNet data = mx.Variable(:data) # first conv -conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => +conv1 = @mx.chain mx.Convolution(data, kernel=(5,5), num_filter=20) => mx.Activation(act_type=:tanh) => mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) # second conv -conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => +conv2 = @mx.chain mx.Convolution(conv1, kernel=(5,5), num_filter=50) => mx.Activation(act_type=:tanh) => mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) # first fully-connected -fc1 = @mx.chain mx.Flatten(data=conv2) => +fc1 = @mx.chain mx.Flatten(conv2) => mx.FullyConnected(num_hidden=500) => mx.Activation(act_type=:tanh) # second fully-connected -fc2 = mx.FullyConnected(data=fc1, num_hidden=10) +fc2 = mx.FullyConnected(fc1, num_hidden=10) # softmax loss -lenet = mx.SoftmaxOutput(data=fc2, name=:softmax) +lenet = mx.SoftmaxOutput(fc2, name=:softmax) #-------------------------------------------------------------------------------- diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index cdb0064da8e5..3f713654d5b9 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -6,12 +6,12 @@ using MXNet #-- Option 1: explicit composition # data = mx.Variable(:data) -# fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) -# act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) -# fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) -# act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) -# fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) -# mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) +# fc1 = mx.FullyConnected(data, name=:fc1, num_hidden=128) +# act1 = mx.Activation(fc1, name=:relu1, act_type=:relu) +# fc2 = mx.FullyConnected(act1, name=:fc2, num_hidden=64) +# act2 = mx.Activation(fc2, name=:relu2, act_type=:relu) +# fc3 = mx.FullyConnected(act2, name=:fc3, num_hidden=10) +# mlp = mx.SoftmaxOutput(fc3, name=:softmax) #-- Option 2: using the mx.chain macro # mlp = @mx.chain mx.Variable(:data) => From 286d751eb29ccc69c581b5dcf76466a09efd1e61 Mon Sep 17 00:00:00 2001 From: Michael Creel Date: Fri, 30 Sep 2016 12:54:32 +0200 Subject: [PATCH 421/630] Add files via upload simple MLP for regression, illustrates data provision from memory, and how to obtain fits --- examples/regression-example.jl | 62 ++++++++++++++++++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 examples/regression-example.jl diff --git a/examples/regression-example.jl b/examples/regression-example.jl new file mode 100644 index 000000000000..645c6325b066 --- /dev/null +++ b/examples/regression-example.jl @@ -0,0 +1,62 @@ +#= +This script shows how a simple MLP net may be used +for regression. It shows how data in memory may be +used for training and evaluation, and how to obtain +the predictions from the trained net. + +TO DO: + * specify batch size, and allow different sizes + for the training and evaluation sets + * tanh activation does not seem to work properly, + investigate +=# +using MXNet +using Distributions +using PyPlot + +# data generating process for exogenous inputs +generate_inputs(media, var, tam) = rand(MvNormal(media, var), tam) + +# function that maps inputs to outputs +f1(data) = sin(data[1,:]).*sin(data[2,:])./(data[1,:].*data[2,:]) + +# parameters for input d.g.p. +mean=[0.0;0.0] +var=[1.0 0.0;0.0 1.0] + +# create training and evaluation data sets +TrainInput = generate_inputs(mean, var, 5000) +TrainOutput = f1(TrainInput) +ValidationInput = generate_inputs(mean, var, 5000) +ValidationOutput = f1(ValidationInput) + +# how to set up data providers using data in memory +trainprovider = mx.ArrayDataProvider(:data => TrainInput, :label => TrainOutput) +evalprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) + +# create a single hidden layer MPL +data = mx.Variable(:data) +label = mx.Variable(:label) +fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=20) +act1 = mx.Activation(data = fc1, name=:relu, act_type=:relu) +fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=1) + +# cost is squared error loss +cost = mx.LinearRegressionOutput(data=fc2, label=label, name = :loss) + +# final model definition +model = mx.FeedForward(cost, context=mx.cpu()) + +# set up the optimizer +optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) + +# train, reporting loss for training and evaluation sets +mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 1000) + +# obtain predictions +fit = mx.predict(model, evalprovider) +plot(ValidationOutput,fit',".") +xlabel("true") +ylabel("predicted") +title("outputs: true versus predicted. 45º line is what we hope for") + From 8453c0a23681eae0ebdb1fb1bf961372963c9dde Mon Sep 17 00:00:00 2001 From: Michael Creel Date: Tue, 4 Oct 2016 19:53:42 +0200 Subject: [PATCH 422/630] Update regression-example.jl (#148) adds batch size, selection of optimizers, possibility for initialization --- examples/regression-example.jl | 65 +++++++++++++++++----------------- 1 file changed, 33 insertions(+), 32 deletions(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index 645c6325b066..8c949f9b2fac 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -3,60 +3,61 @@ This script shows how a simple MLP net may be used for regression. It shows how data in memory may be used for training and evaluation, and how to obtain the predictions from the trained net. - -TO DO: - * specify batch size, and allow different sizes - for the training and evaluation sets - * tanh activation does not seem to work properly, - investigate =# using MXNet using Distributions using PyPlot -# data generating process for exogenous inputs -generate_inputs(media, var, tam) = rand(MvNormal(media, var), tam) - -# function that maps inputs to outputs -f1(data) = sin(data[1,:]).*sin(data[2,:])./(data[1,:].*data[2,:]) +# data generating process +generate_inputs(mean, var, size) = rand(MvNormal(mean, var), size) +output(data) = sin(data[1,:]).*sin(data[2,:])./(data[1,:].*data[2,:]) -# parameters for input d.g.p. +# create training and evaluation data sets mean=[0.0;0.0] var=[1.0 0.0;0.0 1.0] - -# create training and evaluation data sets -TrainInput = generate_inputs(mean, var, 5000) -TrainOutput = f1(TrainInput) -ValidationInput = generate_inputs(mean, var, 5000) -ValidationOutput = f1(ValidationInput) +samplesize = 5000 +TrainInput = generate_inputs(mean, var, samplesize) +TrainOutput = output(TrainInput) +ValidationInput = generate_inputs(mean, var, samplesize) +ValidationOutput = output(ValidationInput) # how to set up data providers using data in memory -trainprovider = mx.ArrayDataProvider(:data => TrainInput, :label => TrainOutput) -evalprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) +batchsize = 100 # can adjust this later, but must be defined now for next line +trainprovider = mx.ArrayDataProvider(:data => TrainInput, batch_size=batchsize, shuffle=true, :label => TrainOutput) +evalprovider = mx.ArrayDataProvider(:data => ValidationInput, batch_size=batchsize, shuffle=true, :label => ValidationOutput) -# create a single hidden layer MPL +# create a two hidden layer MPL: try varying num_hidden, and change tanh to relu, +# or add/remove a layer data = mx.Variable(:data) label = mx.Variable(:label) -fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=20) -act1 = mx.Activation(data = fc1, name=:relu, act_type=:relu) -fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=1) +net = @mx.chain mx.FullyConnected(data = data, num_hidden=10) => + mx.Activation(act_type=:tanh) => + mx.FullyConnected(num_hidden=3) => + mx.Activation(act_type=:tanh) => + mx.FullyConnected(num_hidden=1) -# cost is squared error loss -cost = mx.LinearRegressionOutput(data=fc2, label=label, name = :loss) +# squared error loss is appropriate for regression, don't change +cost = mx.LinearRegressionOutput(data = net, label=label) -# final model definition +# final model definition, don't change, except if using gpu model = mx.FeedForward(cost, context=mx.cpu()) -# set up the optimizer -optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) +# set up the optimizer: select one, explore parameters, if desired +#optimizer = mx.SGD(lr=0.01, momentum=0.9, weight_decay=0.00001) +optimizer = mx.ADAM() # train, reporting loss for training and evaluation sets -mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 1000) +# initial training with small batch size, to get to a good neighborhood +batchsize = 100 +mx.fit(model, optimizer, initializer=mx.NormalInitializer(0.0,0.1), eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 500) +# more training with the full sample +batchsize = samplesize +mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 500) # obtain predictions -fit = mx.predict(model, evalprovider) +plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) +fit = mx.predict(model, plotprovider) plot(ValidationOutput,fit',".") xlabel("true") ylabel("predicted") title("outputs: true versus predicted. 45º line is what we hope for") - From 4664f3b944f1c298e5e3b182c6d86ac50d22713c Mon Sep 17 00:00:00 2001 From: Ranjan Anantharaman Date: Thu, 6 Oct 2016 20:45:59 +0530 Subject: [PATCH 423/630] Add some docs to `eachbatch` (#149) --- src/io.jl | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/io.jl b/src/io.jl index 0c5de69e53f8..da3ba52cecff 100644 --- a/src/io.jl +++ b/src/io.jl @@ -212,6 +212,17 @@ function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name error("$name is not provided by this data provider") end +""" + eachbatch(provider::AbstractDataProvider) + +Allows you to perform operations on data every epoch. This is especially useful +when you need to perform real-time augmentation of the data. + +# Arguments: +* `provider`: an instance of the custom DataProvider type. You must return this +instance after modifying its fields. + +""" eachbatch(provider :: AbstractDataProvider) = provider """ From eee5ca73ed1042f058730a2d92ef952e3bf416bf Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 24 Oct 2016 11:15:52 +0900 Subject: [PATCH 424/630] fix missing wget #150 --- src/util.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util.jl b/src/util.jl index a53647790c06..89a2e508f993 100644 --- a/src/util.jl +++ b/src/util.jl @@ -41,7 +41,7 @@ function get_cifar10() filenames = Dict([k => joinpath(cifar10_dir, v) for (k,v) in filenames]) if !all(isfile, values(filenames)) cd(cifar10_dir) do - run(`http://data.dmlc.ml/mxnet/data/cifar10.zip`) + download("http://data.dmlc.ml/mxnet/data/cifar10.zip", "cifar10.zip") try run(`unzip -u cifar10.zip`) catch From d1eb31833c3980efb622414c37f5046f54177876 Mon Sep 17 00:00:00 2001 From: jocklawrie Date: Thu, 8 Dec 2016 12:25:14 +1100 Subject: [PATCH 425/630] Bug fix updating the ACE loss function. Included a test case. Also provided an example. --- examples/nondefault-example.jl | 130 +++++++++++++++++++++++++++++++++ src/metric.jl | 5 +- test/unittest/metric.jl | 62 ++++++++++++++++ 3 files changed, 194 insertions(+), 3 deletions(-) create mode 100644 examples/nondefault-example.jl create mode 100644 test/unittest/metric.jl diff --git a/examples/nondefault-example.jl b/examples/nondefault-example.jl new file mode 100644 index 000000000000..b93887850ca3 --- /dev/null +++ b/examples/nondefault-example.jl @@ -0,0 +1,130 @@ +#= + Contents: This file contains code for: + - Setting the initial values of the biases and weights equal to the final values of a previous run. + This is helpful for re-estimating a model on updated training data, where the original and updated training data largely overlap. + - Changing the loss function (in our example from Accuracy to ACE) + + Notes: + 1. The model is a toy example with 4 outcomes (categories). + The model is a poor fit to the data, but this is unimportant. The point of the example is to demonstrate the use of some non-default settings. + 2. For categorical outcomes, use 0-based categories! Some of the loss functions assume this, such as ACE. + 3. Incomplete batches are padded with repeated instances of an artificial observation. + This is bad because the artificial data is over-represented and thus biases the results. + The ideal solution is to distribute the observations from the incomplete batch among the complete batches. + This would result in batches of variable but similar size, and thus the estimate of the gradient would not be significantly affected. + But this doesn't happen. + For simplicity we instead drop these extra observations, so that the number of observations in the data set is a multiple of the batch_size. +=# + + +using RDatasets +using MXNet + + +################################################################################ +### Data: Exam scores discretised into 4 categories (use zero-based categories!). +df = dataset("mlmRev", "Gcsemv"); # 1905 x 5 +complete_cases!(df) # 1523 x 5 +n = nrow(df) +df[:written] = zeros(Int, n) +df[:course] = zeros(Int, n) +for i = 1:n + # Categorise :Written + if df[i, :Written] <= 20.0 + df[i, :written] = 0 + elseif df[i, :Written] <= 40.0 + df[i, :written] = 1 + elseif df[i, :Written] <= 60.0 + df[i, :written] = 2 + else + df[i, :written] = 3 + end + + # Categorise :Course + if df[i, :Course] <= 25.0 + df[i, :course] = 0 + elseif df[i, :Course] <= 50.0 + df[i, :course] = 1 + elseif df[i, :Course] <= 75.0 + df[i, :course] = 2 + else + df[i, :course] = 3 + end +end +df = df[1:1500, :] # Ensure nrows is a multiple of batch_size (100 in our example, see below) + +x = convert(Vector{Float64}, df[:course]) +y = convert(Vector{Float64}, df[:written]) + + +################################################################################ +### Hyperparameters + +# Architecture +mlp = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name = :h1, num_hidden = 10) => + mx.Activation(name = :h1_out, act_type = :sigmoid) => + mx.FullyConnected(name = :out, num_hidden = 4) => + mx.SoftmaxOutput(name = :softmax) + +# Hyperparameters +n_epoch = 100 +batch_size = 100 +learn_rate = 0.1 +mom = 0.9 +wt_decay = 0.00001 + + +# Connect data, network architecture and hyperparameters +train_prov = mx.ArrayDataProvider(x, y; batch_size = batch_size) +eval_prov = mx.ArrayDataProvider(x, y; batch_size = batch_size) +opt = mx.SGD(lr = learn_rate, momentum = mom, weight_decay = wt_decay) # Optimizing algorithm + +################################################################################ +### Run 1: Basic run, storing initial and final state. + +# Learn +mdl1 = mx.FeedForward(mlp, context = mx.cpu()) # Model targets the local CPU +cb = mx.do_checkpoint("first", frequency = n_epoch, save_epoch_0 = true) # Write initial and final states to disk +mx.fit(mdl1, opt, train_prov, n_epoch = n_epoch, eval_data = eval_prov, callbacks = [cb]) # Random initial biases and weights + + +################################################################################ +### Run 2: Load the previously trained model and run it some more, starting where Run 1 finished. + +# Load final state of 1st run from disk +arch, arg_params, aux_params = mx.load_checkpoint("first", 100) # arch is the network structure, arg_params contains the weights and biases +mdl2 = mx.FeedForward(arch, context = mx.cpu()) # Only populates the arch and ctx fields +mdl2.arg_params = arg_params # Populate the arg_params fields +cb = mx.do_checkpoint("second", frequency = n_epoch, save_epoch_0 = true) +mx.fit(mdl2, opt, train_prov, n_epoch = n_epoch, eval_data = eval_prov, callbacks = [cb]) + +# Test whether the final state of 1st run equals the initial state of 2nd run +run(`diff first-0100.params second-0000.params`) # Throws error if not true, does nothing otherwise + + +#= + # Other useful functions + arch = mx.load("first-symbol.json", mx.SymbolicNode) + arg_params = mx.load("first-0100.params", mx.NDArray) +=# + + +################################################################################ +### Run 3: Change the loss function from the default Accuracy to ACE + +mdl3 = mx.FeedForward(mlp, context = mx.cpu()) +mx.fit(mdl3, opt, train_prov, n_epoch = n_epoch, eval_data = eval_prov, eval_metric = mx.ACE()) +#mx.fit(mdl3, opt, train_prov, n_epoch = n_epoch, eval_data = eval_prov, eval_metric = mx.Accuracy()) # Default eval_metric +#mx.fit(mdl3, opt, train_prov, n_epoch = n_epoch, eval_data = eval_prov, eval_metric = mx.MultiACE(4)) + +# Test manually +probs = mx.predict(mdl3, eval_prov) +LL = 0.0 +for i = 1:size(y, 1) + LL += log(probs[Int(y[i]) + 1, i]) +end +-LL / size(y, 1) # Should equal the value of ACE from the final iteration of fit(mdl3, ...) + + +# EOF diff --git a/src/metric.jl b/src/metric.jl index 4ae66acf25b7..dc3c3aef452a 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -213,15 +213,14 @@ function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) # Since we can only target labels right now this is the only thing we can do. target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing p_k = pred[i, j, target, sample] - metric.ace_sum += log(p_k) metric.n_sample += 1 end end end elseif ndims(pred) == 2 # 1-dimensional case - for sample in 1:size(labels, 1) - target = Int(labels[sample]) + 1 + for sample in 1:size(label, 1) + target = Int(label[sample]) + 1 # 0-based indexing => 1-based indexing p_k = pred[target, sample] metric.ace_sum += log(p_k) metric.n_sample += 1 diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl new file mode 100644 index 000000000000..f0a79d6ce994 --- /dev/null +++ b/test/unittest/metric.jl @@ -0,0 +1,62 @@ +module TestMetric + +using MXNet +using Base.Test + +################################################################################ +# Supporting functions +################################################################################ + +""" +Returns a random n x m array in which each column defines a discrete probability distribution. +Each column contains numbers between 0 and 1, and each column sums to 1. +""" +function generate_probs(n, m) + # Init + result = rand(n, m) + + # Normalize: ensure each column sums to 1 + for j = 1:m + colsum = sum(result[:, j]) + for i = 1:n + result[i, j] /= colsum + end + end + result +end + + +function loglikelihood{T <: AbstractFloat}(labels::Vector{T}, probs::Array{T, 2}) + LL = 0.0 + for i = 1:size(labels, 1) + LL += log(probs[Int(labels[i]) + 1, i]) # labels are zero-based + end + LL / size(labels, 1) +end + + +################################################################################ +# Test Implementations +################################################################################ + +function test_ace() + info("EvalMetric::ACE") + n_categories = 4 + n_observations = 100 + labels = convert(Vector{Float32}, rand(0:(n_categories - 1), n_observations)) # MXNet uses Float32 + probs = convert(Array{Float32}, generate_probs(n_categories, n_observations)) + LL = loglikelihood(labels, probs) + metric = mx.ACE() # For categorical variables, ACE == -LL + mx._update_single_output(metric, mx.NDArray(labels), mx.NDArray(probs)) + LL_v2 = metric.ace_sum / metric.n_sample + @test_approx_eq_eps LL LL_v2 1e-12 +end + + +################################################################################ +# Run tests +################################################################################ +test_ace() + + +end From b81b26ca5a7c69afb0e160e69f15a7296d4f4d44 Mon Sep 17 00:00:00 2001 From: Arkoniak Date: Thu, 29 Dec 2016 23:55:16 +0400 Subject: [PATCH 426/630] New Optimizers (#159) * Added implementation of RMSProp, AdaGrad, AdaDelta * Added AdaMax and Nadam --- docs/src/api/optimizer.md | 29 +++++++++++ examples/mnist/mlp-test.jl | 5 ++ src/optimizer.jl | 58 +++++++++++++++++++-- src/optimizers/adadelta.jl | 91 +++++++++++++++++++++++++++++++++ src/optimizers/adagrad.jl | 66 ++++++++++++++++++++++++ src/optimizers/adam.jl | 6 +-- src/optimizers/adamax.jl | 77 ++++++++++++++++++++++++++++ src/optimizers/nadam.jl | 100 +++++++++++++++++++++++++++++++++++++ src/optimizers/rmsprop.jl | 71 ++++++++++++++++++++++++++ 9 files changed, 497 insertions(+), 6 deletions(-) create mode 100644 src/optimizers/adadelta.jl create mode 100644 src/optimizers/adagrad.jl create mode 100644 src/optimizers/adamax.jl create mode 100644 src/optimizers/nadam.jl create mode 100644 src/optimizers/rmsprop.jl diff --git a/docs/src/api/optimizer.md b/docs/src/api/optimizer.md index 81fad7cb827e..17974a577913 100644 --- a/docs/src/api/optimizer.md +++ b/docs/src/api/optimizer.md @@ -19,3 +19,32 @@ Modules = [MXNet.mx] Pages = ["optimizers/adam.jl"] ``` +### AdaGrad +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/adagrad.jl"] +``` + +### AdaDelta +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/adadelta.jl"] +``` + +### AdaMax +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/adamax.jl"] +``` + +### RMSProp +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/rmsprop.jl"] +``` + +### Nadam +```@autodocs +Modules = [MXNet.mx] +Pages = ["optimizers/nadam.jl"] +``` diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index 4931944032a9..a88ba3772fb0 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -74,6 +74,11 @@ end function test_mnist_mlp() @test mnist_fit_and_predict(mx.SGD(lr=0.1, momentum=0.9), mx.UniformInitializer(0.01), 2) > 90 @test mnist_fit_and_predict(mx.ADAM(), mx.NormalInitializer(), 2) > 90 + @test mnist_fit_and_predict(mx.AdaGrad(), mx.NormalInitializer(), 2) > 90 + @test mnist_fit_and_predict(mx.AdaDelta(), mx.NormalInitializer(), 2) > 90 + @test mnist_fit_and_predict(mx.AdaMax(), mx.NormalInitializer(), 2) > 90 + @test mnist_fit_and_predict(mx.RMSProp(), mx.NormalInitializer(), 2) > 90 + @test mnist_fit_and_predict(mx.Nadam(), mx.NormalInitializer(), 2) > 90 end test_mnist_mlp() diff --git a/src/optimizer.jl b/src/optimizer.jl index c672c2fe998a..66f7d660847a 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -158,6 +158,42 @@ type Fixed <: AbstractMomentumScheduler momentum :: Float64 end get_momentum(self :: Fixed, state :: OptimizationState) = self.momentum + +""" + Momentum.NadamScheduler + +Nesterov-accelerated adaptive momentum scheduler. + +Description in "Incorporating Nesterov Momentum into Adam." +[http://cs229.stanford.edu/proj2015/054_report.pdf] +(http://cs229.stanford.edu/proj2015/054_report.pdf) + +``\mu_t = \mu_0 * (1 - \gamma * \alpha^{t * \delta})``. +Here +* ``t`` is the iteration count +* ``\delta``: default `0.004` is scheduler decay, +* ``\gamma``: default `0.5` +* ``\alpha``: default `0.96` +* ``\mu_0``: default `0.99` +""" +type NadamScheduler <: AbstractMomentumScheduler + mu0 :: Float64 + delta :: Float64 + gamma :: Float64 + alpha :: Float64 +end +function NadamScheduler(;mu0::Real=0.99, delta::Real=0.004, + gamma::Real=0.5, alpha::Real=0.96) + @assert(0.0 <= delta) + @assert(0.0 <= alpha <= 1.0) + @assert(0.0 <= mu0 <= 1.0) + @assert(0.0 <= gamma <= 1.0) + NadamScheduler(Float64(mu0), Float64(delta), Float64(gamma), Float64(alpha)) +end +get_momentum(self :: NadamScheduler, state :: OptimizationState) = + self.mu0 * (1.0 - self.gamma*self.alpha^(state.curr_iter * self.delta)), + self.mu0 * (1.0 - self.gamma*self.alpha^((state.curr_iter + 1) * self.delta)) + end # module Momentum ################################################################################ function get_momentum_scheduler(scheduler :: Any, momentum :: Real) @@ -170,6 +206,15 @@ function get_momentum_scheduler(scheduler :: Any, momentum :: Real) end end +function get_momentum_scheduler(scheduler :: Any, + another_scheduler :: AbstractMomentumScheduler) + + if isa(scheduler, AbstractMomentumScheduler) + return scheduler + else + return another_scheduler + end +end """ get_updater(optimizer) @@ -198,10 +243,10 @@ Base class for all optimizer options. abstract AbstractOptimizerOptions """ - normalized_gradient(opts, state, grad) + normalized_gradient(opts, state, weight, grad) * `opts::AbstractOptimizerOptions`: options for the optimizer, should contain the field - `grad_scale`, `grad_clip` and `weight_decay`. +`grad_clip` and `weight_decay`. * `state::OptimizationState`: the current optimization state. * `weight::NDArray`: the trainable weights. * `grad::NDArray`: the original gradient of the weights. @@ -216,10 +261,17 @@ function normalized_gradient(opts::AbstractOptimizerOptions, state::Optimization if opts.grad_clip > 0 grad = clip(grad, -opts.grad_clip, opts.grad_clip) end - @inplace grad += opts.weight_decay * weight + if opts.weight_decay > 0 + @inplace grad += opts.weight_decay * weight + end return grad end include("optimizers/sgd.jl") include("optimizers/adam.jl") +include("optimizers/adagrad.jl") +include("optimizers/adadelta.jl") +include("optimizers/adamax.jl") +include("optimizers/rmsprop.jl") +include("optimizers/nadam.jl") diff --git a/src/optimizers/adadelta.jl b/src/optimizers/adadelta.jl new file mode 100644 index 000000000000..e00cc9a42abd --- /dev/null +++ b/src/optimizers/adadelta.jl @@ -0,0 +1,91 @@ +@defstruct AdaDeltaOptions <: AbstractOptimizerOptions ( + (lr :: Real = 1.0, lr > 0), + (rho :: Real = 0.95, rho > 0 && rho < 1), + (epsilon :: Real = 1e-6, epsilon > 0), + (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.00001, weight_decay >= 0), + lr_scheduler :: Any = nothing +) + +""" + AdaDelta + +Scale learning rates by the ratio of accumulated gradients to accumulated +updates, see [1] and notes for further description. + + AdaDelta(; kwargs...) + +# Attributes +* `lr::Real`: default `1.0`, the learning rate controlling the + size of update steps +* `rho::Real`: default `0.9`, squared gradient moving average decay factor +* `epsilon::Real`: default `1e-6`, small value added for + numerical stability +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. + +# Notes +`rho` should be between 0 and 1. A value of `rho` close to 1 will decay the +moving average slowly and a value close to 0 will decay the moving average +fast. + +`rho` = 0.95 and `epsilon` = 1e-6 are suggested in the paper and reported to +work for multiple datasets (MNIST, speech). In the paper, no learning rate is +considered (so `lr` = 1.0). Probably best to keep it at this value. + +`epsilon` is important for the very first update (so the numerator does +not become 0). + +Using the step size `lr` and a decay factor `rho` the learning rate is +calculated as: +``r_t &= \rho r_{t-1} + (1-\rho)*g^2\\ +\eta_t &= \eta \frac{\sqrt{s_{t-1} + \epsilon}} {\sqrt{r_t + \epsilon}}\\ +s_t &= \rho s_{t-1} + (1-\rho)*(\eta_t*g)^2`` + +# References +* [1]: Zeiler, M. D. (2012): + ADADELTA: An Adaptive Learning Rate Method. arXiv Preprint arXiv:1212.5701. +""" + +type AdaDelta <: AbstractOptimizer + opts :: AdaDeltaOptions + state :: OptimizationState + + function AdaDelta(; kwargs...) + opts = AdaDeltaOptions(;kwargs...) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + + new(opts) + end +end + +type AdaDeltaState + acc :: NDArray + delta_acc :: NDArray +end + +function create_state(self :: AdaDelta, index :: Int, weight :: NDArray) + return AdaDeltaState(zeros(size(weight), context(weight)), + zeros(size(weight), context(weight))) +end + +function update(self :: AdaDelta, index :: Int, weight :: NDArray, + grad :: NDArray, state :: AdaDeltaState) + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad = normalized_gradient(self.opts, self.state, weight, grad) + + # Update state.acc as in RMSProp + @inplace state.acc .*= self.opts.rho + @inplace state.acc .+= (1 - self.opts.rho) * grad .* grad + + # Compute update using the "old" state.delta_acc + update = grad .* sqrt(state.delta_acc + self.opts.epsilon) ./ + (sqrt(state.acc + self.opts.epsilon)) + @inplace weight .+= -lr * update + + # update state.delta_acc using update + @inplace state.delta_acc .*= self.opts.rho + @inplace state.delta_acc .+= (1 - self.opts.rho) * update .* update +end diff --git a/src/optimizers/adagrad.jl b/src/optimizers/adagrad.jl new file mode 100644 index 000000000000..196998121cce --- /dev/null +++ b/src/optimizers/adagrad.jl @@ -0,0 +1,66 @@ +@defstruct AdaGradOptions <: AbstractOptimizerOptions ( + (lr :: Real = 0.1, lr > 0), + (epsilon :: Real = 1e-6, epsilon > 0), + (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.00001, weight_decay >= 0), + lr_scheduler :: Any = nothing +) + +""" + AdaGrad + +Scale learning rates by dividing with the square root of accumulated +squared gradients. See [1] for further description. + + AdaGrad(; kwargs...) + +# Attributes +* `lr::Real`: default `0.1`, the learning rate controlling the + size of update steps +* `epsilon::Real`: default `1e-6`, small value added for + numerical stability +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. + +# Notes +Using step size lr AdaGrad calculates the learning rate for feature i at +time step t as: +``η_{t,i} = \frac{lr}{\sqrt{\sum^t_{t^\prime} g^2_{t^\prime,i} + ϵ}} g_{t,i}`` +as such the learning rate is monotonically decreasing. +Epsilon is not included in the typical formula, see [2]. + +# References +* [1]: Duchi, J., Hazan, E., & Singer, Y. (2011): + Adaptive subgradient methods for online learning and + stochastic optimization. JMLR, 12:2121-2159. +* [2]: Chris Dyer: Notes on AdaGrad. + [http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf] + (http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf) +""" + +type AdaGrad <: AbstractOptimizer + opts :: AdaGradOptions + state :: OptimizationState + + function AdaGrad(; kwargs...) + opts = AdaGradOptions(;kwargs...) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + + new(opts) + end +end + +function create_state(self :: AdaGrad, index :: Int, weight :: NDArray) + return zeros(size(weight), context(weight)) +end + +function update(self :: AdaGrad, index :: Int, weight :: NDArray, + grad :: NDArray, state :: NDArray) + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad = normalized_gradient(self.opts, self.state, weight, grad) + + @inplace state .+= grad .* grad + @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) +end diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index 665cc52694b0..3af5c3579736 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -63,11 +63,11 @@ function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) * grad state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * (grad .* grad) - mt = state.mt / (1 - state.beta1Power) - vt = state.vt / (1 - state.beta2Power) + at = sqrt(1.0 - state.beta2Power)/(1.0 - state.beta1Power) state.beta1Power *= self.opts.beta1 state.beta2Power *= self.opts.beta2 - @inplace weight .+= -lr * mt ./ (sqrt(vt) + self.opts.epsilon) + @inplace weight .+= -lr * at * state.mt ./ + (sqrt(state.vt) + self.opts.epsilon) end diff --git a/src/optimizers/adamax.jl b/src/optimizers/adamax.jl new file mode 100644 index 000000000000..838264360e1b --- /dev/null +++ b/src/optimizers/adamax.jl @@ -0,0 +1,77 @@ +@defstruct AdaMaxOptions <: AbstractOptimizerOptions ( + (lr :: Real = 0.002, lr > 0), + (beta1 :: Real = 0.9, beta1 > 0 && beta1 < 1), + (beta2 :: Real = 0.999, beta2 > 0 && beta2 < 1), + (epsilon :: Real = 1e-8, epsilon > 0), + (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.00001, weight_decay >= 0), + lr_scheduler :: Any = nothing +) + +""" + AdaMax + +This is a variant of of the Adam algorithm based on the infinity norm. +See [1] for further description. + + AdaMax(; kwargs...) + +# Attributes +* `lr::Real`: default `0.002`, the learning rate controlling the + size of update steps +* `beta1::Real`: default `0.9`, exponential decay rate + for the first moment estimates +* `beta2::Real`: default `0.999`, exponential decay rate for the + weighted infinity norm estimates +* `epsilon::Real`: default `1e-8`, small value added for + numerical stability +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. + +# References +* [1]: Kingma, Diederik, and Jimmy Ba (2014): + Adam: A Method for Stochastic Optimization. + [http://arxiv.org/abs/1412.6980v8] + (http://arxiv.org/abs/1412.6980v8). +""" + +type AdaMax <: AbstractOptimizer + opts :: AdaMaxOptions + state :: OptimizationState + + function AdaMax(; kwargs...) + opts = AdaMaxOptions(; kwargs...) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + + new(opts) + end +end + +type AdaMaxState + mt :: NDArray + ut :: NDArray + beta1Power :: Float64 +end + +function create_state(self :: AdaMax, index :: Int, weight :: NDArray) + return AdaMaxState( zeros(size(weight), context(weight)), + zeros(size(weight), context(weight)), + self.opts.beta1 ) +end + +function update(self :: AdaMax, index :: Int, weight :: NDArray, + grad :: NDArray, state :: AdaMaxState) + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad = normalized_gradient(self.opts, self.state, weight, grad) + + @inplace state.mt .*= self.opts.beta1 + @inplace state.mt .+= (1 - self.opts.beta1) * grad + state.ut = _maximum(self.opts.beta2 * state.ut, abs(grad)) + + @inplace weight .+= - lr / (1 - state.beta1Power) * + state.mt ./ (state.ut + self.opts.epsilon) + + state.beta1Power *= self.opts.beta1 +end diff --git a/src/optimizers/nadam.jl b/src/optimizers/nadam.jl new file mode 100644 index 000000000000..65a195f674fe --- /dev/null +++ b/src/optimizers/nadam.jl @@ -0,0 +1,100 @@ +@defstruct NadamOptions <: AbstractOptimizerOptions ( + (lr :: Real = 0.001, lr > 0), + (beta1 :: Real = 0.99, beta1 > 0 && beta1 < 1), + (beta2 :: Real = 0.999, beta2 > 0 && beta2 < 1), + (epsilon :: Real = 1e-8, epsilon > 0), + (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.00001, weight_decay >= 0), + lr_scheduler :: Any = nothing, + momentum_scheduler :: Any = nothing +) + +""" + Nadam + +Nesterov Adam optimizer: Adam RMSprop with Nesterov momentum, +see [1] and notes for further description. + + Nadam(; kwargs...) + +# Attributes +* `lr::Real`: default `0.001`, learning rate. +* `beta1::Real`: default `0.99`. +* `beta2::Real`: default `0.999`. +* `epsilon::Real`: default `1e-8`, small value added for + numerical stability +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. +* `lr_scheduler::AbstractLearningRateScheduler`: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `lr` + parameter. +* `momentum_scheduler::AbstractMomentumScheduler` default + `NadamScheduler` of the form + ``\mu_t = beta1 * (1 - 0.5 * 0.96^{t * 0.004})`` + +# Notes +Default parameters follow those provided in the paper. +It is recommended to leave the parameters of this optimizer +at their default values. + +# References +* [1]: Incorporating Nesterov Momentum into Adam. + [http://cs229.stanford.edu/proj2015/054_report.pdf] + (http://cs229.stanford.edu/proj2015/054_report.pdf) +* [2]: On the importance of initialization and momentum in deep learning + [http://www.cs.toronto.edu/~fritz/absps/momentum.pdf] + (http://www.cs.toronto.edu/~fritz/absps/momentum.pdf) +""" +type Nadam <: AbstractOptimizer + opts :: NadamOptions + state :: OptimizationState + + function Nadam(; kwargs...) + opts = NadamOptions(; kwargs...) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + opts.momentum_scheduler = get_momentum_scheduler(opts.momentum_scheduler, + Momentum.NadamScheduler(mu0=opts.beta1)) + + new(opts) + end +end + +type NadamState + mt :: NDArray + nt :: NDArray + momentum :: Float64 + beta2Power :: Float64 +end + +function create_state(self :: Nadam, index :: Int, weight :: NDArray) + return NadamState( zeros(size(weight), context(weight)), + zeros(size(weight), context(weight)), + 1.0, + self.opts.beta2 ) +end + +function update(self :: Nadam, index :: Int, weight :: NDArray, + grad :: NDArray, state :: NadamState) + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad = normalized_gradient(self.opts, self.state, weight, grad) + + mu_t, mu_t1 = + get_momentum(self.opts.momentum_scheduler, self.state) + state.momentum *= mu_t + momentum_next = state.momentum * mu_t1 + + grad_prime = grad / (1.0 - state.momentum) + @inplace state.mt .*= self.opts.beta1 + @inplace state.mt .+= (1.0 - self.opts.beta1) * grad + mt = state.mt / (1.0 - momentum_next) + + @inplace state.nt .*= self.opts.beta2 + @inplace state.nt .+= (1.0 - self.opts.beta2) * grad .* grad + nt = state.nt / (1.0 - state.beta2Power) + state.beta2Power *= self.opts.beta2 + + mt_prime = (1.0 - mu_t) * grad_prime + mu_t1 * mt + @inplace weight .+= -lr * mt_prime ./ (sqrt(nt) + self.opts.epsilon) +end diff --git a/src/optimizers/rmsprop.jl b/src/optimizers/rmsprop.jl new file mode 100644 index 000000000000..01a40651d2f6 --- /dev/null +++ b/src/optimizers/rmsprop.jl @@ -0,0 +1,71 @@ +@defstruct RMSPropOptions <: AbstractOptimizerOptions ( + (lr :: Real = 0.001, lr > 0), + (rho :: Real = 0.9, rho > 0 && rho < 1), + (epsilon :: Real = 1e-6, epsilon > 0), + (grad_clip :: Real = 0, grad_clip >= 0), + (weight_decay :: Real = 0.00001, weight_decay >= 0), + lr_scheduler :: Any = nothing +) + +""" + RMSProp + +Scale learning rates by dividing with the moving average of the root mean +squared (RMS) gradients. See [1] for further description. + + RMSProp(; kwargs...) + +# Attributes +* `lr::Real`: default `0.1`, the learning rate controlling the + size of update steps +* `rho::Real`: default `0.9`, gradient moving average decay factor +* `epsilon::Real`: default `1e-6`, small value added for + numerical stability +* `grad_clip::Real`: default `0`, if positive, will clip the gradient + into the range `[-grad_clip, grad_clip]`. +* `weight_decay::Real`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. + +# Notes +`rho` should be between 0 and 1. A value of `rho` close to 1 will decay the +moving average slowly and a value close to 0 will decay the moving average +fast. + +Using the step size ``lr`` and a decay factor ``\rho`` the +learning rate ``\eta_t`` is calculated as: +``r_t &= ρ r_{t-1} + (1 - ρ)*g^2 \\ + η_t &= \frac{lr}{\sqrt{r_t + ϵ}}`` + +# References +* [1]: Tieleman, T. and Hinton, G. (2012): + Neural Networks for Machine Learning, Lecture 6.5 - rmsprop. + Coursera. [http://www.youtube.com/watch?v=O3sxAc4hxZU] + (http://www.youtube.com/watch?v=O3sxAc4hxZU) (formula @5:20) +""" + +type RMSProp <: AbstractOptimizer + opts :: RMSPropOptions + state :: OptimizationState + + function RMSProp(; kwargs...) + opts = RMSPropOptions(;kwargs...) + opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) + + new(opts) + end +end + +function create_state(self :: RMSProp, index :: Int, weight :: NDArray) + return zeros(size(weight), context(weight)) +end + +function update(self :: RMSProp, index :: Int, weight :: NDArray, + grad :: NDArray, state :: NDArray) + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad = normalized_gradient(self.opts, self.state, weight, grad) + + @inplace state .*= self.opts.rho + @inplace state .+= (1 - self.opts.rho) * grad .* grad + + @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) +end From 1ea44c93e77917165f5b95b0d18ba304757e11d0 Mon Sep 17 00:00:00 2001 From: Arkoniak Date: Sat, 7 Jan 2017 05:15:11 +0400 Subject: [PATCH 427/630] Fix hygiene bug (#164) --- src/symbolic-node.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index dfc54c3c3b1c..55328e5126c7 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -52,7 +52,7 @@ macro _list_symbol_info(self, func_name) ref_sz = Ref{MX_uint}(0) ref_names = Ref{char_pp}(0) @mxcall($func_name, (MX_handle, Ref{MX_uint}, Ref{char_pp}), - $self, ref_sz, ref_names) + $(esc(self)), ref_sz, ref_names) narg = ref_sz[] names = unsafe_wrap(Array, ref_names[], narg) names = [Symbol(unsafe_wrap(String, x)) for x in names] From 4d0aa87d652fddf366b2dfa2c6812ac00b2224d5 Mon Sep 17 00:00:00 2001 From: Arkoniak Date: Sat, 7 Jan 2017 05:15:51 +0400 Subject: [PATCH 428/630] Fixed visualize (#163) --- src/visualize.jl | 24 ++++++++++++++++-------- test/unittest/visualize.jl | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 50 insertions(+), 8 deletions(-) create mode 100644 test/unittest/visualize.jl diff --git a/src/visualize.jl b/src/visualize.jl index f0dd74efdc67..42d31a22d69e 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -51,6 +51,14 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp attr = deepcopy(node_attr) label = op + # Up to 0.8 version of mxnet additional info was stored in + # node["param"]. Staring from pre0.9 `param` was changed to `attr`. + if haskey(node, "param") + node_info = node["param"] + elseif haskey(node, "attr") + node_info = node["attr"] + end + if op == "null" if i ∈ heads # heads are output nodes @@ -62,23 +70,23 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp end elseif op == "Convolution" label = format("Convolution\nkernel={1}\nstride={2}\nn-filter={3}", - _extract_shape(node["param"]["kernel"]), - _extract_shape(node["param"]["stride"]), - node["param"]["num_filter"]) + _extract_shape(node_info["kernel"]), + _extract_shape(node_info["stride"]), + node_info["num_filter"]) colorkey = 2 elseif op == "FullyConnected" - label = format("FullyConnected\nnum-hidden={1}", node["param"]["num_hidden"]) + label = format("FullyConnected\nnum-hidden={1}", node_info["num_hidden"]) colorkey = 2 elseif op == "Activation" - label = format("Activation\nact-type={1}", node["param"]["act_type"]) + label = format("Activation\nact-type={1}", node_info["act_type"]) colorkey = 3 elseif op == "BatchNorm" colorkey = 4 elseif op == "Pooling" label = format("Pooling\ntype={1}\nkernel={2}\nstride={3}", - node["param"]["pool_type"], - _extract_shape(node["param"]["kernel"]), - _extract_shape(node["param"]["stride"])) + node_info["pool_type"], + _extract_shape(node_info["kernel"]), + _extract_shape(node_info["stride"])) colorkey = 5 elseif op ∈ ("Concat", "Flatten", "Reshape") colorkey = 6 diff --git a/test/unittest/visualize.jl b/test/unittest/visualize.jl new file mode 100644 index 000000000000..973c2b7034d0 --- /dev/null +++ b/test/unittest/visualize.jl @@ -0,0 +1,34 @@ +module TestVisualize +using MXNet +using Base.Test + +using ..Main: mlp2 + +################################################################################ +# Test Implementations +################################################################################ + +function test_basic() + info("Visualize::basic") + + mlp = mlp2() + + # Order of elements or default color values can change, but length of the output should be more or less stable + @test length(mx.to_graphviz(mlp)) == length( +""" +digraph "Network Visualization" { +node [fontsize=10]; +edge [fontsize=10]; +"fc1" [label="fc1\\nFullyConnected\\nnum-hidden=1000",style="rounded,filled",fixedsize=true,width=1.3,fillcolor="#fb8072",shape=box,penwidth=2,height=0.8034,color="#941305"]; +"activation0" [label="activation0\\nActivation\\nact-type=relu",style="rounded,filled",fixedsize=true,width=1.3,fillcolor="#ffffb3",shape=box,penwidth=2,height=0.8034,color="#999900"]; +"fc2" [label="fc2\\nFullyConnected\\nnum-hidden=10",style="rounded,filled",fixedsize=true,width=1.3,fillcolor="#fb8072",shape=box,penwidth=2,height=0.8034,color="#941305"]; +"activation0" -> "fc1" [arrowtail=open,color="#737373",dir=back]; +"fc2" -> "activation0" [arrowtail=open,color="#737373",dir=back]; +} +""") +end +################################################################################ +# Run tests +################################################################################ +test_basic() +end From ab80048173e833556e9f5c0efe5891107884b961 Mon Sep 17 00:00:00 2001 From: Spencer Lyon Date: Fri, 6 Jan 2017 20:19:42 -0500 Subject: [PATCH 429/630] ENH: added verbosity training option to control printouts (#162) --- src/model.jl | 43 ++++++++++++++++++++++++++++--------------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/src/model.jl b/src/model.jl index a892dc886ca3..c3ec17e035c0 100644 --- a/src/model.jl +++ b/src/model.jl @@ -191,7 +191,7 @@ end for copying mini-batches of data. Since there is no concern about convergence in prediction, it is better to set the mini-batch size as large as possible (limited by your device memory) if prediction speed is a concern. - + For the same reason, currently prediction will only use the first device even if multiple devices are provided to construct the model. @@ -290,6 +290,7 @@ end kvstore :: Union{Base.Symbol, KVStore} = :local, force_init :: Bool = false, callbacks :: Vector{AbstractCallback} = AbstractCallback[], + verbosity :: Int = 3 ) function _invoke_callbacks{T<:Real}(self::FeedForward, callbacks::Vector{AbstractCallback}, @@ -340,24 +341,30 @@ Train the `model` on `data` with the `optimizer`. this option is set, it will always do random initialization at the begining of training. * `callbacks::Vector{AbstractCallback}`: keyword argument, default `[]`. Callbacks to be invoked at each epoch or mini-batch, see `AbstractCallback`. +* `verbosity::Int`: Determines the verbosity of the print messages. Higher numbers + leads to more verbose printing. Acceptable values are + - `0`: Do not print anything during training + - `1`: Print starting and final messages + - `2`: Print one time messages and a message at the start of each epoch + - `3`: Print a summary of the training and validation accuracy for each epoch """ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) opts = TrainingOptions(; kwargs...) - info("Start training on $(self.ctx)") + opts.verbosity >= 1 && info("Start training on $(self.ctx)") batch_size = get_batch_size(data) num_dev = length(self.ctx) slices = _split_inputs(batch_size, num_dev) # initialize parameters - info("Initializing parameters...") + opts.verbosity >= 2 && info("Initializing parameters...") arg_names, param_names, aux_names = _init_model(self, data, opts.initializer, opts.force_init) # setup kvstore kvstore = opts.kvstore if isa(kvstore, Base.Symbol) - info("Creating KVStore...") + opts.verbosity >= 2 && info("Creating KVStore...") kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) end @@ -388,7 +395,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra label_shapes = Dict([k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)]) train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=grad_req, data_shapes..., label_shapes...) dbg_str = mx.debug_str(train_execs[i]) - info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[i])) + opts.verbosity >= 2 && info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[i])) copy_params_from(train_execs[i], self.arg_params, self.aux_params) end @@ -420,7 +427,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra set_optimizer(kvstore, optimizer) end - info("Initializing KVStore...") + opts.verbosity >= 2 && info("Initializing KVStore...") # init kv with gradients for idx = 1:length(param_arrays) param_on_devs = param_arrays[idx] @@ -443,7 +450,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # invoke callbacks on epoch 0 _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback) - info("Start training...") + opts.verbosity >= 2 && info("Start training...") for i_epoch = 1:opts.n_epoch time_start = time() reset!(opts.eval_metric) @@ -515,12 +522,14 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra time_stop = time() metric = get(opts.eval_metric) - info(format("== Epoch {1:0>3d} ==========", i_epoch)) - info("## Training summary") - for (name, value) in metric - info(format("{1:>18s} = {2:.4f}", string(name), value)) + opts.verbosity >= 2 && info(format("== Epoch {1:0>3d}/{1:0>3d} ==========", i_epoch, opts.n_epoch)) + if opts.verbosity >= 3 + info("## Training summary") + for (name, value) in metric + info(format("{1:>18s} = {2:.4f}", string(name), value)) + end + info(format("{1:>18s} = {2:.4f} seconds", "time", time_stop-time_start)) end - info(format("{1:>18s} = {2:.4f} seconds", "time", time_stop-time_start)) # evaluation on validation set if !isa(opts.eval_data, Void) @@ -546,9 +555,11 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra update!(opts.eval_metric, cpu_label_arrays, cpu_output_arrays) end - info("## Validation summary") - for (name, value) in get(opts.eval_metric) - info(format("{1:>18s} = {2:.4f}", string(name), value)) + if opts.verbosity >= 3 + info("## Validation summary") + for (name, value) in get(opts.eval_metric) + info(format("{1:>18s} = {2:.4f}", string(name), value)) + end end end @@ -566,6 +577,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback; metric=metric) end # end of all epochs + + opts.verbosity >= 1 && info("Finish training on $(self.ctx)") end function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) From 2393d4a969877548d4b840fdd95942ca2d0e46e5 Mon Sep 17 00:00:00 2001 From: Arkoniak Date: Mon, 9 Jan 2017 05:57:01 +0400 Subject: [PATCH 430/630] Fix scalar operations in NDArray (#165) * Fixed string conversion bug * Smaller eps for Float16 * Changed rand_tensors in julian way --- src/ndarray.jl | 12 +++- test/unittest/ndarray.jl | 133 +++++++++++++++++++++++++++++++-------- 2 files changed, 119 insertions(+), 26 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index f32180a95e9c..6d19903b73cf 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -928,6 +928,16 @@ end ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) ) +function _julia_to_mx_param(val :: Any) + string(val) +end +function _julia_to_mx_param(val :: Float16) + string(val) +end +function _julia_to_mx_param(val :: Real) + @sprintf("%e", val) +end + # Import corresponding math functions from base so the automatically defined libmxnet # functions can overload them import Base: sqrt @@ -999,7 +1009,7 @@ function _get_ndarray_function_def(name :: String) num_outputs_p = [convert(Cint, num_outputs)] kw_keys_str = String[string(x[1]) for x in kwargs] - kw_vals_str = String[string(x[2]) for x in kwargs] + kw_vals_str = String[_julia_to_mx_param(x[2]) for x in kwargs] #op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) op_handle = _get_cached_libmx_op_handle($(name)) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index abc8d646fc7a..5b555b7d1adc 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -7,8 +7,9 @@ using ..Main: rand_dims, reldiff ################################################################################ # Test Implementations ################################################################################ -function rand_tensors{N}(dims::NTuple{N, Int}) - tensor = rand(mx.MX_float, dims) +rand_tensors{N}(dims::NTuple{N, Int}) = rand_tensors(mx.MX_float, dims) +function rand_tensors{N, T}(::Type{T}, dims::NTuple{N, Int}) + tensor = rand(T, dims) array = copy(tensor, mx.cpu()) return (tensor, array) end @@ -80,53 +81,99 @@ function test_plus() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) t3, a3 = rand_tensors(dims) + thresh = 1e-6 info("NDArray::plus::dims = $dims") - @test reldiff(t1+t2, copy(a1+a2)) < 1e-6 - @test reldiff(t1.+t2, copy(a1.+a2)) < 1e-6 + @test reldiff(t1+t2, copy(a1+a2)) < thresh + @test reldiff(t1.+t2, copy(a1.+a2)) < thresh - @test reldiff(t1+t2+t3, copy(a1+a2+a3)) < 1e-6 + @test reldiff(t1+t2+t3, copy(a1+a2+a3)) < thresh # test inplace += operation a0 = a1 # keep a reference to a1 @mx.inplace a1 += a2 # perform inplace += @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < 1e-6 - @test reldiff(copy(a1), t1+t2) < 1e-6 + @test reldiff(copy(a0), copy(a1)) < thresh + @test reldiff(copy(a1), t1+t2) < thresh # test scalar scalar = rand() - @test reldiff(t3 + scalar, copy(a3 + scalar)) < 1e-6 - @test reldiff(t2+scalar+t3, copy(a2+scalar+a3)) < 1e-6 + @test reldiff(t3 + scalar, copy(a3 + scalar)) < thresh + @test reldiff(t2+scalar+t3, copy(a2+scalar+a3)) < thresh + + # test small and large scalar + t4 = zeros(Float32, dims) + a4 = copy(t4, mx.cpu()) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t4 + scalar_small, copy(a4 .+ scalar_small)) < thresh + @test reldiff(t4 + scalar_large, copy(a4 .+ scalar_large)) < thresh + + t5 = zeros(Float64, dims) + a5 = copy(t5, mx.cpu()) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t5 + scalar_small, copy(a5 .+ scalar_small)) < thresh + @test reldiff(t5 + scalar_large, copy(a5 .+ scalar_large)) < thresh + + t6 = zeros(Float16, dims) + a6 = copy(t6, mx.cpu()) + scalar_small = Float16(1e-5) + scalar_large = Float16(1e4) + @test reldiff(t6 + scalar_small, copy(a6 .+ scalar_small)) < 1e-2 + @test reldiff(t6 + scalar_large, copy(a6 .+ scalar_large)) < 1e-2 end function test_minus() dims = rand_dims() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) + thresh = 1e-6 info("NDArray::minus::dims = $dims") - @test reldiff(t1-t2, copy(a1-a2)) < 1e-6 - @test reldiff(t1.-t2, copy(a1.-a2)) < 1e-6 + @test reldiff(t1-t2, copy(a1-a2)) < thresh + @test reldiff(t1.-t2, copy(a1.-a2)) < thresh - @test reldiff(-t1, copy(-a1)) < 1e-6 + @test reldiff(-t1, copy(-a1)) < thresh # make sure the negation is not in-place, so a1 is not changed after previous # statement is executed - @test reldiff(t1, copy(a1)) < 1e-6 + @test reldiff(t1, copy(a1)) < thresh # test inplace -= operation a0 = a1 # keep a reference to a1 @mx.inplace a1 -= a2 # perform inplace -= @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < 1e-6 - @test reldiff(copy(a1), t1-t2) < 1e-6 + @test reldiff(copy(a0), copy(a1)) < thresh + @test reldiff(copy(a1), t1-t2) < thresh # test scalar scalar = rand() - @test reldiff(t2 - scalar, copy(a2 - scalar)) < 1e-6 + @test reldiff(t2 - scalar, copy(a2 - scalar)) < thresh + + # test small and large scalar + t4 = zeros(Float32, dims) + a4 = copy(t4, mx.cpu()) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t4 - scalar_small, copy(a4 .- scalar_small)) < thresh + @test reldiff(t4 - scalar_large, copy(a4 .- scalar_large)) < thresh + + t5 = zeros(Float64, dims) + a5 = copy(t5, mx.cpu()) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t5 - scalar_small, copy(a5 .- scalar_small)) < thresh + @test reldiff(t5 - scalar_large, copy(a5 .- scalar_large)) < thresh + + t6 = zeros(Float16, dims) + a6 = copy(t6, mx.cpu()) + scalar_small = Float16(1e-5) + scalar_large = Float16(1e4) + @test reldiff(t6 - scalar_small, copy(a6 .- scalar_small)) < 1e-2 + @test reldiff(t6 - scalar_large, copy(a6 .- scalar_large)) < 1e-2 end function test_mul() @@ -134,44 +181,80 @@ function test_mul() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) t3, a3 = rand_tensors(dims) + thresh = 1e-6 info("NDArray::mul::dims = $dims") - @test reldiff(t1.*t2, copy(a1.*a2)) < 1e-6 + @test reldiff(t1.*t2, copy(a1.*a2)) < thresh # test inplace .*= operation a0 = a1 # keep a reference to a1 @mx.inplace a1 .*= a2 # perform inplace .*= @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < 1e-6 - @test reldiff(copy(a1), t1.*t2) < 1e-6 + @test reldiff(copy(a0), copy(a1)) < thresh + @test reldiff(copy(a1), t1.*t2) < thresh # test scalar - scalar = rand() - @test reldiff(t3 * scalar, copy(a3 .* scalar)) < 1e-6 + scalar = mx.MX_float(rand()) + @test reldiff(t3 * scalar, copy(a3 .* scalar)) < thresh + + # test small and large scalar + t4, a4 = rand_tensors(Float32, dims) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t4 * scalar_small, copy(a4 .* scalar_small)) < thresh + @test reldiff(t4 * scalar_large, copy(a4 .* scalar_large)) < thresh + + t5, a5 = rand_tensors(Float64, dims) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t5 * scalar_small, copy(a5 .* scalar_small)) < thresh + @test reldiff(t5 * scalar_large, copy(a5 .* scalar_large)) < thresh + + t6, a6 = rand_tensors(Float16, dims) + scalar_small = Float16(1e-5) + @test reldiff(t6 * scalar_small, copy(a6 .* scalar_small)) < 1e-2 end function test_div() dims = rand_dims() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) + thresh = 1e-6 info("NDArray::div::dims = $dims") t2 .+= 2 # avoid numerical instability @mx.inplace a2 .+= 2 - @test reldiff(t1 ./ t2, copy(a1 ./ a2)) < 1e-6 + @test reldiff(t1 ./ t2, copy(a1 ./ a2)) < thresh # test inplace -= operation a0 = a1 # keep a reference to a2 @mx.inplace a1 ./= a2 # perform inplace ./= @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < 1e-6 - @test reldiff(copy(a1), t1 ./ t2) < 1e-6 + @test reldiff(copy(a0), copy(a1)) < thresh + @test reldiff(copy(a1), t1 ./ t2) < thresh # test scalar scalar = rand() + 2 - @test reldiff(t2./scalar, copy(a2./scalar)) < 1e-6 + @test reldiff(t2./scalar, copy(a2./scalar)) < thresh + + # test small and large scalar + t4, a4 = rand_tensors(Float32, dims) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t4 / scalar_small, copy(a4 ./ scalar_small)) < thresh + @test reldiff(t4 / scalar_large, copy(a4 ./ scalar_large)) < thresh + + t5, a5 = rand_tensors(Float64, dims) + scalar_small = 1e-8 + scalar_large = 1e8 + @test reldiff(t5 / scalar_small, copy(a5 ./ scalar_small)) < thresh + @test reldiff(t5 / scalar_large, copy(a5 ./ scalar_large)) < thresh + + t6, a6 = rand_tensors(Float16, dims) + scalar_large = 1e4 + @test reldiff(t6 / scalar_large, copy(a6 ./ scalar_large)) < 1e-2 end function test_gd() From 179daa5deeef7197457b2235ed295ed6f5e76b22 Mon Sep 17 00:00:00 2001 From: Arkoniak Date: Fri, 13 Jan 2017 13:27:59 +0400 Subject: [PATCH 431/630] Fix of bugs in nnvm branch (#169) * Fix build error in travis Another string conversion fix * Fixed JSON and added testsets * Fixed errors in julia 0.4 --- REQUIRE | 1 + deps/build.jl | 1 + src/model.jl | 8 ++--- src/ndarray.jl | 22 +++++++++----- src/symbolic-node.jl | 5 +++- src/util.jl | 4 +-- test/runtests.jl | 19 ++++++++---- test/travis/setup_env.sh | 1 + test/unittest/bind.jl | 11 +++++-- test/unittest/io.jl | 15 +++++++--- test/unittest/kvstore.jl | 15 +++++++--- test/unittest/name.jl | 13 +++++++-- test/unittest/ndarray.jl | 53 +++++++++++++++++++--------------- test/unittest/operator.jl | 12 ++++++-- test/unittest/random.jl | 13 +++++++-- test/unittest/symbolic-node.jl | 27 ++++++++++------- test/unittest/visualize.jl | 12 ++++++-- 17 files changed, 160 insertions(+), 72 deletions(-) diff --git a/REQUIRE b/REQUIRE index d37f975fe665..1ca6bdddd26a 100644 --- a/REQUIRE +++ b/REQUIRE @@ -3,3 +3,4 @@ Compat 0.9.1 Formatting BinDeps JSON +BaseTestNext diff --git a/deps/build.jl b/deps/build.jl index b79940e8c1eb..49ff90b22b08 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -1,4 +1,5 @@ using Compat + ################################################################################ # First try to detect and load existing libmxnet ################################################################################ diff --git a/src/model.jl b/src/model.jl index a892dc886ca3..cc81e20633ad 100644 --- a/src/model.jl +++ b/src/model.jl @@ -384,8 +384,8 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra train_execs = Array(Executor, num_dev) for i = 1:num_dev - data_shapes = Dict([k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_data(data)]) - label_shapes = Dict([k => tuple(v[1:end-1]...,length(slices[i])) for (k,v) in provide_label(data)]) + data_shapes = Dict(map((x) -> x[1] => tuple(x[2][1:end-1]...,length(slices[i])), provide_data(data))) + label_shapes = Dict(map((x) -> x[1] => tuple(x[2][1:end-1]...,length(slices[i])), provide_label(data))) train_execs[i] = simple_bind(self.arch, self.ctx[i]; grad_req=grad_req, data_shapes..., label_shapes...) dbg_str = mx.debug_str(train_execs[i]) info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[i])) @@ -574,8 +574,8 @@ end function save_checkpoint(sym :: SymbolicNode, arg_params :: Dict{Base.Symbol, NDArray}, aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) save("$prefix-symbol.json", sym) - save_dict = merge(Dict([Symbol("arg:$k") => v for (k,v) in arg_params]), - Dict([Symbol("aux:$k") => v for (k,v) in aux_params])) + save_dict = merge(Dict{Base.Symbol, NDArray}(map((x) -> Symbol("arg:$(x[1])") => x[2], arg_params)), + Dict{Base.Symbol, NDArray}(map((x) -> Symbol("aux:$(x[1])") => x[2], aux_params))) save_filename = format("{1}-{2:04d}.params", prefix, epoch) save(save_filename, save_dict) info("Saved checkpoint to '$save_filename'") diff --git a/src/ndarray.jl b/src/ndarray.jl index 6d19903b73cf..7dd0a59b9ad5 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -931,11 +931,14 @@ end function _julia_to_mx_param(val :: Any) string(val) end -function _julia_to_mx_param(val :: Float16) - string(val) +function _julia_to_mx_param(val :: Float64) + @sprintf("%.16e", val) end -function _julia_to_mx_param(val :: Real) - @sprintf("%e", val) +function _julia_to_mx_param(val :: Float32) + @sprintf("%.8e", val) +end +function _julia_to_mx_param(val :: Float16) + @sprintf("%.4e", val) end # Import corresponding math functions from base so the automatically defined libmxnet @@ -986,6 +989,9 @@ function _get_ndarray_function_def(name :: String) end args = collect(args) # tuple to list + if length(args) == 0 + args = MX_handle[] + end # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped # See https://github.com/dmlc/MXNet.jl/issues/55 @@ -1000,9 +1006,11 @@ function _get_ndarray_function_def(name :: String) kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] end - output_handles = [Base.cconvert(MX_handle, x) for x in output_vars] - if length(output_handles) > 0 - output_handles_pp = [Base.cconvert(Ptr{MX_handle}, output_handles)] + if length(output_vars) > 0 + output_handles = map((x) -> Base.cconvert(MX_handle, x), output_vars) + # XXX: Julia 0.4 has bug: [Array{MX_handle}] == Array{MX_handle} + output_handles_pp = Array{Array{MX_handle}}(1) + output_handles_pp[1] = Base.cconvert(Ptr{MX_handle}, output_handles) else output_handles_pp = [Base.convert(Ptr{MX_handle}, 0)] end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 1612f0c57771..ad63fe87e30b 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -52,7 +52,7 @@ macro _list_symbol_info(self, func_name) ref_sz = Ref{MX_uint}(0) ref_names = Ref{char_pp}(0) @mxcall($func_name, (MX_handle, Ref{MX_uint}, Ref{char_pp}), - $self, ref_sz, ref_names) + $(esc(self)), ref_sz, ref_names) narg = ref_sz[] names = unsafe_wrap(Array, ref_names[], narg) names = [Symbol(unsafe_wrap(String, x)) for x in names] @@ -493,6 +493,9 @@ end function /(self :: SymbolicNode, arg :: Real) ./(self, arg) end +function /(arg :: Real, self :: SymbolicNode) + _RDivScalar(self, scalar=arg) +end function ./(arg :: Real, self :: SymbolicNode) _RDivScalar(self, scalar=arg) end diff --git a/src/util.jl b/src/util.jl index 11d12e7f9dd2..8e134b05c527 100644 --- a/src/util.jl +++ b/src/util.jl @@ -15,7 +15,7 @@ function get_mnist_ubyte() :train_label => "train-labels-idx1-ubyte", :test_data => "t10k-images-idx3-ubyte", :test_label => "t10k-labels-idx1-ubyte") - filenames = Dict([k => joinpath(mnist_dir, v) for (k,v) in filenames]) + filenames = Dict(map((x) -> x[1] => joinpath(mnist_dir, x[2]), filenames)) if !all(isfile, values(filenames)) cd(mnist_dir) do mnist_dir = download("http://data.dmlc.ml/mxnet/data/mnist.zip", "mnist.zip") @@ -38,7 +38,7 @@ function get_cifar10() cifar10_dir = joinpath(data_dir, "cifar10") mkpath(cifar10_dir) filenames = Dict(:train => "cifar/train.rec", :test => "cifar/test.rec") - filenames = Dict([k => joinpath(cifar10_dir, v) for (k,v) in filenames]) + filenames = Dict(map((x) -> x[1] => joinpath(cifar10_dir, x[2]), filenames)) if !all(isfile, values(filenames)) cd(cifar10_dir) do run(`http://data.dmlc.ml/mxnet/data/cifar10.zip`) diff --git a/test/runtests.jl b/test/runtests.jl index cd9087b7202b..20125bc2b8cf 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,10 @@ using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end # run test in the whole directory, latest modified files # are run first, this makes waiting time shorter when writing @@ -12,9 +17,13 @@ function test_dir(dir) end include(joinpath(dirname(@__FILE__), "common.jl")) -test_dir(joinpath(dirname(@__FILE__), "unittest")) +@testset "MXNet Test" begin + test_dir(joinpath(dirname(@__FILE__), "unittest")) -# run the basic MNIST mlp example -if haskey(ENV, "CONTINUOUS_INTEGRATION") - include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp-test.jl")) + # run the basic MNIST mlp example + if haskey(ENV, "CONTINUOUS_INTEGRATION") + @testset "MNIST Test" begin + include(joinpath(Pkg.dir("MXNet"), "examples", "mnist", "mlp-test.jl")) + end + end end diff --git a/test/travis/setup_env.sh b/test/travis/setup_env.sh index 5a33a9e42955..4bae93243949 100755 --- a/test/travis/setup_env.sh +++ b/test/travis/setup_env.sh @@ -15,5 +15,6 @@ if [ ${TRAVIS_OS_NAME} == "linux" ]; then mkdir shadow_bin ln -s `which gcc-4.8` shadow_bin/gcc ln -s `which g++-4.8` shadow_bin/g++ + export PATH=$PWD/shadow_bin:$PATH fi diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index c764435d5a9c..592a51a9f101 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -1,6 +1,11 @@ module TestBind using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: rand_dims, reldiff @@ -70,7 +75,9 @@ end ################################################################################ # Run tests ################################################################################ -test_arithmetic() +@testset "Bind Test" begin + test_arithmetic() +end end diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 39e37f1bfe9a..9978ea4ccbce 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -1,6 +1,11 @@ module TestIO using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: rand_dims, reldiff @@ -117,8 +122,10 @@ function test_arrays_shuffle() @test reldiff(data_got, data[:,Int[label_got...]]) < 1e-6 end -test_arrays_shuffle() -test_arrays() -test_mnist() +@testset "IO Test" begin + test_arrays_shuffle() + test_arrays() + test_mnist() +end end diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index 2770edf7a259..05f5a9af5a60 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -1,6 +1,11 @@ module TestKVStore using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: rand_dims @@ -62,8 +67,10 @@ function test_aggregator() end end -test_kv_basic() -test_single_kv_pair() -test_aggregator() +@testset "KVStore Test" begin + test_kv_basic() + test_single_kv_pair() + test_aggregator() +end end diff --git a/test/unittest/name.jl b/test/unittest/name.jl index aca039670286..2df05f10b4f2 100644 --- a/test/unittest/name.jl +++ b/test/unittest/name.jl @@ -1,6 +1,11 @@ module TestNameManager using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end function test_default() info("NameManager::default") @@ -25,7 +30,9 @@ function test_prefix() @test get!(prefix_manager, "", name) == Symbol("$prefix$(name)0") end -test_default() -test_prefix() +@testset "Name Test" begin + test_default() + test_prefix() +end end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 5b555b7d1adc..42c8c0de54b6 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1,6 +1,11 @@ module TestNDArray using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: rand_dims, reldiff @@ -121,8 +126,8 @@ function test_plus() a6 = copy(t6, mx.cpu()) scalar_small = Float16(1e-5) scalar_large = Float16(1e4) - @test reldiff(t6 + scalar_small, copy(a6 .+ scalar_small)) < 1e-2 - @test reldiff(t6 + scalar_large, copy(a6 .+ scalar_large)) < 1e-2 + @test reldiff(t6 + scalar_small, copy(a6 .+ scalar_small)) < 1e-1 + @test reldiff(t6 + scalar_large, copy(a6 .+ scalar_large)) < 1e-1 end function test_minus() @@ -172,8 +177,8 @@ function test_minus() a6 = copy(t6, mx.cpu()) scalar_small = Float16(1e-5) scalar_large = Float16(1e4) - @test reldiff(t6 - scalar_small, copy(a6 .- scalar_small)) < 1e-2 - @test reldiff(t6 - scalar_large, copy(a6 .- scalar_large)) < 1e-2 + @test reldiff(t6 - scalar_small, copy(a6 .- scalar_small)) < 1e-1 + @test reldiff(t6 - scalar_large, copy(a6 .- scalar_large)) < 1e-1 end function test_mul() @@ -213,7 +218,7 @@ function test_mul() t6, a6 = rand_tensors(Float16, dims) scalar_small = Float16(1e-5) - @test reldiff(t6 * scalar_small, copy(a6 .* scalar_small)) < 1e-2 + @test reldiff(t6 * scalar_small, copy(a6 .* scalar_small)) < 1e-1 end function test_div() @@ -254,7 +259,7 @@ function test_div() t6, a6 = rand_tensors(Float16, dims) scalar_large = 1e4 - @test reldiff(t6 / scalar_large, copy(a6 ./ scalar_large)) < 1e-2 + @test reldiff(t6 / scalar_large, copy(a6 ./ scalar_large)) < 1e-1 end function test_gd() @@ -300,7 +305,7 @@ function test_saveload() # save and load dictionary of ndarrays names = [Symbol("array$i") for i = 1:n_arrays] - dict = Dict([n => v for (n,v) in zip(names, nd_arrays)]) + dict = Dict([(n, v) for (n,v) in zip(names, nd_arrays)]) mx.save(fname, dict) data = mx.load(fname, mx.NDArray) @test isa(data, Dict{Symbol, mx.NDArray}) @@ -397,20 +402,22 @@ end ################################################################################ # Run tests ################################################################################ -test_assign() -test_copy() -test_slice() -test_plus() -test_minus() -test_mul() -test_div() -test_gd() -test_saveload() -test_clip() -test_sqrt() -test_eltype() -test_nd_as_jl() -test_dot() -test_kwargs() +@testset "NDArray Test" begin + test_assign() + test_copy() + test_slice() + test_plus() + test_minus() + test_mul() + test_div() + test_gd() + test_saveload() + test_clip() + test_sqrt() + test_eltype() + test_nd_as_jl() + test_dot() + test_kwargs() +end end diff --git a/test/unittest/operator.jl b/test/unittest/operator.jl index f4332582428c..e06d87e55c8f 100644 --- a/test/unittest/operator.jl +++ b/test/unittest/operator.jl @@ -1,6 +1,11 @@ module TestOperator using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: rand_dims, reldiff @@ -31,6 +36,9 @@ end ################################################################################ # Run tests ################################################################################ -test_scalar_op() + +@testset "Operator Test" begin + test_scalar_op() +end end diff --git a/test/unittest/random.jl b/test/unittest/random.jl index 5328aff26906..ac023d7226c9 100644 --- a/test/unittest/random.jl +++ b/test/unittest/random.jl @@ -1,6 +1,11 @@ module TestRandom using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end function test_uniform() dims = (100, 100, 2) @@ -37,7 +42,9 @@ function test_gaussian() @test abs(std(copy(ret1)) - σ) < 0.1 end -test_uniform() -test_gaussian() +@testset "Random Test" begin + test_uniform() + test_gaussian() +end end diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index ca2986d4a377..423a4c06e74e 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -1,6 +1,11 @@ module TestSymbolicNode using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: mlp2, reldiff @@ -128,14 +133,16 @@ end ################################################################################ # Run tests ################################################################################ -test_basic() -test_internal() -test_compose() -test_infer_shape() -test_infer_shape_error() -test_saveload() -test_attrs() -test_functions() -test_dot() +@testset "SymbolicNode Test" begin + test_basic() + test_internal() + test_compose() + test_infer_shape() + test_infer_shape_error() + test_saveload() + test_attrs() + test_functions() + test_dot() +end end diff --git a/test/unittest/visualize.jl b/test/unittest/visualize.jl index 973c2b7034d0..f8b9f2164aba 100644 --- a/test/unittest/visualize.jl +++ b/test/unittest/visualize.jl @@ -1,6 +1,11 @@ module TestVisualize using MXNet -using Base.Test +if VERSION ≥ v"0.5.0-dev+7720" + using Base.Test +else + using BaseTestNext + const Test = BaseTestNext +end using ..Main: mlp2 @@ -30,5 +35,8 @@ end ################################################################################ # Run tests ################################################################################ -test_basic() + +@testset "Visualize Test" begin + test_basic() +end end From 030990a155fab5c1e36075800f144f72c3424901 Mon Sep 17 00:00:00 2001 From: Spencer Lyon Date: Sun, 15 Jan 2017 08:38:44 -0500 Subject: [PATCH 432/630] BUG: fix two small mistakes with verbosity control (#168) --- src/model.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/model.jl b/src/model.jl index 98a1a21a216f..3b38cf3193f2 100644 --- a/src/model.jl +++ b/src/model.jl @@ -522,7 +522,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra time_stop = time() metric = get(opts.eval_metric) - opts.verbosity >= 2 && info(format("== Epoch {1:0>3d}/{1:0>3d} ==========", i_epoch, opts.n_epoch)) + opts.verbosity >= 2 && info(format("== Epoch {1:0>3d}/{2:0>3d} ==========", i_epoch, opts.n_epoch)) if opts.verbosity >= 3 info("## Training summary") for (name, value) in metric @@ -579,6 +579,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end # end of all epochs opts.verbosity >= 1 && info("Finish training on $(self.ctx)") + nothing end function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) From 22a3c725747a4580a19b00217e869d390757d9ea Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Mon, 16 Jan 2017 14:12:05 +0800 Subject: [PATCH 433/630] Drop support for v0.4 and Deprecations for v0.6 (#171) * Fix deprecation warning on v0.6 * drop support for v0.4 --- .travis.yml | 1 - REQUIRE | 3 +-- src/base.jl | 12 ++++++------ src/io.jl | 4 ++-- src/kvstore.jl | 2 +- src/ndarray.jl | 2 +- src/symbolic-node.jl | 6 +++--- src/visualize.jl | 2 +- test/common.jl | 4 ++-- test/unittest/kvstore.jl | 4 ++-- test/unittest/metric.jl | 6 +++++- test/unittest/ndarray.jl | 4 ++-- 12 files changed, 26 insertions(+), 24 deletions(-) diff --git a/.travis.yml b/.travis.yml index 284ca4b422ae..571644e7773b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,7 +5,6 @@ os: - linux - osx julia: - - 0.4 - 0.5 - nightly diff --git a/REQUIRE b/REQUIRE index 1ca6bdddd26a..7812bc91c571 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,6 +1,5 @@ -julia 0.4 +julia 0.5 Compat 0.9.1 Formatting BinDeps JSON -BaseTestNext diff --git a/src/base.jl b/src/base.jl index 588b777177bf..d34639e0516d 100644 --- a/src/base.jl +++ b/src/base.jl @@ -208,11 +208,11 @@ function _defstruct_impl(is_immutable, name, fields) name = esc(name.args[1]) end - field_defs = Array(Expr, length(fields)) # :(field2 :: Int) - field_names = Array(Expr, length(fields)) # :field2 - field_defaults = Array(Expr, length(fields)) # :(field2 = 0) - field_types = Array(Expr, length(fields)) # Int - field_asserts = Array(Expr, length(fields)) # :(field2 >= 0) + field_defs = Vector{Expr}(length(fields)) # :(field2 :: Int) + field_names = Vector{Expr}(length(fields)) # :field2 + field_defaults = Vector{Expr}(length(fields)) # :(field2 = 0) + field_types = Vector{Expr}(length(fields)) # Int + field_asserts = Vector{Expr}(length(fields)) # :(field2 >= 0) required_field = Symbol[] for i = 1:length(fields) @@ -249,7 +249,7 @@ function _defstruct_impl(is_immutable, name, fields) f_name, f_type = param :($f_name = convert($f_type, $f_name)) end - asserts = map(filter(i -> isdefined(field_asserts,i), 1:length(fields))) do i + asserts = map(filter(i -> isassigned(field_asserts,i), 1:length(fields))) do i :(@assert($(field_asserts[i]))) end construct = Expr(:call, name, field_names...) diff --git a/src/io.jl b/src/io.jl index da3ba52cecff..f65314e67c71 100644 --- a/src/io.jl +++ b/src/io.jl @@ -537,7 +537,7 @@ function _get_iter_name(hdr :: MX_handle) (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - return Symbol(unsafe_wrap(String, ref_name[])) + return Symbol(unsafe_string(ref_name[])) end const _iter_creator_cache = Dict{Symbol, MX_handle}() @@ -564,7 +564,7 @@ function _define_data_iter_creator(hdr :: MX_handle) (MX_handle, Ref{char_p}, Ref{char_p}, Ref{MX_uint}, Ref{char_pp}, Ref{char_pp}, Ref{char_pp}), hdr, ref_name, ref_desc, ref_narg, ref_arg_names, ref_arg_types, ref_arg_descs) - iter_name = Symbol(unsafe_wrap(String, ref_name[])) + iter_name = Symbol(unsafe_string(ref_name[])) isprovider = endswith(string(iter_name), "Iter") signature = _format_signature(Int(ref_narg[]), ref_arg_names) diff --git a/src/kvstore.jl b/src/kvstore.jl index 03da58197d70..a1d835f8a6c2 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -87,7 +87,7 @@ end function get_type(self :: KVStore) type_ref = Ref{char_p}(0) @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) - return Symbol(unsafe_wrap(String, type_ref[])) + return Symbol(unsafe_string(type_ref[])) end function get_num_workers(self :: KVStore) diff --git a/src/ndarray.jl b/src/ndarray.jl index 7dd0a59b9ad5..d37b321a2d8a 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -882,7 +882,7 @@ function load(filename::AbstractString, ::Type{NDArray}) return [NDArray(MX_NDArrayHandle(hdr)) for hdr in unsafe_wrap(Array, out_hdrs[], out_size)] else @assert out_size == out_name_size - return Dict([(Symbol(unsafe_wrap(String, k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in + return Dict([(Symbol(unsafe_string(k)), NDArray(MX_NDArrayHandle(hdr))) for (k,hdr) in zip(unsafe_wrap(Array, out_names[], out_size), unsafe_wrap(Array, out_hdrs[], out_size))]) end end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index ad63fe87e30b..f5a518c35e56 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -55,7 +55,7 @@ macro _list_symbol_info(self, func_name) $(esc(self)), ref_sz, ref_names) narg = ref_sz[] names = unsafe_wrap(Array, ref_names[], narg) - names = [Symbol(unsafe_wrap(String, x)) for x in names] + names = [Symbol(unsafe_string(x)) for x in names] return names end end @@ -151,7 +151,7 @@ function list_attr(self :: SymbolicNode) strings = unsafe_wrap(Array, ref_strings[], narg) out = Dict{Symbol, String}() for i in 1:2:narg - key = Symbol(unsafe_wrap(String, strings[i])) + key = Symbol(unsafe_string(strings[i])) value = unsafe_string(strings[i+1]) # Creates a copy of string out[key] = value end @@ -174,7 +174,7 @@ function list_all_attr(self :: SymbolicNode) strings = unsafe_wrap(Array, ref_strings[], narg) out = Dict{Symbol, String}() for i in 1:2:narg - key = Symbol(unsafe_wrap(String, strings[i])) + key = Symbol(unsafe_string(strings[i])) value = unsafe_string(strings[i+1]) out[key] = value end diff --git a/src/visualize.jl b/src/visualize.jl index 42d31a22d69e..f7dfca859432 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -136,7 +136,7 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp end println(io, "}") - return takebuf_string(io) + return String(take!(io)) end function _format_graphviz_attr(io::IOBuffer, attrs) diff --git a/test/common.jl b/test/common.jl index a394acf95a93..7f7cd3cb78ea 100644 --- a/test/common.jl +++ b/test/common.jl @@ -2,8 +2,8 @@ # Common models used in testing ################################################################################ function reldiff(a, b) - diff = sum(abs(a - b)) - norm = sum(abs(a)) + diff = sum(abs.(a .- b)) + norm = sum(abs.(a)) return diff / (norm + 1e-10) end diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index 05f5a9af5a60..15b3f8c2c42c 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -37,7 +37,7 @@ function test_single_kv_pair() mx.push!(kv, 3, mx.ones(SHAPE)) val = mx.empty(SHAPE) mx.pull!(kv, 3, val) - @test maximum(abs(copy(val) - 1)) == 0 + @test maximum(abs.(copy(val) .- 1)) == 0 end function test_aggregator() @@ -52,7 +52,7 @@ function test_aggregator() mx.push!(kv, 3, vals) mx.pull!(kv, 3, vals) for v in vals - @test maximum(abs(copy(v)) - num_devs) == 0 + @test maximum(abs.(copy(v)) - num_devs) == 0 end # list diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl index f0a79d6ce994..fd36d4ffc7f8 100644 --- a/test/unittest/metric.jl +++ b/test/unittest/metric.jl @@ -49,7 +49,11 @@ function test_ace() metric = mx.ACE() # For categorical variables, ACE == -LL mx._update_single_output(metric, mx.NDArray(labels), mx.NDArray(probs)) LL_v2 = metric.ace_sum / metric.n_sample - @test_approx_eq_eps LL LL_v2 1e-12 + @static if VERSION >= v"0.6.0-dev.2075" + @test LL ≈ LL_v2 atol=1e-12 + else + @test_approx_eq_eps LL LL_v2 1e-12 + end end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 42c8c0de54b6..9b96861e0006 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -322,7 +322,7 @@ function test_clip() info("NDArray::clip::dims = $dims") j_array, nd_array = rand_tensors(dims) - clip_up = maximum(abs(j_array)) / 2 + clip_up = maximum(abs.(j_array)) / 2 clip_down = 0 clipped = mx.clip(nd_array, a_min=clip_down, a_max=clip_up) @@ -338,7 +338,7 @@ function test_sqrt() j_array, nd_array = rand_tensors(dims) sqrt_ed = sqrt(nd_array) - @test reldiff(copy(sqrt_ed), sqrt(j_array)) < 1e-6 + @test reldiff(copy(sqrt_ed), sqrt.(j_array)) < 1e-6 end function test_nd_as_jl() From 2a66ea3242203c549f156f3650db208189a0418e Mon Sep 17 00:00:00 2001 From: mdpradeep Date: Fri, 20 Jan 2017 18:30:14 +0530 Subject: [PATCH 434/630] Update build.jl (#174) --- deps/build.jl | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 37ce0c7b862d..fece3065fe4d 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -24,11 +24,18 @@ if !libmxnet_detected ################################################################################ # If not found, try to build automatically using BinDeps ################################################################################ - if is_windows() - info("Please follow the libmxnet documentation on how to build manually") - info("or to install pre-build packages:") - info("http://mxnet.readthedocs.io/en/latest/how_to/build.html#building-on-windows") - error("Automatic building libxmnet on Windows is currently not supported yet.") + if is_windows() + DOWNLOAD_URL = "https://github.com/dmlc/mxnet/releases/download/20160531/20160531_win10_x64_cpu.7z" + run(download_cmd(DOWNLOAD_URL, "mxnet.7z")) + run(`7z x mxnet.7z -y -ousr`) + run(`usr\\setupenv.cmd`) + run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) + + DOWNLOAD_URL = "https://github.com/yajiedesign/mxnet/releases/download/20161125/20161125_mxnet_x64_cpu.7z" + run(download_cmd(DOWNLOAD_URL, "mxnet.7z")) + run(`7z x mxnet.7z -y -ousr`) + + return end blas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) From 3ad5819b5202d444b33e521a2430b20222b3305b Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 26 Jan 2017 17:28:43 +0900 Subject: [PATCH 435/630] update NEWS.md --- NEWS.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/NEWS.md b/NEWS.md index 2d0ec817ce39..f4fb93004db8 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,9 @@ +# v0.2.0 (2017.01.26) +* Drop support for Julia v0.4. +* Added support for NVVM. +* Updated supported version of MXNet to 0.9.3. +* New optimizers (@Arkoniak). + # v0.1.0 (2016.09.08) * Track specific libmxnet version for each release. From 8d044b160da4377f117860496861936677d2bcba Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Jan 2017 14:33:02 +0900 Subject: [PATCH 436/630] refactor windows support --- deps/build.jl | 42 +++++++++++++++++++++++++++++------------- 1 file changed, 29 insertions(+), 13 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index fece3065fe4d..3ef676e8a667 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -1,10 +1,12 @@ using Compat +import JSON ################################################################################ # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false libmxnet_curr_ver = "master" +curr_win = "20161125" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") @@ -18,25 +20,39 @@ if haskey(ENV, "MXNET_HOME") end end + + using BinDeps @BinDeps.setup if !libmxnet_detected + if is_windows() + # TODO: Detect GPU support on Windows + info("Downloading pre-built CPU packages for Windows.") + base_url = "https://github.com/dmlc/mxnet/releases/download/20160531/20160531_win10_x64_cpu.7z" + if libmxnet_curr_ver == "master" + # download_cmd uses powershell 2, but we need powershell 3 to do this + ps_wget(url, file) = run(`powershell -NoProfile -Command "wget \"$url\" -o \"$file\""`) + ps_wget("https://api.github.com/repos/yajiedesign/mxnet/releases/latest", "mxnet.json") + curr_win = JSON.parsefile("mxnet.json")["tag_name"] + info("Can't use MXNet master on Windows, using latest binaries from $curr_win.") + end + # TODO: Get url from JSON. + package_url = "https://github.com/yajiedesign/mxnet/releases/download/$(curr_win)/$(curr_win)_mxnet_x64_vc12_cpu.7z" + + run(download_cmd(base_url, "mxnet_base.7z")) + run(`7z x mxnet_base.7z -y -ousr`) + run(`usr\\setupenv.cmd`) + run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) + + run(download_cmd(package_url, "mxnet.7z")) + run(`7z x mxnet.7z -y -ousr`) + + return + end + ################################################################################ # If not found, try to build automatically using BinDeps ################################################################################ - if is_windows() - DOWNLOAD_URL = "https://github.com/dmlc/mxnet/releases/download/20160531/20160531_win10_x64_cpu.7z" - run(download_cmd(DOWNLOAD_URL, "mxnet.7z")) - run(`7z x mxnet.7z -y -ousr`) - run(`usr\\setupenv.cmd`) - run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) - - DOWNLOAD_URL = "https://github.com/yajiedesign/mxnet/releases/download/20161125/20161125_mxnet_x64_cpu.7z" - run(download_cmd(DOWNLOAD_URL, "mxnet.7z")) - run(`7z x mxnet.7z -y -ousr`) - - return - end blas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) From b039771b87aa6817697cc95c3f75032befe99f24 Mon Sep 17 00:00:00 2001 From: Harsha Byadarahalli Mahesh Date: Fri, 27 Jan 2017 15:47:56 +0530 Subject: [PATCH 437/630] Using WebRequest function to download the JSON file wget was failing to download mxnet.json , so replaced them with Invoke-WebRequest --- deps/build.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 3ef676e8a667..4fe217056a4e 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -31,8 +31,7 @@ if !libmxnet_detected base_url = "https://github.com/dmlc/mxnet/releases/download/20160531/20160531_win10_x64_cpu.7z" if libmxnet_curr_ver == "master" # download_cmd uses powershell 2, but we need powershell 3 to do this - ps_wget(url, file) = run(`powershell -NoProfile -Command "wget \"$url\" -o \"$file\""`) - ps_wget("https://api.github.com/repos/yajiedesign/mxnet/releases/latest", "mxnet.json") + run(`powershell -NoProfile -Command Invoke-WebRequest -Uri "https://api.github.com/repos/yajiedesign/mxnet/releases/latest" -OutFile "mxnet.json"`) curr_win = JSON.parsefile("mxnet.json")["tag_name"] info("Can't use MXNet master on Windows, using latest binaries from $curr_win.") end From 6335ecac3ece7c78d401a655ee483a7ae4abe535 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Jan 2017 19:20:23 +0900 Subject: [PATCH 438/630] fix vectorized abs deprecation --- test/unittest/kvstore.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index 15b3f8c2c42c..9b7b0a4e4bb4 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -62,7 +62,7 @@ function test_aggregator() for vv in vals for v in vv - @test maximum(abs(copy(v)) - 2num_devs) == 0 + @test maximum(abs.(copy(v)) - 2 * num_devs) == 0 end end end From d9f9551754c33a42596140e07b2c891cf17d87e3 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Jan 2017 19:24:27 +0900 Subject: [PATCH 439/630] don't include code from within a function --- examples/mnist/mlp-test.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index a88ba3772fb0..c282c1dead1d 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -5,6 +5,8 @@ module MNISTTest using MXNet using Base.Test +include("mnist-data.jl") + function get_mnist_mlp() mlp = @mx.chain mx.Variable(:data) => mx.FullyConnected(name=:fc1, num_hidden=128) => @@ -17,7 +19,6 @@ function get_mnist_mlp() end function get_mnist_data(batch_size=100) - include("mnist-data.jl") return get_mnist_providers(batch_size) end From a6968a6b26ab0c4a5321ae73410ff2460ff192b6 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Jan 2017 19:31:23 +0900 Subject: [PATCH 440/630] deprecation of filter and Array creation --- src/MXNet.jl | 4 ++++ src/model.jl | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/MXNet.jl b/src/MXNet.jl index b9de52a58bf8..892cc4e95163 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -11,6 +11,10 @@ using Compat import Compat.String import Compat.view +if VERSION >= v"0.6.0-dev.1024" + import Base.Iterators: filter +end + using Formatting # Functions from base that we can safely extend and that are defined by libmxnet. diff --git a/src/model.jl b/src/model.jl index 3b38cf3193f2..f1f05595533d 100644 --- a/src/model.jl +++ b/src/model.jl @@ -389,7 +389,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra end end - train_execs = Array(Executor, num_dev) + train_execs = Array{Executor}(num_dev) for i = 1:num_dev data_shapes = Dict(map((x) -> x[1] => tuple(x[2][1:end-1]...,length(slices[i])), provide_data(data))) label_shapes = Dict(map((x) -> x[1] => tuple(x[2][1:end-1]...,length(slices[i])), provide_label(data))) From a79d002df3f94d4878d1e4cdfc3308e7b03dee2a Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Jan 2017 19:33:30 +0900 Subject: [PATCH 441/630] readall deprecation --- examples/char-lstm/seq-data.jl | 2 +- examples/char-lstm/train.jl | 2 +- examples/mnist/mlp-test.jl | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index 1ea93e3b6116..cfcc5a67bb04 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -5,7 +5,7 @@ using MXNet function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; max_vocab=10000) if isfile(vocab_fn) info("Vocabulary already exists, reusing $vocab_fn...") - vocab = Dict{Char,Int}([w => i for (i,w) in enumerate(readall(vocab_fn))]) + vocab = Dict{Char,Int}([w => i for (i,w) in enumerate(readstring(vocab_fn))]) else # count symbol frequency dict = Dict{Char,Int}() diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index f96fdde1d674..30578603cafc 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -14,7 +14,7 @@ lstm = LSTM(LSTM_N_LAYER, SEQ_LENGTH, DIM_HIDDEN, DIM_EMBED, #--data # load data -text_all = readall(INPUT_FILE) +text_all = readstring(INPUT_FILE) len_train = round(Int, length(text_all)*DATA_TR_RATIO) text_tr = text_all[1:len_train] text_val = text_all[len_train+1:end] diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index c282c1dead1d..2acedd073dfa 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -41,7 +41,7 @@ function mnist_fit_and_predict(optimizer, initializer, n_epoch) end mlp_load = mx.load("$cp_prefix-symbol.json", mx.SymbolicNode) @test mx.to_json(mlp_load) == mx.to_json(mlp) - mlp_load = mx.from_json(readall("$cp_prefix-symbol.json"), mx.SymbolicNode) + mlp_load = mx.from_json(readstring("$cp_prefix-symbol.json"), mx.SymbolicNode) @test mx.to_json(mlp_load) == mx.to_json(mlp) #-------------------------------------------------------------------------------- From a476d3e448fa57ffb84f69b237fa05454af8e5ea Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 27 Jan 2017 19:53:13 +0900 Subject: [PATCH 442/630] MXNet.jl on v0.6 is currently subtlety broken due to broadcasting. --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 571644e7773b..6c2f33c114a4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ os: - osx julia: - 0.5 - - nightly +# - nightly 0.6 supports depends on #170 # dependent apt packages addons: From fecfcdd146cc561a7742e11883b5c7a141c4d5c9 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 28 Jan 2017 12:17:43 +0900 Subject: [PATCH 443/630] fix use of Symbol in symbolic-node.jl, fixes #189 --- src/symbolic-node.jl | 4 ++-- test/unittest/symbolic-node.jl | 9 +++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index f5a518c35e56..c85420939d85 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -627,8 +627,8 @@ function _define_atomic_symbol_creator(name :: String) $(if key_narg != "" quote - if !in(Symbol($key_narg), param_keys) - push!(param_keys, Symbol($key_narg)) + if !in($key_narg, param_keys) + push!(param_keys, $key_narg) push!(param_vals, string(length(args))) end end diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 423a4c06e74e..573f541fbcd6 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -130,6 +130,14 @@ function test_dot() @test reldiff(ret, 2*ones(100, 200)) < 1e-6 end +function test_misc() + info("SymbolicNode::Miscellaneous") + # Test for #189 + a = mx.Variable("a") + b = mx.Variable("b") + symb = mx.ElementWiseSum(a,b) +end + ################################################################################ # Run tests ################################################################################ @@ -143,6 +151,7 @@ end test_attrs() test_functions() test_dot() + test_misc() end end From cac5625596312a5b9cf5628f4e2c7b6f4592059c Mon Sep 17 00:00:00 2001 From: Zac Cranko Date: Wed, 25 Jan 2017 10:13:36 +1100 Subject: [PATCH 444/630] refactor sgd.jl to exploit multiple dispatch --- src/optimizers/sgd.jl | 25 ++++++++++++++----------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index 2eda1d0bf67c..cd0b998fbed0 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -49,18 +49,21 @@ function create_state(self :: SGD, index :: Int, weight :: NDArray) end end -function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Union{Void, NDArray}) +function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Void) lr = get_learning_rate(self.opts.lr_scheduler, self.state) grad = normalized_gradient(self.opts, self.state, weight, grad) + + @inplace weight += -lr * grad +end - if isa(state, Void) - # vanilla SGD, without momentum - @inplace weight += -lr * grad - else - mom = state :: NDArray - coef = get_momentum(self.opts.momentum_scheduler, self.state) - @inplace mom .*= coef - @inplace mom .+= -lr * grad - @inplace weight .+= mom - end +# update with momentum +function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: NDArray) + lr = get_learning_rate(self.opts.lr_scheduler, self.state) + grad = normalized_gradient(self.opts, self.state, weight, grad) + + mom = state :: NDArray + coef = get_momentum(self.opts.momentum_scheduler, self.state) + @inplace mom .*= coef + @inplace mom .+= -lr * grad + @inplace weight .+= mom end From d0d79630551693997f4e9475d0c753eae7789061 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 28 Jan 2017 15:26:24 +0900 Subject: [PATCH 445/630] remove BaseTestNext and bump Compat --- REQUIRE | 2 +- test/runtests.jl | 7 +------ test/unittest/bind.jl | 7 +------ test/unittest/io.jl | 7 +------ test/unittest/kvstore.jl | 7 +------ test/unittest/name.jl | 7 +------ test/unittest/ndarray.jl | 7 +------ test/unittest/operator.jl | 7 +------ test/unittest/random.jl | 7 +------ test/unittest/symbolic-node.jl | 7 +------ test/unittest/visualize.jl | 7 +------ 11 files changed, 11 insertions(+), 61 deletions(-) diff --git a/REQUIRE b/REQUIRE index 7812bc91c571..8fdfc5fcde96 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,5 @@ julia 0.5 -Compat 0.9.1 +Compat 0.9.5 Formatting BinDeps JSON diff --git a/test/runtests.jl b/test/runtests.jl index 20125bc2b8cf..823830b54d0d 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,10 +1,5 @@ using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test # run test in the whole directory, latest modified files # are run first, this makes waiting time shorter when writing diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 592a51a9f101..79f639518487 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -1,11 +1,6 @@ module TestBind using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: rand_dims, reldiff diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 9978ea4ccbce..88d77bd3c16b 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -1,11 +1,6 @@ module TestIO using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: rand_dims, reldiff diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index 9b7b0a4e4bb4..cf15f1a65f05 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -1,11 +1,6 @@ module TestKVStore using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: rand_dims diff --git a/test/unittest/name.jl b/test/unittest/name.jl index 2df05f10b4f2..9287bf087074 100644 --- a/test/unittest/name.jl +++ b/test/unittest/name.jl @@ -1,11 +1,6 @@ module TestNameManager using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test function test_default() info("NameManager::default") diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 9b96861e0006..155f365d6ee8 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1,11 +1,6 @@ module TestNDArray using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: rand_dims, reldiff diff --git a/test/unittest/operator.jl b/test/unittest/operator.jl index e06d87e55c8f..76e51f4403f5 100644 --- a/test/unittest/operator.jl +++ b/test/unittest/operator.jl @@ -1,11 +1,6 @@ module TestOperator using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: rand_dims, reldiff diff --git a/test/unittest/random.jl b/test/unittest/random.jl index ac023d7226c9..54d651482cd6 100644 --- a/test/unittest/random.jl +++ b/test/unittest/random.jl @@ -1,11 +1,6 @@ module TestRandom using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test function test_uniform() dims = (100, 100, 2) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 573f541fbcd6..36f125cd728c 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -1,11 +1,6 @@ module TestSymbolicNode using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: mlp2, reldiff diff --git a/test/unittest/visualize.jl b/test/unittest/visualize.jl index f8b9f2164aba..3bdaa12824d2 100644 --- a/test/unittest/visualize.jl +++ b/test/unittest/visualize.jl @@ -1,11 +1,6 @@ module TestVisualize using MXNet -if VERSION ≥ v"0.5.0-dev+7720" - using Base.Test -else - using BaseTestNext - const Test = BaseTestNext -end +using Base.Test using ..Main: mlp2 From 0ee926ecfd76f0f066b52205eeca915bcb1c0b60 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sun, 29 Jan 2017 17:58:43 +0900 Subject: [PATCH 446/630] remove usr/setupenv.cmd because it is too invasive --- deps/build.jl | 1 - src/base.jl | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 4fe217056a4e..7b654d9a9706 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -40,7 +40,6 @@ if !libmxnet_detected run(download_cmd(base_url, "mxnet_base.7z")) run(`7z x mxnet_base.7z -y -ousr`) - run(`usr\\setupenv.cmd`) run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) run(download_cmd(package_url, "mxnet.7z")) diff --git a/src/base.jl b/src/base.jl index d34639e0516d..183465341f3c 100644 --- a/src/base.jl +++ b/src/base.jl @@ -18,7 +18,7 @@ typealias char_pp Ptr{char_p} ################################################################################ const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], [joinpath("$(get(ENV,"MXNET_HOME",""))","lib"), - joinpath(Pkg.dir("MXNet"),"deps/usr/lib")]) + Pkg.dir("MXNet","deps","usr","lib")]) if isempty(MXNET_LIB) # touch this file, so that after the user properly build libmxnet, the precompiled # MXNet.ji will be re-compiled to get MXNET_LIB properly. From 75060bc0457499d8d06c226a2a570006d564e200 Mon Sep 17 00:00:00 2001 From: Arkoniak Date: Sun, 5 Feb 2017 17:29:17 +0400 Subject: [PATCH 447/630] Fix for symbolic node (#199) * Fix for symbolic node * wrong indentation * Better condition --- examples/char-lstm/lstm.jl | 40 +++++++++++++++++----------------- examples/char-lstm/sampler.jl | 8 +++---- examples/char-lstm/seq-data.jl | 12 +++++----- src/optimizer.jl | 2 +- src/symbolic-node.jl | 8 ++++--- 5 files changed, 36 insertions(+), 34 deletions(-) diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index e98778b65272..dcd1e6ddb60c 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -26,11 +26,11 @@ function lstm_cell(data::mx.SymbolicNode, prev_state::LSTMState, param::LSTMPara end i2h = mx.FullyConnected(data, weight=param.i2h_W, bias=param.i2h_b, - num_hidden=4num_hidden, name=symbol(name, "_i2h")) + num_hidden=4num_hidden, name=Symbol(name, "_i2h")) h2h = mx.FullyConnected(prev_state.h, weight=param.h2h_W, bias=param.h2h_b, - num_hidden=4num_hidden, name=symbol(name, "_h2h")) + num_hidden=4num_hidden, name=Symbol(name, "_h2h")) - gates = mx.SliceChannel(i2h + h2h, num_outputs=4, name=symbol(name, "_gates")) + gates = mx.SliceChannel(i2h + h2h, num_outputs=4, name=Symbol(name, "_gates")) in_gate = mx.Activation(gates[1], act_type=:sigmoid) in_trans = mx.Activation(gates[2], act_type=:tanh) @@ -49,17 +49,17 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla dropout::Real=0, name::Symbol=gensym(), output_states::Bool=false) # placeholder nodes for all parameters - embed_W = mx.Variable(symbol(name, "_embed_weight")) - pred_W = mx.Variable(symbol(name, "_pred_weight")) - pred_b = mx.Variable(symbol(name, "_pred_bias")) + embed_W = mx.Variable(Symbol(name, "_embed_weight")) + pred_W = mx.Variable(Symbol(name, "_pred_weight")) + pred_b = mx.Variable(Symbol(name, "_pred_bias")) layer_param_states = map(1:n_layer) do i - param = LSTMParam(mx.Variable(symbol(name, "_l$(i)_i2h_weight")), - mx.Variable(symbol(name, "_l$(i)_h2h_weight")), - mx.Variable(symbol(name, "_l$(i)_i2h_bias")), - mx.Variable(symbol(name, "_l$(i)_h2h_bias"))) - state = LSTMState(mx.Variable(symbol(name, "_l$(i)_init_c")), - mx.Variable(symbol(name, "_l$(i)_init_h"))) + param = LSTMParam(mx.Variable(Symbol(name, "_l$(i)_i2h_weight")), + mx.Variable(Symbol(name, "_l$(i)_h2h_weight")), + mx.Variable(Symbol(name, "_l$(i)_i2h_bias")), + mx.Variable(Symbol(name, "_l$(i)_h2h_bias"))) + state = LSTMState(mx.Variable(Symbol(name, "_l$(i)_init_c")), + mx.Variable(Symbol(name, "_l$(i)_init_h"))) (param, state) end #... @@ -69,17 +69,17 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla # now unroll over time outputs = mx.SymbolicNode[] for t = 1:seq_len - data = mx.Variable(symbol(name, "_data_$t")) - label = mx.Variable(symbol(name, "_label_$t")) + data = mx.Variable(Symbol(name, "_data_$t")) + label = mx.Variable(Symbol(name, "_label_$t")) hidden = mx.FullyConnected(data, weight=embed_W, num_hidden=dim_embed, - no_bias=true, name=symbol(name, "_embed_$t")) + no_bias=true, name=Symbol(name, "_embed_$t")) # stack LSTM cells for i = 1:n_layer l_param, l_state = layer_param_states[i] dp = i == 1 ? 0 : dropout # don't do dropout for data next_state = lstm_cell(hidden, l_state, l_param, num_hidden=dim_hidden, dropout=dp, - name=symbol(name, "_lstm_$t")) + name=Symbol(name, "_lstm_$t")) hidden = next_state.h layer_param_states[i] = (l_param, next_state) end @@ -89,8 +89,8 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla hidden = mx.Dropout(hidden, p=dropout) end pred = mx.FullyConnected(hidden, weight=pred_W, bias=pred_b, num_hidden=n_class, - name=symbol(name, "_pred_$t")) - smax = mx.SoftmaxOutput(pred, label, name=symbol(name, "_softmax_$t")) + name=Symbol(name, "_pred_$t")) + smax = mx.SoftmaxOutput(pred, label, name=Symbol(name, "_softmax_$t")) push!(outputs, smax) end #... @@ -100,8 +100,8 @@ function LSTM(n_layer::Int, seq_len::Int, dim_hidden::Int, dim_embed::Int, n_cla # append block-gradient nodes to the final states for i = 1:n_layer l_param, l_state = layer_param_states[i] - final_state = LSTMState(mx.BlockGrad(l_state.c, name=symbol(name, "_l$(i)_last_c")), - mx.BlockGrad(l_state.h, name=symbol(name, "_l$(i)_last_h"))) + final_state = LSTMState(mx.BlockGrad(l_state.c, name=Symbol(name, "_l$(i)_last_c")), + mx.BlockGrad(l_state.h, name=Symbol(name, "_l$(i)_last_h"))) layer_param_states[i] = (l_param, final_state) end diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl index 22166b7700c9..ad34f344cbcd 100644 --- a/examples/char-lstm/sampler.jl +++ b/examples/char-lstm/sampler.jl @@ -10,11 +10,11 @@ vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) n_class = length(vocab) # prepare data provider -jl_data = Pair[(symbol(NAME, "_data_$t") => zeros(mx.MX_float, (length(vocab), BATCH_SIZE_SMP))) +jl_data = Pair[(Symbol(NAME, "_data_$t") => zeros(mx.MX_float, (length(vocab), BATCH_SIZE_SMP))) for t = 1:1] -jl_c = Pair[(symbol(NAME, "_l$(l)_init_c") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) +jl_c = Pair[(Symbol(NAME, "_l$(l)_init_c") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) for l = 1:LSTM_N_LAYER] -jl_h = Pair[(symbol(NAME, "_l$(l)_init_h") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) +jl_h = Pair[(Symbol(NAME, "_l$(l)_init_h") => zeros(mx.MX_float, (DIM_HIDDEN, BATCH_SIZE_SMP))) for l = 1:LSTM_N_LAYER] # the first input in the sequence @@ -36,7 +36,7 @@ output_samples = zeros(Char, (SAMPLE_LENGTH, BATCH_SIZE_SMP)) output_samples[1, :] = SAMPLE_START # build inverse vocabulary for convenience -inv_vocab = Dict([v => k for (k,v) in vocab]) +inv_vocab = Dict(v => k for (k,v) in vocab) # do prediction and sampling step by step for t = 2:SAMPLE_LENGTH-1 diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index cfcc5a67bb04..fb7c8378a5f1 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -5,7 +5,7 @@ using MXNet function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; max_vocab=10000) if isfile(vocab_fn) info("Vocabulary already exists, reusing $vocab_fn...") - vocab = Dict{Char,Int}([w => i for (i,w) in enumerate(readstring(vocab_fn))]) + vocab = Dict{Char,Int}(w => i for (i,w) in enumerate(readstring(vocab_fn))) else # count symbol frequency dict = Dict{Char,Int}() @@ -25,7 +25,7 @@ function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; m end end - vocab = Dict([x.first => i for (i,x) in enumerate(vocab)]) + vocab = Dict(x.first => i for (i,x) in enumerate(vocab)) end vocab[UNKNOWN_CHAR] = length(vocab) return vocab @@ -50,12 +50,12 @@ end #--provide function mx.provide_data(p :: CharSeqProvider) - [(symbol(p.prefix, "_data_$t"), (length(p.vocab), p.batch_size)) for t = 1:p.seq_len] ∪ - [(symbol(p.prefix, "_l$(l)_init_c"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] ∪ - [(symbol(p.prefix, "_l$(l)_init_h"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] + [(Symbol(p.prefix, "_data_$t"), (length(p.vocab), p.batch_size)) for t = 1:p.seq_len] ∪ + [(Symbol(p.prefix, "_l$(l)_init_c"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] ∪ + [(Symbol(p.prefix, "_l$(l)_init_h"), (p.dim_hidden, p.batch_size)) for l=1:p.n_layer] end function mx.provide_label(p :: CharSeqProvider) - [(symbol(p.prefix, "_label_$t"), (p.batch_size,)) for t = 1:p.seq_len] + [(Symbol(p.prefix, "_label_$t"), (p.batch_size,)) for t = 1:p.seq_len] end #--/provide diff --git a/src/optimizer.jl b/src/optimizer.jl index 66f7d660847a..5e9065b64127 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -259,7 +259,7 @@ function normalized_gradient(opts::AbstractOptimizerOptions, state::Optimization grad = grad_scale * grad if opts.grad_clip > 0 - grad = clip(grad, -opts.grad_clip, opts.grad_clip) + grad = clip(grad, a_min=-opts.grad_clip, a_max=opts.grad_clip) end if opts.weight_decay > 0 @inplace grad += opts.weight_decay * weight diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index c85420939d85..6e421b66d1be 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -650,8 +650,8 @@ function _define_atomic_symbol_creator(name :: String) end end - if length(args) != 0 && length(symbol_kws) != 0 - @assert(false, $name * " only accepts SymbolicNode either as positional or keyword arguments, not both.") + if length(args) > 1 && length(symbol_kws) != 0 + @assert(false, $name * " only accepts SymbolicNode either as positional or keyword arguments with optional positional `data` argument, not both.") end $(if key_narg != "" quote @@ -680,8 +680,10 @@ function _define_atomic_symbol_creator(name :: String) set_attr(node, k, v) end - if length(args) != 0 + if length(symbol_kws) == 0 _compose!(node, name, args...) + elseif length(args) == 1 + _compose!(node; name=name, data=args[1], symbol_kws...) else _compose!(node; name=name, symbol_kws...) end From f33f1dc19d7f1b5919f4651ae922c2387b851347 Mon Sep 17 00:00:00 2001 From: Mike J Innes Date: Fri, 24 Feb 2017 00:27:04 +0000 Subject: [PATCH 448/630] get_name (#206) --- src/symbolic-node.jl | 21 +++++++++++++++++++++ test/unittest/symbolic-node.jl | 2 ++ 2 files changed, 23 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 6e421b66d1be..4c73bd33642d 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -199,6 +199,27 @@ function set_attr(self :: SymbolicNode, key :: Symbol, value :: AbstractString) @mxcall(:MXSymbolSetAttr, (MX_handle, Cstring, Cstring), self, key_s, value_s) end +""" + get_name(self :: SymbolicNode) + +Get the name of the symbol. + + julia> x = mx.Variable(:data) + julia> mx.get_name(x) + :data + + julia> y = mx.FullyConnected(x, num_hidden = 128) + julia> mx.get_name(y) + :fullyconnected0 +""" +function get_name(self :: mx.SymbolicNode) + name = Ref{mx.char_p}(0) + success = Ref(0) + @mxcall(:MXSymbolGetName, (MX_handle, Ref{char_p}, Ref{Int}), self.handle.value, name, success) + @assert success[] != -1 + return Symbol(unsafe_wrap(String, name[])) +end + """ grad(self :: SymbolicNode, wrt :: Vector{SymbolicNode}) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 36f125cd728c..4c8cff8adccc 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -86,6 +86,7 @@ function test_attrs() data = mx.Variable(:data) + @test mx.get_name(data) == :data result = mx.get_attr(data, :test) @test isnull(result) mx.set_attr(data, :test, "1.0") @@ -100,6 +101,7 @@ function test_attrs() @test isnull(mx.get_attr(conv, :b)) @test get(mx.get_attr(conv, :a)) == "a" @test get(mx.get_attr(conv, :π)) == "π" + @test isa(mx.get_name(conv), Symbol) @test_throws MethodError mx.Variable(:data3, attrs = Dict(:test => "1.0", :test2 => 1.0)) @test_throws MethodError mx.Convolution(data2, kernel = (1,1), num_filter = 1, attrs = Dict(:test => "1.0", :test2 => 1.0)) From 17a544205b22d7092f07690816c000a25e43b0ef Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Tue, 28 Feb 2017 17:34:11 -0500 Subject: [PATCH 449/630] fix data URL error (#204) --- src/util.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/util.jl b/src/util.jl index b41641f68309..6f7dc1ba73ac 100644 --- a/src/util.jl +++ b/src/util.jl @@ -18,7 +18,7 @@ function get_mnist_ubyte() filenames = Dict(map((x) -> x[1] => joinpath(mnist_dir, x[2]), filenames)) if !all(isfile, values(filenames)) cd(mnist_dir) do - mnist_dir = download("http://data.dmlc.ml/mxnet/data/mnist.zip", "mnist.zip") + mnist_dir = download("http://data.mxnet.io/mxnet/data/mnist.zip", "mnist.zip") try run(`unzip -u $mnist_dir`) catch @@ -41,7 +41,7 @@ function get_cifar10() filenames = Dict(map((x) -> x[1] => joinpath(cifar10_dir, x[2]), filenames)) if !all(isfile, values(filenames)) cd(cifar10_dir) do - download("http://data.dmlc.ml/mxnet/data/cifar10.zip", "cifar10.zip") + download("http://data.mxnet.io/mxnet/data/cifar10.zip", "cifar10.zip") try run(`unzip -u cifar10.zip`) catch From c58eb466be4f0794b4860d31a5ce6beb3b8e6afe Mon Sep 17 00:00:00 2001 From: Michael Creel Date: Fri, 3 Mar 2017 23:58:19 +0100 Subject: [PATCH 450/630] fix for issue #207 (#208) --- examples/regression-example.jl | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index 8c949f9b2fac..bf0acb14b2f1 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -30,17 +30,16 @@ evalprovider = mx.ArrayDataProvider(:data => ValidationInput, batch_size=batchsi # or add/remove a layer data = mx.Variable(:data) label = mx.Variable(:label) -net = @mx.chain mx.FullyConnected(data = data, num_hidden=10) => +net = @mx.chain mx.Variable(:data) => + mx.FullyConnected(num_hidden=10) => mx.Activation(act_type=:tanh) => mx.FullyConnected(num_hidden=3) => mx.Activation(act_type=:tanh) => - mx.FullyConnected(num_hidden=1) - -# squared error loss is appropriate for regression, don't change -cost = mx.LinearRegressionOutput(data = net, label=label) + mx.FullyConnected(num_hidden=1) => + mx.LinearRegressionOutput(label) # final model definition, don't change, except if using gpu -model = mx.FeedForward(cost, context=mx.cpu()) +model = mx.FeedForward(net, context=mx.cpu()) # set up the optimizer: select one, explore parameters, if desired #optimizer = mx.SGD(lr=0.01, momentum=0.9, weight_decay=0.00001) @@ -48,11 +47,11 @@ optimizer = mx.ADAM() # train, reporting loss for training and evaluation sets # initial training with small batch size, to get to a good neighborhood -batchsize = 100 -mx.fit(model, optimizer, initializer=mx.NormalInitializer(0.0,0.1), eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 500) +batchsize = 200 +mx.fit(model, optimizer, initializer=mx.NormalInitializer(0.0,0.1), eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 20) # more training with the full sample batchsize = samplesize -mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 500) +mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 20) # obtain predictions plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) From fa25ad6f4ad1769bd86c3ae9a0c5c3bd050cef5d Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 10 Mar 2017 00:14:39 +0800 Subject: [PATCH 451/630] metric: intro NMSE normalized mean squared error --- src/metric.jl | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/src/metric.jl b/src/metric.jl index dc3c3aef452a..7432657aaa14 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -179,6 +179,47 @@ function reset!(metric :: MSE) metric.n_sample = 0 end +doc""" + NMSE + +Normalized Mean Squared Error + +```math +\sum_i (\frac{label_i - pred_i}{label_i})^2 +``` +""" +type NMSE <: AbstractEvalMetric + nmse_sum :: Float64 + n_sample :: Int + + NMSE() = new(0.0, 0) +end + +function _update_single_output(metric :: NMSE, label :: NDArray, pred :: NDArray) + label = copy(label) + pred = copy(pred) + + n_sample = size(pred)[end] + metric.n_sample += n_sample + + for i = 1:n_sample + if label[i] == 0.0f0 # in case of batch padding + continue + end + + metric.nmse_sum += ((label[i] - pred[i]) / label[i])^2 + end +end + +function get(metric :: NMSE) + return [(:NMSE, metric.nmse_sum / metric.n_sample)] +end + +function reset!(metric :: NMSE) + metric.nmse_sum = 0.0 + metric.n_sample = 0 +end + """ ACE From f23b0a9dba5eed903b2f255ff9880c9e6987399c Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 10 Mar 2017 00:15:22 +0800 Subject: [PATCH 452/630] doc: update CDN link of MathJax --- docs/mkdocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index bf604818027e..ed18e479ae37 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -12,7 +12,7 @@ extra_css: - assets/Documenter.css extra_javascript: - - https://cdn.mathjax.org/mathjax/latest/MathJax.jl?config=TeX-AMS-MML_HTMLorMML + - https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_HTML - assets/mathjaxhelper.js markdown_extensions: From 96191ea24346816b36c029b1bcd1bcf9dfd25781 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 10 Mar 2017 11:00:11 +0800 Subject: [PATCH 453/630] metric: add test cases for NMSE --- test/unittest/metric.jl | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl index fd36d4ffc7f8..46b62a243dc1 100644 --- a/test/unittest/metric.jl +++ b/test/unittest/metric.jl @@ -57,10 +57,46 @@ function test_ace() end +function test_nmse() + info("EvalMetric::NMSE") + + @testset "EvalMetric::NMSE::update!" begin + metric = mx.NMSE() + labels = Array{mx.NDArray}( + [mx.NDArray([100.0, 0.0]), mx.NDArray([10.0, 0.0])]) + preds = Array{mx.NDArray}( + [mx.NDArray([20.0, 0.0]), mx.NDArray([2.0, 0.0])]) + + mx.update!(metric, labels, preds) + @test metric.nmse_sum ≈ 0.64 * 2 + end + + @testset "EvalMetric::NMSE::reset!" begin + metric = mx.NMSE() + metric.nmse_sum = sum(rand(10)) + metric.n_sample = 42 + + mx.reset!(metric) + + @test metric.nmse_sum == 0.0 + @test metric.n_sample == 0 + end + + @testset "EvalMetric::NMSE::get" begin + metric = mx.NMSE() + metric.nmse_sum = 100.0 + metric.n_sample = 20 + + @test mx.get(metric) == [(:NMSE, 5.0)] + end +end + + ################################################################################ # Run tests ################################################################################ test_ace() +test_nmse() end From ba97d9354af12108ed22cfcdb8a8765ad34d62ae Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 11 Mar 2017 17:43:16 +0800 Subject: [PATCH 454/630] docs: add an explanation for normalized MSE --- src/metric.jl | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/src/metric.jl b/src/metric.jl index 7432657aaa14..135fe9fa6994 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -187,6 +187,44 @@ Normalized Mean Squared Error ```math \sum_i (\frac{label_i - pred_i}{label_i})^2 ``` + +Note that there are various ways to do the *normalization*. +It depends on your own context. Please judge the problem setting you have +first. If the current implementation do not suitable for you, +feel free to file it on GitHub. + +Let me show you a use case of this kind of normalization: + +Bob is training a network for option pricing. The option pricing problem is +a regression problem (pirce predicting). There are lots of option contracts +on same target stock but different strike price. +For example, there is a stock `S`; it's market price is 1000. +And, there are two call option contracts with different strike price. +Assume Bob obtains the outcome as following table: + +``` ++--------+----------------+----------------+--------------+ +| | Strike Price | Market Price | Pred Price | ++--------+----------------+----------------+--------------+ +| Op 1 | 1500 | 100 | 80 | ++--------+----------------+----------------+--------------+ +| Op 2 | 500 | 10 | 8 | ++--------+----------------+----------------+--------------+ +``` + +Now, obviously, Bob will calculate the normalized MSE as: + +```math + (\frac{100 - 80}{100})^2 + \text{ vs } + (\frac{10 - 8}{10}) ^2 +``` + +Both of the pred prices got the same degree of error. + +For more discussion about normalized MSE, please see +[#211](https://github.com/dmlc/MXNet.jl/pull/211) also. + """ type NMSE <: AbstractEvalMetric nmse_sum :: Float64 From cf7f32b3d750c94f817510ec9437c537183d229e Mon Sep 17 00:00:00 2001 From: Steven Thornton Date: Thu, 9 Mar 2017 11:18:56 -0500 Subject: [PATCH 455/630] Fixed data attribute --- docs/src/tutorial/mnist.md | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index 096d7dd0310f..9b580cfcf38d 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -29,14 +29,14 @@ data = mx.Variable(:data) and then cascading fully-connected layers and activation functions: ```julia -fc1 = mx.FullyConnected(data = data, name=:fc1, num_hidden=128) -act1 = mx.Activation(data = fc1, name=:relu1, act_type=:relu) -fc2 = mx.FullyConnected(data = act1, name=:fc2, num_hidden=64) -act2 = mx.Activation(data = fc2, name=:relu2, act_type=:relu) -fc3 = mx.FullyConnected(data = act2, name=:fc3, num_hidden=10) +fc1 = mx.FullyConnected(data, name=:fc1, num_hidden=128) +act1 = mx.Activation(fc1, name=:relu1, act_type=:relu) +fc2 = mx.FullyConnected(act1, name=:fc2, num_hidden=64) +act2 = mx.Activation(fc2, name=:relu2, act_type=:relu) +fc3 = mx.FullyConnected(act2, name=:fc3, num_hidden=10) ``` -Note each composition we take the previous symbol as the data argument, +Note each composition we take the previous symbol as the first argument, forming a feedforward chain. The architecture looks like ``` @@ -49,7 +49,7 @@ where the last 10 units correspond to the 10 output classes (digits classes: ```julia -mlp = mx.SoftmaxOutput(data = fc3, name=:softmax) +mlp = mx.SoftmaxOutput(fc3, name=:softmax) ``` As we can see, the MLP is just a chain of layers. For this case, we can @@ -148,12 +148,12 @@ listed below: data = mx.Variable(:data) # first conv -conv1 = @mx.chain mx.Convolution(data=data, kernel=(5,5), num_filter=20) => +conv1 = @mx.chain mx.Convolution(data, kernel=(5,5), num_filter=20) => mx.Activation(act_type=:tanh) => mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) # second conv -conv2 = @mx.chain mx.Convolution(data=conv1, kernel=(5,5), num_filter=50) => +conv2 = @mx.chain mx.Convolution(conv1, kernel=(5,5), num_filter=50) => mx.Activation(act_type=:tanh) => mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) ``` @@ -168,17 +168,17 @@ a tensor of shape `(28,28,1,100)`. The convolution and pooling operates in the spatial axis, so `kernel=(5,5)` indicate a square region of 5-width and 5-height. The rest of the architecture follows as: -```ulia +```julia # first fully-connected -fc1 = @mx.chain mx.Flatten(data=conv2) => +fc1 = @mx.chain mx.Flatten(conv2) => mx.FullyConnected(num_hidden=500) => mx.Activation(act_type=:tanh) # second fully-connected -fc2 = mx.FullyConnected(data=fc1, num_hidden=10) +fc2 = mx.FullyConnected(fc1, num_hidden=10) # softmax loss -lenet = mx.Softmax(data=fc2, name=:softmax) +lenet = mx.Softmax(fc2, name=:softmax) ``` Note a fully-connected operator expects the input to be a matrix. From c9eb71bc8bbc52c56dae097405caaf9985c99834 Mon Sep 17 00:00:00 2001 From: Michael Creel Date: Sun, 19 Mar 2017 20:59:26 +0100 Subject: [PATCH 456/630] switch to Plots.jl from pyplot (#217) --- examples/regression-example.jl | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index bf0acb14b2f1..508dfa79b308 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -6,7 +6,7 @@ the predictions from the trained net. =# using MXNet using Distributions -using PyPlot +using Plots # data generating process generate_inputs(mean, var, size) = rand(MvNormal(mean, var), size) @@ -56,7 +56,4 @@ mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprov # obtain predictions plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) fit = mx.predict(model, plotprovider) -plot(ValidationOutput,fit',".") -xlabel("true") -ylabel("predicted") -title("outputs: true versus predicted. 45º line is what we hope for") +scatter(ValidationOutput,fit',w = 3, xlabel="true", ylabel="predicted", title="45º line is what we hope for", show=true) From 1781290bfb6d67876db453c652bb3d924eaba13f Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 30 Mar 2017 06:08:36 +0900 Subject: [PATCH 457/630] Update ACE metric (#220) * Takes eps parameter to prevent log(0) to blow up, * Recognises learning of probability distributions. --- src/metric.jl | 43 ++++++++++++++++++++++++++++++++--------- test/unittest/metric.jl | 11 ++++++----- 2 files changed, 40 insertions(+), 14 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 135fe9fa6994..36d82fe7ab47 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -262,12 +262,16 @@ end ACE Calculates the averaged cross-entropy (logloss) for classification. + +# Arguments: +* `eps::Float64`: Prevents returning `Inf` if `p = 0`. """ type ACE <: AbstractEvalMetric ace_sum :: Float64 n_sample :: Int + eps :: Float64 - ACE() = new(0.0, 0) + ACE(eps=1.0e-8) = new(0.0, 0, eps) end function get(metric :: ACE) @@ -281,9 +285,17 @@ end function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin + eps = metric.eps # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) - if ndims(pred) == 4 + if size(label) == size(pred) # simply calculate the cross entropy of the probabilities + for (q, p) in zip(pred, label) + # p == true probability + # q == "unnatural" probability + metric.ace_sum += p * log(q + eps) + metric.n_sample += 1 + end + elseif ndims(pred) == 4 labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) for sample in 1:size(labels, 4) for j in 1:size(labels, 2) @@ -292,7 +304,7 @@ function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) # Since we can only target labels right now this is the only thing we can do. target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing p_k = pred[i, j, target, sample] - metric.ace_sum += log(p_k) + metric.ace_sum += log(p_k + eps) metric.n_sample += 1 end end @@ -301,7 +313,7 @@ function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) for sample in 1:size(label, 1) target = Int(label[sample]) + 1 # 0-based indexing => 1-based indexing p_k = pred[target, sample] - metric.ace_sum += log(p_k) + metric.ace_sum += log(p_k + eps) metric.n_sample += 1 end else @@ -319,8 +331,9 @@ This can be used to quantify the influence of different classes on the overall l type MultiACE <: AbstractEvalMetric aces :: Vector{Float64} counts :: Vector{Int} + eps :: Float64 - MultiACE(nclasses) = new(Base.zeros(nclasses), Base.zeros(Int, nclasses)) + MultiACE(nclasses, eps=1.0e-8) = new(Base.zeros(nclasses), Base.zeros(Int, nclasses), eps) end function get(metric :: MultiACE) @@ -336,10 +349,22 @@ end function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDArray) @nd_as_jl ro=(label,pred) begin + eps = metric.eps # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) - - if ndims(pred) == 4 + @assert size(metric.aces) == size(metric.counts) + if size(label) == size(pred) # simply calculate the cross entropy of the probabilities + for k in 1:length(metric.aces) + kpred = view(pred, ntuple(d->:, ndims(pred) - 2)..., k, :) + klabel = view(label, ntuple(d->:, ndims(label) - 2)..., k, :) + for (q, p) in zip(kpred, klabel) + # p == true probability + # q == "unnatural" probability + metric.aces[k] += p * log(q + eps) + metric.counts[k] += 1 + end + end + elseif ndims(pred) == 4 labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) for sample in 1:size(labels, 4) for j in 1:size(labels, 2) @@ -349,7 +374,7 @@ function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDA target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing p_k = pred[i, j, target, sample] - metric.aces[target] += log(p_k) + metric.aces[target] += log(p_k + eps) metric.counts[target] += 1 end end @@ -358,7 +383,7 @@ function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDA for sample in 1:size(label, 1) target = Int(label[sample]) + 1 p_k = pred[target, sample] - metric.aces[target] += log(p_k) + metric.aces[target] += log(p_k + eps) metric.counts[target] += 1 end else diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl index 46b62a243dc1..9628f6ca28b4 100644 --- a/test/unittest/metric.jl +++ b/test/unittest/metric.jl @@ -17,10 +17,10 @@ function generate_probs(n, m) # Normalize: ensure each column sums to 1 for j = 1:m - colsum = sum(result[:, j]) - for i = 1:n - result[i, j] /= colsum - end + colsum = sum(result[:, j]) + for i = 1:n + result[i, j] /= colsum + end end result end @@ -28,8 +28,9 @@ end function loglikelihood{T <: AbstractFloat}(labels::Vector{T}, probs::Array{T, 2}) LL = 0.0 + eps = 1.0e-8 for i = 1:size(labels, 1) - LL += log(probs[Int(labels[i]) + 1, i]) # labels are zero-based + LL += log(probs[Int(labels[i]) + 1, i] + eps) # labels are zero-based end LL / size(labels, 1) end From 9474d9595785b3a25074305767c527d010f57263 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 30 Mar 2017 06:10:00 +0900 Subject: [PATCH 458/630] provide an optional context in mx.load_checkpoint (#221) --- src/model.jl | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/model.jl b/src/model.jl index f1f05595533d..46323d701f23 100644 --- a/src/model.jl +++ b/src/model.jl @@ -613,9 +613,14 @@ function load_checkpoint(prefix :: AbstractString, epoch :: Int) return (arch, arg_params, aux_params) end -function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForward}) +""" + load_checkpoint(prefix, epoch, ::mx.FeedForward; context) + +Load a mx.FeedForward model from the checkpoint *prefix*, *epoch* and optionally provide a context. +""" +function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForward}; context = nothing) arch, arg_params, aux_params = load_checkpoint(prefix, epoch) - model = FeedForward(arch) + model = FeedForward(arch, context = context) model.arg_params = arg_params model.aux_params = aux_params return model From 46440e3ff60cbb9c60147ba7572d8013658ead01 Mon Sep 17 00:00:00 2001 From: Pepe Mandioca Date: Thu, 30 Mar 2017 19:14:37 -0300 Subject: [PATCH 459/630] Added initializers for Spatial Transformer Networks (STN) layers (#223) --- src/initializer.jl | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/src/initializer.jl b/src/initializer.jl index dacb06f349c3..dc484eb2dfe8 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -19,7 +19,13 @@ abstract AbstractInitializer function init{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) strname = string(name) - if endswith(strname, "bias") + if startswith(strname,"upsampling") + _init_bilinear(self,name, array) + elseif startswith(strname,"stn_loc") && endswith(strname,"weight") + _init_zero(self,name, array) + elseif startswith(strname,"stn_loc") && endswith(strname,"bias") + _init_loc_bias(self,name, array) + elseif endswith(strname, "bias") _init_bias(self, name, array) elseif endswith(strname, "gamma") _init_gamma(self, name, array) @@ -36,6 +42,37 @@ function init{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: N end end +function _init_loc_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + assert(size(array) == (6,)) + array=[1.0, 0, 0, 0, 1.0, 0] +end + +function _init_bilinear(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) + # ported from python version: + #weight = np.zeros(np.prod(arr.shape), dtype='float32') + #shape = arr.shape + #f = np.ceil(shape[3] / 2.) + #c = (2 * f - 1 - f % 2) / (2. * f) + #for i in range(np.prod(shape)): + # x = i % shape[3] + # y = (i / shape[3]) % shape[2] + # weight[i] = (1 - abs(x / f - c)) * (1 - abs(y / f - c)) + #arr[:] = weight.reshape(shape) + + weight=zeros(array) + + h,w,channels,n=size(array) + f = ceil(w / 2.) + c = (2 * f - 1 - f % 2) / (2. * f) + + for i=1:length(weight) + x = i % w + y = (i / w) % h + weight[i] = (1 - abs(x / f - c)) * (1 - abs(y / f - c)) + end + array[:,:,:,:]=weight +end + function _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) array[:] = 0 end From 9dd329168313a248b56617897e6aa08b2b545834 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 2 Apr 2017 14:31:06 +0800 Subject: [PATCH 460/630] docs: update renamed `every_n_batch` ref: #407 [ci skip] --- src/callback.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/callback.jl b/src/callback.jl index 7736bbf08a44..e71bb1f844c8 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -37,7 +37,7 @@ A convenient function to construct a callback that runs every `n` mini-batches. For example, the [`speedometer`](@ref) callback is defined as ```julia -every_n_iter(frequency, call_on_0=true) do state :: OptimizationState +every_n_batch(frequency, call_on_0=true) do state :: OptimizationState if state.curr_batch == 0 # reset timer else @@ -102,7 +102,7 @@ A convenient function to construct a callback that runs every `n` full data-pass yet. This is useful if you want to inspect the randomly initialized model that has not seen any data yet. -See also [`every_n_iter`](@ref). +See also [`every_n_batch`](@ref). """ function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) From 5c02fcb0eb56f98de6eda8995301c3f638e7e161 Mon Sep 17 00:00:00 2001 From: Facundo Quiroga Date: Sun, 2 Apr 2017 20:34:26 -0300 Subject: [PATCH 461/630] Check if key stride exists in the node info when producing graphviz output. Fixes #213 --- src/visualize.jl | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/src/visualize.jl b/src/visualize.jl index f7dfca859432..ea700b704266 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -51,7 +51,7 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp attr = deepcopy(node_attr) label = op - # Up to 0.8 version of mxnet additional info was stored in + # Up to 0.8 version of mxnet additional info was stored in # node["param"]. Staring from pre0.9 `param` was changed to `attr`. if haskey(node, "param") node_info = node["param"] @@ -69,9 +69,15 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp continue end elseif op == "Convolution" + if haskey(node_info,"stride") + stride_info=_extract_shape(node_info["stride"]) + else + stride_info="1" + end + label = format("Convolution\nkernel={1}\nstride={2}\nn-filter={3}", _extract_shape(node_info["kernel"]), - _extract_shape(node_info["stride"]), + stride_info, node_info["num_filter"]) colorkey = 2 elseif op == "FullyConnected" @@ -83,10 +89,15 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp elseif op == "BatchNorm" colorkey = 4 elseif op == "Pooling" + if haskey(node_info,"stride") + stride_info=_extract_shape(node_info["stride"]) + else + stride_info="1" + end label = format("Pooling\ntype={1}\nkernel={2}\nstride={3}", node_info["pool_type"], _extract_shape(node_info["kernel"]), - _extract_shape(node_info["stride"])) + stride_info) colorkey = 5 elseif op ∈ ("Concat", "Flatten", "Reshape") colorkey = 6 From 31b95fb1e6bed98c2e7621f524dce51f6d91294f Mon Sep 17 00:00:00 2001 From: ultradian Date: Mon, 10 Apr 2017 07:22:46 -0700 Subject: [PATCH 462/630] Update mnist.md (#227) add link to Jupyter notebooks --- docs/src/tutorial/mnist.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index 9b580cfcf38d..a12e10b37821 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -6,7 +6,7 @@ multi-layer perceptron and then a convolutional neural network (the LeNet architecture) on the [MNIST handwritten digit dataset](http://yann.lecun.com/exdb/mnist/). The code for this tutorial could be found in -[examples/mnist](https://github.com/dmlc/MXNet.jl/tree/master/examples/mnist). +[examples/mnist](https://github.com/dmlc/MXNet.jl/tree/master/examples/mnist). There are also two Jupyter notebooks that expand a little more on the [MLP](https://github.com/ultradian/julia_notebooks/blob/master/mxnet/mnistMLP.ipynb) and the [LeNet](https://github.com/ultradian/julia_notebooks/blob/master/mxnet/mnistLenet.ipynb), using the more general `ArrayDataProvider`. Simple 3-layer MLP ------------------ From 1be1b8762d535fa6fed48a51caf652ad8e000cc4 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 12 Apr 2017 15:43:34 +0900 Subject: [PATCH 463/630] fix conflict with Images --- src/io.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/io.jl b/src/io.jl index f65314e67c71..ea22a4da961f 100644 --- a/src/io.jl +++ b/src/io.jl @@ -253,9 +253,9 @@ TODO: remove `data_padding` and `label_padding`, and implement rollover that cop the last or first several training samples to feed the padding. """ type ArrayDataProvider <: AbstractDataProvider - data_arrays :: Vector{Array{MX_float}} + data_arrays :: Vector{Array} data_names :: Vector{Base.Symbol} - label_arrays :: Vector{Array{MX_float}} + label_arrays :: Vector{Array} label_names :: Vector{Base.Symbol} batch_size :: Int sample_count :: Int From bfa966ac3f0c4f187a438db819c2e92e56ad8d47 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 13 Apr 2017 10:02:35 +0900 Subject: [PATCH 464/630] fixes bilinear initializer following approach in #34 --- src/initializer.jl | 43 +++++++++++++++++++++---------------------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/src/initializer.jl b/src/initializer.jl index dc484eb2dfe8..5e34dce8d8f9 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -48,29 +48,28 @@ function _init_loc_bias(self :: AbstractInitializer, name :: Base.Symbol, array end function _init_bilinear(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) - # ported from python version: - #weight = np.zeros(np.prod(arr.shape), dtype='float32') - #shape = arr.shape - #f = np.ceil(shape[3] / 2.) - #c = (2 * f - 1 - f % 2) / (2. * f) - #for i in range(np.prod(shape)): - # x = i % shape[3] - # y = (i / shape[3]) % shape[2] - # weight[i] = (1 - abs(x / f - c)) * (1 - abs(y / f - c)) - #arr[:] = weight.reshape(shape) - - weight=zeros(array) - - h,w,channels,n=size(array) - f = ceil(w / 2.) - c = (2 * f - 1 - f % 2) / (2. * f) - - for i=1:length(weight) - x = i % w - y = (i / w) % h - weight[i] = (1 - abs(x / f - c)) * (1 - abs(y / f - c)) + @assert ndims(array) == 4 + + W, H, C, N = size(array) # Inverse of NCHW layout + filter = Base.zeros(eltype(array), W, H) + + @assert H == W + + f = ceil(Int, W / 2) # factor + c = (2 * f - 1 - f % 2) / (2 * f) # center + for x in 0:(W-1) + for y in 0:(H-1) + filter[x+1, y+1] = (1 - abs(x / f - c)) * (1 - abs(y / f - c)) + end + end + + @nd_as_jl rw=array begin + for i in 1:N + for j in 1:C + array[:,:, j, i] = filter + end + end end - array[:,:,:,:]=weight end function _init_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) From 8947eadb6b17f702e0e550274f69131150157f3d Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 13 Apr 2017 10:38:58 +0900 Subject: [PATCH 465/630] adds test for bilinear initializer --- test/unittest/initializer.jl | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 test/unittest/initializer.jl diff --git a/test/unittest/initializer.jl b/test/unittest/initializer.jl new file mode 100644 index 000000000000..750959f2eb20 --- /dev/null +++ b/test/unittest/initializer.jl @@ -0,0 +1,18 @@ +@testset "Initializers" begin + @testset "Bilinear initializer" begin + # Setup a filter with scale = 2 + expectedFilter = Float32[ + 0.0625 0.1875 0.1875 0.0625; + 0.1875 0.5625 0.5625 0.1875; + 0.1875 0.5625 0.5625 0.1875; + 0.0625 0.1875 0.1875 0.0625] + filter = mx.zeros(Float32, 4, 4, 1, 4) + mx.init(mx.XavierInitializer(), :upsampling0_weight, filter) + + mx.@nd_as_jl ro=filter begin + for s in 1:size(filter, 4) + @test all(filter[:, :, 1, s] .== expectedFilter) + end + end + end +end From 558adb7f92c7d2e8dc0351b2139992f2bae9bc84 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 28 Mar 2017 17:58:00 +0900 Subject: [PATCH 466/630] add SeqMetric to apply different metrics for multi-output --- src/metric.jl | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/src/metric.jl b/src/metric.jl index 36d82fe7ab47..c8a42d88854a 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -85,6 +85,40 @@ function get(metric :: MultiMetric) mapreduce(get, append!, metric.metrics) end +""" + SeqMetric(metrics::Vector{AbstractEvalMetric}) + +Apply a different metric to each output. This is especially useful for `mx.Group`. + +# Usage +Calculate accuracy [`Accuracy`](@ref) for the first output +and log-loss [`ACE`](@ref) for the second output: +```julia + mx.fit(..., eval_metric = mx.SeqMetric([mx.Accuracy(), mx.ACE()])) +``` +""" +type SeqMetric <: mx.AbstractEvalMetric + metrics :: Vector{mx.AbstractEvalMetric} +end + +function update!(metric :: SeqMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + @assert length(metric.metrics) == length(labels) + @assert length(metric.metrics) == length(preds) + for (m, l, p) in zip(metric.metrics, labels, preds) + update!(m, [l], [p]) + end + return nothing +end + +function reset!(metric :: SeqMetric) + map(reset!, metric.metrics) + return nothing +end + +function get(metric :: SeqMetric) + mapreduce(get, append!, metric.metrics) +end + """ Accuracy From b4ece81a9fde4e6ea8ecfe16c1d9bbb69832e752 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Wed, 29 Mar 2017 13:02:59 +0900 Subject: [PATCH 467/630] add NullMetric --- src/metric.jl | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/metric.jl b/src/metric.jl index c8a42d88854a..eb4bff3930d9 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -54,6 +54,26 @@ function get(metric :: AbstractEvalMetric) throw(MethodError(get, (typeof(metric),))) end +""" + NullMetric() + +A metric that calculates nothing. Can be used to ignore an output during training. +""" +type NullMetric <: mx.AbstractEvalMetric +end + +function update!(metric :: NullMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + return nothing +end + +function reset!(metric :: NullMetric) + return nothing +end + +function get(metric :: NullMetric) + return Tuple{Symbol, Float64}[] +end + """ MultiMetric(metrics::Vector{AbstractEvalMetric}) From ced3f73aa8503ae8f0d34e9ef73e94417af15b8e Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 30 Mar 2017 14:41:09 +0900 Subject: [PATCH 468/630] fix pointer(NDArray) for Float64 --- src/ndarray.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index d37b321a2d8a..42ad43d7c492 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -798,8 +798,8 @@ end # pointers from CPU) leads to undefined behavior. import Base.pointer function pointer(arr :: NDArray) - pdata = Ref{Ptr{MX_float}}(0) - @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{MX_float}}), arr, pdata) + pdata = Ref{Ptr{Void}}(0) + @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{Void}}), arr, pdata) return convert(Ptr{eltype(arr)}, pdata[]) end function _wait_to_read(arr :: NDArray) From d0253f1ba0d4d681c3e755cc1dd4a9b63e1f79ac Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 14 Apr 2017 13:00:54 +0900 Subject: [PATCH 469/630] XCode8 supports thread_local in C++ --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 6c2f33c114a4..7c1c6b9be19d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,6 +4,7 @@ language: julia os: - linux - osx +osx_image: xcode8 julia: - 0.5 # - nightly 0.6 supports depends on #170 From 169752232f43756caab985d9dacf40ba981a498f Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Tue, 28 Mar 2017 17:58:22 +0900 Subject: [PATCH 470/630] update MSE to be agnostic to dimensions. --- src/metric.jl | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index eb4bff3930d9..92e304f2877a 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -200,9 +200,10 @@ end """ MSE -Mean Squared Error. TODO: add support for multi-dimensional outputs. +Mean Squared Error. -Calculates the mean squared error regression loss in one dimension. +Calculates the mean squared error regression loss. +Requires that label and prediction have the same shape. """ type MSE <: AbstractEvalMetric @@ -213,14 +214,10 @@ type MSE <: AbstractEvalMetric end function _update_single_output(metric :: MSE, label :: NDArray, pred :: NDArray) - label = copy(label) - pred = copy(pred) - - n_sample = size(pred)[end] - metric.n_sample += n_sample - - for i = 1:n_sample - metric.mse_sum += (label[i] - pred[i])^2 + @assert size(label) == size(pred) + metric.n_sample += length(label) + @nd_as_jl ro=(label, pred) begin + metric.mse_sum += sumabs2(label .- pred) end end From e12052c65027abc1141cad86c11fa7423172eb18 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Thu, 30 Mar 2017 12:44:09 +0900 Subject: [PATCH 471/630] convert single outputs to array so that inference has it easier --- src/metric.jl | 204 ++++++++++++++++++++-------------------- test/unittest/metric.jl | 2 +- 2 files changed, 101 insertions(+), 105 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 92e304f2877a..271ab719ee89 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -27,7 +27,13 @@ function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray} number of outputs ($(length(preds))). The calculated metric might not be accuracte.") end for (label, pred) in zip(labels, preds) - _update_single_output(metric, label, pred) + @nd_as_jl ro=(label, pred) begin + # This is a dynamic dispatch since the conversion from NDArray to + # Array is not type-stable. We could use a trait to decide if we should + # convert the NDArray here so that the called function will be type-stable + # or if we should forward the NDArray. + _update_single_output(metric, label, pred) + end end end @@ -154,37 +160,35 @@ type Accuracy <: AbstractEvalMetric Accuracy() = new(0.0, 0) end -function _update_single_output(metric :: Accuracy, label :: NDArray, pred :: NDArray) - @nd_as_jl ro=(label,pred) begin - # Samples are stored in the last dimension - @assert size(label, ndims(label)) == size(pred, ndims(pred)) +function _update_single_output(metric :: Accuracy, label :: Array, pred :: Array) + # Samples are stored in the last dimension + @assert size(label, ndims(label)) == size(pred, ndims(pred)) - if ndims(pred) == 4 # Multidimensional case - # Reshape label to be of the same shape as pred. - # Except for the third dimension where the predictions are stored. - labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + if ndims(pred) == 4 # Multidimensional case + # Reshape label to be of the same shape as pred. + # Except for the third dimension where the predictions are stored. + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) - for sample in 1:size(labels, 4) - for j in 1:size(labels, 2) - for i in 1:size(labels, 1) - label = labels[i, j, 1, sample] - klasses = view(pred, i, j, :, sample) - klass = indmax(klasses) - 1 # Classes start at 0...k-1 + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + label = labels[i, j, 1, sample] + klasses = view(pred, i, j, :, sample) + klass = indmax(klasses) - 1 # Classes start at 0...k-1 - metric.acc_sum += klass == label - metric.n_sample += 1 - end + metric.acc_sum += klass == label + metric.n_sample += 1 end end - elseif ndims(pred) == 2 # 1-dimensional case - for sample in 1:size(label, 1) - klass = indmax(view(pred, :, sample)) - 1 - metric.acc_sum += klass == label[sample] - metric.n_sample += 1 - end - else - error("Can't handle prediction with dimensions $(ndims(pred)).") end + elseif ndims(pred) == 2 # 1-dimensional case + for sample in 1:size(label, 1) + klass = indmax(view(pred, :, sample)) - 1 + metric.acc_sum += klass == label[sample] + metric.n_sample += 1 + end + else + error("Can't handle prediction with dimensions $(ndims(pred)).") end end @@ -213,12 +217,11 @@ type MSE <: AbstractEvalMetric MSE() = new(0.0, 0) end -function _update_single_output(metric :: MSE, label :: NDArray, pred :: NDArray) +function _update_single_output{T}(metric :: MSE, label :: Array{T}, pred :: Array{T}) @assert size(label) == size(pred) metric.n_sample += length(label) - @nd_as_jl ro=(label, pred) begin - metric.mse_sum += sumabs2(label .- pred) - end + metric.mse_sum += sumabs2(label .- pred) + return nothing end function get(metric :: MSE) @@ -284,10 +287,7 @@ type NMSE <: AbstractEvalMetric NMSE() = new(0.0, 0) end -function _update_single_output(metric :: NMSE, label :: NDArray, pred :: NDArray) - label = copy(label) - pred = copy(pred) - +function _update_single_output(metric :: NMSE, label :: Array, pred :: Array) n_sample = size(pred)[end] metric.n_sample += n_sample @@ -334,42 +334,40 @@ function reset!(metric :: ACE) metric.n_sample = 0 end -function _update_single_output(metric :: ACE, label :: NDArray, pred :: NDArray) - @nd_as_jl ro=(label,pred) begin - eps = metric.eps - # Samples are stored in the last dimension - @assert size(label, ndims(label)) == size(pred, ndims(pred)) - if size(label) == size(pred) # simply calculate the cross entropy of the probabilities - for (q, p) in zip(pred, label) - # p == true probability - # q == "unnatural" probability - metric.ace_sum += p * log(q + eps) +function _update_single_output(metric :: ACE, label :: Array, pred :: Array) + eps = metric.eps + # Samples are stored in the last dimension + @assert size(label, ndims(label)) == size(pred, ndims(pred)) + if size(label) == size(pred) # simply calculate the cross entropy of the probabilities + for (q, p) in zip(pred, label) + # p == true probability + # q == "unnatural" probability + metric.ace_sum += p * log(q + eps) + metric.n_sample += 1 + end + elseif ndims(pred) == 4 + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification + # Since we can only target labels right now this is the only thing we can do. + target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing + p_k = pred[i, j, target, sample] + metric.ace_sum += log(p_k + eps) metric.n_sample += 1 - end - elseif ndims(pred) == 4 - labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) - for sample in 1:size(labels, 4) - for j in 1:size(labels, 2) - for i in 1:size(labels, 1) - # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification - # Since we can only target labels right now this is the only thing we can do. - target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing - p_k = pred[i, j, target, sample] - metric.ace_sum += log(p_k + eps) - metric.n_sample += 1 - end end end - elseif ndims(pred) == 2 # 1-dimensional case - for sample in 1:size(label, 1) - target = Int(label[sample]) + 1 # 0-based indexing => 1-based indexing - p_k = pred[target, sample] - metric.ace_sum += log(p_k + eps) - metric.n_sample += 1 - end - else - error("Can't handle prediction with dimensions $(ndims(pred)).") end + elseif ndims(pred) == 2 # 1-dimensional case + for sample in 1:size(label, 1) + target = Int(label[sample]) + 1 # 0-based indexing => 1-based indexing + p_k = pred[target, sample] + metric.ace_sum += log(p_k +eps) + metric.n_sample += 1 + end + else + error("Can't handle prediction with dimensions $(ndims(pred)).") end end @@ -398,48 +396,46 @@ function reset!(metric :: MultiACE) metric.counts = Base.zero(metric.counts) end -function _update_single_output(metric :: MultiACE, label :: NDArray, pred :: NDArray) - @nd_as_jl ro=(label,pred) begin - eps = metric.eps - # Samples are stored in the last dimension - @assert size(label, ndims(label)) == size(pred, ndims(pred)) - @assert size(metric.aces) == size(metric.counts) - if size(label) == size(pred) # simply calculate the cross entropy of the probabilities - for k in 1:length(metric.aces) - kpred = view(pred, ntuple(d->:, ndims(pred) - 2)..., k, :) - klabel = view(label, ntuple(d->:, ndims(label) - 2)..., k, :) - for (q, p) in zip(kpred, klabel) - # p == true probability - # q == "unnatural" probability - metric.aces[k] += p * log(q + eps) - metric.counts[k] += 1 - end +function _update_single_output(metric :: MultiACE, label :: Array, pred :: Array) + eps = metric.eps + # Samples are stored in the last dimension + @assert size(label, ndims(label)) == size(pred, ndims(pred)) + @assert size(metric.aces) == size(metric.counts) + if size(label) == size(pred) # simply calculate the cross entropy of the probabilities + for k in 1:length(metric.aces) + kpred = view(pred, ntuple(d->:, ndims(pred) - 2)..., k, :) + klabel = view(label, ntuple(d->:, ndims(label) - 2)..., k, :) + for (q, p) in zip(kpred, klabel) + # p == true probability + # q == "unnatural" probability + metric.aces[k] += p * log(q + eps) + metric.counts[k] += 1 end - elseif ndims(pred) == 4 - labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) - for sample in 1:size(labels, 4) - for j in 1:size(labels, 2) - for i in 1:size(labels, 1) - # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification - # Since we can only target labels right now this is the only thing we can do. - target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing - p_k = pred[i, j, target, sample] - - metric.aces[target] += log(p_k + eps) - metric.counts[target] += 1 - end + end + elseif ndims(pred) == 4 + labels = reshape(label, size(pred, 1, 2)..., 1, size(pred, 4)) + for sample in 1:size(labels, 4) + for j in 1:size(labels, 2) + for i in 1:size(labels, 1) + # Cross-entropy reduces to -(ln(p_1)*0 + ln(p_2)*1) for classification + # Since we can only target labels right now this is the only thing we can do. + target = Int(labels[i, j, 1, sample]) + 1 # klasses are 0...k-1 => julia indexing + p_k = pred[i, j, target, sample] + + metric.aces[target] += log(p_k + eps) + metric.counts[target] += 1 end end - elseif ndims(pred) == 2 - for sample in 1:size(label, 1) - target = Int(label[sample]) + 1 - p_k = pred[target, sample] - metric.aces[target] += log(p_k + eps) - metric.counts[target] += 1 - end - else - error("Can't handle prediction with dimensions $(ndims(pred)).") end + elseif ndims(pred) == 2 + for sample in 1:size(label, 1) + target = Int(label[sample]) + 1 + p_k = pred[target, sample] + metric.aces[target] += log(p_k + eps) + metric.counts[target] += 1 + end + else + error("Can't handle prediction with dimensions $(ndims(pred)).") end end diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl index 9628f6ca28b4..1377c4c2bb81 100644 --- a/test/unittest/metric.jl +++ b/test/unittest/metric.jl @@ -48,7 +48,7 @@ function test_ace() probs = convert(Array{Float32}, generate_probs(n_categories, n_observations)) LL = loglikelihood(labels, probs) metric = mx.ACE() # For categorical variables, ACE == -LL - mx._update_single_output(metric, mx.NDArray(labels), mx.NDArray(probs)) + mx._update_single_output(metric, labels, probs) LL_v2 = metric.ace_sum / metric.n_sample @static if VERSION >= v"0.6.0-dev.2075" @test LL ≈ LL_v2 atol=1e-12 From def129f20a03bec6913217c6b9e685941dad66e2 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 14 Apr 2017 11:45:16 +0900 Subject: [PATCH 472/630] convert eps to eltype of array --- src/metric.jl | 8 ++++---- test/unittest/metric.jl | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 271ab719ee89..1eed64988b12 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -334,8 +334,8 @@ function reset!(metric :: ACE) metric.n_sample = 0 end -function _update_single_output(metric :: ACE, label :: Array, pred :: Array) - eps = metric.eps +function _update_single_output{T}(metric :: ACE, label :: Array{T}, pred :: Array{T}) + eps = convert(T, metric.eps) # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) if size(label) == size(pred) # simply calculate the cross entropy of the probabilities @@ -396,8 +396,8 @@ function reset!(metric :: MultiACE) metric.counts = Base.zero(metric.counts) end -function _update_single_output(metric :: MultiACE, label :: Array, pred :: Array) - eps = metric.eps +function _update_single_output{T}(metric :: MultiACE, label :: Array{T}, pred :: Array{T}) + eps = convert(T, metric.eps) # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) @assert size(metric.aces) == size(metric.counts) diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl index 1377c4c2bb81..5b5632a87c76 100644 --- a/test/unittest/metric.jl +++ b/test/unittest/metric.jl @@ -28,7 +28,7 @@ end function loglikelihood{T <: AbstractFloat}(labels::Vector{T}, probs::Array{T, 2}) LL = 0.0 - eps = 1.0e-8 + eps = convert(T, 1.0e-8) for i = 1:size(labels, 1) LL += log(probs[Int(labels[i]) + 1, i] + eps) # labels are zero-based end From c2044d5bd7dad88721f7edd6c06f0f066a91076a Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 14 Apr 2017 16:01:01 +0900 Subject: [PATCH 473/630] adds THTT to enable metrics to support NDArray or Array --- src/metric.jl | 39 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 36 insertions(+), 3 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 1eed64988b12..5c5945e96a9f 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -10,6 +10,16 @@ interfaces: """ abstract AbstractEvalMetric +""" + hasNDArraySupport(metric) -> Val{true/false} + +Trait for `_update_single_output` should return `Val{true}() if metric can handle `NDArray` +directly and `Val{false}()i` if requires `Array`. Metric that work with NDArrays can be +async, while native Julia arrays require that we copy the output of the network, which is +a blocking operation. +""" +hasNDArraySupport(::AbstractEvalMetric) = Val{true}() + """ update!(metric, labels, preds) @@ -21,6 +31,21 @@ Update and accumulate metrics. * `preds::Vector{NDArray}`: the outputs (predictions) of the network. """ function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}) + _update!(metric, labels, preds, hasNDArraySupport(metric)) +end + +function _update!{T<: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{true}) + if length(labels) != length(preds) + Base.warn_once( + "The number of labels ($(length(labels))) does not correspond to the\ + number of outputs ($(length(preds))). The calculated metric might not be accuracte.") + end + for (label, pred) in zip(labels, preds) + _update_single_output(metric, label, pred) + end +end + +function _update!{T<: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{false}) if length(labels) != length(preds) Base.warn_once( "The number of labels ($(length(labels))) does not correspond to the\ @@ -29,9 +54,7 @@ function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray} for (label, pred) in zip(labels, preds) @nd_as_jl ro=(label, pred) begin # This is a dynamic dispatch since the conversion from NDArray to - # Array is not type-stable. We could use a trait to decide if we should - # convert the NDArray here so that the called function will be type-stable - # or if we should forward the NDArray. + # Array is not type-stable. _update_single_output(metric, label, pred) end end @@ -160,6 +183,8 @@ type Accuracy <: AbstractEvalMetric Accuracy() = new(0.0, 0) end +hasNDArraySupport(::Accuracy) = Val{false}() + function _update_single_output(metric :: Accuracy, label :: Array, pred :: Array) # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) @@ -217,6 +242,8 @@ type MSE <: AbstractEvalMetric MSE() = new(0.0, 0) end +hasNDArraySupport(::MSE) = Val{false}() + function _update_single_output{T}(metric :: MSE, label :: Array{T}, pred :: Array{T}) @assert size(label) == size(pred) metric.n_sample += length(label) @@ -287,6 +314,8 @@ type NMSE <: AbstractEvalMetric NMSE() = new(0.0, 0) end +hasNDArraySupport(::NMSE) = Val{false}() + function _update_single_output(metric :: NMSE, label :: Array, pred :: Array) n_sample = size(pred)[end] metric.n_sample += n_sample @@ -334,6 +363,8 @@ function reset!(metric :: ACE) metric.n_sample = 0 end +hasNDArraySupport(::ACE) = Val{false}() + function _update_single_output{T}(metric :: ACE, label :: Array{T}, pred :: Array{T}) eps = convert(T, metric.eps) # Samples are stored in the last dimension @@ -396,6 +427,8 @@ function reset!(metric :: MultiACE) metric.counts = Base.zero(metric.counts) end +hasNDArraySupport(::MultiACE) = Val{false}() + function _update_single_output{T}(metric :: MultiACE, label :: Array{T}, pred :: Array{T}) eps = convert(T, metric.eps) # Samples are stored in the last dimension From 349d2a781a7acc4a408e1efca915a0dc99ad874e Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Fri, 14 Apr 2017 16:09:07 +0900 Subject: [PATCH 474/630] convert MSE to use NDArray as an example for async --- src/metric.jl | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/src/metric.jl b/src/metric.jl index 5c5945e96a9f..3bff815d4f35 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -236,27 +236,30 @@ Requires that label and prediction have the same shape. """ type MSE <: AbstractEvalMetric - mse_sum :: Float64 + mse_sum :: Vector{NDArray} n_sample :: Int - MSE() = new(0.0, 0) + MSE() = new(Vector{NDArray}(), 0) end -hasNDArraySupport(::MSE) = Val{false}() +hasNDArraySupport(::MSE) = Val{true}() -function _update_single_output{T}(metric :: MSE, label :: Array{T}, pred :: Array{T}) +function _update_single_output(metric :: MSE, label :: NDArray, pred :: NDArray) @assert size(label) == size(pred) metric.n_sample += length(label) - metric.mse_sum += sumabs2(label .- pred) + mse_sum = mx.sum(mx._PowerScalar(label - pred,scalar=2)) + push!(metric.mse_sum, mse_sum) return nothing end function get(metric :: MSE) - return [(:MSE, metric.mse_sum / metric.n_sample)] + # Delay copy until last possible moment + mse_sum = mapreduce(nda->copy(nda)[1], +, 0.0, metric.mse_sum) + return [(:MSE, mse_sum / metric.n_sample)] end function reset!(metric :: MSE) - metric.mse_sum = 0.0 + metric.mse_sum = Vector{NDArray}() metric.n_sample = 0 end From c4a265863557fb25314e56403d77e4fb00af25e9 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 15 Apr 2017 11:51:27 +0900 Subject: [PATCH 475/630] ensure that new submodules are initialized --- deps/build.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 7b654d9a9706..03e07fccbd22 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -115,7 +115,8 @@ if !libmxnet_detected `git -C mshadow checkout -- make/mshadow.mk` `git fetch` `git checkout $libmxnet_curr_ver` - `git submodule update` + `git submodule update --init` + `make clean` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` end FileRule(joinpath(_mxdir, "config.mk"), @build_steps begin From 9aac7e13c4828872fc96a30131980e4fe086c654 Mon Sep 17 00:00:00 2001 From: undefdev Date: Sat, 15 Apr 2017 17:50:50 +0200 Subject: [PATCH 476/630] Update callback.jl (#235) fixed typo in comment --- src/callback.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/callback.jl b/src/callback.jl index e71bb1f844c8..ee200d2e30c9 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -97,7 +97,7 @@ end A convenient function to construct a callback that runs every `n` full data-passes. -* `call_on_0::Int`: keyword argument, default false. Unless set, the callback +* `call_on_0::Bool`: keyword argument, default false. Unless set, the callback will *not* be run on epoch 0. Epoch 0 means no training has been performed yet. This is useful if you want to inspect the randomly initialized model that has not seen any data yet. From 7a530fafec87966c71ae3ce9d9e44ad07be49e22 Mon Sep 17 00:00:00 2001 From: Pepe Mandioca Date: Wed, 26 Apr 2017 19:45:40 -0300 Subject: [PATCH 477/630] Fixed SpatialTransformerNetwork loc_bias initializer. Added lenet-stn example. (#238) --- examples/mnist/lenet-stn.jl | 64 +++++++++++++++++++++++++++++++++++++ src/initializer.jl | 2 +- 2 files changed, 65 insertions(+), 1 deletion(-) create mode 100644 examples/mnist/lenet-stn.jl diff --git a/examples/mnist/lenet-stn.jl b/examples/mnist/lenet-stn.jl new file mode 100644 index 000000000000..23ca9de3fdb3 --- /dev/null +++ b/examples/mnist/lenet-stn.jl @@ -0,0 +1,64 @@ +using MXNet + +#-------------------------------------------------------------------------------- +# define lenet with stn layer + + + +# input +data = mx.Variable(:data) + + +# the localisation network in lenet-stn +# it will increase acc about more than 1%, when num-epoch >=15 +# The localization net just takes the data as input and must output a vector in R^n +loc_net = @mx.chain mx.Convolution(data, num_filter=10, kernel=(5, 5), stride=(2,2)) => + mx.Activation(act_type=:relu) => + mx.Pooling( kernel=(2, 2), stride=(2, 2), pool_type=:max) => + mx.Convolution( num_filter=10, kernel=(3, 3), stride=(2,2), pad=(1, 1)) => + mx.Activation(act_type=:relu) => + mx.Pooling( global_pool=true, kernel=(2, 2), pool_type=:avg) => + mx.Flatten() => + mx.FullyConnected(num_hidden=6, name=:stn_loc) + +data=mx.SpatialTransformer(data,loc_net, target_shape = (28,28), transform_type="affine", sampler_type="bilinear") + +# first conv +conv1 = @mx.chain mx.Convolution(data, kernel=(5,5), num_filter=20) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) + +# second conv +conv2 = @mx.chain mx.Convolution(conv1, kernel=(5,5), num_filter=50) => + mx.Activation(act_type=:tanh) => + mx.Pooling(pool_type=:max, kernel=(2,2), stride=(2,2)) + +# first fully-connected +fc1 = @mx.chain mx.Flatten(conv2) => + mx.FullyConnected(num_hidden=500) => + mx.Activation(act_type=:tanh) + +# second fully-connected +fc2 = mx.FullyConnected(fc1, num_hidden=10) + +# softmax loss +lenet = mx.SoftmaxOutput(fc2, name=:softmax) + + +#-------------------------------------------------------------------------------- + +# load data +batch_size = 100 +include("mnist-data.jl") +train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) + +#-------------------------------------------------------------------------------- +# fit model +model = mx.FeedForward(lenet, context=mx.cpu()) + +# optimizer +optimizer = mx.ADAM(lr=0.01, weight_decay=0.00001) + +# fit parameters +initializer=mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1) +mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider,initializer=initializer) diff --git a/src/initializer.jl b/src/initializer.jl index 5e34dce8d8f9..4685f41101cc 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -44,7 +44,7 @@ end function _init_loc_bias(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) assert(size(array) == (6,)) - array=[1.0, 0, 0, 0, 1.0, 0] + array[:]= [1.0, 0, 0, 0, 1.0, 0] end function _init_bilinear(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) From 06010b8936e82bfc74b7e0a59324e2b2162aa745 Mon Sep 17 00:00:00 2001 From: Mus M Date: Sat, 29 Apr 2017 22:13:38 -0400 Subject: [PATCH 478/630] explicitly specify 7z (#239) --- deps/build.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 03e07fccbd22..6be844574968 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -43,7 +43,8 @@ if !libmxnet_detected run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) run(download_cmd(package_url, "mxnet.7z")) - run(`7z x mxnet.7z -y -ousr`) + exe7z = joinpath(JULIA_HOME, "7z.exe") + run(`$exe7z x mxnet.7z -y -ousr`) return end From 6cc9e99cd4fbff26c099ded296cd4984e21cf5a7 Mon Sep 17 00:00:00 2001 From: Robin Deits Date: Tue, 9 May 2017 14:13:35 -0400 Subject: [PATCH 479/630] add verbosity option to predict() --- src/model.jl | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/model.jl b/src/model.jl index 46323d701f23..afa3aa351a8a 100644 --- a/src/model.jl +++ b/src/model.jl @@ -143,7 +143,7 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove return (arg_names, param_names, aux_names) end -function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_shapes...) +function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; verbosity :: Integer = 1, data_shapes...) if !isdefined(self, :pred_exec) || isa(self.pred_exec, Void) || overwrite if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) @assert(false, "Model weights not defined, please init or train the model, or load from file") @@ -152,7 +152,7 @@ function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; data_sha # the predictor use only the first device self.pred_exec = simple_bind(self.arch, self.ctx[1]; grad_req=GRAD_NOP, data_shapes...) dbg_str = mx.debug_str(self.pred_exec) - info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[1])) + verbosity >= 1 && info(string("TempSpace: ", split(dbg_str, ['\n'])[end-2]..., " on ", self.ctx[1])) copy_params_from(self.pred_exec, self.arg_params, self.aux_params) else # make sure the new setup is compatible with the existing one @@ -185,6 +185,10 @@ end then the executor can be potentially be re-used. So, if `overwrite` is false, we will try to re-use, and raise an error if batch-size changed. If `overwrite` is true (the default), a new `Executor` will be created to replace the old one. +* `verbosity::Integer`: Determines the verbosity of the print messages. Higher numbers + leads to more verbose printing. Acceptable values are + - `0`: Do not print anything during prediction + - `1`: Print allocation information during prediction !!! note Prediction is computationally much less costly than training, so the bottleneck sometimes becomes the IO @@ -203,13 +207,15 @@ end See also [`train`](@ref), [`fit`](@ref), [`init_model`](@ref), and [`load_checkpoint`](@ref) """ -function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = true) - predict(self, data; overwrite = overwrite, callback=callback) +function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; + overwrite :: Bool = true, verbosity :: Integer = 1) + predict(self, data; overwrite = overwrite, callback=callback, verbosity = verbosity) end -function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=true, callback::Union{Function,Void}=nothing) +function predict(self :: FeedForward, data :: AbstractDataProvider; + overwrite::Bool=true, callback::Union{Function,Void}=nothing, verbosity :: Integer = 1) data_shapes = provide_data(data) data_names = [x[1] for x in data_shapes] - _setup_predictor(self, overwrite; data_shapes...) + _setup_predictor(self, overwrite; verbosity = verbosity, data_shapes...) batch_size = get_batch_size(data) data_arrays = [self.pred_exec.arg_dict[name] for name in data_names] From 5d163055fd0a2b96d00877e0e15164b91f3dac11 Mon Sep 17 00:00:00 2001 From: Mus M Date: Fri, 12 May 2017 09:54:59 -0400 Subject: [PATCH 480/630] explicitly specify 7z --- deps/build.jl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 6be844574968..aebc679e304e 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -38,12 +38,13 @@ if !libmxnet_detected # TODO: Get url from JSON. package_url = "https://github.com/yajiedesign/mxnet/releases/download/$(curr_win)/$(curr_win)_mxnet_x64_vc12_cpu.7z" + exe7z = joinpath(JULIA_HOME, "7z.exe") + run(download_cmd(base_url, "mxnet_base.7z")) - run(`7z x mxnet_base.7z -y -ousr`) + run(`$exe7z x mxnet_base.7z -y -ousr`) run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) run(download_cmd(package_url, "mxnet.7z")) - exe7z = joinpath(JULIA_HOME, "7z.exe") run(`$exe7z x mxnet.7z -y -ousr`) return From 8918f1b362cd20f44fa545f9a117cb09e4ec34da Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 13 May 2017 00:03:16 +0900 Subject: [PATCH 481/630] update appveyor script (#195) --- appveyor.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 8b7699447712..62356194de72 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,14 +1,12 @@ environment: matrix: - - JULIAVERSION: "julialang/bin/winnt/x86/0.3/julia-0.3-latest-win32.exe" - - JULIAVERSION: "julialang/bin/winnt/x64/0.3/julia-0.3-latest-win64.exe" - - JULIAVERSION: "julianightlies/bin/winnt/x86/julia-latest-win32.exe" - - JULIAVERSION: "julianightlies/bin/winnt/x64/julia-latest-win64.exe" + - JULIAVERSION: "julialang/bin/winnt/x86/0.5/julia-0.5-latest-win32.exe" + - JULIAVERSION: "julialang/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe" branches: only: - master - - /release-.*/ + - stable notifications: - provider: Email @@ -17,6 +15,11 @@ notifications: on_build_status_changed: false install: +# If there's a newer build queued for the same PR, cancel this one + - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod ` + https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` + Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { ` + throw "There are newer queued builds for this pull request, failing early." } # Download most recent Julia Windows binary - ps: (new-object net.webclient).DownloadFile( $("http://s3.amazonaws.com/"+$env:JULIAVERSION), From 60f5306a31106b623365455cc35b1cf452c747de Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sun, 29 Jan 2017 14:47:56 +0900 Subject: [PATCH 482/630] improve GPU detection. --- deps/build.jl | 55 ++++++++++++++++++++++++---------- docs/src/user-guide/install.md | 9 ++++-- 2 files changed, 46 insertions(+), 18 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index aebc679e304e..ddc4dfc99ea4 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -20,23 +20,56 @@ if haskey(ENV, "MXNET_HOME") end end +# Try to find cuda +CUDAPATHS = String[] +if haskey(ENV, "CUDA_HOME") + push!(CUDAPATHS, joinpath(ENV["CUDA_HOME"], "lib64")) +elseif is_linux() + append!(CUDAPATHS, ["/opt/cuda/lib64", "/usr/local/cuda/lib64"]) +end + +if is_unix() + try + push!(CUDAPATHS, replace(strip(readstring(`which nvcc`)), "bin/nvcc", "lib64")) + end +end + +HAS_CUDA = false +let cudalib = Libdl.find_library(["libcuda", "nvcuda.dll"], CUDAPATHS) + HAS_CUDA = Libdl.dlopen_e(cudalib) != C_NULL +end + +if !HAS_CUDA && is_windows() + # TODO: this needs to be improved. + try + run(`nvcc --version`) + HAS_CUDA = true + end +end +if HAS_CUDA + info("Found a CUDA installation.") +else + info("Did not find a CUDA installation, using CPU-only version of MXNet.") +end using BinDeps @BinDeps.setup if !libmxnet_detected if is_windows() - # TODO: Detect GPU support on Windows - info("Downloading pre-built CPU packages for Windows.") - base_url = "https://github.com/dmlc/mxnet/releases/download/20160531/20160531_win10_x64_cpu.7z" + info("Downloading pre-built packages for Windows.") + name = "20160531_win10_x64_$(HAS_CUDA ? "gpu" : "cpu").7z" + base_url = "https://github.com/dmlc/mxnet/releases/download/20160531/$name" + if libmxnet_curr_ver == "master" # download_cmd uses powershell 2, but we need powershell 3 to do this - run(`powershell -NoProfile -Command Invoke-WebRequest -Uri "https://api.github.com/repos/yajiedesign/mxnet/releases/latest" -OutFile "mxnet.json"`) + run(`powershell -NoProfile -Command Invoke-WebRequest -Uri "https://api.github.com/repos/yajiedesign/mxnet/releases/latest" -OutFile "mxnet.json"`) curr_win = JSON.parsefile("mxnet.json")["tag_name"] info("Can't use MXNet master on Windows, using latest binaries from $curr_win.") end # TODO: Get url from JSON. - package_url = "https://github.com/yajiedesign/mxnet/releases/download/$(curr_win)/$(curr_win)_mxnet_x64_vc12_cpu.7z" + name = "mxnet_x64_vc12_$(HAS_CUDA ? "gpu" : "cpu").7z" + package_url = "https://github.com/yajiedesign/mxnet/releases/download/$(curr_win)/$(curr_win)_$(name)" exe7z = joinpath(JULIA_HOME, "7z.exe") @@ -56,16 +89,6 @@ if !libmxnet_detected blas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) - # Try to find cuda - hascuda = false - if haskey(ENV, "CUDA_HOME") - hascuda = Libdl.dlopen_e(joinpath(ENV["CUDA_HOME"], "lib64", "libcuda.so")) != C_NULL - else - cudapaths = String["/opt/cuda/lib64", "/usr/local/cuda/lib64"] - cudalib = Libdl.find_library(["libcuda", "libcuda.so"], cudapaths) - hascuda = Libdl.dlopen_e(cudalib) != C_NULL - end - if VERSION >= v"0.5.0-dev+4338" blas_vendor = Base.BLAS.vendor() else @@ -129,7 +152,7 @@ if !libmxnet_detected `cp make/config.mk config.mk` end `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` - if hascuda + if HAS_CUDA `sed -i -s 's/USE_CUDA = 0/USE_CUDA = 1/' config.mk` if haskey(ENV, "CUDA_HOME") `sed -i -s 's/USE_CUDA_PATH = NULL/USE_CUDA_PATH = $(ENV["CUDA_HOME"])/' config.mk` diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index 34c532321ef7..abc48dcf76c0 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -21,9 +21,14 @@ MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. +There are two environment variables that change this behaviour. If you +already have a pre-installed version of mxnet you can use `MXNET_HOME` +to point the build-process in the right direction. If the automatic +cuda detection fails you can also set `CUDA_HOME` to override the process. + The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. -The automatic build is using default configurations, with OpenCV, CUDA -disabled. If the compilation failed due to unresolved dependency, or if +The automatic build is using default configurations, with OpenCV disabled. +If the compilation failed due to unresolved dependency, or if you want to customize the build, you can compile and install libmxnet manually. Please see below for more details. From efff124a42e446098c2b7620cb0f9f20e36c8536 Mon Sep 17 00:00:00 2001 From: Chiyuan Zhang Date: Fri, 12 May 2017 11:18:11 -0400 Subject: [PATCH 483/630] Add appveyor badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 06313473c060..624b04206a48 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,7 @@ # MXNet [![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) +[![Windows Build](https://ci.appveyor.com/api/projects/status/re90njols2th2ide?svg=true)](https://ci.appveyor.com/project/pluskid/mxnet-jl) [![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) [![](https://img.shields.io/badge/docs-latest-blue.svg)](https://dmlc.github.io/MXNet.jl/latest) [![MXNet](http://pkg.julialang.org/badges/MXNet_0.4.svg)](http://pkg.julialang.org/?pkg=MXNet) From 07777d670a9c36564866de7a01b6e04265d4ac4c Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 13 May 2017 00:16:42 +0900 Subject: [PATCH 484/630] switch to vc14 --- deps/build.jl | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index ddc4dfc99ea4..39688bf8d183 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -6,7 +6,7 @@ import JSON ################################################################################ libmxnet_detected = false libmxnet_curr_ver = "master" -curr_win = "20161125" +curr_win = "20170502" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") @@ -58,8 +58,7 @@ using BinDeps if !libmxnet_detected if is_windows() info("Downloading pre-built packages for Windows.") - name = "20160531_win10_x64_$(HAS_CUDA ? "gpu" : "cpu").7z" - base_url = "https://github.com/dmlc/mxnet/releases/download/20160531/$name" + base_url = "https://github.com/yajiedesign/mxnet/releases/download/weekly_binary_build/prebuildbase_win10_x64_vc14.7z" if libmxnet_curr_ver == "master" # download_cmd uses powershell 2, but we need powershell 3 to do this @@ -68,7 +67,7 @@ if !libmxnet_detected info("Can't use MXNet master on Windows, using latest binaries from $curr_win.") end # TODO: Get url from JSON. - name = "mxnet_x64_vc12_$(HAS_CUDA ? "gpu" : "cpu").7z" + name = "mxnet_x64_vc14_$(HAS_CUDA ? "gpu" : "cpu").7z" package_url = "https://github.com/yajiedesign/mxnet/releases/download/$(curr_win)/$(curr_win)_$(name)" exe7z = joinpath(JULIA_HOME, "7z.exe") From 28344a49d14eef467723a09393ea796a9b52ea77 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 13 May 2017 14:41:18 +0900 Subject: [PATCH 485/630] actually copy the mxnet.dll to the right place on windows --- deps/build.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/deps/build.jl b/deps/build.jl index 39688bf8d183..c2b10b459089 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -75,9 +75,11 @@ if !libmxnet_detected run(download_cmd(base_url, "mxnet_base.7z")) run(`$exe7z x mxnet_base.7z -y -ousr`) run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) + run(`cmd /c copy "usr\\3rdparty\\opencv\\*.dll" "usr\\lib"`) run(download_cmd(package_url, "mxnet.7z")) run(`$exe7z x mxnet.7z -y -ousr`) + run(`cmd /c copy "usr\\build\\*.dll" "usr\\lib"`) return end From 96bdfa7056d179353f061265f569d3f8fd3c11a6 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sat, 13 May 2017 15:01:23 +0900 Subject: [PATCH 486/630] Prebuilt binaries only work on windows 64bit --- appveyor.yml | 1 - deps/build.jl | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/appveyor.yml b/appveyor.yml index 62356194de72..569f13ae0b57 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,6 +1,5 @@ environment: matrix: - - JULIAVERSION: "julialang/bin/winnt/x86/0.5/julia-0.5-latest-win32.exe" - JULIAVERSION: "julialang/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe" branches: diff --git a/deps/build.jl b/deps/build.jl index c2b10b459089..754836288851 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -57,6 +57,10 @@ using BinDeps @BinDeps.setup if !libmxnet_detected if is_windows() + if Sys.ARCH != :x86_64 + info("Prebuilt windows binaries are only available on 64bit. You will have to built MXNet yourself.") + return + end info("Downloading pre-built packages for Windows.") base_url = "https://github.com/yajiedesign/mxnet/releases/download/weekly_binary_build/prebuildbase_win10_x64_vc14.7z" From 2ecf2c1666cf3edc4a32c18836cc8e86094172f7 Mon Sep 17 00:00:00 2001 From: Valentin Churavy Date: Sun, 14 May 2017 11:07:50 +0900 Subject: [PATCH 487/630] update NEWS.md --- NEWS.md | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index f4fb93004db8..551985db996c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,7 +1,17 @@ +# v0.2.2 (2017.05.14) +* Updated supported version of MXNet to 0.9.4. +* Improved build-system with support for auto-detecting GPU support. +* Several updates to Metrics. +* CI for Windows. +* Verbosity option for `predict` (@rdeits) + +# v0.2.1 (2017.01.29) +* Bugfix release for Windows + # v0.2.0 (2017.01.26) * Drop support for Julia v0.4. * Added support for NVVM. -* Updated supported version of MXNet to 0.9.3. +* Updated supported version of MXNet to 0.9.2 * New optimizers (@Arkoniak). # v0.1.0 (2016.09.08) From 9c709227f90af180bd81094c53668ae6edeca024 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Tue, 16 May 2017 17:19:53 -0700 Subject: [PATCH 488/630] Update CI URLs to point to new caching infrastructure --- appveyor.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/appveyor.yml b/appveyor.yml index 569f13ae0b57..3de92afc4c9d 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,6 +1,6 @@ environment: matrix: - - JULIAVERSION: "julialang/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe" branches: only: @@ -14,6 +14,7 @@ notifications: on_build_status_changed: false install: + - ps: "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12" # If there's a newer build queued for the same PR, cancel this one - ps: if ($env:APPVEYOR_PULL_REQUEST_NUMBER -and $env:APPVEYOR_BUILD_NUMBER -ne ((Invoke-RestMethod ` https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` @@ -21,7 +22,7 @@ install: throw "There are newer queued builds for this pull request, failing early." } # Download most recent Julia Windows binary - ps: (new-object net.webclient).DownloadFile( - $("http://s3.amazonaws.com/"+$env:JULIAVERSION), + $env:JULIA_URL, "C:\projects\julia-binary.exe") # Run installer silently, output to C:\projects\julia - C:\projects\julia-binary.exe /S /D=C:\projects\julia From b63fd9b7d19ef1dcd6eed610fb0089f22f829784 Mon Sep 17 00:00:00 2001 From: holl- Date: Tue, 23 May 2017 19:59:24 +0200 Subject: [PATCH 489/630] Fix #242 (#247) Previously, when providing a KVStore object to mx.fit, the method would crash with an UndefVarError. This is now fixed by moving the definition of update_on_kvstore from _create_kvstore to fit. --- src/model.jl | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/src/model.jl b/src/model.jl index afa3aa351a8a..4be38ededb92 100644 --- a/src/model.jl +++ b/src/model.jl @@ -207,11 +207,11 @@ end See also [`train`](@ref), [`fit`](@ref), [`init_model`](@ref), and [`load_checkpoint`](@ref) """ -function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; +function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; overwrite :: Bool = true, verbosity :: Integer = 1) predict(self, data; overwrite = overwrite, callback=callback, verbosity = verbosity) end -function predict(self :: FeedForward, data :: AbstractDataProvider; +function predict(self :: FeedForward, data :: AbstractDataProvider; overwrite::Bool=true, callback::Union{Function,Void}=nothing, verbosity :: Integer = 1) data_shapes = provide_data(data) data_names = [x[1] for x in data_shapes] @@ -264,9 +264,9 @@ function _init_model(self :: FeedForward, data :: AbstractDataProvider, initiali init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) end -function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}) +function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}, verbosity :: Int) if num_device == 1 && !ismatch(r"dist", string(kv_type)) - kv = nothing + return nothing else if kv_type == :local max_size = maximum([prod(size(param)) for (k,param) in arg_params]) @@ -275,17 +275,10 @@ function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params : else kv_type = :local_allreduce_cpu end - info("Auto-select kvstore type = $kv_type") + verbosity >= 2 && info("Auto-select kvstore type = $kv_type") end - kv = KVStore(kv_type) - end - - update_on_kvstore = true - if isa(kv, Void) || ismatch(r"local_allreduce", string(get_type(kv))) - update_on_kvstore = false + return KVStore(kv_type) end - - return (kv, update_on_kvstore) end @defstruct TrainingOptions ( @@ -371,7 +364,12 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra kvstore = opts.kvstore if isa(kvstore, Base.Symbol) opts.verbosity >= 2 && info("Creating KVStore...") - kvstore, update_on_kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params) + kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params, opts.verbosity) + end + + update_on_kvstore = true + if isa(kvstore, Void) || ismatch(r"local_allreduce", string(get_type(kvstore))) + update_on_kvstore = false end # get grad attribute to allow for freezing From 172af604bbcb0a4e0fd9c98671b5051f08d83e91 Mon Sep 17 00:00:00 2001 From: Elliot Saba Date: Tue, 30 May 2017 07:38:51 -0700 Subject: [PATCH 490/630] Use `Sys.CPU_CORES` not `nprocs()` when doing a parallel `make` (#250) --- deps/build.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 754836288851..0b728e3edfb4 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -168,9 +168,9 @@ if !libmxnet_detected ChangeDirectory(_mxdir) `cp ../../cblas.h include/cblas.h` if USE_JULIA_BLAS - `make -j$(nprocs()) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` + `make -j$(min(Sys.CPU_CORES,8)) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` else - `make -j$(nprocs())` + `make -j$(min(Sys.CPU_CORES,8))` end end FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin From 2b26bc2a3e150087c10c4f700731a16fa6fbbf28 Mon Sep 17 00:00:00 2001 From: ExpandingMan Date: Thu, 1 Jun 2017 11:17:41 -0400 Subject: [PATCH 491/630] update for Julia v0.6 (#248) * Updated for Julia v0.6 * Updated tests. * Added 0.5 compat. * Updated REQUIRE. * Removed compat macros. Should now work for both 0.5 and 0.6. * Attempting to fix dot operators for 0.5. * Still trying to fix the damn dot ops in 0.5. * Dotted operators now get imported from Base in version 0.5. * Updated appveyor and travis to use both 0.5 and 0.6. * Rewrote @chain to remove all AST dependance. * Updated appveyor yml. * Added a basic test for chain macro. * Fixed isa in chain to work on 0.5. * Fixed incorrect broadcast issue. * Updated appveyor. * Fixed broadcast multiplications for 0.5, again. --- .travis.yml | 1 + REQUIRE | 5 +- appveyor.yml | 3 ++ src/MXNet.jl | 4 ++ src/base.jl | 10 ++-- src/callback.jl | 10 ++-- src/compat.jl | 32 ++++++++++++ src/initializer.jl | 2 +- src/io.jl | 8 +-- src/metric.jl | 2 +- src/model.jl | 2 +- src/name.jl | 6 +-- src/ndarray.jl | 38 +++++++------- src/optimizer.jl | 12 +++-- src/optimizers/adadelta.jl | 6 +-- src/optimizers/adagrad.jl | 2 +- src/optimizers/adam.jl | 2 +- src/optimizers/nadam.jl | 2 +- src/optimizers/rmsprop.jl | 2 +- src/symbolic-node.jl | 94 ++++++++++++++++++---------------- test/common.jl | 6 +++ test/unittest/bind.jl | 8 +-- test/unittest/ndarray.jl | 2 +- test/unittest/symbolic-node.jl | 12 ++++- 24 files changed, 170 insertions(+), 101 deletions(-) create mode 100644 src/compat.jl diff --git a/.travis.yml b/.travis.yml index 7c1c6b9be19d..708b5392fd3e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,6 +7,7 @@ os: osx_image: xcode8 julia: - 0.5 + - 0.6 # - nightly 0.6 supports depends on #170 # dependent apt packages diff --git a/REQUIRE b/REQUIRE index 8fdfc5fcde96..ca30ebc84ff9 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,6 @@ -julia 0.5 -Compat 0.9.5 +julia 0.5.2 +Compat 0.25.2 Formatting BinDeps JSON +MacroTools diff --git a/appveyor.yml b/appveyor.yml index 3de92afc4c9d..e3d1984f4732 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,6 +1,7 @@ environment: matrix: - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe" + - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.6/julia-0.6-latest-win64.exe" branches: only: @@ -20,6 +21,7 @@ install: https://ci.appveyor.com/api/projects/$env:APPVEYOR_ACCOUNT_NAME/$env:APPVEYOR_PROJECT_SLUG/history?recordsNumber=50).builds | ` Where-Object pullRequestId -eq $env:APPVEYOR_PULL_REQUEST_NUMBER)[0].buildNumber) { ` throw "There are newer queued builds for this pull request, failing early." } + # Download most recent Julia Windows binary - ps: (new-object net.webclient).DownloadFile( $env:JULIA_URL, @@ -35,3 +37,4 @@ build_script: test_script: - C:\projects\julia\bin\julia --check-bounds=yes -e "Pkg.test(\"MXNet\")" + diff --git a/src/MXNet.jl b/src/MXNet.jl index 892cc4e95163..bcc6296b3fb8 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -16,12 +16,16 @@ if VERSION >= v"0.6.0-dev.1024" end using Formatting +using MacroTools # Functions from base that we can safely extend and that are defined by libmxnet. import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm, transpose include("base.jl") + +include("compat.jl") + include("context.jl") include("util.jl") diff --git a/src/base.jl b/src/base.jl index 183465341f3c..dd816dd41582 100644 --- a/src/base.jl +++ b/src/base.jl @@ -6,12 +6,12 @@ end ################################################################################ # Common types used in MXNet API ################################################################################ -typealias MX_uint Cuint -typealias MX_float Cfloat -typealias MX_handle Ptr{Void} +const MX_uint = Cuint +const MX_float = Cfloat +const MX_handle = Ptr{Void} -typealias char_p Ptr{UInt8} -typealias char_pp Ptr{char_p} +const char_p = Ptr{UInt8} +const char_pp = Ptr{char_p} ################################################################################ # Initialization and library API entrance diff --git a/src/callback.jl b/src/callback.jl index ee200d2e30c9..780869daf9f0 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -3,21 +3,21 @@ Abstract type of callback functions used in training. """ -abstract AbstractCallback +@compat abstract type AbstractCallback end """ AbstractBatchCallback Abstract type of callbacks to be called every mini-batch. """ -abstract AbstractBatchCallback <: AbstractCallback +@compat abstract type AbstractBatchCallback <: AbstractCallback end """ AbstractEpochCallback Abstract type of callbacks to be called every epoch. """ -abstract AbstractEpochCallback <: AbstractCallback +@compat abstract type AbstractEpochCallback <: AbstractCallback end type BatchCallback <: AbstractBatchCallback frequency :: Int @@ -51,7 +51,7 @@ See also [`every_n_epoch`](@ref) and [`speedometer`](@ref). function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) BatchCallback(n, call_on_0, callback) end -@compat function (cb :: BatchCallback)(state :: OptimizationState) +function (cb :: BatchCallback)(state :: OptimizationState) if state.curr_batch == 0 if cb.call_on_0 cb.callback(state) @@ -107,7 +107,7 @@ See also [`every_n_batch`](@ref). function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -@compat function (cb :: EpochCallback){T<:Real}(model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) +function (cb :: EpochCallback){T<:Real}(model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) if state.curr_epoch == 0 if cb.call_on_0 cb.callback(model, state, metric) diff --git a/src/compat.jl b/src/compat.jl new file mode 100644 index 000000000000..7357632174e5 --- /dev/null +++ b/src/compat.jl @@ -0,0 +1,32 @@ +# this file contains code used for enabling backward compatibility with 0.5 + +# have to import base dotted operators if in 0.5 +if VERSION < v"0.6.0-dev" + import Base: .+, .-, .*, ./, .^ +end + + +# this is for declaring broadcasted functions in 0.5 +# TODO this macro should be removed when 0.5 support is dropped +macro compatdot(fblock) + if VERSION ≥ v"0.6.0-dev" + return esc(fblock) + end + @capture(fblock, function Base.broadcast(::typeof(op_), args__) + body_ + end) + opdot = Symbol(string('.',op)) + esc(quote + function $opdot($(args...)) + $body + end + end) +end + +macro compatmul(expr1, expr2) + if VERSION ≥ v"0.6.0-dev" + esc(:(broadcast(*, $expr1, $expr2))) + else + esc(:($expr1 .* $expr2)) + end +end diff --git a/src/initializer.jl b/src/initializer.jl index 4685f41101cc..80e6026d1bf4 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -15,7 +15,7 @@ Or, if full behavior customization is needed, override the following function init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) """ -abstract AbstractInitializer +@compat abstract type AbstractInitializer end function init{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) strname = string(name) diff --git a/src/io.jl b/src/io.jl index ea22a4da961f..242a9ad98d1a 100644 --- a/src/io.jl +++ b/src/io.jl @@ -15,7 +15,7 @@ Normally this involves defining: * `Base.done(provider, state) -> Bool` * `Base.next(provider, state) -> (AbstractDataBatch, AbstractDataProvider)` """ -abstract AbstractDataProvider +@compat abstract type AbstractDataProvider end """ get_batch_size(provider) -> Int @@ -53,7 +53,7 @@ function provide_label end Base type for data provider states. """ -abstract AbstractDataProviderState +@compat abstract type AbstractDataProviderState end """ AbstractDataBatch @@ -70,7 +70,7 @@ The following utility functions will be automatically defined: * [`load_data!`](@ref) * [`load_label!`](@ref) """ -abstract AbstractDataBatch +@compat abstract type AbstractDataBatch end """ count_samples(provider, batch) -> Int @@ -127,7 +127,7 @@ get_label{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batc A alias type of `Tuple{UnitRange{Int},NDArray}`. """ -typealias SlicedNDArray Tuple{UnitRange{Int},NDArray} +const SlicedNDArray = Tuple{UnitRange{Int},NDArray} function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{Vector{SlicedNDArray}}, loader::Function) diff --git a/src/metric.jl b/src/metric.jl index 3bff815d4f35..b39309f6275c 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -8,7 +8,7 @@ interfaces: * [`reset!`](@ref) * [`get`](@ref) """ -abstract AbstractEvalMetric +@compat abstract type AbstractEvalMetric end """ hasNDArraySupport(metric) -> Val{true/false} diff --git a/src/model.jl b/src/model.jl index 4be38ededb92..a592b500e9cb 100644 --- a/src/model.jl +++ b/src/model.jl @@ -3,7 +3,7 @@ The abstract super type of all models in MXNet.jl. """ -abstract AbstractModel +@compat abstract type AbstractModel end """ FeedForward diff --git a/src/name.jl b/src/name.jl index 2ec531834232..8ba0f707ff1a 100644 --- a/src/name.jl +++ b/src/name.jl @@ -1,6 +1,6 @@ -abstract AbstractNameManager -typealias NameType Union{Base.Symbol, AbstractString} -typealias NameCounter Dict{Base.Symbol, Int} +@compat abstract type AbstractNameManager end +const NameType = Union{Base.Symbol, AbstractString} +const NameCounter = Dict{Base.Symbol, Int} import Base: get! diff --git a/src/ndarray.jl b/src/ndarray.jl index 42ad43d7c492..442f46d682ea 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,7 +1,7 @@ # All the types supported by mshadow. -typealias DType Union{Float32, Float64, Float16, UInt8, Int32} +const DType = Union{Float32, Float64, Float16, UInt8, Int32} @enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 -typealias DEFAULT_DTYPE Float32 +const DEFAULT_DTYPE = Float32 function toTypeFlag{T <: DType}(:: Type{T}) if T == Float32 @@ -300,7 +300,6 @@ function eltype{T <: Union{NDArray, MX_NDArrayHandle}}(arr :: T) end -import Base: slice """ slice(arr :: NDArray, start:stop) @@ -532,7 +531,8 @@ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) return dst end -import Base: +, .+ +import Base.broadcast +import Base: + """ +(args...) @@ -546,14 +546,15 @@ function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) ret = copy(arg0, context(arg0)) add_to!(ret, args...) end -function .+(arg0 :: NDArray, args :: Union{Real, NDArray}...) +@compatdot function Base.broadcast(::typeof(+), arg0 :: NDArray, args :: Union{Real, NDArray}...) +(arg0, args...) end function +(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) +(arg1, arg0, args...) end -function .+(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) - .+(arg1, arg0, args...) +@compatdot function Base.broadcast(::typeof(+), arg0 :: Real, arg1 :: NDArray, + args :: Union{Real, NDArray}...) + broadcast(+, arg1, arg0, args...) end """ @@ -570,7 +571,7 @@ function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) end end -import Base: -, .- +import Base: - """ -(arg0, arg1) @@ -584,7 +585,7 @@ function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) sub_from!(ret, arg1) end -function .-(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) +@compatdot function Base.broadcast(::typeof(-), arg0 :: NDArray, arg1 :: Union{Real, NDArray}) -(arg0, arg1) end function -(arg0 :: Real, arg1 :: NDArray) @@ -592,7 +593,7 @@ function -(arg0 :: Real, arg1 :: NDArray) add_to!(ret, arg0) return ret end -function .-(arg0 :: Real, arg1 :: NDArray) +@compatdot function Base.broadcast(::typeof(-), arg0 :: Real, arg1 :: NDArray) -(arg0, arg1) end @@ -616,19 +617,19 @@ function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) return dst end -import Base: .*, * +import Base: * """ .*(arg0, arg1) Elementwise multiplication of `arg0` and `arg`, could be either scalar or `NDArray`. """ -function .*(arg0 :: NDArray, arg :: Union{Real, NDArray}) +@compatdot function Base.broadcast(::typeof(*), arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) mul_to!(ret, arg) end -function .*(arg0 :: Real, arg :: NDArray) - .*(arg, arg0) +@compatdot function Base.broadcast(::typeof(*), arg0 :: Real, arg :: NDArray) + arg .* arg0 end """ @@ -659,13 +660,13 @@ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) end end -import Base: ./, / +import Base: / """ ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) Elementwise dividing an `NDArray` by a scalar or another `NDArray` of the same shape. """ -function ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) +@compatdot function Base.broadcast(::typeof(/), arg0 :: NDArray, arg :: Union{Real, NDArray}) ret = copy(arg0, context(arg0)) div_from!(ret, arg) end @@ -676,7 +677,7 @@ end Divide an `NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. """ function /(arg0 :: NDArray, arg :: Real) - ./(arg0, arg) + arg0 ./ arg end @@ -1063,7 +1064,8 @@ macro _import_ndarray_functions() func_name = Symbol(name) expr = quote - $(isdefined(Base, func_name) ? :(import Base.$func_name) : :()) + # TODO the explicit exclusion of take will no longer be necessary when it is removed from Base + $((isdefined(Base, func_name) && func_name ≠ :take)? :(import Base.$func_name) : :()) $func_def @doc $desc -> $func_def2 diff --git a/src/optimizer.jl b/src/optimizer.jl index 5e9065b64127..11508bf22035 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -3,21 +3,21 @@ Base type for all optimizers. """ -abstract AbstractOptimizer +@compat abstract type AbstractOptimizer end """ AbstractLearningRateScheduler Base type for all learning rate scheduler. """ -abstract AbstractLearningRateScheduler +@compat abstract type AbstractLearningRateScheduler end """ AbstractMomentumScheduler Base type for all momentum scheduler. """ -abstract AbstractMomentumScheduler +@compat abstract type AbstractMomentumScheduler end @@ -62,6 +62,7 @@ function get_learning_rate end ################################################################################ # The learning rate module module LearningRate +using Compat import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate """ @@ -137,6 +138,7 @@ end ################################################################################ # The Momentum module module Momentum +using Compat import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum """ @@ -145,7 +147,7 @@ import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum The null momentum scheduler always returns 0 for momentum. It is also used to explicitly indicate momentum should not be used. """ -type Null <: AbstractMomentumScheduler +immutable Null <: AbstractMomentumScheduler end get_momentum(self :: Null, state :: OptimizationState) = 0.0 @@ -240,7 +242,7 @@ end Base class for all optimizer options. """ -abstract AbstractOptimizerOptions +@compat abstract type AbstractOptimizerOptions end """ normalized_gradient(opts, state, weight, grad) diff --git a/src/optimizers/adadelta.jl b/src/optimizers/adadelta.jl index e00cc9a42abd..aec81445fbb2 100644 --- a/src/optimizers/adadelta.jl +++ b/src/optimizers/adadelta.jl @@ -78,14 +78,14 @@ function update(self :: AdaDelta, index :: Int, weight :: NDArray, # Update state.acc as in RMSProp @inplace state.acc .*= self.opts.rho - @inplace state.acc .+= (1 - self.opts.rho) * grad .* grad + @inplace state.acc .+= (1 - self.opts.rho) * @compatmul(grad, grad) # Compute update using the "old" state.delta_acc - update = grad .* sqrt(state.delta_acc + self.opts.epsilon) ./ + update = @compatmul(grad, sqrt(state.delta_acc + self.opts.epsilon)) ./ (sqrt(state.acc + self.opts.epsilon)) @inplace weight .+= -lr * update # update state.delta_acc using update @inplace state.delta_acc .*= self.opts.rho - @inplace state.delta_acc .+= (1 - self.opts.rho) * update .* update + @inplace state.delta_acc .+= (1 - self.opts.rho) * @compatmul(update, update) end diff --git a/src/optimizers/adagrad.jl b/src/optimizers/adagrad.jl index 196998121cce..2df3c07efd44 100644 --- a/src/optimizers/adagrad.jl +++ b/src/optimizers/adagrad.jl @@ -61,6 +61,6 @@ function update(self :: AdaGrad, index :: Int, weight :: NDArray, lr = get_learning_rate(self.opts.lr_scheduler, self.state) grad = normalized_gradient(self.opts, self.state, weight, grad) - @inplace state .+= grad .* grad + @inplace state .+= @compatmul(grad, grad) @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) end diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index 3af5c3579736..a6b5c69eefc3 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -61,7 +61,7 @@ function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, grad = normalized_gradient(self.opts, self.state, weight, grad) state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) * grad - state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * (grad .* grad) + state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * @compatmul(grad, grad) at = sqrt(1.0 - state.beta2Power)/(1.0 - state.beta1Power) diff --git a/src/optimizers/nadam.jl b/src/optimizers/nadam.jl index 65a195f674fe..b90b7a106984 100644 --- a/src/optimizers/nadam.jl +++ b/src/optimizers/nadam.jl @@ -91,7 +91,7 @@ function update(self :: Nadam, index :: Int, weight :: NDArray, mt = state.mt / (1.0 - momentum_next) @inplace state.nt .*= self.opts.beta2 - @inplace state.nt .+= (1.0 - self.opts.beta2) * grad .* grad + @inplace state.nt .+= (1.0 - self.opts.beta2) * @compatmul(grad, grad) nt = state.nt / (1.0 - state.beta2Power) state.beta2Power *= self.opts.beta2 diff --git a/src/optimizers/rmsprop.jl b/src/optimizers/rmsprop.jl index 01a40651d2f6..0c6c2d45a753 100644 --- a/src/optimizers/rmsprop.jl +++ b/src/optimizers/rmsprop.jl @@ -65,7 +65,7 @@ function update(self :: RMSProp, index :: Int, weight :: NDArray, grad = normalized_gradient(self.opts, self.state, weight, grad) @inplace state .*= self.opts.rho - @inplace state .+= (1 - self.opts.rho) * grad .* grad + @inplace state .+= (1 - self.opts.rho) * @compatmul(grad, grad) @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 4c73bd33642d..8a567c2e8003 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -38,11 +38,11 @@ function Base.copy(self :: SymbolicNode) Base.deepcopy(self) end -@compat function (self::SymbolicNode)(args :: SymbolicNode...) +function (self::SymbolicNode)(args :: SymbolicNode...) s = deepcopy(self) _compose!(s, args...) end -@compat function (self::SymbolicNode)(;kwargs...) +function (self::SymbolicNode)(;kwargs...) s = deepcopy(self) _compose!(s; kwargs...) end @@ -217,7 +217,7 @@ function get_name(self :: mx.SymbolicNode) success = Ref(0) @mxcall(:MXSymbolGetName, (MX_handle, Ref{char_p}, Ref{Int}), self.handle.value, name, success) @assert success[] != -1 - return Symbol(unsafe_wrap(String, name[])) + return Symbol(unsafe_string(name[])) end """ @@ -435,7 +435,8 @@ function Base.getindex(self :: SymbolicNode, idx :: Int) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -import Base: +, .+ +import Base.broadcast +import Base: + function +(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) ret = self for arg in args @@ -447,34 +448,35 @@ function +(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) end ret end -function .+(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) +@compatdot function Base.broadcast(::typeof(+), self::SymbolicNode, args::Union{SymbolicNode,Real}...) +(self, args...) end function +(s1 :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) +(self, s1, args...) end -function .+(s1 :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) +@compatdot function Base.broadcast(::typeof(+), s1::Real, self::SymbolicNode, + args::Union{SymbolicNode,Real}...) +(self, s1, args...) end -import Base: -, .- +import Base: - function -(self :: SymbolicNode, arg :: SymbolicNode) _Minus(self, arg) end -function .-(self :: SymbolicNode, arg :: SymbolicNode) +@compatdot function Base.broadcast(::typeof(-), self :: SymbolicNode, arg :: SymbolicNode) -(self, arg) end function -(self :: SymbolicNode, arg :: Real) _MinusScalar(self, scalar=MX_float(arg)) end -function .-(self :: SymbolicNode, arg :: Real) +@compatdot function Base.broadcast(::typeof(-), self :: SymbolicNode, arg :: Real) -(self, arg) end function -(arg :: Real, self :: SymbolicNode) _RMinusScalar(self, scalar=arg) end -function .-(arg :: Real, self :: SymbolicNode) +@compatdot function Base.broadcast(::typeof(-), arg :: Real, self :: SymbolicNode) -(arg, self) end @@ -482,8 +484,8 @@ function -(self :: SymbolicNode) -(0, self) end -import Base: .*, * -function .*(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) +import Base: * +@compatdot function Base.broadcast(::typeof(*), self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) ret = self for arg in args if isa(arg, SymbolicNode) @@ -494,8 +496,9 @@ function .*(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) end ret end -function .*(arg :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) - .*(self, arg, args...) +@compatdot function Base.broadcast(::typeof(*), arg :: Real, self :: SymbolicNode, + args :: Union{SymbolicNode,Real}...) + broadcast(*, self, arg, args...) end function *(arg :: Real, self :: SymbolicNode) _MulScalar(self, scalar=arg) @@ -504,32 +507,32 @@ function *(self :: SymbolicNode, arg :: Real) *(arg, self) end -import Base: ./, / -function ./(self :: SymbolicNode, arg :: SymbolicNode) +import Base: / +@compatdot function Base.broadcast(::typeof(/), self :: SymbolicNode, arg :: SymbolicNode) _Div(self, arg) end -function ./(self :: SymbolicNode, arg :: Real) +@compatdot function Base.broadcast(::typeof(/), self :: SymbolicNode, arg :: Real) _DivScalar(self, scalar=MX_float(arg)) end function /(self :: SymbolicNode, arg :: Real) - ./(self, arg) + self ./ arg end function /(arg :: Real, self :: SymbolicNode) _RDivScalar(self, scalar=arg) end -function ./(arg :: Real, self :: SymbolicNode) +@compatdot function Base.broadcast(::typeof(/), arg :: Real, self :: SymbolicNode) _RDivScalar(self, scalar=arg) end -import Base: .^, ^ -function .^(self :: SymbolicNode, pow :: SymbolicNode) +import Base: ^ +@compatdot function Base.broadcast(::typeof(^), self :: SymbolicNode, pow :: SymbolicNode) _Power(self, pow) end -function .^(self :: SymbolicNode, pow :: AbstractFloat) +@compatdot function Base.broadcast(::typeof(^), self :: SymbolicNode, pow :: AbstractFloat) _PowerScalar(self, scalar=pow) end function ^(self :: SymbolicNode, pow :: AbstractFloat) - .^(self, pow) + self .^ pow end function _compose!(node :: SymbolicNode; kwargs...) @@ -750,26 +753,31 @@ end # Utility macros to chain up symbols ################################################################################ macro chain(layers) - exprs = [] - last_layer = nothing - function _chain_layer(layer, last_layer) - if isa(last_layer, Void) - esc(layer) - else - @assert(isa(layer, Expr) && layer.head == :call, "Do not know how to chain up $layer") - return Expr(:call, esc(layer.args[1]), last_layer, map(esc, layer.args[2:end])...) + exprs = [] + last_layer = nothing + + function _chain_layer(layer, last_layer) + if isa(last_layer, Void) + return esc(layer) + else + if @capture(layer, f_(x__)) + return :($f($last_layer, $(x...))) + else + throw(AssertionError("$layer is not a valid function call and cannot be chained.")) + end + end end - end - while true - if layers.head == :(=>) - new_layer = gensym() - push!(exprs, :($new_layer = $(_chain_layer(layers.args[1], last_layer)))) - last_layer = new_layer - layers = layers.args[2] - else - push!(exprs, _chain_layer(layers, last_layer)) - break + + while true + if @capture(layers, l1_=>l2_) + new_layer = gensym() + push!(exprs, :($new_layer = $(_chain_layer(l1, last_layer)))) + last_layer = new_layer + layers = l2 + else + push!(exprs, _chain_layer(layers, last_layer)) + break + end end - end - return Expr(:block, exprs...) + Expr(:block, exprs...) end diff --git a/test/common.jl b/test/common.jl index 7f7cd3cb78ea..035650e74f72 100644 --- a/test/common.jl +++ b/test/common.jl @@ -19,3 +19,9 @@ function mlp2() return out end +function mlpchain() + mx.@chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=1000) => + mx.Activation(act_type=:relu) => + mx.FullyConnected(name=:fc2, num_hidden=10) +end diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 79f639518487..0e32446b9e0f 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -56,13 +56,13 @@ end function test_arithmetic() for T in [mx.fromTypeFlag(TF) for TF in instances(mx.TypeFlag)] - test_arithmetic(T, .+, (g,x,y) -> (g,g)) - test_arithmetic(T, .-, (g,x,y) -> (g,-g)) - test_arithmetic(T, .*, (g,x,y) -> (y.*g, x.*g)) + test_arithmetic(T, (x,y) -> x .+ y, (g,x,y) -> (g,g)) + test_arithmetic(T, (x,y) -> x .- y, (g,x,y) -> (g,-g)) + test_arithmetic(T, (x,y) -> x .* y, (g,x,y) -> (y.*g, x.*g)) if T <: Integer || T == Float16 warn("Not running division test for $T") else - test_arithmetic(T, ./, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) + test_arithmetic(T, (x,y) -> x ./ y, (g,x,y) -> (g ./ y, -x .* g ./ (y.^2))) end end end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 155f365d6ee8..69599960973f 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -73,7 +73,7 @@ function test_slice() array = mx.zeros((2,4)) array[2:3] = ones(2,2) @test copy(array) == [0 1 1 0; 0 1 1 0] - @test copy(slice(array, 2:3)) == [1 1; 1 1] + @test copy(mx.slice(array, 2:3)) == [1 1; 1 1] end function test_plus() diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 4c8cff8adccc..5d8d01b608db 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -2,7 +2,7 @@ module TestSymbolicNode using MXNet using Base.Test -using ..Main: mlp2, reldiff +using ..Main: mlp2, mlpchain, reldiff ################################################################################ # Test Implementations @@ -16,6 +16,15 @@ function test_basic() @test mx.list_auxiliary_states(model) == Symbol[] end +function test_chain() + info("SymbolicNode::chain") + + model = mlpchain() + @test mx.list_arguments(model) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] + @test mx.list_outputs(model) == [:fc2_output] + @test mx.list_auxiliary_states(model) == Symbol[] +end + function test_internal() info("SymbolicNode::internal") @@ -140,6 +149,7 @@ end ################################################################################ @testset "SymbolicNode Test" begin test_basic() + test_chain() test_internal() test_compose() test_infer_shape() From 841e9ee0937df6587f5d24dfc9a8efed1e26de17 Mon Sep 17 00:00:00 2001 From: Michael Creel Date: Fri, 2 Jun 2017 14:42:53 +0200 Subject: [PATCH 492/630] update regression example for julia v0.6(#254) * update regression example for julia v0.6 Updated to work with julia 0.6.x. The plot is commented out for now. --- examples/regression-example.jl | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index 508dfa79b308..885f031dc658 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -6,11 +6,11 @@ the predictions from the trained net. =# using MXNet using Distributions -using Plots +#using Plots # data generating process generate_inputs(mean, var, size) = rand(MvNormal(mean, var), size) -output(data) = sin(data[1,:]).*sin(data[2,:])./(data[1,:].*data[2,:]) +output(data) = sin.(data[1:1,:]).*sin.(data[2:2,:])./(data[1:1,:].*data[2:2,:]) # create training and evaluation data sets mean=[0.0;0.0] @@ -36,7 +36,7 @@ net = @mx.chain mx.Variable(:data) => mx.FullyConnected(num_hidden=3) => mx.Activation(act_type=:tanh) => mx.FullyConnected(num_hidden=1) => - mx.LinearRegressionOutput(label) + mx.LinearRegressionOutput(mx.Variable(:label)) # final model definition, don't change, except if using gpu model = mx.FeedForward(net, context=mx.cpu()) @@ -56,4 +56,5 @@ mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprov # obtain predictions plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) fit = mx.predict(model, plotprovider) -scatter(ValidationOutput,fit',w = 3, xlabel="true", ylabel="predicted", title="45º line is what we hope for", show=true) +println("correlation between fitted values and true regression line: ", cor(vec(fit), vec(ValidationOutput))) +#scatter(ValidationOutput,fit',w = 3, xlabel="true", ylabel="predicted", title="45º line is what we hope for", show=true) From c965235765835e54204542f720c7a67b40e3e2f1 Mon Sep 17 00:00:00 2001 From: Josh Bode Date: Fri, 9 Jun 2017 17:16:45 +1000 Subject: [PATCH 493/630] Add extra check on CUDA library detection Some machines return a non-zero pointer for `Libdl.dlopen("")` causing MXNet dep build to incorrectly report presense of CUDA libraries. --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 0b728e3edfb4..bc86af683b23 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -36,7 +36,7 @@ end HAS_CUDA = false let cudalib = Libdl.find_library(["libcuda", "nvcuda.dll"], CUDAPATHS) - HAS_CUDA = Libdl.dlopen_e(cudalib) != C_NULL + HAS_CUDA = !isempty(cudalib) && Libdl.dlopen_e(cudalib) != C_NULL end if !HAS_CUDA && is_windows() From e7d1b622b3ffa0d3c7cb2ed6bb7b1d8f2a70a1cd Mon Sep 17 00:00:00 2001 From: Josh Bode Date: Wed, 21 Jun 2017 01:19:36 +1000 Subject: [PATCH 494/630] Added cblas_[ds]tr[sm]m to cblas include file (#259) --- deps/cblas.h | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/deps/cblas.h b/deps/cblas.h index a99c8fc9920f..471550323882 100644 --- a/deps/cblas.h +++ b/deps/cblas.h @@ -48,6 +48,10 @@ typedef int blasint; #define cblas_dger cblas_dger64_ #define cblas_sdot cblas_sdot64_ #define cblas_ddot cblas_ddot64_ +#define cblas_strmm cblas_strmm64_ +#define cblas_dtrmm cblas_dtrmm64_ +#define cblas_strsm cblas_strsm64_ +#define cblas_dtrsm cblas_dtrsm64_ #endif @@ -72,6 +76,15 @@ void cblas_sgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLA void cblas_dgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); +void cblas_strmm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *B, OPENBLAS_CONST blasint ldb); +void cblas_dtrmm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *B, OPENBLAS_CONST blasint ldb); +void cblas_strsm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *B, OPENBLAS_CONST blasint ldb); +void cblas_dtrsm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *B, OPENBLAS_CONST blasint ldb); + #ifdef __cplusplus } #endif /* __cplusplus */ From 10ad90bd20724bc63c480effbcab7892f28faf46 Mon Sep 17 00:00:00 2001 From: Josh Bode Date: Fri, 23 Jun 2017 23:48:24 +1000 Subject: [PATCH 495/630] Added MXNET_COMMIT env variable for upstream commit/tag (#260) Setting the `MXNET_COMMIT` environment variable to a tag, branch or hash will use that version of the upstream mxnet repo in building the libmxnet shared object. Additionally, the upstream submodules are cleared (and reinitialised) every build to prevent some issues when submodules make changes (squashes, etc). --- deps/build.jl | 9 +++++---- docs/src/user-guide/install.md | 5 ++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index bc86af683b23..7ddce7ab4166 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -5,7 +5,7 @@ import JSON # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false -libmxnet_curr_ver = "master" +libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "master") curr_win = "20170502" if haskey(ENV, "MXNET_HOME") @@ -138,14 +138,15 @@ if !libmxnet_detected @build_steps begin BinDeps.DirectoryRule(_mxdir, @build_steps begin ChangeDirectory(_srcdir) - `git clone --recursive https://github.com/dmlc/mxnet` + `git clone https://github.com/dmlc/mxnet` end) @build_steps begin ChangeDirectory(_mxdir) - `git -C mshadow checkout -- make/mshadow.mk` + `git submodule deinit --force .` `git fetch` `git checkout $libmxnet_curr_ver` - `git submodule update --init` + `git submodule update --init --recursive` + `git -C mshadow checkout -- make/mshadow.mk` `make clean` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` end diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index abc48dcf76c0..a585b4409c6d 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -21,10 +21,13 @@ MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. -There are two environment variables that change this behaviour. If you +There are three environment variables that change this behaviour. If you already have a pre-installed version of mxnet you can use `MXNET_HOME` to point the build-process in the right direction. If the automatic cuda detection fails you can also set `CUDA_HOME` to override the process. +To control which version of libmxnet will be compiled, you can use the +`MXNET_COMMIT` variable to point to either a version tag (e.g. `v0.10.0`), a +branch name (e.g. `master`) or a specific commit hash (e.g. `a0b1c2d3`). The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. The automatic build is using default configurations, with OpenCV disabled. From 0750c41dbbc6245e3c1c65b1a256f5008aababec Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 8 Sep 2017 18:59:13 +0800 Subject: [PATCH 496/630] build: bump curr_win to 20170819, MXNet 0.11.0 --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 7ddce7ab4166..c97ccaaec03a 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -6,7 +6,7 @@ import JSON ################################################################################ libmxnet_detected = false libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "master") -curr_win = "20170502" +curr_win = "20170819" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") From 85a2d97ec058156706f93b51120b62ba52001d60 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 9 Sep 2017 21:36:51 +0800 Subject: [PATCH 497/630] ndarray: add kInt8 and kInt64 into TypeFlag enum Ref: * https://github.com/dmlc/mshadow/pull/244 * https://github.com/dmlc/mshadow/pull/258 --- src/ndarray.jl | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 442f46d682ea..952c944dfbdd 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1,7 +1,7 @@ -# All the types supported by mshadow. -const DType = Union{Float32, Float64, Float16, UInt8, Int32} -@enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 -const DEFAULT_DTYPE = Float32 +# All the types supported by mshadow. See `mshadow/base.h` +const DType = Union{Float32, Float64, Float16, UInt8, Int32, Int8, Int64} +@enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 kInt8 kInt64 +const DEFAULT_DTYPE = Float32 # MSHADOW_DEFAULT_DTYPE function toTypeFlag{T <: DType}(:: Type{T}) if T == Float32 @@ -14,6 +14,10 @@ function toTypeFlag{T <: DType}(:: Type{T}) return kUint8 elseif T == Int32 return kInt32 + elseif T == Int8 + return kInt8 + elseif T == Int64 + return kInt64 else throw(ArgumentError("Can't convert $T to DType.")) end @@ -30,6 +34,10 @@ function fromTypeFlag(T :: TypeFlag) return UInt8 elseif T == kInt32 return Int32 + elseif T == kInt8 + return Int8 + elseif T == kInt64 + return Int64 else throw(ArgumentError("Can't convert DType $T.")) end From 873715b7e722431c9c642bbe783f76c184151b47 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 8 Sep 2017 11:41:20 +0800 Subject: [PATCH 498/630] cblas: import all func prototype into header Also, enable ILP64 symbol if INTERFACE64 defined --- deps/cblas.h | 478 +++++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 465 insertions(+), 13 deletions(-) diff --git a/deps/cblas.h b/deps/cblas.h index 471550323882..98a02f4fa968 100644 --- a/deps/cblas.h +++ b/deps/cblas.h @@ -2,7 +2,7 @@ #define CBLAS_H /* - * This file modified from the OpenBLAS repository, + * This file modified from the OpenBLAS repository. */ #include @@ -39,21 +39,192 @@ typedef BLASLONG blasint; typedef int blasint; #endif +/* copy from openblas_config_template.h */ +/* C99 supports complex floating numbers natively, which GCC also offers as an + extension since version 3.0. If neither are available, use a compatible + structure as fallback (see Clause 6.2.5.13 of the C99 standard). */ +#if ((defined(__STDC_IEC_559_COMPLEX__) || __STDC_VERSION__ >= 199901L || \ + (__GNUC__ >= 3 && !defined(__cplusplus))) && !(defined(FORCE_OPENBLAS_COMPLEX_STRUCT))) +#ifndef __cplusplus + #include +#endif + typedef float _Complex openblas_complex_float; + typedef double _Complex openblas_complex_double; +#else + typedef struct { float real, imag; } openblas_complex_float; + typedef struct { double real, imag; } openblas_complex_double; +#endif + #ifdef INTERFACE64 -#define cblas_sgemm cblas_sgemm64_ -#define cblas_sgemv cblas_sgemv64_ -#define cblas_sger cblas_sger64_ -#define cblas_dgemm cblas_dgemm64_ -#define cblas_dgemv cblas_dgemv64_ -#define cblas_dger cblas_dger64_ -#define cblas_sdot cblas_sdot64_ -#define cblas_ddot cblas_ddot64_ -#define cblas_strmm cblas_strmm64_ -#define cblas_dtrmm cblas_dtrmm64_ -#define cblas_strsm cblas_strsm64_ -#define cblas_dtrsm cblas_dtrsm64_ +# define cblas_sdsdot cblas_sdsdot64_ +# define cblas_dsdot cblas_dsdot64_ +# define cblas_sdot cblas_sdot64_ +# define cblas_ddot cblas_ddot64_ +# define cblas_cdotu cblas_cdotu64_ +# define cblas_cdotc cblas_cdotc64_ +# define cblas_zdotu cblas_zdotu64_ +# define cblas_zdotc cblas_zdotc64_ +# define cblas_cdotu_sub cblas_cdotu_sub64_ +# define cblas_cdotc_sub cblas_cdotc_sub64_ +# define cblas_zdotu_sub cblas_zdotu_sub64_ +# define cblas_zdotc_sub cblas_zdotc_sub64_ +# define cblas_sasum cblas_sasum64_ +# define cblas_dasum cblas_dasum64_ +# define cblas_scasum cblas_scasum64_ +# define cblas_dzasum cblas_dzasum64_ +# define cblas_snrm2 cblas_snrm264_ +# define cblas_dnrm2 cblas_dnrm264_ +# define cblas_scnrm2 cblas_scnrm264_ +# define cblas_dznrm2 cblas_dznrm264_ +# define cblas_isamax cblas_isamax64_ +# define cblas_idamax cblas_idamax64_ +# define cblas_icamax cblas_icamax64_ +# define cblas_izamax cblas_izamax64_ +# define cblas_saxpy cblas_saxpy64_ +# define cblas_daxpy cblas_daxpy64_ +# define cblas_caxpy cblas_caxpy64_ +# define cblas_zaxpy cblas_zaxpy64_ +# define cblas_scopy cblas_scopy64_ +# define cblas_dcopy cblas_dcopy64_ +# define cblas_ccopy cblas_ccopy64_ +# define cblas_zcopy cblas_zcopy64_ +# define cblas_sswap cblas_sswap64_ +# define cblas_dswap cblas_dswap64_ +# define cblas_cswap cblas_cswap64_ +# define cblas_zswap cblas_zswap64_ +# define cblas_srot cblas_srot64_ +# define cblas_drot cblas_drot64_ +# define cblas_srotg cblas_srotg64_ +# define cblas_drotg cblas_drotg64_ +# define cblas_srotm cblas_srotm64_ +# define cblas_drotm cblas_drotm64_ +# define cblas_srotmg cblas_srotmg64_ +# define cblas_drotmg cblas_drotmg64_ +# define cblas_sscal cblas_sscal64_ +# define cblas_dscal cblas_dscal64_ +# define cblas_cscal cblas_cscal64_ +# define cblas_zscal cblas_zscal64_ +# define cblas_csscal cblas_csscal64_ +# define cblas_zdscal cblas_zdscal64_ +# define cblas_sgemv cblas_sgemv64_ +# define cblas_dgemv cblas_dgemv64_ +# define cblas_cgemv cblas_cgemv64_ +# define cblas_zgemv cblas_zgemv64_ +# define cblas_sger cblas_sger64_ +# define cblas_dger cblas_dger64_ +# define cblas_cgeru cblas_cgeru64_ +# define cblas_cgerc cblas_cgerc64_ +# define cblas_zgeru cblas_zgeru64_ +# define cblas_zgerc cblas_zgerc64_ +# define cblas_strsv cblas_strsv64_ +# define cblas_dtrsv cblas_dtrsv64_ +# define cblas_ctrsv cblas_ctrsv64_ +# define cblas_ztrsv cblas_ztrsv64_ +# define cblas_strmv cblas_strmv64_ +# define cblas_dtrmv cblas_dtrmv64_ +# define cblas_ctrmv cblas_ctrmv64_ +# define cblas_ztrmv cblas_ztrmv64_ +# define cblas_ssyr cblas_ssyr64_ +# define cblas_dsyr cblas_dsyr64_ +# define cblas_cher cblas_cher64_ +# define cblas_zher cblas_zher64_ +# define cblas_ssyr2 cblas_ssyr264_ +# define cblas_dsyr2 cblas_dsyr264_ +# define cblas_cher2 cblas_cher264_ +# define cblas_zher2 cblas_zher264_ +# define cblas_sgbmv cblas_sgbmv64_ +# define cblas_dgbmv cblas_dgbmv64_ +# define cblas_cgbmv cblas_cgbmv64_ +# define cblas_zgbmv cblas_zgbmv64_ +# define cblas_ssbmv cblas_ssbmv64_ +# define cblas_dsbmv cblas_dsbmv64_ +# define cblas_stbmv cblas_stbmv64_ +# define cblas_dtbmv cblas_dtbmv64_ +# define cblas_ctbmv cblas_ctbmv64_ +# define cblas_ztbmv cblas_ztbmv64_ +# define cblas_stbsv cblas_stbsv64_ +# define cblas_dtbsv cblas_dtbsv64_ +# define cblas_ctbsv cblas_ctbsv64_ +# define cblas_ztbsv cblas_ztbsv64_ +# define cblas_stpmv cblas_stpmv64_ +# define cblas_dtpmv cblas_dtpmv64_ +# define cblas_ctpmv cblas_ctpmv64_ +# define cblas_ztpmv cblas_ztpmv64_ +# define cblas_stpsv cblas_stpsv64_ +# define cblas_dtpsv cblas_dtpsv64_ +# define cblas_ctpsv cblas_ctpsv64_ +# define cblas_ztpsv cblas_ztpsv64_ +# define cblas_ssymv cblas_ssymv64_ +# define cblas_dsymv cblas_dsymv64_ +# define cblas_chemv cblas_chemv64_ +# define cblas_zhemv cblas_zhemv64_ +# define cblas_sspmv cblas_sspmv64_ +# define cblas_dspmv cblas_dspmv64_ +# define cblas_sspr cblas_sspr64_ +# define cblas_dspr cblas_dspr64_ +# define cblas_chpr cblas_chpr64_ +# define cblas_zhpr cblas_zhpr64_ +# define cblas_sspr2 cblas_sspr264_ +# define cblas_dspr2 cblas_dspr264_ +# define cblas_chpr2 cblas_chpr264_ +# define cblas_zhpr2 cblas_zhpr264_ +# define cblas_chbmv cblas_chbmv64_ +# define cblas_zhbmv cblas_zhbmv64_ +# define cblas_chpmv cblas_chpmv64_ +# define cblas_zhpmv cblas_zhpmv64_ +# define cblas_sgemm cblas_sgemm64_ +# define cblas_dgemm cblas_dgemm64_ +# define cblas_cgemm cblas_cgemm64_ +# define cblas_cgemm3m cblas_cgemm3m64_ +# define cblas_zgemm cblas_zgemm64_ +# define cblas_zgemm3m cblas_zgemm3m64_ +# define cblas_ssymm cblas_ssymm64_ +# define cblas_dsymm cblas_dsymm64_ +# define cblas_csymm cblas_csymm64_ +# define cblas_zsymm cblas_zsymm64_ +# define cblas_ssyrk cblas_ssyrk64_ +# define cblas_dsyrk cblas_dsyrk64_ +# define cblas_csyrk cblas_csyrk64_ +# define cblas_zsyrk cblas_zsyrk64_ +# define cblas_ssyr2k cblas_ssyr2k64_ +# define cblas_dsyr2k cblas_dsyr2k64_ +# define cblas_csyr2k cblas_csyr2k64_ +# define cblas_zsyr2k cblas_zsyr2k64_ +# define cblas_strmm cblas_strmm64_ +# define cblas_dtrmm cblas_dtrmm64_ +# define cblas_ctrmm cblas_ctrmm64_ +# define cblas_ztrmm cblas_ztrmm64_ +# define cblas_strsm cblas_strsm64_ +# define cblas_dtrsm cblas_dtrsm64_ +# define cblas_ctrsm cblas_ctrsm64_ +# define cblas_ztrsm cblas_ztrsm64_ +# define cblas_chemm cblas_chemm64_ +# define cblas_zhemm cblas_zhemm64_ +# define cblas_cherk cblas_cherk64_ +# define cblas_zherk cblas_zherk64_ +# define cblas_cher2k cblas_cher2k64_ +# define cblas_zher2k cblas_zher2k64_ +# define cblas_xerbla cblas_xerbla64_ +# define cblas_saxpby cblas_saxpby64_ +# define cblas_daxpby cblas_daxpby64_ +# define cblas_caxpby cblas_caxpby64_ +# define cblas_zaxpby cblas_zaxpby64_ +# define cblas_somatcopy cblas_somatcopy64_ +# define cblas_domatcopy cblas_domatcopy64_ +# define cblas_comatcopy cblas_comatcopy64_ +# define cblas_zomatcopy cblas_zomatcopy64_ +# define cblas_simatcopy cblas_simatcopy64_ +# define cblas_dimatcopy cblas_dimatcopy64_ +# define cblas_cimatcopy cblas_cimatcopy64_ +# define cblas_zimatcopy cblas_zimatcopy64_ +# define cblas_sgeadd cblas_sgeadd64_ +# define cblas_dgeadd cblas_dgeadd64_ +# define cblas_cgeadd cblas_cgeadd64_ +# define cblas_zgeadd cblas_zgeadd64_ #endif +#define CBLAS_INDEX size_t + typedef enum CBLAS_ORDER {CblasRowMajor=101, CblasColMajor=102} CBLAS_ORDER; typedef enum CBLAS_TRANSPOSE {CblasNoTrans=111, CblasTrans=112, CblasConjTrans=113, CblasConjNoTrans=114} CBLAS_TRANSPOSE; @@ -61,29 +232,310 @@ typedef enum CBLAS_UPLO {CblasUpper=121, CblasLower=122} CBLAS_UPLO; typedef enum CBLAS_DIAG {CblasNonUnit=131, CblasUnit=132} CBLAS_DIAG; typedef enum CBLAS_SIDE {CblasLeft=141, CblasRight=142} CBLAS_SIDE; +float cblas_sdsdot(OPENBLAS_CONST blasint n, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy); +double cblas_dsdot (OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy); float cblas_sdot(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy); double cblas_ddot(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *y, OPENBLAS_CONST blasint incy); +openblas_complex_float cblas_cdotu(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy); +openblas_complex_float cblas_cdotc(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy); +openblas_complex_double cblas_zdotu(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *y, OPENBLAS_CONST blasint incy); +openblas_complex_double cblas_zdotc(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *y, OPENBLAS_CONST blasint incy); + +void cblas_cdotu_sub(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy, openblas_complex_float *ret); +void cblas_cdotc_sub(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *y, OPENBLAS_CONST blasint incy, openblas_complex_float *ret); +void cblas_zdotu_sub(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *y, OPENBLAS_CONST blasint incy, openblas_complex_double *ret); +void cblas_zdotc_sub(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *y, OPENBLAS_CONST blasint incy, openblas_complex_double *ret); + +float cblas_sasum (OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx); +double cblas_dasum (OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx); +float cblas_scasum(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx); +double cblas_dzasum(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx); + +float cblas_snrm2 (OPENBLAS_CONST blasint N, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX); +double cblas_dnrm2 (OPENBLAS_CONST blasint N, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX); +float cblas_scnrm2(OPENBLAS_CONST blasint N, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX); +double cblas_dznrm2(OPENBLAS_CONST blasint N, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX); + +CBLAS_INDEX cblas_isamax(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx); +CBLAS_INDEX cblas_idamax(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx); +CBLAS_INDEX cblas_icamax(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx); +CBLAS_INDEX cblas_izamax(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx); + +void cblas_saxpy(OPENBLAS_CONST blasint n, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, float *y, OPENBLAS_CONST blasint incy); +void cblas_daxpy(OPENBLAS_CONST blasint n, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, double *y, OPENBLAS_CONST blasint incy); +void cblas_caxpy(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, float *y, OPENBLAS_CONST blasint incy); +void cblas_zaxpy(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, double *y, OPENBLAS_CONST blasint incy); + +void cblas_scopy(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, float *y, OPENBLAS_CONST blasint incy); +void cblas_dcopy(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, double *y, OPENBLAS_CONST blasint incy); +void cblas_ccopy(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, float *y, OPENBLAS_CONST blasint incy); +void cblas_zcopy(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, double *y, OPENBLAS_CONST blasint incy); + +void cblas_sswap(OPENBLAS_CONST blasint n, float *x, OPENBLAS_CONST blasint incx, float *y, OPENBLAS_CONST blasint incy); +void cblas_dswap(OPENBLAS_CONST blasint n, double *x, OPENBLAS_CONST blasint incx, double *y, OPENBLAS_CONST blasint incy); +void cblas_cswap(OPENBLAS_CONST blasint n, float *x, OPENBLAS_CONST blasint incx, float *y, OPENBLAS_CONST blasint incy); +void cblas_zswap(OPENBLAS_CONST blasint n, double *x, OPENBLAS_CONST blasint incx, double *y, OPENBLAS_CONST blasint incy); + +void cblas_srot(OPENBLAS_CONST blasint N, float *X, OPENBLAS_CONST blasint incX, float *Y, OPENBLAS_CONST blasint incY, OPENBLAS_CONST float c, OPENBLAS_CONST float s); +void cblas_drot(OPENBLAS_CONST blasint N, double *X, OPENBLAS_CONST blasint incX, double *Y, OPENBLAS_CONST blasint incY, OPENBLAS_CONST double c, OPENBLAS_CONST double s); + +void cblas_srotg(float *a, float *b, float *c, float *s); +void cblas_drotg(double *a, double *b, double *c, double *s); + +void cblas_srotm(OPENBLAS_CONST blasint N, float *X, OPENBLAS_CONST blasint incX, float *Y, OPENBLAS_CONST blasint incY, OPENBLAS_CONST float *P); +void cblas_drotm(OPENBLAS_CONST blasint N, double *X, OPENBLAS_CONST blasint incX, double *Y, OPENBLAS_CONST blasint incY, OPENBLAS_CONST double *P); + +void cblas_srotmg(float *d1, float *d2, float *b1, OPENBLAS_CONST float b2, float *P); +void cblas_drotmg(double *d1, double *d2, double *b1, OPENBLAS_CONST double b2, double *P); + +void cblas_sscal(OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, float *X, OPENBLAS_CONST blasint incX); +void cblas_dscal(OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, double *X, OPENBLAS_CONST blasint incX); +void cblas_cscal(OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, float *X, OPENBLAS_CONST blasint incX); +void cblas_zscal(OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, double *X, OPENBLAS_CONST blasint incX); +void cblas_csscal(OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, float *X, OPENBLAS_CONST blasint incX); +void cblas_zdscal(OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, double *X, OPENBLAS_CONST blasint incX); + void cblas_sgemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE trans, OPENBLAS_CONST blasint m, OPENBLAS_CONST blasint n, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *a, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float beta, float *y, OPENBLAS_CONST blasint incy); void cblas_dgemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE trans, OPENBLAS_CONST blasint m, OPENBLAS_CONST blasint n, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *a, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double beta, double *y, OPENBLAS_CONST blasint incy); +void cblas_cgemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE trans, OPENBLAS_CONST blasint m, OPENBLAS_CONST blasint n, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *a, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST float *beta, float *y, OPENBLAS_CONST blasint incy); +void cblas_zgemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE trans, OPENBLAS_CONST blasint m, OPENBLAS_CONST blasint n, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *a, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx, OPENBLAS_CONST double *beta, double *y, OPENBLAS_CONST blasint incy); + void cblas_sger (OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A, OPENBLAS_CONST blasint lda); void cblas_dger (OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A, OPENBLAS_CONST blasint lda); +void cblas_cgeru(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A, OPENBLAS_CONST blasint lda); +void cblas_cgerc(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A, OPENBLAS_CONST blasint lda); +void cblas_zgeru(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A, OPENBLAS_CONST blasint lda); +void cblas_zgerc(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A, OPENBLAS_CONST blasint lda); + +void cblas_strsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_dtrsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); +void cblas_ctrsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_ztrsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); + +void cblas_strmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_dtrmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); +void cblas_ctrmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_ztrmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); + +void cblas_ssyr(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, float *A, OPENBLAS_CONST blasint lda); +void cblas_dsyr(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, double *A, OPENBLAS_CONST blasint lda); +void cblas_cher(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, float *A, OPENBLAS_CONST blasint lda); +void cblas_zher(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, double *A, OPENBLAS_CONST blasint lda); + +void cblas_ssyr2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo,OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, + OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A, OPENBLAS_CONST blasint lda); +void cblas_dsyr2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, + OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A, OPENBLAS_CONST blasint lda); +void cblas_cher2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, + OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A, OPENBLAS_CONST blasint lda); +void cblas_zher2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, + OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A, OPENBLAS_CONST blasint lda); + +void cblas_sgbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST blasint KL, OPENBLAS_CONST blasint KU, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_dgbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST blasint KL, OPENBLAS_CONST blasint KU, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double beta, double *Y, OPENBLAS_CONST blasint incY); +void cblas_cgbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST blasint KL, OPENBLAS_CONST blasint KU, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_zgbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST blasint KL, OPENBLAS_CONST blasint KU, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *beta, double *Y, OPENBLAS_CONST blasint incY); + +void cblas_ssbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, + OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_dsbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, + OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double beta, double *Y, OPENBLAS_CONST blasint incY); + + +void cblas_stbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_dtbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); +void cblas_ctbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_ztbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); + +void cblas_stbsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_dtbsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); +void cblas_ctbsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *X, OPENBLAS_CONST blasint incX); +void cblas_ztbsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *X, OPENBLAS_CONST blasint incX); + +void cblas_stpmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST float *Ap, float *X, OPENBLAS_CONST blasint incX); +void cblas_dtpmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST double *Ap, double *X, OPENBLAS_CONST blasint incX); +void cblas_ctpmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST float *Ap, float *X, OPENBLAS_CONST blasint incX); +void cblas_ztpmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST double *Ap, double *X, OPENBLAS_CONST blasint incX); + +void cblas_stpsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST float *Ap, float *X, OPENBLAS_CONST blasint incX); +void cblas_dtpsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST double *Ap, double *X, OPENBLAS_CONST blasint incX); +void cblas_ctpsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST float *Ap, float *X, OPENBLAS_CONST blasint incX); +void cblas_ztpsv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, + OPENBLAS_CONST blasint N, OPENBLAS_CONST double *Ap, double *X, OPENBLAS_CONST blasint incX); + +void cblas_ssymv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, + OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_dsymv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, + OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double beta, double *Y, OPENBLAS_CONST blasint incY); +void cblas_chemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, + OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_zhemv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, + OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *beta, double *Y, OPENBLAS_CONST blasint incY); + + +void cblas_sspmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *Ap, + OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_dspmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *Ap, + OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double beta, double *Y, OPENBLAS_CONST blasint incY); + +void cblas_sspr(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, float *Ap); +void cblas_dspr(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, double *Ap); + +void cblas_chpr(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, float *A); +void cblas_zhpr(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X,OPENBLAS_CONST blasint incX, double *A); + +void cblas_sspr2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *A); +void cblas_dspr2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *A); +void cblas_chpr2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *Y, OPENBLAS_CONST blasint incY, float *Ap); +void cblas_zhpr2(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *Y, OPENBLAS_CONST blasint incY, double *Ap); + +void cblas_chbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_zhbmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *beta, double *Y, OPENBLAS_CONST blasint incY); + +void cblas_chpmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *Ap, OPENBLAS_CONST float *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST float *beta, float *Y, OPENBLAS_CONST blasint incY); +void cblas_zhpmv(OPENBLAS_CONST enum CBLAS_ORDER order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint N, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *Ap, OPENBLAS_CONST double *X, OPENBLAS_CONST blasint incX, OPENBLAS_CONST double *beta, double *Y, OPENBLAS_CONST blasint incY); void cblas_sgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); void cblas_dgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); +void cblas_cgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float *beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_cgemm3m(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float *beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zgemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double *beta, double *C, OPENBLAS_CONST blasint ldc); +void cblas_zgemm3m(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransB, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double *beta, double *C, OPENBLAS_CONST blasint ldc); + + +void cblas_ssymm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_dsymm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); +void cblas_csymm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float *beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zsymm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double *beta, double *C, OPENBLAS_CONST blasint ldc); + +void cblas_ssyrk(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_dsyrk(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); +void cblas_csyrk(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zsyrk(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *beta, double *C, OPENBLAS_CONST blasint ldc); + +void cblas_ssyr2k(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_dsyr2k(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); +void cblas_csyr2k(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float *beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zsyr2k(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, + OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double *beta, double *C, OPENBLAS_CONST blasint ldc); void cblas_strmm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *B, OPENBLAS_CONST blasint ldb); void cblas_dtrmm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *B, OPENBLAS_CONST blasint ldb); +void cblas_ctrmm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *B, OPENBLAS_CONST blasint ldb); +void cblas_ztrmm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *B, OPENBLAS_CONST blasint ldb); + void cblas_strsm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *B, OPENBLAS_CONST blasint ldb); void cblas_dtrsm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *B, OPENBLAS_CONST blasint ldb); +void cblas_ctrsm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, float *B, OPENBLAS_CONST blasint ldb); +void cblas_ztrsm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE TransA, + OPENBLAS_CONST enum CBLAS_DIAG Diag, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, double *B, OPENBLAS_CONST blasint ldb); + +void cblas_chemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float *beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zhemm(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_SIDE Side, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST blasint M, OPENBLAS_CONST blasint N, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double *beta, double *C, OPENBLAS_CONST blasint ldc); + +void cblas_cherk(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST float alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zherk(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST double alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); + +void cblas_cher2k(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST float *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST float beta, float *C, OPENBLAS_CONST blasint ldc); +void cblas_zher2k(OPENBLAS_CONST enum CBLAS_ORDER Order, OPENBLAS_CONST enum CBLAS_UPLO Uplo, OPENBLAS_CONST enum CBLAS_TRANSPOSE Trans, OPENBLAS_CONST blasint N, OPENBLAS_CONST blasint K, + OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *A, OPENBLAS_CONST blasint lda, OPENBLAS_CONST double *B, OPENBLAS_CONST blasint ldb, OPENBLAS_CONST double beta, double *C, OPENBLAS_CONST blasint ldc); + +void cblas_xerbla(blasint p, char *rout, char *form, ...); + +/*** BLAS extensions ***/ + +void cblas_saxpby(OPENBLAS_CONST blasint n, OPENBLAS_CONST float alpha, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx,OPENBLAS_CONST float beta, float *y, OPENBLAS_CONST blasint incy); + +void cblas_daxpby(OPENBLAS_CONST blasint n, OPENBLAS_CONST double alpha, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx,OPENBLAS_CONST double beta, double *y, OPENBLAS_CONST blasint incy); + +void cblas_caxpby(OPENBLAS_CONST blasint n, OPENBLAS_CONST float *alpha, OPENBLAS_CONST float *x, OPENBLAS_CONST blasint incx,OPENBLAS_CONST float *beta, float *y, OPENBLAS_CONST blasint incy); + +void cblas_zaxpby(OPENBLAS_CONST blasint n, OPENBLAS_CONST double *alpha, OPENBLAS_CONST double *x, OPENBLAS_CONST blasint incx,OPENBLAS_CONST double *beta, double *y, OPENBLAS_CONST blasint incy); + +void cblas_somatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST float calpha, OPENBLAS_CONST float *a, + OPENBLAS_CONST blasint clda, float *b, OPENBLAS_CONST blasint cldb); +void cblas_domatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST double calpha, OPENBLAS_CONST double *a, + OPENBLAS_CONST blasint clda, double *b, OPENBLAS_CONST blasint cldb); +void cblas_comatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST float* calpha, OPENBLAS_CONST float* a, + OPENBLAS_CONST blasint clda, float*b, OPENBLAS_CONST blasint cldb); +void cblas_zomatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST double* calpha, OPENBLAS_CONST double* a, + OPENBLAS_CONST blasint clda, double *b, OPENBLAS_CONST blasint cldb); + +void cblas_simatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST float calpha, float *a, + OPENBLAS_CONST blasint clda, OPENBLAS_CONST blasint cldb); +void cblas_dimatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST double calpha, double *a, + OPENBLAS_CONST blasint clda, OPENBLAS_CONST blasint cldb); +void cblas_cimatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST float* calpha, float* a, + OPENBLAS_CONST blasint clda, OPENBLAS_CONST blasint cldb); +void cblas_zimatcopy(OPENBLAS_CONST enum CBLAS_ORDER CORDER, OPENBLAS_CONST enum CBLAS_TRANSPOSE CTRANS, OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST double* calpha, double* a, + OPENBLAS_CONST blasint clda, OPENBLAS_CONST blasint cldb); + +void cblas_sgeadd(OPENBLAS_CONST enum CBLAS_ORDER CORDER,OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST float calpha, float *a, OPENBLAS_CONST blasint clda, OPENBLAS_CONST float cbeta, + float *c, OPENBLAS_CONST blasint cldc); +void cblas_dgeadd(OPENBLAS_CONST enum CBLAS_ORDER CORDER,OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST double calpha, double *a, OPENBLAS_CONST blasint clda, OPENBLAS_CONST double cbeta, + double *c, OPENBLAS_CONST blasint cldc); +void cblas_cgeadd(OPENBLAS_CONST enum CBLAS_ORDER CORDER,OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST float *calpha, float *a, OPENBLAS_CONST blasint clda, OPENBLAS_CONST float *cbeta, + float *c, OPENBLAS_CONST blasint cldc); +void cblas_zgeadd(OPENBLAS_CONST enum CBLAS_ORDER CORDER,OPENBLAS_CONST blasint crows, OPENBLAS_CONST blasint ccols, OPENBLAS_CONST double *calpha, double *a, OPENBLAS_CONST blasint clda, OPENBLAS_CONST double *cbeta, + double *c, OPENBLAS_CONST blasint cldc); + #ifdef __cplusplus } From 4e5ab13edd3afbefd80eeca0e8c462d34fb62fe3 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 6 Sep 2017 10:55:03 +0800 Subject: [PATCH 499/630] travis : set make -j4 and travis_wait * change # of make job in order to prevent from out-of-memory issue * travis_wait is a workaround about https://github.com/JuliaLang/julia/pull/23601 --- .travis.yml | 5 ++++- deps/build.jl | 12 ++++++++++-- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 708b5392fd3e..2f7dee479a68 100644 --- a/.travis.yml +++ b/.travis.yml @@ -35,7 +35,10 @@ notifications: email: false script: - - source $TRAVIS/run_test.sh + # bump the time limit of no ouput + # the `travis_wait` wrapper can be removed once this issue fixed: + # https://github.com/JuliaLang/julia/pull/23601 + - travis_wait 60 $TRAVIS/run_test.sh after_success: - source $TRAVIS/run_coverage.sh diff --git a/deps/build.jl b/deps/build.jl index c97ccaaec03a..aef054a8e52f 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -53,6 +53,14 @@ else info("Did not find a CUDA installation, using CPU-only version of MXNet.") end +function get_cpucore() + if haskey(ENV, "TRAVIS") # on travis-ci + 4 + else + min(Sys.CPU_CORES, 8) + end +end + using BinDeps @BinDeps.setup if !libmxnet_detected @@ -169,9 +177,9 @@ if !libmxnet_detected ChangeDirectory(_mxdir) `cp ../../cblas.h include/cblas.h` if USE_JULIA_BLAS - `make -j$(min(Sys.CPU_CORES,8)) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` + `make -j$(get_cpucore()) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` else - `make -j$(min(Sys.CPU_CORES,8))` + `make -j$(get_cpucore())` end end FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin From d1bbcc4a821e9862a671817d7f75521638d6a558 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 1 Sep 2017 14:16:02 +0800 Subject: [PATCH 500/630] base/build: using `Libdl.dlext` for searching lib --- deps/build.jl | 11 +++++++---- src/base.jl | 9 +++++---- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index c97ccaaec03a..03f57bcf0094 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -11,7 +11,8 @@ curr_win = "20170819" if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") info("Trying to load existing libmxnet...") - lib = Libdl.find_library(["libmxnet", "libmxnet.so"], ["$(ENV["MXNET_HOME"])/lib"]) + lib = Libdl.find_library("libmxnet.$(Libdl.dlext)", + ["$(ENV["MXNET_HOME"])/lib"]) if !isempty(lib) info("Existing libmxnet detected at $lib, skip building...") libmxnet_detected = true @@ -130,7 +131,7 @@ if !libmxnet_detected _libdir = joinpath(_prefix, "lib") # We have do eagerly delete the installed libmxnet.so # Otherwise we won't rebuild on an update. - run(`rm -f $_libdir/libmxnet.so`) + run(`rm -f $_libdir/libmxnet.$(Libdl.dlext)`) provides(BuildProcess, (@build_steps begin CreateDirectory(_srcdir) @@ -174,8 +175,10 @@ if !libmxnet_detected `make -j$(min(Sys.CPU_CORES,8))` end end - FileRule(joinpath(_libdir, "libmxnet.so"), @build_steps begin - `cp $_mxdir/lib/libmxnet.so $_libdir/` + FileRule(joinpath(_libdir, "libmxnet.$(Libdl.dlext)"), @build_steps begin + # the output file on macos is still in `.so` suffix + # so we rename it + `cp $_mxdir/lib/libmxnet.so $_libdir/libmxnet.$(Libdl.dlext)` end) end end), mxnet, installed_libpath=_libdir) diff --git a/src/base.jl b/src/base.jl index dd816dd41582..4f004f452f28 100644 --- a/src/base.jl +++ b/src/base.jl @@ -16,14 +16,15 @@ const char_pp = Ptr{char_p} ################################################################################ # Initialization and library API entrance ################################################################################ -const MXNET_LIB = Libdl.find_library(["libmxnet.so","libmxnet.dll"], - [joinpath("$(get(ENV,"MXNET_HOME",""))","lib"), - Pkg.dir("MXNet","deps","usr","lib")]) +const MXNET_LIB = Libdl.find_library("libmxnet.$(Libdl.dlext)", + [joinpath(get(ENV, "MXNET_HOME", ""), "lib"), + Pkg.dir("MXNet", "deps", "usr", "lib")]) if isempty(MXNET_LIB) # touch this file, so that after the user properly build libmxnet, the precompiled # MXNet.ji will be re-compiled to get MXNET_LIB properly. touch(@__FILE__) - error("Cannot find or load libmxnet.so. Please see the document on how to build it.") + error("Cannot find or load libmxnet.$(Libdl.dlext). " * + "Please see the document on how to build it.") else include_dependency(MXNET_LIB) end From 0d54f4b7cfb4f744bb4a8e3a81539c6abf2f16f2 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 9 Sep 2017 15:50:30 +0800 Subject: [PATCH 501/630] build: force enable LAPACK when Julia's OpenBLAS is available --- deps/build.jl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/deps/build.jl b/deps/build.jl index c97ccaaec03a..7bf6372acebb 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -105,6 +105,7 @@ if !libmxnet_detected ilp64 = "-DINTERFACE64" end + FORCE_LAPACK = false if blas_vendor == :unknown info("Julia is built with an unkown blas library ($blas_path).") info("Attempting build without reusing the blas library") @@ -115,6 +116,7 @@ if !libmxnet_detected USE_JULIA_BLAS = true else USE_JULIA_BLAS = true + FORCE_LAPACK = true end blas_name = blas_vendor == :openblas64 ? "openblas" : string(blas_vendor) @@ -164,6 +166,11 @@ if !libmxnet_detected `sed -i -s 's/USE_CUDA_PATH = NULL/USE_CUDA_PATH = $(ENV["CUDA_HOME"])/' config.mk` end end + # Force enable LAPACK build + # Julia's OpenBLAS has LAPACK functionality already + if FORCE_LAPACK + `sed -i -s 's/ADD_CFLAGS =\(.*\)/ADD_CFLAGS =\1 -DMXNET_USE_LAPACK/' config.mk` + end end) @build_steps begin ChangeDirectory(_mxdir) From 23866bd640ad39a2e20c596e1557701b0cac52e8 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Sep 2017 22:44:52 +0800 Subject: [PATCH 502/630] build: fix macOS LAPACK build --- deps/build.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/deps/build.jl b/deps/build.jl index 558f4f7cbb61..69443c4987e5 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -178,6 +178,9 @@ if !libmxnet_detected # Force enable LAPACK build # Julia's OpenBLAS has LAPACK functionality already if FORCE_LAPACK + if is_apple() + MSHADOW_LDFLAGS *= " -framework Accelerate" + end `sed -i -s 's/ADD_CFLAGS =\(.*\)/ADD_CFLAGS =\1 -DMXNET_USE_LAPACK/' config.mk` end end) From 4e439ed5254bd0ad41f55a4e055477e2950ced00 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Sep 2017 02:01:54 +0800 Subject: [PATCH 503/630] build: remove FileRule on config.mk and fix CUDA config * remove FileRule on config.mk, we always override it. Due to the following workflow is quite common: * user issue `Pkg.build("MXNet")` * then, build failed because the `CUDA_HOME` not set properly. * if the `FileRule` find `config.mk`, re-issuing `Pkg.build` will not change the `config.mk`. User need to delete/modified `config.mk` manually * fix CUDA config * the build_steps didn't being unfolded correctly. * consider following cases: ```julia @build_steps begin if true `1` `2` `3` if true `42` end end end BinDeps.SynchronousStepCollection(Any[`42`], "", "") ``` vs ```julia @build_steps begin if true @build_steps begin `1` `2` `3` if true `42` end end # inner build_steps end # if end BinDeps.SynchronousStepCollection(Any[`1`, `2`, `3`, `42`], "", "") ``` --- deps/build.jl | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index 69443c4987e5..a11758e36f3c 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -159,22 +159,30 @@ if !libmxnet_detected `git submodule update --init --recursive` `git -C mshadow checkout -- make/mshadow.mk` `make clean` + `cp ../../cblas.h include/cblas.h` + `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` - end - FileRule(joinpath(_mxdir, "config.mk"), @build_steps begin - ChangeDirectory(_mxdir) + + # Copy config.mk, always override the file if is_apple() `cp make/osx.mk config.mk` else `cp make/config.mk config.mk` end + + # Configure OpenCV `sed -i -s 's/USE_OPENCV = 1/USE_OPENCV = 0/' config.mk` + + # Configure CUDA if HAS_CUDA - `sed -i -s 's/USE_CUDA = 0/USE_CUDA = 1/' config.mk` - if haskey(ENV, "CUDA_HOME") - `sed -i -s 's/USE_CUDA_PATH = NULL/USE_CUDA_PATH = $(ENV["CUDA_HOME"])/' config.mk` + @build_steps begin + `sed -i -s 's/USE_CUDA = 0/USE_CUDA = 1/' config.mk` + if haskey(ENV, "CUDA_HOME") + `sed -i -s "s@USE_CUDA_PATH = NONE@USE_CUDA_PATH = $(ENV["CUDA_HOME"])@" config.mk` + end end end + # Force enable LAPACK build # Julia's OpenBLAS has LAPACK functionality already if FORCE_LAPACK @@ -183,10 +191,7 @@ if !libmxnet_detected end `sed -i -s 's/ADD_CFLAGS =\(.*\)/ADD_CFLAGS =\1 -DMXNET_USE_LAPACK/' config.mk` end - end) - @build_steps begin - ChangeDirectory(_mxdir) - `cp ../../cblas.h include/cblas.h` + if USE_JULIA_BLAS `make -j$(get_cpucore()) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` else From 7cc66d1c5186b6afb73c55eebc6cd609b0c01bca Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 13 Sep 2017 00:27:08 +0800 Subject: [PATCH 504/630] executor: fix docstring of `bind` --- src/executor.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/executor.jl b/src/executor.jl index 3ae5301a6c92..e340537d7a63 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -65,6 +65,8 @@ function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDA return (args_hdr, args_vec) end +@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 + """ bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) @@ -81,7 +83,6 @@ Create an `Executor` by binding a `SymbolicNode` to concrete `NDArray`. * `aux_states`: * `grad_req`: """ -@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 function bind(self :: SymbolicNode, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), From 5d739daccd6d9b2f30845a52688f20cd0ec46852 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 13 Sep 2017 11:58:04 +0800 Subject: [PATCH 505/630] ndarray: implement deepcopy via MXNDArrayGetDataNDArray --- src/ndarray.jl | 15 ++++++++++++++- test/unittest/ndarray.jl | 9 +++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 952c944dfbdd..d74ed07b0c1f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -403,7 +403,8 @@ function getindex(arr :: NDArray, idx::UnitRange{Int}) slice(arr, idx) end -import Base: copy!, copy, convert +import Base: copy!, copy, convert, deepcopy + """ copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) @@ -485,6 +486,18 @@ function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) convert(t, copy(arr)) end +""" + deepcopy(arr::NDArray) + +Get a deep copy of the data blob in the form of an NDArray of default storage +type. This function blocks. Do not use it in performance critical code. +""" +function deepcopy(arr::NDArray) + out_ref = Ref{MX_handle}(C_NULL) + @mxcall(:MXNDArrayGetDataNDArray, (MX_handle, Ref{MX_handle}), arr, out_ref) + NDArray(MX_NDArrayHandle(out_ref[])) +end + """ @inplace diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 69599960973f..8c058c1bf97d 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -31,6 +31,15 @@ function test_copy() @test reldiff(tensor, tensor2) < 1e-6 end +function test_deepcopy() + info("NDArray::deepcopy") + + x = mx.zeros(2, 5) + y = deepcopy(x) + x[:] = 42 + @test copy(x) != copy(y) +end + function test_assign() dims = rand_dims() tensor = rand(mx.MX_float, dims) From 16c2c5ac87459eaf8e5d459c6dcdb92a6afcefe1 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 18 Sep 2017 22:21:57 +0800 Subject: [PATCH 506/630] base: update GRAD_REQ enumeration * also, add a `grad_req_map` --- src/base.jl | 12 ++++++++++++ src/executor.jl | 2 -- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/base.jl b/src/base.jl index 4f004f452f28..2d3ec8f3d053 100644 --- a/src/base.jl +++ b/src/base.jl @@ -13,6 +13,18 @@ const MX_handle = Ptr{Void} const char_p = Ptr{UInt8} const char_pp = Ptr{char_p} +################################################################################ +# Enumeration from MXNet headers +################################################################################ +# OpReqType in include/mxnet/op_attr_types.h +@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_INPLACE=2 GRAD_ADD=3 +const grad_req_map = Dict{Symbol, GRAD_REQ}( + :nop => GRAD_NOP, # no operation, do not write anything + :write => GRAD_WRITE, # write gradient to provided space + :inplace => GRAD_INPLACE, # perform an inplace write + :add => GRAD_ADD, # add to the provided space +) + ################################################################################ # Initialization and library API entrance ################################################################################ diff --git a/src/executor.jl b/src/executor.jl index e340537d7a63..8d10102f5c75 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -65,8 +65,6 @@ function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDA return (args_hdr, args_vec) end -@enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_ADD=3 - """ bind(sym, ctx, args; args_grad=Dict(), aux_states=Dict(), grad_req=GRAD_WRITE) From e3ad29dc63ba0784f6c44ac087942c5a1ea07455 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 19 Sep 2017 10:36:27 +0800 Subject: [PATCH 507/630] travis: bring coverage report back --- .travis.yml | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2f7dee479a68..68b1cb1b8b00 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,8 +28,8 @@ addons: - g++-4.8 before_install: - - export TRAVIS=test/travis - - source $TRAVIS/setup_env.sh + - export TRAVIS_DIR=test/travis + - source ${TRAVIS_DIR}/setup_env.sh notifications: email: false @@ -38,11 +38,10 @@ script: # bump the time limit of no ouput # the `travis_wait` wrapper can be removed once this issue fixed: # https://github.com/JuliaLang/julia/pull/23601 - - travis_wait 60 $TRAVIS/run_test.sh + - travis_wait 60 ${TRAVIS_DIR}/run_test.sh after_success: - - source $TRAVIS/run_coverage.sh + - source ${TRAVIS_DIR}/run_coverage.sh - echo $TRAVIS_JULIA_VERSION - julia -e 'Pkg.add("Documenter")' - julia -e 'cd(Pkg.dir("MXNet")); include(joinpath("docs", "make.jl"))' - From dcba690720b5e46abbe57918d5483f18a8890c87 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Sep 2017 14:30:07 +0800 Subject: [PATCH 508/630] build: CuDNN detection --- deps/build.jl | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/deps/build.jl b/deps/build.jl index a11758e36f3c..b8ea163951c4 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -36,6 +36,8 @@ if is_unix() end HAS_CUDA = false +HAS_CUDNN = false +cudnnver = -1 let cudalib = Libdl.find_library(["libcuda", "nvcuda.dll"], CUDAPATHS) HAS_CUDA = !isempty(cudalib) && Libdl.dlopen_e(cudalib) != C_NULL end @@ -48,8 +50,21 @@ if !HAS_CUDA && is_windows() end end +if HAS_CUDA # then check cudnn + let cudnnlib = Libdl.find_library("libcudnn", CUDAPATHS) + HAS_CUDNN = !isempty(cudnnlib) && Libdl.dlopen_e(cudnnlib) != C_NULL + if HAS_CUDNN + # TODO: do more version check? + cudnnver = dec(ccall((:cudnnGetVersion, cudnnlib), Csize_t, ())) + end + end +end + if HAS_CUDA info("Found a CUDA installation.") + if HAS_CUDNN + info("Found a CuDNN installation (version -> $cudnnver).") + end else info("Did not find a CUDA installation, using CPU-only version of MXNet.") end @@ -180,6 +195,9 @@ if !libmxnet_detected if haskey(ENV, "CUDA_HOME") `sed -i -s "s@USE_CUDA_PATH = NONE@USE_CUDA_PATH = $(ENV["CUDA_HOME"])@" config.mk` end + if HAS_CUDNN + `sed -i -s 's/USE_CUDNN = 0/USE_CUDNN = 1/' config.mk` + end end end From 3d4adfb030ce6b78f76d5c4f4dad4cdf59c7aaf0 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 10 Sep 2017 12:04:36 +0800 Subject: [PATCH 509/630] ndarray: `reshape` share the same API with Base * fix #272 * deprecate `Reshape` as well * test cases included --- NEWS.md | 9 +++++++++ src/MXNet.jl | 2 ++ src/deprecated.jl | 3 +++ src/ndarray.jl | 32 +++++++++++++++++++++++++++++--- test/unittest/ndarray.jl | 31 +++++++++++++++++++++++++++---- 5 files changed, 70 insertions(+), 7 deletions(-) create mode 100644 src/deprecated.jl diff --git a/NEWS.md b/NEWS.md index 551985db996c..36f10ee2735d 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,12 @@ +# v0.3.0 (TBD) + +## API Changes + +* `reshape` of NDArray share the same interface with Base (#272). + * `reshape(NDArray, dim; reverse=false)` + * `reshape(NDArray, dim...; reverse=false)` + * `Reshape` deprecated. + # v0.2.2 (2017.05.14) * Updated supported version of MXNet to 0.9.4. * Improved build-system with support for auto-detecting GPU support. diff --git a/src/MXNet.jl b/src/MXNet.jl index bcc6296b3fb8..142f1a06d5be 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -50,6 +50,8 @@ include("visualize.jl") include("nn-factory.jl") +include("deprecated.jl") + end # mx end # module MXNet diff --git a/src/deprecated.jl b/src/deprecated.jl new file mode 100644 index 000000000000..4c9345126bde --- /dev/null +++ b/src/deprecated.jl @@ -0,0 +1,3 @@ +# reshape (#272) +@deprecate reshape(arr::NDArray; shape=()) reshape(arr, shape) +@deprecate Reshape(arr::NDArray; shape=()) reshape(arr, shape) diff --git a/src/ndarray.jl b/src/ndarray.jl index 952c944dfbdd..a5d3c97ebb67 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -921,6 +921,31 @@ function save(filename::String, data::Dict{Base.Symbol,NDArray}) filename, length(names), arrays, names) end +import Base: reshape + +""" + reshape(arr::NDArray, dim...; reverse=false) + reshape(arr::NDArray, dim; reverse=false) +""" +reshape{N}(arr::NDArray, dim::NTuple{N, Integer}; reverse::Bool=false) = + _reshape(arr, dim, reverse) +reshape{N}(arr::NDArray, dim::Vararg{Integer, N}; reverse::Bool=false) = + _reshape(arr, dim, reverse) + +@inline function _reshape{N}(arr::NDArray, dim::NTuple{N, Integer}, reverse::Bool) + op_handle = _get_cached_libmx_op_handle("reshape") + n_output = Ref(Cint(0)) + hdls_ref = Ref{Ptr{MX_handle}}(C_NULL) + @mxcall(:MXImperativeInvoke, + (MX_handle, Cint, Ptr{MX_handle}, Ref{Cint}, Ref{Ptr{MX_handle}}, + Cint, char_pp, char_pp), + op_handle, 1, [arr.handle], n_output, hdls_ref, + 2, ["shape", "reverse"], [dump_mx_param(dim), dump_mx_param(!reverse)]) + # not a typo ^^^^^^^^ + @assert n_output[] == 1 + NDArray(MX_NDArrayHandle(unsafe_load(hdls_ref[], 1))) +end + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -980,7 +1005,6 @@ Upon calling, the output arguments will be automatically initialized with empty Those functions always return the output arguments. If there is only one output (the typical situation), that object (`NDArray`) is returned. Otherwise, a tuple containing all the outputs will be returned. """ - function _get_ndarray_function_def(name :: String) func_name = Symbol(name) @@ -1063,7 +1087,9 @@ function _get_ndarray_function_def(name :: String) end macro _import_ndarray_functions() - names = _get_libmx_op_names() + black_list = ["reshape"] # do not import these funcs + names = filter(n -> ∉(lowercase(n), black_list), _get_libmx_op_names()) + func_exprs = map(names) do name op_handle = _get_libmx_op_handle(name) @@ -1073,7 +1099,7 @@ macro _import_ndarray_functions() func_name = Symbol(name) expr = quote # TODO the explicit exclusion of take will no longer be necessary when it is removed from Base - $((isdefined(Base, func_name) && func_name ≠ :take)? :(import Base.$func_name) : :()) + $((isdefined(Base, func_name) && func_name ≠ :take) ? :(import Base.$func_name) : :()) $func_def @doc $desc -> $func_def2 diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 69599960973f..b7584104d495 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -109,7 +109,7 @@ function test_plus() scalar_large = 1e8 @test reldiff(t4 + scalar_small, copy(a4 .+ scalar_small)) < thresh @test reldiff(t4 + scalar_large, copy(a4 .+ scalar_large)) < thresh - + t5 = zeros(Float64, dims) a5 = copy(t5, mx.cpu()) scalar_small = 1e-8 @@ -160,7 +160,7 @@ function test_minus() scalar_large = 1e8 @test reldiff(t4 - scalar_small, copy(a4 .- scalar_small)) < thresh @test reldiff(t4 - scalar_large, copy(a4 .- scalar_large)) < thresh - + t5 = zeros(Float64, dims) a5 = copy(t5, mx.cpu()) scalar_small = 1e-8 @@ -204,7 +204,7 @@ function test_mul() scalar_large = 1e8 @test reldiff(t4 * scalar_small, copy(a4 .* scalar_small)) < thresh @test reldiff(t4 * scalar_large, copy(a4 .* scalar_large)) < thresh - + t5, a5 = rand_tensors(Float64, dims) scalar_small = 1e-8 scalar_large = 1e8 @@ -245,7 +245,7 @@ function test_div() scalar_large = 1e8 @test reldiff(t4 / scalar_small, copy(a4 ./ scalar_small)) < thresh @test reldiff(t4 / scalar_large, copy(a4 ./ scalar_large)) < thresh - + t5, a5 = rand_tensors(Float64, dims) scalar_small = 1e-8 scalar_large = 1e8 @@ -382,6 +382,28 @@ function test_eltype() end end +function test_reshape() + info("NDArray::reshape") + A = rand(2, 3, 4) + + B = reshape(mx.NDArray(A), 4, 3, 2) + @test size(B) == (4, 3, 2) + @test copy(B)[3, 1, 1] == A[1, 2, 1] + + C = reshape(mx.NDArray(A), (4, 3, 2)) + @test size(C) == (4, 3, 2) + @test copy(C)[3, 1, 1] == A[1, 2, 1] + + info("NDArray::reshape::reverse") + A = mx.zeros(10, 5, 4) + + B = reshape(A, -1, 0) + @test size(B) == (40, 5) + + C = reshape(A, -1, 0, reverse=true) + @test size(C) == (50, 4) +end + function test_kwargs() info("NDArray::kwargs") dims1 = (2,3,4) @@ -412,6 +434,7 @@ end test_eltype() test_nd_as_jl() test_dot() + test_reshape() test_kwargs() end From 0b9bf78c385236787a75f966fd9835f10d3007d7 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Sep 2017 19:10:09 +0800 Subject: [PATCH 510/630] sym-node: support `print` for debugging --- src/symbolic-node.jl | 45 ++++++++++++++++++++++++++++++++++ test/unittest/symbolic-node.jl | 8 ++++++ 2 files changed, 53 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 8a567c2e8003..772ff66e53f7 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -220,6 +220,51 @@ function get_name(self :: mx.SymbolicNode) return Symbol(unsafe_string(name[])) end +import Base: print + +function print(io :: IO, sym :: SymbolicNode) + out = Ref{mx.char_p}(C_NULL) + @mx.mxcall(:MXSymbolPrint, (mx.MX_SymbolHandle, Ref{mx.char_p}), sym.handle, out) + print(io, unsafe_string(out[])) +end + +print(sym :: SymbolicNode) = print(STDOUT, sym) + +""" + print([io :: IO], sym :: SymbolicNode) + +Print the content of symbol, used for debug. + +```julia +julia> layer = @mx.chain mx.Variable(:data) => + mx.FullyConnected(name=:fc1, num_hidden=128) => + mx.Activation(name=:relu1, act_type=:relu) +MXNet.mx.SymbolicNode(MXNet.mx.MX_SymbolHandle(Ptr{Void} @0x000055b29b9c3520)) + +julia> print(layer) +Symbol Outputs: + output[0]=relu1(0) +Variable:data +Variable:fc1_weight +Variable:fc1_bias +-------------------- +Op:FullyConnected, Name=fc1 +Inputs: + arg[0]=data(0) version=0 + arg[1]=fc1_weight(0) version=0 + arg[2]=fc1_bias(0) version=0 +Attrs: + num_hidden=128 +-------------------- +Op:Activation, Name=relu1 +Inputs: + arg[0]=fc1(0) +Attrs: + act_type=relu +``` +""" +print + """ grad(self :: SymbolicNode, wrt :: Vector{SymbolicNode}) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 5d8d01b608db..534a99140115 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -136,6 +136,13 @@ function test_dot() @test reldiff(ret, 2*ones(100, 200)) < 1e-6 end +function test_print() + info("SymbolicNode::print") + io = IOBuffer() + print(io, mx.Variable(:x)) + @test !isempty(String(take!(io))) +end + function test_misc() info("SymbolicNode::Miscellaneous") # Test for #189 @@ -158,6 +165,7 @@ end test_attrs() test_functions() test_dot() + test_print() test_misc() end From 4e89c919b47fad6d79cf9e33a6b5a6f2c168b98d Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 12 Sep 2017 18:18:20 +0800 Subject: [PATCH 511/630] sym-node: extract a until function _create_atomic_symbol * from _define_atomic_symbol_creator --- src/symbolic-node.jl | 39 ++++++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 8a567c2e8003..c81b250f497a 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -614,6 +614,26 @@ end ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet ################################################################################ +@inline function _create_atomic_symbol(creator::MX_handle, keys::Vector{String}, + vals::Vector{String}) + ref_sym_hdr = Ref{MX_handle}(C_NULL) + @mxcall(:MXSymbolCreateAtomicSymbol, + (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), + creator, length(keys), keys, vals, ref_sym_hdr) + SymbolicNode(MX_SymbolHandle(ref_sym_hdr[])) +end + +@inline function _create_atomic_symbol(creator::MX_handle, keys::Vector{String}, + vals::Vector{String}, + attrs::Dict{Symbol, String}) + node = _create_atomic_symbol(creator, keys, vals) + # set attrs + for (k, v) in attrs + set_attr(node, k, v) + end + node +end + function _define_atomic_symbol_creator(name :: String) handle = _get_libmx_op_handle(name) f_desc, key_narg = _get_libmx_op_description(name, handle) @@ -664,7 +684,7 @@ function _define_atomic_symbol_creator(name :: String) symbol_kws[k] = v elseif k == :attrs if isa(v, Dict) - attrs = convert(Dict{Symbol, AbstractString}, v) + attrs = convert(Dict{Symbol, String}, v) else throw(ArgumentError("attrs needs to be a Dictionary")) end @@ -686,24 +706,13 @@ function _define_atomic_symbol_creator(name :: String) end end) - local hdr = _get_cached_libmx_op_handle($name) - - # create the SymbolicNode - ref_sym_hdr = Ref{MX_handle}() - @mxcall(:MXSymbolCreateAtomicSymbol, - (MX_handle, MX_uint, Ptr{char_p}, Ptr{char_p}, Ref{MX_handle}), - hdr, length(param_keys), param_keys, param_vals, ref_sym_hdr) - sym_hdr = ref_sym_hdr[] + local op = _get_cached_libmx_op_handle($name) + node = _create_atomic_symbol(op.value, param_keys, param_vals, attrs) - node = SymbolicNode(MX_SymbolHandle(sym_hdr)) + # generate a new name for the new symbol if user not provided in kwargs hint = lowercase($name) name = get!(DEFAULT_NAME_MANAGER, name, hint) - # set attrs - for (k, v) in attrs - set_attr(node, k, v) - end - if length(symbol_kws) == 0 _compose!(node, name, args...) elseif length(args) == 1 From 63a34ccadaa59f329ca0ec4babd47f25bc35e481 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 12 Sep 2017 22:32:51 +0800 Subject: [PATCH 512/630] sym-node: implement reshape api as Base * copy the docstring from upstream, and convert it into markdown * test cases included --- src/symbolic-node.jl | 83 +++++++++++++++++++++++++++++++++- test/unittest/symbolic-node.jl | 27 +++++++++++ 2 files changed, 108 insertions(+), 2 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index c81b250f497a..620df2ed063d 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -611,6 +611,85 @@ function save(filename :: AbstractString, node :: SymbolicNode) @mxcall(:MXSymbolSaveToFile, (MX_handle, char_p), node, filename) end +import Base: reshape + +reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; kwargs...) = + _reshape(sym, dim, kwargs...) +reshape(sym::SymbolicNode, dim::Integer...; kwargs...) = + _reshape(sym, dim, kwargs...) + +@inline function _reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; + reverse::Bool=false, name::String="") + op = _get_cached_libmx_op_handle("reshape") + node = _create_atomic_symbol(op.value, ["shape"], [dump_mx_param(dim)]) + name = get!(DEFAULT_NAME_MANAGER, name, "reshape") + _compose!(node, name=name, data=sym) +end + +""" + reshape(sym::SymbolicNode, dim; reverse=false, name) + reshape(sym::SymbolicNode, dim...; reverse=false, name) + +Reshape SymbolicNode operator + +Some dimensions of the shape can take special values from the set +{0, -1, -2, -3, -4}. +The significance of each is explained below: + +- `0` copy this dimension from the input to the output shape. + + Example: + + - input shape = (2,3,4), shape = (4,0,2), output shape = (4,3,2) + - input shape = (2,3,4), shape = (2,0,0), output shape = (2,3,4) + +- `-1` infers the dimension of the output shape by using the remainder of the + input dimensions keeping the size of the new array same as that of the input + array. At most one dimension of shape can be -1. + + Example: + + - input shape = (2,3,4), shape = (6,1,-1), output shape = (6,1,4) + - input shape = (2,3,4), shape = (3,-1,8), output shape = (3,1,8) + - input shape = (2,3,4), shape=(-1,), output shape = (24,) + +- `-2` copy all/remainder of the input dimensions to the output shape. + + Example: + + - input shape = (2,3,4), shape = (-2,), output shape = (2,3,4) + - input shape = (2,3,4), shape = (2,-2), output shape = (2,3,4) + - input shape = (2,3,4), shape = (-2,1,1), output shape = (2,3,4,1,1) + +- `-3` use the product of two consecutive dimensions of the input shape as the + output dimension. + + Example: + + - input shape = (2,3,4), shape = (-3,4), output shape = (6,4) + - input shape = (2,3,4,5), shape = (-3,-3), output shape = (6,20) + - input shape = (2,3,4), shape = (0,-3), output shape = (2,12) + - input shape = (2,3,4), shape = (-3,-2), output shape = (6,4) + +- `-4` split one dimension of the input into two dimensions passed subsequent + to -4 in shape (can contain -1). + + Example: + + - input shape = (2,3,4), shape = (-4,1,2,-2), output shape =(1,2,3,4) + - input shape = (2,3,4), shape = (2,-4,-1,3,-2), output shape = (2,1,3,4) + +If the argument `reverse` is set to `1`, then the special values are inferred +from right to left. + + Example: + + - with `reverse=false`, for input shape = (10,5,4), shape = (-1,0), + output shape would be (40,5) + - with `reverse=true`, output shape will be (50,4). +""" +reshape + ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet ################################################################################ @@ -742,11 +821,11 @@ macro _import_atomic_symbol_creators() # XXX: those are operators defined for NDArray, we exclude them here # because the calling convention for the type signature is not strong # enough to disambiguate the method for NDArray and SymbolicNode - const ignored_ops = ["_set_value"] + const ignored_ops = ["_set_value", "reshape"] # in lowercase op_names = _get_libmx_op_names() func_exprs = map(op_names) do name - if name ∉ ignored_ops + if lowercase(name) ∉ ignored_ops expr = _define_atomic_symbol_creator(name) end end diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 5d8d01b608db..26b049e8d63c 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -122,6 +122,32 @@ function test_functions() typeof(mx.sum(data)) == mx.SymbolicNode end +function test_reshape() + info("SymbolicNode::reshape(sym, dim...)") + + A = mx.NDArray(collect(1:24)) + x = mx.Variable(:x) + y = mx.reshape(x, 2, 3, 4) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (2, 3, 4) + @test copy(out) == reshape(1:24, 2, 3, 4) + + info("SymbolicNode::reshape(sym, dim)") + + A = mx.NDArray(collect(1:24)) + x = mx.Variable(:x) + y = mx.reshape(x, (2, 3, 4)) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (2, 3, 4) + @test copy(out) == reshape(1:24, 2, 3, 4) +end + function test_dot() info("SymbolicNode::dot") x = mx.Variable(:x) @@ -157,6 +183,7 @@ end test_saveload() test_attrs() test_functions() + test_reshape() test_dot() test_misc() end From 68d6ff7b9e73df93b97238da0451e4ff6deb7bc2 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 25 Sep 2017 15:11:18 +0800 Subject: [PATCH 513/630] sym-node: update deprecation for reshape --- src/deprecated.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/deprecated.jl b/src/deprecated.jl index 4c9345126bde..33688d435a8e 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -1,3 +1,7 @@ -# reshape (#272) +# NDArray reshape (#272) @deprecate reshape(arr::NDArray; shape=()) reshape(arr, shape) @deprecate Reshape(arr::NDArray; shape=()) reshape(arr, shape) + +# SymbolicNode reshape (#279) +@deprecate reshape(sym::SymbolicNode; shape=()) reshape(sym, shape) +@deprecate Reshape(sym::SymbolicNode; shape=()) reshape(sym, shape) From a2e270e1fff696b8f869341ccf5d70ed87f994fa Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 25 Sep 2017 15:11:37 +0800 Subject: [PATCH 514/630] sym-node: update NEWS for reshape API --- NEWS.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/NEWS.md b/NEWS.md index 36f10ee2735d..913f92eaa719 100644 --- a/NEWS.md +++ b/NEWS.md @@ -7,6 +7,13 @@ * `reshape(NDArray, dim...; reverse=false)` * `Reshape` deprecated. + * `reshape` of SymbolicNode share the same interface with Base + and additional keyword argument (#279). + + * `reshape(SymbolicNode, dim; reverse=false, name)` + * `reshape(SymbolicNode, dim...; reverse=false, name)` + * `Reshape` deprecated. + # v0.2.2 (2017.05.14) * Updated supported version of MXNet to 0.9.4. * Improved build-system with support for auto-detecting GPU support. From 9e0844a069413a267d65fa3afb309b0d85d703b7 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 25 Sep 2017 19:54:49 +0800 Subject: [PATCH 515/630] sym-node: add reverse keyword arg --- src/symbolic-node.jl | 29 ++++++++++++++--------------- 1 file changed, 14 insertions(+), 15 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 620df2ed063d..737d0dc1b610 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -613,19 +613,6 @@ end import Base: reshape -reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; kwargs...) = - _reshape(sym, dim, kwargs...) -reshape(sym::SymbolicNode, dim::Integer...; kwargs...) = - _reshape(sym, dim, kwargs...) - -@inline function _reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; - reverse::Bool=false, name::String="") - op = _get_cached_libmx_op_handle("reshape") - node = _create_atomic_symbol(op.value, ["shape"], [dump_mx_param(dim)]) - name = get!(DEFAULT_NAME_MANAGER, name, "reshape") - _compose!(node, name=name, data=sym) -end - """ reshape(sym::SymbolicNode, dim; reverse=false, name) reshape(sym::SymbolicNode, dim...; reverse=false, name) @@ -676,7 +663,7 @@ The significance of each is explained below: Example: - - input shape = (2,3,4), shape = (-4,1,2,-2), output shape =(1,2,3,4) + - input shape = (2,3,4), shape = (-4,1,2,-2), output shape = (1,2,3,4) - input shape = (2,3,4), shape = (2,-4,-1,3,-2), output shape = (2,1,3,4) If the argument `reverse` is set to `1`, then the special values are inferred @@ -688,7 +675,19 @@ from right to left. output shape would be (40,5) - with `reverse=true`, output shape will be (50,4). """ -reshape +reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; kwargs...) = + _reshape(sym, dim; kwargs...) +reshape(sym::SymbolicNode, dim::Integer...; kwargs...) = + _reshape(sym, dim; kwargs...) + +@inline function _reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; + reverse::Bool=false, name::String="") + op = _get_cached_libmx_op_handle("reshape") + node = _create_atomic_symbol(op.value, ["shape", "reverse"], + [dump_mx_param(dim), dump_mx_param(!reverse)]) + name = get!(DEFAULT_NAME_MANAGER, name, "reshape") + _compose!(node, name=name, data=sym) +end ################################################################################ # Atomic SymbolicNode functions dynamically imported from libmxnet From 55ccbfa2adb346ab6c2fd8a2ab63cbae07f796bf Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 25 Sep 2017 19:55:48 +0800 Subject: [PATCH 516/630] sym-node: more test cases for reshape --- test/unittest/symbolic-node.jl | 66 ++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 26b049e8d63c..44f35142de9d 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -146,6 +146,72 @@ function test_reshape() @test size(out) == (2, 3, 4) @test copy(out) == reshape(1:24, 2, 3, 4) + + info("SymbolicNode::reshape::reverse") + + A = mx.zeros(10, 5, 4) + x = mx.Variable(:x) + y = mx.reshape(x, -1, 0, reverse=true) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (50, 4) + + info("SymbolicNode::reshape::0") + + A = mx.zeros(2, 3, 4) + x = mx.Variable(:x) + y = mx.reshape(x, 4, 0, 2) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (4, 3, 2) + + info("SymbolicNode::reshape::-1") + + A = mx.zeros(2, 3, 4) + x = mx.Variable(:x) + y = mx.reshape(x, 6, 1, -1) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (6, 1, 4) + + info("SymbolicNode::reshape::-2") + + A = mx.zeros(2, 3, 4, 2) + x = mx.Variable(:x) + y = mx.reshape(x, 3, 2, -2) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (3, 2, 4, 2) + + info("SymbolicNode::reshape::-3") + + A = mx.zeros(2, 3, 4, 5) + x = mx.Variable(:x) + y = mx.reshape(x, -3, -3) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (6, 20) + + info("SymbolicNode::reshape::-4") + + A = mx.zeros(2, 3, 4) + x = mx.Variable(:x) + y = mx.reshape(x, 0, 0, -4, 2, 2) + e = mx.bind(y, mx.cpu(), Dict(:x => A)) + mx.forward(e) + out = e.outputs[1] + + @test size(out) == (2, 3, 2, 2) end function test_dot() From ff0556b13d92573214f3badc99c31cace5668fe7 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 13 Sep 2017 13:11:52 +0800 Subject: [PATCH 517/630] random: rename srand! to srand make it same as Base --- src/random.jl | 2 +- test/unittest/random.jl | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/random.jl b/src/random.jl index b5b53def8f54..47801512372e 100644 --- a/src/random.jl +++ b/src/random.jl @@ -22,6 +22,6 @@ function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context) randn!(mean, stdvar, out) end -function srand!(seed_state::Int) +function srand(seed_state::Int) @mxcall(:MXRandomSeed, (Cint,), seed_state) end diff --git a/test/unittest/random.jl b/test/unittest/random.jl index 54d651482cd6..30995ecf79bf 100644 --- a/test/unittest/random.jl +++ b/test/unittest/random.jl @@ -8,10 +8,10 @@ function test_uniform() low = -10; high = 10 seed = 123 - mx.srand!(seed) + mx.srand(seed) ret1 = mx.rand(low, high, dims) - mx.srand!(seed) + mx.srand(seed) ret2 = mx.empty(dims) mx.rand!(low, high, ret2) @@ -25,10 +25,10 @@ function test_gaussian() μ = 10; σ = 2 seed = 456 - mx.srand!(seed) + mx.srand(seed) ret1 = mx.randn(μ, σ, dims) - mx.srand!(seed) + mx.srand(seed) ret2 = mx.empty(dims) mx.randn!(μ, σ, ret2) From 5b3c507f1a1ae11c033708d13087ec172c96e8c9 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 25 Sep 2017 23:18:39 +0800 Subject: [PATCH 518/630] random: update NEWS and deprecated.jl - update NEWS for deepcopy on NDArray as well (#273) --- NEWS.md | 10 ++++++++-- src/deprecated.jl | 3 +++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/NEWS.md b/NEWS.md index 913f92eaa719..773c2471c514 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,9 @@ # v0.3.0 (TBD) +## New API + +* `deepcopy` for NDArray (#273) + ## API Changes * `reshape` of NDArray share the same interface with Base (#272). @@ -7,13 +11,15 @@ * `reshape(NDArray, dim...; reverse=false)` * `Reshape` deprecated. - * `reshape` of SymbolicNode share the same interface with Base - and additional keyword argument (#279). +* `reshape` of SymbolicNode share the same interface with Base + and additional keyword argument (#279). * `reshape(SymbolicNode, dim; reverse=false, name)` * `reshape(SymbolicNode, dim...; reverse=false, name)` * `Reshape` deprecated. +* `srand!` deprecated, please use `srand` (#282) + # v0.2.2 (2017.05.14) * Updated supported version of MXNet to 0.9.4. * Improved build-system with support for auto-detecting GPU support. diff --git a/src/deprecated.jl b/src/deprecated.jl index 33688d435a8e..b2816fad5432 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -5,3 +5,6 @@ # SymbolicNode reshape (#279) @deprecate reshape(sym::SymbolicNode; shape=()) reshape(sym, shape) @deprecate Reshape(sym::SymbolicNode; shape=()) reshape(sym, shape) + +# srand (#282) +@deprecate srand!(seed_state::Int) srand(seed_state) From 251a8b3661b5b5269788bf48d649a7fa35c2ff61 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 4 Oct 2017 15:16:58 +0800 Subject: [PATCH 519/630] random: upstream api change Ref: https://github.com/apache/incubator-mxnet/pull/7939 --- src/random.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/random.jl b/src/random.jl index b5b53def8f54..d031d9c2f2d2 100644 --- a/src/random.jl +++ b/src/random.jl @@ -1,6 +1,6 @@ function rand!(low::Real, high::Real, out::NDArray) # XXX: note we reverse shape because julia and libmx has different dim order - _sample_uniform(NDArray, low=low, high=high, shape=reverse(size(out)), out=out) + _random_uniform(NDArray, low=low, high=high, shape=reverse(size(out)), out=out) end function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}) rand(low, high, shape, cpu()) @@ -12,7 +12,7 @@ end function randn!(mean::Real, stdvar::Real, out::NDArray) # XXX: note we reverse shape because julia and libmx has different dim order - _sample_normal(NDArray, loc=mean, scale=stdvar, shape=reverse(size(out)), out=out) + _random_normal(NDArray, loc=mean, scale=stdvar, shape=reverse(size(out)), out=out) end function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}) randn(mean, stdvar, shape, cpu()) From 077a8cb2119edd6ffc63ef0eae92c2e52da32a57 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 4 Oct 2017 14:17:42 +0800 Subject: [PATCH 520/630] build: improve cuda detection - change ADD_LDFLAGS - https://github.com/apache/incubator-mxnet/pull/7856 - https://github.com/apache/incubator-mxnet/pull/7838 - guessing the CUDA_HOME if user not provides it. --- deps/build.jl | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index b8ea163951c4..d6591c22c910 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -37,7 +37,6 @@ end HAS_CUDA = false HAS_CUDNN = false -cudnnver = -1 let cudalib = Libdl.find_library(["libcuda", "nvcuda.dll"], CUDAPATHS) HAS_CUDA = !isempty(cudalib) && Libdl.dlopen_e(cudalib) != C_NULL end @@ -53,9 +52,8 @@ end if HAS_CUDA # then check cudnn let cudnnlib = Libdl.find_library("libcudnn", CUDAPATHS) HAS_CUDNN = !isempty(cudnnlib) && Libdl.dlopen_e(cudnnlib) != C_NULL - if HAS_CUDNN - # TODO: do more version check? - cudnnver = dec(ccall((:cudnnGetVersion, cudnnlib), Csize_t, ())) + if HAS_CUDNN && !haskey(ENV, "CUDA_HOME") # inference `CUDA_HOME` + ENV["CUDA_HOME"] = dirname(dirname(cudnnlib)) end end end @@ -63,8 +61,9 @@ end if HAS_CUDA info("Found a CUDA installation.") if HAS_CUDNN - info("Found a CuDNN installation (version -> $cudnnver).") + info("Found a CuDNN installation.") end + info("CUDA_HOME -> $(ENV["CUDA_HOME"])") else info("Did not find a CUDA installation, using CPU-only version of MXNet.") end @@ -170,7 +169,11 @@ if !libmxnet_detected ChangeDirectory(_mxdir) `git submodule deinit --force .` `git fetch` - `git checkout $libmxnet_curr_ver` + if libmxnet_curr_ver != "master" + `git checkout $libmxnet_curr_ver` + else + `git merge --ff origin/$libmxnet_curr_ver` + end `git submodule update --init --recursive` `git -C mshadow checkout -- make/mshadow.mk` `make clean` @@ -192,9 +195,16 @@ if !libmxnet_detected if HAS_CUDA @build_steps begin `sed -i -s 's/USE_CUDA = 0/USE_CUDA = 1/' config.mk` + # address https://github.com/apache/incubator-mxnet/pull/7856 + `sed -i -s "s/ADD_LDFLAGS =\(.*\)/ADD_LDFLAGS =\1 -lcublas -lcusolver -lcurand -lcudart/" config.mk` if haskey(ENV, "CUDA_HOME") `sed -i -s "s@USE_CUDA_PATH = NONE@USE_CUDA_PATH = $(ENV["CUDA_HOME"])@" config.mk` end + if haskey(ENV, "CUDA_HOME") + # address https://github.com/apache/incubator-mxnet/pull/7838 + flag = "-L$(ENV["CUDA_HOME"])/lib64 -L$(ENV["CUDA_HOME"])/lib" + `sed -i -s "s@ADD_LDFLAGS =\(.*\)@ADD_LDFLAGS =\1 $flag@" config.mk` + end if HAS_CUDNN `sed -i -s 's/USE_CUDNN = 0/USE_CUDNN = 1/' config.mk` end From ac7bbd009b7745b5e96cdec934bacfbbce22e56a Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 7 Oct 2017 20:49:26 +0800 Subject: [PATCH 521/630] base: support MXGetVersion - Ref: https://github.com/apache/incubator-mxnet/pull/8013 --- src/base.jl | 11 +++++++++++ test/runtests.jl | 2 ++ 2 files changed, 13 insertions(+) diff --git a/src/base.jl b/src/base.jl index 2d3ec8f3d053..c013f17976ae 100644 --- a/src/base.jl +++ b/src/base.jl @@ -46,6 +46,8 @@ function __init__() _get_libmx_op_names() _populate_iter_creator_cache!() + global const LIB_VERSION = _get_lib_version() + atexit() do # notify libmxnet we are shutting down ccall( ("MXNotifyShutdown", MXNET_LIB), Cint, () ) @@ -74,6 +76,15 @@ macro mxcall(fv, argtypes, args...) end end +""" +Get libmxnet version +""" +function _get_lib_version() + ver = Ref{Cint}(0) + @mxcall :MXGetVersion (Ref{Cint},) ver + ver[] +end + ################################################################################ # Handle types ################################################################################ diff --git a/test/runtests.jl b/test/runtests.jl index 823830b54d0d..eb05ff2f5ca4 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -11,6 +11,8 @@ function test_dir(dir) end end +info("libmxnet version => $(mx.LIB_VERSION)") + include(joinpath(dirname(@__FILE__), "common.jl")) @testset "MXNet Test" begin test_dir(joinpath(dirname(@__FILE__), "unittest")) From 904eb20af96ced5363cb92017e985e09d339c498 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 10 Oct 2017 17:13:05 +0800 Subject: [PATCH 522/630] rand: docstring --- src/random.jl | 59 ++++++++++++++++++++++++++++++++++++++++++++------- 1 file changed, 51 insertions(+), 8 deletions(-) diff --git a/src/random.jl b/src/random.jl index b01e2bc184d7..c021ef9a0fe7 100644 --- a/src/random.jl +++ b/src/random.jl @@ -1,27 +1,70 @@ +""" + rand!(low, high, arr::NDArray) + +Draw random samples from a uniform distribution. +Samples are uniformly distributed over the half-open interval [low, high) +(includes low, but excludes high). + +# Examples + +```julia +julia> mx.rand(0, 1, mx.zeros(2, 2)) |> copy +2×2 Array{Float32,2}: + 0.405374 0.321043 + 0.281153 0.713927 +``` +""" function rand!(low::Real, high::Real, out::NDArray) # XXX: note we reverse shape because julia and libmx has different dim order _random_uniform(NDArray, low=low, high=high, shape=reverse(size(out)), out=out) end -function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}) - rand(low, high, shape, cpu()) -end -function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context) + +""" + rand(low, high, shape, context=cpu()) + +Draw random samples from a uniform distribution. +Samples are uniformly distributed over the half-open interval [low, high) +(includes low, but excludes high). + +# Examples + +```julia +julia> mx.rand(0, 1, (2, 2)) |> copy +2×2 Array{Float32,2}: + 0.405374 0.321043 + 0.281153 0.713927 +``` +""" +function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context=cpu()) out = empty(shape, ctx) rand!(low, high, out) end +""" + randn!(mean, std, arr::NDArray) + +Draw random samples from a normal (Gaussian) distribution. +""" function randn!(mean::Real, stdvar::Real, out::NDArray) # XXX: note we reverse shape because julia and libmx has different dim order _random_normal(NDArray, loc=mean, scale=stdvar, shape=reverse(size(out)), out=out) end -function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}) - randn(mean, stdvar, shape, cpu()) -end -function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context) + +""" + randn(mean, std, shape, context=cpu()) + +Draw random samples from a normal (Gaussian) distribution. +""" +function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context=cpu()) out = empty(shape, ctx) randn!(mean, stdvar, out) end +""" + srand(seed::Int) + +Set the random seed of libmxnet +""" function srand(seed_state::Int) @mxcall(:MXRandomSeed, (Cint,), seed_state) end From aae016c06c346607e7e7da63ef5eea99b395bafd Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 8 Oct 2017 01:12:23 +0800 Subject: [PATCH 523/630] ndarray: pretty printing MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Leverage `Base.showarray`. e.g. ```julia julia> mx.rand(0, 1, (5, 20)) 5x20 mx.NDArray{Float32} @ CPU0: 0.628982 0.425452 0.952792 … 0.785153 0.181631 5.53504f-5 0.798047 0.45613 0.984329 0.0639553 0.872651 0.885338 0.687488 0.28173 0.512393 0.31186 0.185636 0.483409 0.703495 0.485628 0.273542 0.679879 0.215508 0.58641 0.580447 ``` - test cases included --- src/ndarray.jl | 34 +++++++++++++++++++++++----------- test/unittest/ndarray.jl | 11 +++++++++++ 2 files changed, 34 insertions(+), 11 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index eba7e2169a7e..7e8ac40f9a03 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -99,7 +99,8 @@ type NDArray end function Base.show(io :: IO, arr :: NDArray) - print(io, "mx.NDArray{$(eltype(arr))}$(size(arr))") + println(io, "$(join(size(arr), "x")) mx.NDArray{$(eltype(arr))} @ $(context(arr)):") + Base.showarray(io, try_get_shared(arr, sync=:read), false, header=false) end function NDArray{T<:Real}(data :: Array{T}) @@ -299,7 +300,7 @@ function eltype{T <: Union{NDArray, MX_NDArrayHandle}}(arr :: T) if dtype_ref[] == -1 # arr->is_none() warn("Eltype of $arr is not defined") - Base.show_backtrace(STDOUT,backtrace()) + Base.show_backtrace(STDOUT, backtrace()) println() return Float32 else @@ -832,24 +833,35 @@ function _wait_to_write(arr :: NDArray) end """ - try_get_shared(arr) + try_get_shared(arr; sync=:nop) Try to create a Julia array by sharing the data with the underlying `NDArray`. # Arguments: + * `arr::NDArray`: the array to be shared. !!! note The returned array does not guarantee to share data with the underlying `NDArray`. In particular, data sharing is possible only when the `NDArray` lives on CPU. + +* `sync::Symbol`: `:nop`,`:write`, `:read` + On CPU, invoke `_wait_to_read` if `:read`; + invoke `_wait_to_write` if `:write`. """ -function try_get_shared(arr :: NDArray) +function try_get_shared(arr :: NDArray; sync::Symbol=:nop) if context(arr).device_type == CPU # try to do data sharing - return unsafe_wrap(Array, pointer(arr), size(arr)) + if sync == :read + _wait_to_read(arr) + elseif sync == :write + _wait_to_write(arr) + end + + unsafe_wrap(Array, pointer(arr), size(arr)) else # impossible to share, just copying - return copy(arr) + copy(arr) end end @@ -859,12 +871,12 @@ end Test whether `j_arr` is sharing data with `arr`. # Arguments: -* Array j_arr: the Julia Array. -* NDArray arr: the `NDArray`. + +* `j_arr::Array`: the Julia Array. +* `arr::NDArray`: the `NDArray`. """ -function is_shared(j_arr :: Array, arr :: NDArray) - false -end +is_shared(j_arr :: Array, arr :: NDArray) = false + function is_shared{T<:DType}(j_arr :: Array{T}, arr :: NDArray) if length(j_arr) != length(arr) return false diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 2185d920cc2a..67f395dbf95a 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -425,6 +425,16 @@ function test_kwargs() @test all(copy(tx) .== tA) end +function test_show() + let str = sprint(show, mx.NDArray([1 2 3 4])) + @test contains(str, "1x4") + @test contains(str, "mx.NDArray") + @test contains(str, "Int64") + @test contains(str, "CPU") + @test match(r"1\s+2\s+3\s+4", str) != nothing + end +end + ################################################################################ # Run tests ################################################################################ @@ -445,6 +455,7 @@ end test_dot() test_reshape() test_kwargs() + test_show() end end From 81b134ffa6af292d8c65b2de8d7a66d44b8eab5a Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 25 Oct 2017 05:39:06 +0800 Subject: [PATCH 524/630] ndarray: implement `fill` and `fill!` (#297) It serve as an API corresponds to Python's `mx.nd.full()` --- NEWS.md | 7 +++++++ src/ndarray.jl | 25 +++++++++++++++++++++++++ test/unittest/ndarray.jl | 37 +++++++++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+) diff --git a/NEWS.md b/NEWS.md index 773c2471c514..1ffca376c399 100644 --- a/NEWS.md +++ b/NEWS.md @@ -4,6 +4,13 @@ * `deepcopy` for NDArray (#273) +* `fill` and `fill!` for NDArray (#TBD) + An API correspond to Python's `mx.nd.full()` + + * `fill(x, dims, ctx=cpu())` + * `fill(x, dims...)` + * `fill!(x, arr::NDArray)` + ## API Changes * `reshape` of NDArray share the same interface with Base (#272). diff --git a/src/ndarray.jl b/src/ndarray.jl index 7e8ac40f9a03..5974c82f1972 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -703,6 +703,31 @@ function /(arg0 :: NDArray, arg :: Real) end +""" + fill!(x, arr::NDArray) + +Create an `NDArray` filled with the value `x`, like `Base.fill`. +""" +function fill!(x, arr::NDArray) + arr[:] = x + arr +end + +""" + fill(x, dims, ctx=cpu()) + fill(x, dims...) + +Create an `NDArray` filled with the value `x`, like `Base.fill`. +""" +function fill{N}(x, dims::NTuple{N, Integer}, ctx::Context=cpu()) + arr = empty(typeof(x), dims, ctx) + arr[:] = x + arr +end + +fill(x, dims::Integer...) = fill(x, dims) + + """ Manipulating as Julia Arrays ---------------------------- diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 67f395dbf95a..6944e7ed0829 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -413,6 +413,42 @@ function test_reshape() @test size(C) == (50, 4) end +function test_fill() + info("NDArray::fill") + thresh = 1e8 + + let x = mx.fill(42, 2, 3, 4) + @test eltype(x) == Int + @test size(x) == (2, 3, 4) + @test copy(x) == fill(42, 2, 3, 4) + end + + let x = mx.fill(Float32(42), 2, 3, 4) + @test eltype(x) == Float32 + @test size(x) == (2, 3, 4) + @test reldiff(copy(x), fill(Float32(42), 2, 3, 4)) < thresh + end + + let x = mx.fill(42, (2, 3, 4)) + @test eltype(x) == Int + @test size(x) == (2, 3, 4) + @test copy(x) == fill(42, 2, 3, 4) + end + + let x = mx.fill(Float32(42), (2, 3, 4)) + @test eltype(x) == Float32 + @test size(x) == (2, 3, 4) + @test reldiff(copy(x), fill(Float32(42), 2, 3, 4)) < thresh + end + + info("NDArray::fill!::arr") + let x = mx.fill!(42, mx.zeros(2, 3, 4)) + @test eltype(x) == Float32 + @test size(x) == (2, 3, 4) + @test reldiff(copy(x), fill(Float32(42), 2, 3, 4)) < thresh + end +end # function test_fill + function test_kwargs() info("NDArray::kwargs") dims1 = (2,3,4) @@ -454,6 +490,7 @@ end test_nd_as_jl() test_dot() test_reshape() + test_fill() test_kwargs() test_show() end From 71f2d40590bec90bc71583606a67de532e5adc74 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 26 Oct 2017 01:43:16 +0800 Subject: [PATCH 525/630] base: merge _julia_to_mx_param into dump_mx_param (#296) --- src/base.jl | 16 +++++++++------- src/ndarray.jl | 17 ++--------------- src/random.jl | 6 ++---- 3 files changed, 13 insertions(+), 26 deletions(-) diff --git a/src/base.jl b/src/base.jl index c013f17976ae..b078793d75e1 100644 --- a/src/base.jl +++ b/src/base.jl @@ -157,14 +157,16 @@ end # # TODO: find a better solution in case this cause issues in the future. ################################################################################ -function dump_mx_param(val :: Any) - string(val) -end -function dump_mx_param{N,T<:Integer}(shape :: NTuple{N, T}) - string(tuple(flipdim([shape...],1)...)) -end +dump_mx_param(val::Any) = string(val) +dump_mx_param(val::Float64) = @sprintf("%.16e", val) +dump_mx_param(val::Float32) = @sprintf("%.8e", val) +dump_mx_param(val::Float16) = @sprintf("%.4e", val) +dump_mx_param{N, T<:Integer}(shape::NTuple{N, T}) = + string(tuple(flipdim([shape...], 1)...)) -"""A convenient macro copied from Mocha.jl that could be used to define structs + +""" +A convenient macro copied from Mocha.jl that could be used to define structs with default values and type checks. For example ```julia @defstruct MyStruct Any ( diff --git a/src/ndarray.jl b/src/ndarray.jl index 5974c82f1972..abe5a150d9ff 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1012,19 +1012,6 @@ end ACCEPT_EMPTY_MUTATE_TARGET = (1 << 2) ) -function _julia_to_mx_param(val :: Any) - string(val) -end -function _julia_to_mx_param(val :: Float64) - @sprintf("%.16e", val) -end -function _julia_to_mx_param(val :: Float32) - @sprintf("%.8e", val) -end -function _julia_to_mx_param(val :: Float16) - @sprintf("%.4e", val) -end - # Import corresponding math functions from base so the automatically defined libmxnet # functions can overload them import Base: sqrt @@ -1086,7 +1073,7 @@ function _get_ndarray_function_def(name :: String) # and in libmxnet. # See https://github.com/dmlc/MXNet.jl/pull/123 if $name == "transpose" - kwargs = Any[key != :axes ? (key, arg) : (key, reverse(map(i->length(arg)-i, arg))) for (key, arg) in kwargs] + kwargs = Any[key != :axes ? (key, arg) : (key, map(i->length(arg)-i, arg)) for (key, arg) in kwargs] end if length(output_vars) > 0 @@ -1100,7 +1087,7 @@ function _get_ndarray_function_def(name :: String) num_outputs_p = [convert(Cint, num_outputs)] kw_keys_str = String[string(x[1]) for x in kwargs] - kw_vals_str = String[_julia_to_mx_param(x[2]) for x in kwargs] + kw_vals_str = String[dump_mx_param(x[2]) for x in kwargs] #op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) op_handle = _get_cached_libmx_op_handle($(name)) diff --git a/src/random.jl b/src/random.jl index c021ef9a0fe7..2d594014da1c 100644 --- a/src/random.jl +++ b/src/random.jl @@ -15,8 +15,7 @@ julia> mx.rand(0, 1, mx.zeros(2, 2)) |> copy ``` """ function rand!(low::Real, high::Real, out::NDArray) - # XXX: note we reverse shape because julia and libmx has different dim order - _random_uniform(NDArray, low=low, high=high, shape=reverse(size(out)), out=out) + _random_uniform(NDArray, low=low, high=high, shape=size(out), out=out) end """ @@ -46,8 +45,7 @@ end Draw random samples from a normal (Gaussian) distribution. """ function randn!(mean::Real, stdvar::Real, out::NDArray) - # XXX: note we reverse shape because julia and libmx has different dim order - _random_normal(NDArray, loc=mean, scale=stdvar, shape=reverse(size(out)), out=out) + _random_normal(NDArray, loc=mean, scale=stdvar, shape=size(out), out=out) end """ From 3824df0b33c0a860d1bf9205b41bee5b0b5557b5 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 27 Oct 2017 05:50:01 +0800 Subject: [PATCH 526/630] Fix mx.chain var reference in macro (#299) fix #298 --- src/nn-factory.jl | 2 +- src/symbolic-node.jl | 3 ++- test/unittest/symbolic-node.jl | 13 +++++++++++++ 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/src/nn-factory.jl b/src/nn-factory.jl index 76babffbe035..ab88d21adf29 100644 --- a/src/nn-factory.jl +++ b/src/nn-factory.jl @@ -21,7 +21,7 @@ fully connected layers. Returns the constructed MLP. """ -function MLP(input, spec; hidden_activation::Base.Symbol=:relu, prefix=gensym()) +function MLP(input, spec; hidden_activation::Symbol=:relu, prefix=gensym()) spec = convert(Vector{Union{Int,Tuple}}, spec) n_layer = length(spec) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 4e1ed433bb88..3ae545334953 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -893,7 +893,8 @@ macro chain(layers) return esc(layer) else if @capture(layer, f_(x__)) - return :($f($last_layer, $(x...))) + x′ = esc.(x) + return :($f($last_layer, $(x′...))) else throw(AssertionError("$layer is not a valid function call and cannot be chained.")) end diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 688a010794f7..e81ef52055b1 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -23,6 +23,19 @@ function test_chain() @test mx.list_arguments(model) == [:data,:fc1_weight,:fc1_bias,:fc2_weight,:fc2_bias] @test mx.list_outputs(model) == [:fc2_output] @test mx.list_auxiliary_states(model) == Symbol[] + + let layerconfig = [20, 10, 6] + model = @mx.chain mx.Variable(:data) => + mx.MLP(layerconfig, prefix=:magic_) => + mx.LinearRegressionOutput(mx.Variable(:label)) + + @test mx.list_arguments(model) == [ + :data, + :magic_fc1_weight, :magic_fc1_bias, + :magic_fc2_weight, :magic_fc2_bias, + :magic_fc3_weight, :magic_fc3_bias, + :label] + end end function test_internal() From 41f8f55129676b9eaf0fef76052c0369aa175b3b Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 31 Oct 2017 23:57:18 +0800 Subject: [PATCH 527/630] visualize: fix node_attr on upstream master (#301) Ref: https://github.com/dmlc/nnvm/pull/152/files#diff-8fb4ac5650c04b74642d321b992f8db0R82 --- src/visualize.jl | 9 +++++---- test/unittest/visualize.jl | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/visualize.jl b/src/visualize.jl index ea700b704266..91bbd0c48481 100644 --- a/src/visualize.jl +++ b/src/visualize.jl @@ -51,10 +51,11 @@ function to_graphviz(network :: SymbolicNode; title="Network Visualization", inp attr = deepcopy(node_attr) label = op - # Up to 0.8 version of mxnet additional info was stored in - # node["param"]. Staring from pre0.9 `param` was changed to `attr`. - if haskey(node, "param") - node_info = node["param"] + # Up to 0.11.0 version of mxnet additional info was stored in + # node["attr"]. Staring from 0.12 `attr` was changed to `attrs`. + # See: https://github.com/dmlc/nnvm/pull/152 + if haskey(node, "attrs") + node_info = node["attrs"] elseif haskey(node, "attr") node_info = node["attr"] end diff --git a/test/unittest/visualize.jl b/test/unittest/visualize.jl index 3bdaa12824d2..f4ccbf0e5a81 100644 --- a/test/unittest/visualize.jl +++ b/test/unittest/visualize.jl @@ -12,7 +12,7 @@ function test_basic() info("Visualize::basic") mlp = mlp2() - + # Order of elements or default color values can change, but length of the output should be more or less stable @test length(mx.to_graphviz(mlp)) == length( """ From 9fcab401182f9212eea0da88b7a55ebcf113c238 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 6 Nov 2017 23:57:00 +0800 Subject: [PATCH 528/630] symbol: add Base.show (#302) --- src/symbolic-node.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 3ae545334953..9312ea5dd7c5 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -220,6 +220,9 @@ function get_name(self :: mx.SymbolicNode) return Symbol(unsafe_string(name[])) end +Base.show(io::IO, sym::SymbolicNode) = + print(io, "$(typeof(sym)) $(get_name(sym))") + import Base: print function print(io :: IO, sym :: SymbolicNode) From 4f182ee42cec55b9dc9aef078fd66c38c42ff266 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 7 Nov 2017 03:35:50 +0800 Subject: [PATCH 529/630] ndarray: more Base-like APIs (#303) * ndarray: make API of `sum` and `mean` be Base-like - also fix the axis value mapping - `mean(arr, axis=0)` is not Julian * ndarray: Base-like `maximum` and `minimum` - remove `mx.max`, `mx.min`, `mx.max_axis` and `mx.min_axis` * ndarray: simple doc while remapping with `@_remap` * ndarray: more test cases for dim as tuple * ndarray: remap dot, the elegent way * ndarray: remap `transpose` and add `permutedims` * ndarray: docs of _remap * ndarray: remap `prod` * util: add _sig_checker for discovering non-Julian APIs * travis: add _sig_checker after testing --- .travis.yml | 3 + NEWS.md | 32 ++++++++- src/ndarray.jl | 148 +++++++++++++++++++++++++++++---------- src/util.jl | 25 +++++++ test/unittest/ndarray.jl | 96 ++++++++++++++++++++++--- 5 files changed, 254 insertions(+), 50 deletions(-) diff --git a/.travis.yml b/.travis.yml index 68b1cb1b8b00..aed248bcb23d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -41,6 +41,9 @@ script: - travis_wait 60 ${TRAVIS_DIR}/run_test.sh after_success: + # See https://github.com/dmlc/MXNet.jl/pull/303#issuecomment-341171774 + - julia -e 'using MXNet; mx._sig_checker()' + - source ${TRAVIS_DIR}/run_coverage.sh - echo $TRAVIS_JULIA_VERSION - julia -e 'Pkg.add("Documenter")' diff --git a/NEWS.md b/NEWS.md index 1ffca376c399..b538b7160c3f 100644 --- a/NEWS.md +++ b/NEWS.md @@ -13,12 +13,12 @@ ## API Changes -* `reshape` of NDArray share the same interface with Base (#272). +* `reshape` of NDArray shares the same interface with Base (#272). * `reshape(NDArray, dim; reverse=false)` * `reshape(NDArray, dim...; reverse=false)` * `Reshape` deprecated. -* `reshape` of SymbolicNode share the same interface with Base +* `reshape` of SymbolicNode shares the same interface with Base and additional keyword argument (#279). * `reshape(SymbolicNode, dim; reverse=false, name)` @@ -27,6 +27,34 @@ * `srand!` deprecated, please use `srand` (#282) +* `mean` and `sum` of NDArray share the same interface with Base + and fix the `axis` indexing (#TBD). + + * This is a breaking change; no deprecated warning. + * Before: `mean(arr, axis=0)` + * After: `mean(arr, 1)` + +* `max` and `min` of NDArray renamed to `maximum` and `minimum` and share the + same interface with Base. The `axis` indexing is fixed, also. (#TBD) + + * This is a breaking change; no deprecated warning. + * Before: `mx.max(arr, axis=0)` or `mx.max_axis(arr, axis=0)` + * After: `maximum(arr, 1)` + +* `mx.transpose` for high dimension NDArray has been renamed to `permutedims` + and shares the same interface with Base. (#TBD) + + * This is a breaking changes; no deprecated warning. + * Before: `mx.transpose(A, axis=[2, 1, 3])` + * After: `permutedims(A, [2, 1, 3])` + +* `prod` of `NDArray` shares the same interface with Base and fix + the `axis` indexing. (#TBD). + + * This is a breaking change; no deprecated warning. + * Before: `prod(arr, axis=-1)` + * After: `prod(arr, 1)` + # v0.2.2 (2017.05.14) * Updated supported version of MXNet to 0.9.4. * Improved build-system with support for auto-detecting GPU support. diff --git a/src/ndarray.jl b/src/ndarray.jl index abe5a150d9ff..458c67f5745c 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -99,7 +99,7 @@ type NDArray end function Base.show(io :: IO, arr :: NDArray) - println(io, "$(join(size(arr), "x")) mx.NDArray{$(eltype(arr))} @ $(context(arr)):") + println(io, "$(join(size(arr), "×")) mx.NDArray{$(eltype(arr))} @ $(context(arr)):") Base.showarray(io, try_get_shared(arr, sync=:read), false, header=false) end @@ -971,31 +971,106 @@ function save(filename::String, data::Dict{Base.Symbol,NDArray}) filename, length(names), arrays, names) end -import Base: reshape +################################################################################ +# Mapping NDArray functions to Base-like API +################################################################################ -""" - reshape(arr::NDArray, dim...; reverse=false) - reshape(arr::NDArray, dim; reverse=false) -""" -reshape{N}(arr::NDArray, dim::NTuple{N, Integer}; reverse::Bool=false) = - _reshape(arr, dim, reverse) -reshape{N}(arr::NDArray, dim::Vararg{Integer, N}; reverse::Bool=false) = - _reshape(arr, dim, reverse) +const _mxsig = Dict{Symbol,Expr}() + +function _autoimport(name::Symbol) + if isdefined(Base, name) + :(import Base: $name) + else + :() + end +end + +macro _remap(sig::Expr, imp::Expr) + fname = sig.args[1] + opname = string(imp.args[1]) -@inline function _reshape{N}(arr::NDArray, dim::NTuple{N, Integer}, reverse::Bool) - op_handle = _get_cached_libmx_op_handle("reshape") - n_output = Ref(Cint(0)) - hdls_ref = Ref{Ptr{MX_handle}}(C_NULL) - @mxcall(:MXImperativeInvoke, - (MX_handle, Cint, Ptr{MX_handle}, Ref{Cint}, Ref{Ptr{MX_handle}}, - Cint, char_pp, char_pp), - op_handle, 1, [arr.handle], n_output, hdls_ref, - 2, ["shape", "reverse"], [dump_mx_param(dim), dump_mx_param(!reverse)]) - # not a typo ^^^^^^^^ - @assert n_output[] == 1 - NDArray(MX_NDArrayHandle(unsafe_load(hdls_ref[], 1))) + import_expr = _autoimport(fname) + + if isa(imp.args[2], Expr) && imp.args[2].head == :parameters + ndin = imp.args[3:end] + mxargs = imp.args[2].args + else # no keyword arguments + ndin = imp.args[2:end] + mxargs = [] + end + + mxkeys = map(x -> string(x.args[1]), mxargs) + mxvals = Expr(:vect, map(x -> :(dump_mx_param($(x.args[2]))), mxargs)...) + ndhlds = Expr(:vect, map(x -> :($(x).handle), ndin)...) + + func_body = quote + op_handle = _get_cached_libmx_op_handle($opname) + n_output = Ref(Cint(0)) + hdls_ref = Ref{Ptr{MX_handle}}(C_NULL) + @mxcall(:MXImperativeInvoke, + (MX_handle, + Cint, + Ptr{MX_handle}, + Ref{Cint}, + Ref{Ptr{MX_handle}}, + Cint, + char_pp, + char_pp), + op_handle, + $(length(ndin)), + $(ndhlds), + n_output, + hdls_ref, + $(length(mxargs)), + $mxkeys, + $mxvals) + NDArray(MX_NDArrayHandle(unsafe_load(hdls_ref[], 1))) + end + + docstr = " $sig" + func_def = Expr(:function, sig, func_body) + + esc(quote + $import_expr + @doc $docstr -> + $func_def + end) end +macro _remap(sig::Expr, imp::Symbol) + imp = _mxsig[imp] + + esc(quote + @_remap($sig, $imp) + end) +end + +_mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) +@_remap reshape(arr::NDArray, dim...; reverse = false) reshape +@_remap reshape(arr::NDArray, dim; reverse = false) reshape + +@_remap mean(arr::NDArray) mean(arr) +@_remap mean(arr::NDArray, region) mean(arr; axis = 0 .- region, keepdims = true) + +@_remap sum(arr::NDArray) sum(arr) +@_remap sum(arr::NDArray, dims) sum(arr; axis = 0 .- dims, keepdims = true) + +@_remap maximum(arr::NDArray) max(arr) +@_remap maximum(arr::NDArray, dims) max(arr; axis = 0 .- dims, keepdims = true) + +@_remap minimum(arr::NDArray) min(arr) +@_remap minimum(arr::NDArray, dims) min(arr; axis = 0 .- dims, keepdims = true) + +# See https://github.com/dmlc/MXNet.jl/issues/55 +@_remap dot(x::NDArray, y::NDArray) dot(y, x) + +# See https://github.com/dmlc/MXNet.jl/pull/123 +@_remap transpose(arr::NDArray) transpose(_only2d(arr)) +@_remap permutedims(arr::NDArray, axes) transpose(arr; axes = length(axes) .- tuple(axes...)) + +@_remap prod(arr::NDArray) prod(arr) +@_remap prod(arr::NDArray, dims) prod(arr; axis = 0 .- dims, keepdims = true) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1063,19 +1138,6 @@ function _get_ndarray_function_def(name :: String) args = MX_handle[] end - # XXX: hacky way of solving the problem that the arguments of `dot` should be swapped - # See https://github.com/dmlc/MXNet.jl/issues/55 - if $name == "dot" - args = reverse(args) - end - - # XXX: hacky way of solving the semantic difference of the axes parameter in Julia - # and in libmxnet. - # See https://github.com/dmlc/MXNet.jl/pull/123 - if $name == "transpose" - kwargs = Any[key != :axes ? (key, arg) : (key, map(i->length(arg)-i, arg)) for (key, arg) in kwargs] - end - if length(output_vars) > 0 output_handles = map((x) -> Base.cconvert(MX_handle, x), output_vars) # XXX: Julia 0.4 has bug: [Array{MX_handle}] == Array{MX_handle} @@ -1123,9 +1185,21 @@ function _get_ndarray_function_def(name :: String) return func_def, func_def2 end +const _op_import_bl = [ # import black list; do not import these funcs + "mean", + "reshape", + "sum", + "max", + "max_axis", + "min", + "min_axis", + "dot", + "transpose", + "prod", +] + macro _import_ndarray_functions() - black_list = ["reshape"] # do not import these funcs - names = filter(n -> ∉(lowercase(n), black_list), _get_libmx_op_names()) + names = filter(n -> ∉(lowercase(n), _op_import_bl), _get_libmx_op_names()) func_exprs = map(names) do name op_handle = _get_libmx_op_handle(name) diff --git a/src/util.jl b/src/util.jl index 6f7dc1ba73ac..54a633fbb73a 100644 --- a/src/util.jl +++ b/src/util.jl @@ -163,3 +163,28 @@ function _format_signature(narg::Int, arg_names::Ref{char_pp}) return join([unsafe_string(name) for name in arg_names] , ", ") end +@inline function _only2d(x) + @assert ndims(x) == 2 + x +end + +""" +libmxnet operators signature checker. +""" +function _sig_checker() + names = filter(n -> ∉(lowercase(n), _op_import_bl), _get_libmx_op_names()) + foreach(names) do name + op_handle = _get_libmx_op_handle(name) + + desc, key_narg = _get_libmx_op_description(name, op_handle) + _sig = desc |> s -> split(s, '\n') |> first |> strip + _m = match(r"(axis|axes|keepdims|shape)", _sig) + + if _m === nothing + return + end + + warn(_sig) + + end +end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 6944e7ed0829..61161f2c7fd8 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -413,6 +413,71 @@ function test_reshape() @test size(C) == (50, 4) end +function test_sum() + info("NDArray::sum") + + let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + @test copy(sum(X))[] == sum(A) + @test copy(sum(X, 1)) == sum(A, 1) + @test copy(sum(X, 2)) == sum(A, 2) + @test copy(sum(X, 3)) == sum(A, 3) + @test copy(sum(X, [1, 2])) == sum(A, [1, 2]) + @test copy(sum(X, (1, 2))) == sum(A, (1, 2)) + end +end + +function test_mean() + info("NDArray::mean") + + let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + @test copy(mean(X))[] == mean(A) + @test copy(mean(X, 1)) == mean(A, 1) + @test copy(mean(X, 2)) == mean(A, 2) + @test copy(mean(X, 3)) == mean(A, 3) + @test copy(mean(X, [1, 2])) == mean(A, [1, 2]) + @test copy(mean(X, (1, 2))) == mean(A, (1, 2)) + end +end + +function test_maximum() + info("NDArray::maximum") + + let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + @test copy(maximum(X))[] == maximum(A) + @test copy(maximum(X, 1)) == maximum(A, 1) + @test copy(maximum(X, 2)) == maximum(A, 2) + @test copy(maximum(X, 3)) == maximum(A, 3) + @test copy(maximum(X, [1, 2])) == maximum(A, [1, 2]) + @test copy(maximum(X, (1, 2))) == maximum(A, (1, 2)) + end +end + +function test_minimum() + info("NDArray::minimum") + + let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + @test copy(minimum(X))[] == minimum(A) + @test copy(minimum(X, 1)) == minimum(A, 1) + @test copy(minimum(X, 2)) == minimum(A, 2) + @test copy(minimum(X, 3)) == minimum(A, 3) + @test copy(minimum(X, [1, 2])) == minimum(A, [1, 2]) + @test copy(minimum(X, (1, 2))) == minimum(A, (1, 2)) + end +end + +function test_prod() + info("NDArray::prod") + + let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + @test copy(prod(X))[] == prod(A) + @test copy(prod(X, 1)) == prod(A, 1) + @test copy(prod(X, 2)) == prod(A, 2) + @test copy(prod(X, 3)) == prod(A, 3) + @test copy(prod(X, [1, 2])) == prod(A, [1, 2]) + @test copy(prod(X, (1, 2))) == prod(A, (1, 2)) + end +end + function test_fill() info("NDArray::fill") thresh = 1e8 @@ -449,21 +514,25 @@ function test_fill() end end # function test_fill -function test_kwargs() - info("NDArray::kwargs") - dims1 = (2,3,4) +function test_transpose() + info("NDArray::transpose") + let A = rand(Float32, 2, 3), x = mx.NDArray(A) + @test size(x) == (2, 3) + @test size(x') == (3, 2) + end - A = rand(Float32, dims1) - x = mx.NDArray(A) - tx = mx.transpose(x, axes=(2,1,3)) - tA = permutedims(A, [2,1,3]) - @test size(tx) == size(tA) - @test all(copy(tx) .== tA) + info("NDArray::permutedims") + let A = collect(Float32, reshape(1.0:24, 2, 3, 4)), x = mx.NDArray(A) + A′ = permutedims(A, [2, 1, 3]) + x′ = permutedims(x, [2, 1, 3]) + @test size(A′) == size(x′) + @test A′ == copy(x′) + end end function test_show() let str = sprint(show, mx.NDArray([1 2 3 4])) - @test contains(str, "1x4") + @test contains(str, "1×4") @test contains(str, "mx.NDArray") @test contains(str, "Int64") @test contains(str, "CPU") @@ -490,8 +559,13 @@ end test_nd_as_jl() test_dot() test_reshape() + test_sum() + test_mean() + test_maximum() + test_minimum() + test_prod() test_fill() - test_kwargs() + test_transpose() test_show() end From a3317f13dff102ffd2a113811d5ee77ea51ab72d Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 7 Nov 2017 03:39:48 +0800 Subject: [PATCH 530/630] ndarray: implement rdiv (#292) * ndarray: implement rdiv e.g. ```julia 1 ./ mx.NDArray(Float32[1 2; 3 4]) ``` * typo --- NEWS.md | 2 +- src/ndarray.jl | 31 ++++++++++++++++++++++++------- test/unittest/ndarray.jl | 40 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 65 insertions(+), 8 deletions(-) diff --git a/NEWS.md b/NEWS.md index b538b7160c3f..9840cc52ce6c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -3,7 +3,7 @@ ## New API * `deepcopy` for NDArray (#273) - +* `scalar ./ NDArray` is available now. (#292) * `fill` and `fill!` for NDArray (#TBD) An API correspond to Python's `mx.nd.full()` diff --git a/src/ndarray.jl b/src/ndarray.jl index 458c67f5745c..1a1a3b9f347c 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -682,25 +682,42 @@ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) end end +""" +Elementwise division of NDArray +""" +div(x::NDArray, y::NDArray) = _div(x, y) +div(x::NDArray, s::Real) = _div_scalar(x, scalar=s) +div(s::Real, x::NDArray) = _rdiv_scalar(x, scalar=s) + import Base: / """ ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) Elementwise dividing an `NDArray` by a scalar or another `NDArray` of the same shape. """ -@compatdot function Base.broadcast(::typeof(/), arg0 :: NDArray, arg :: Union{Real, NDArray}) - ret = copy(arg0, context(arg0)) - div_from!(ret, arg) +@compatdot function Base.broadcast(::typeof(/), arg0 :: NDArray, + arg :: Union{Real, NDArray}) + div(arg0, arg) +end + +@compatdot function Base.broadcast(::typeof(/), arg0 :: Real, arg :: NDArray) + div(arg0, arg) end """ /(arg0 :: NDArray, arg :: Real) -Divide an `NDArray` by a scalar. Matrix division (solving linear systems) is not implemented yet. +Divide an `NDArray` by a scalar. +Matrix division (solving linear systems) is not implemented yet. """ -function /(arg0 :: NDArray, arg :: Real) - arg0 ./ arg -end +/(arg0 :: NDArray, arg :: Real) = div(arg0, arg) + +""" + /(arg0 :: Real, arg :: NDArray) + +Elementwise divide a scalar by an `NDArray`. +""" +/(arg0 :: Real, arg :: NDArray) = div(arg0, arg) """ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 61161f2c7fd8..2b731f6beded 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -264,8 +264,47 @@ function test_div() t6, a6 = rand_tensors(Float16, dims) scalar_large = 1e4 @test reldiff(t6 / scalar_large, copy(a6 ./ scalar_large)) < 1e-1 + + let x = mx.NDArray([1 2; 3 4]) + @test eltype(x) == Int + @test copy(x / 2) == [0 1; 1 2] + @test copy(x / 2.5) == [0 1; 1 2] + @test copy(x / 2.9) == [0 1; 1 2] + end end + +function test_rdiv() + info("NDarray::rdiv") + + info("NDarray::rdiv::Inf16") + let x = 1 ./ mx.zeros(Float16, 4) + @test copy(x) == [Inf16, Inf16, Inf16, Inf16] + end + + info("NDarray::rdiv::Inf32") + let x = 1 ./ mx.zeros(Float32, 4) + @test copy(x) == [Inf32, Inf32, Inf32, Inf32] + end + + info("NDarray::rdiv::Inf64") + let x = 1 ./ mx.zeros(Float64, 4) + @test copy(x) == [Inf64, Inf64, Inf64, Inf64] + end + + info("NDarray::rdiv::Int") + let x = 1 ./ mx.NDArray([1 2; 3 4]) + @test copy(x) == [1 0; 0 0] + end + + info("NDarray::rdiv::Float32") + let x = 1 ./ mx.NDArray(Float32[1 2; 3 4]) + y = 1 ./ Float32[1 2; 3 4] + @test reldiff(copy(x), y) < 1e8 + end +end # function test_rdiv + + function test_gd() dims = rand_dims() tw, aw = rand_tensors(dims) @@ -551,6 +590,7 @@ end test_minus() test_mul() test_div() + test_rdiv() test_gd() test_saveload() test_clip() From 8cc5c1c44864d10e9475e80aa6c01700cdbaa60a Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 7 Nov 2017 13:27:29 +0800 Subject: [PATCH 531/630] util: docstring of _sig_checker (#305) --- src/util.jl | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/util.jl b/src/util.jl index 54a633fbb73a..6877200d87b8 100644 --- a/src/util.jl +++ b/src/util.jl @@ -170,6 +170,20 @@ end """ libmxnet operators signature checker. + +C/Python have different convernsion of accessing array. Those languages +handle arrays in row-major and zero-indexing which differs from Julia's +colume-major and 1-indexing. + +This function scans the docstrings of NDArray's APIs, +filter out the signature which contain `axis`, `axes`, `keepdims` and `shape` +as its function argument. + +We invoks this checker in Travis CI build and pop up the warning message +if the functions does not get manually mapped +(imply it's dimension refering may looks weird). + +If you found any warning in Travis CI build, please open an issue on GitHub. """ function _sig_checker() names = filter(n -> ∉(lowercase(n), _op_import_bl), _get_libmx_op_names()) From a488d7a564418bc1eae288d664fb16cc9f512226 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 7 Nov 2017 23:25:02 +0800 Subject: [PATCH 532/630] ndarray: intro TakingBroadcastSeriously and broadcast unfusing (#300) * ndarray: TakingBroadcastSeriously and broadcast unfusing * ndarray: test cases for elementwise mul close #253 * Drop 0.5 support * fixup! ndarray: TakingBroadcastSeriously and broadcast unfusing --- .travis.yml | 3 +- NEWS.md | 15 +++++ REQUIRE | 3 +- appveyor.yml | 1 - docs/make.jl | 2 +- docs/mkdocs.yml | 2 +- src/MXNet.jl | 2 + src/ndarray.jl | 137 +++++++++++++++------------------------ test/unittest/ndarray.jl | 31 +++++---- 9 files changed, 93 insertions(+), 103 deletions(-) diff --git a/.travis.yml b/.travis.yml index aed248bcb23d..734de9e1eff9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,6 @@ os: - osx osx_image: xcode8 julia: - - 0.5 - 0.6 # - nightly 0.6 supports depends on #170 @@ -38,7 +37,7 @@ script: # bump the time limit of no ouput # the `travis_wait` wrapper can be removed once this issue fixed: # https://github.com/JuliaLang/julia/pull/23601 - - travis_wait 60 ${TRAVIS_DIR}/run_test.sh + - ${TRAVIS_DIR}/run_test.sh after_success: # See https://github.com/dmlc/MXNet.jl/pull/303#issuecomment-341171774 diff --git a/NEWS.md b/NEWS.md index 9840cc52ce6c..a848a72c9b02 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,7 @@ # v0.3.0 (TBD) +* Drop 0.5 support. (#TBD) + ## New API * `deepcopy` for NDArray (#273) @@ -11,6 +13,19 @@ * `fill(x, dims...)` * `fill!(x, arr::NDArray)` +* Matrix (2D NDArray) multiplication is available now. (#TBD) + + ```julia + julia> x + 1x2 mx.NDArray{Float64} @ CPU0: + 1.0 2.0 + + julia> x' * x + 2x2 mx.NDArray{Float64} @ CPU0: + 1.0 2.0 + 2.0 4.0 + ``` + ## API Changes * `reshape` of NDArray shares the same interface with Base (#272). diff --git a/REQUIRE b/REQUIRE index ca30ebc84ff9..8c07e3f8b273 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,6 +1,7 @@ -julia 0.5.2 +julia 0.6 Compat 0.25.2 Formatting BinDeps JSON MacroTools +TakingBroadcastSeriously diff --git a/appveyor.yml b/appveyor.yml index e3d1984f4732..449465246eff 100644 --- a/appveyor.yml +++ b/appveyor.yml @@ -1,6 +1,5 @@ environment: matrix: - - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.5/julia-0.5-latest-win64.exe" - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.6/julia-0.6-latest-win64.exe" branches: diff --git a/docs/make.jl b/docs/make.jl index a84f680711a5..0a2b01d25cc8 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -8,5 +8,5 @@ makedocs( deploydocs( deps = Deps.pip("pygments", "mkdocs", "mkdocs-material", "python-markdown-math"), repo = "github.com/dmlc/MXNet.jl.git", - julia = "0.5", + julia = "0.6", ) diff --git a/docs/mkdocs.yml b/docs/mkdocs.yml index ed18e479ae37..e636a194196d 100644 --- a/docs/mkdocs.yml +++ b/docs/mkdocs.yml @@ -33,7 +33,7 @@ pages: - Installation Guide: user-guide/install.md - Overview: user-guide/overview.md - FAQ: user-guide/faq.md - - API Documentation: + - API Documentation: - Context: api/context.md - Models: api/model.md - Initializers: api/initializer.md diff --git a/src/MXNet.jl b/src/MXNet.jl index 142f1a06d5be..a527eb6ec4c2 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -17,6 +17,8 @@ end using Formatting using MacroTools +using TakingBroadcastSeriously: @unfuse +import TakingBroadcastSeriously: broadcast_ # Functions from base that we can safely extend and that are defined by libmxnet. import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm, diff --git a/src/ndarray.jl b/src/ndarray.jl index 1a1a3b9f347c..c3d80fb43914 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -98,6 +98,10 @@ type NDArray end end +const NDArrayOrReal = Union{NDArray, Real} + +@unfuse NDArray + function Base.show(io :: IO, arr :: NDArray) println(io, "$(join(size(arr), "×")) mx.NDArray{$(eltype(arr))} @ $(context(arr)):") Base.showarray(io, try_get_shared(arr, sync=:read), false, header=false) @@ -553,7 +557,6 @@ function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) return dst end -import Base.broadcast import Base: + """ @@ -561,23 +564,13 @@ import Base: + .+(args...) Summation. Multiple arguments of either scalar or `NDArray` could be -added together. Note at least the first or second argument needs to be an `NDArray` to -avoid ambiguity of built-in summation. +added together. Note at least the first or second argument needs to be an +`NDArray` to avoid ambiguity of built-in summation. """ -function +(arg0 :: NDArray, args :: Union{Real, NDArray}...) - ret = copy(arg0, context(arg0)) - add_to!(ret, args...) -end -@compatdot function Base.broadcast(::typeof(+), arg0 :: NDArray, args :: Union{Real, NDArray}...) - +(arg0, args...) -end -function +(arg0 :: Real, arg1 :: NDArray, args :: Union{Real, NDArray}...) - +(arg1, arg0, args...) -end -@compatdot function Base.broadcast(::typeof(+), arg0 :: Real, arg1 :: NDArray, - args :: Union{Real, NDArray}...) - broadcast(+, arg1, arg0, args...) -end ++(x::NDArray, ys::NDArrayOrReal...) = add_to!(copy(x, context(x)), ys...) + +broadcast_(::typeof(+), x::NDArray, y::NDArrayOrReal) = x + y +broadcast_(::typeof(+), x::Real, y::NDArray) = x + y """ sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) @@ -596,32 +589,19 @@ end import Base: - """ - -(arg0, arg1) - -(arg0) - .-(arg0, arg1) + -(x::NDArray) + -(x, y) + .-(x, y) -Subtraction `arg0 - arg1`, of scalar types or `NDArray`. Or create -the negative of `arg0`. +Subtraction `x - y`, of scalar types or `NDArray`. +Or create the negative of `x`. """ -function -(arg0 :: NDArray, arg1 :: Union{Real, NDArray}) - ret = copy(arg0, context(arg0)) - sub_from!(ret, arg1) -end -@compatdot function Base.broadcast(::typeof(-), arg0 :: NDArray, arg1 :: Union{Real, NDArray}) - -(arg0, arg1) -end -function -(arg0 :: Real, arg1 :: NDArray) - ret = -arg1 - add_to!(ret, arg0) - return ret -end -@compatdot function Base.broadcast(::typeof(-), arg0 :: Real, arg1 :: NDArray) - -(arg0, arg1) -end +-(x::NDArray) = _mul_scalar(x, scalar=-one(eltype(x))) +-(x::NDArray, y::NDArrayOrReal) = sub_from!(copy(x, context(x)), y) +-(x::Real, y::NDArray) = -y .+ x -function -(arg0 :: NDArray) - _mul_scalar(arg0, scalar=-one(eltype(arg0))) -end +broadcast_(::typeof(-), x::NDArray, y::NDArrayOrReal) = x - y +broadcast_(::typeof(-), x::Real, y::NDArray) = x - y """ mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) @@ -636,36 +616,31 @@ function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) else _mul(dst, arg, out=dst) end - return dst end import Base: * """ - .*(arg0, arg1) + .*(x, y) -Elementwise multiplication of `arg0` and `arg`, could be either scalar or `NDArray`. +Currently only multiplication a scalar with an `NDArray` is implemented. """ -@compatdot function Base.broadcast(::typeof(*), arg0 :: NDArray, arg :: Union{Real, NDArray}) - ret = copy(arg0, context(arg0)) - mul_to!(ret, arg) -end -@compatdot function Base.broadcast(::typeof(*), arg0 :: Real, arg :: NDArray) - arg .* arg0 -end +*(x:: NDArray, y::Real) = x .* y +*(x::Real, y::NDArray) = y .* x + +broadcast_(::typeof(*), x::NDArray, y::NDArrayOrReal) = + mul_to!(copy(x, context(x)), y) +broadcast_(::typeof(*), x::Real, y::NDArray) = y .* x """ - *(arg0, arg1) + *(A::NDArray, B::NDArray) -Currently only multiplication a scalar with an `NDArray` is implemented. Matrix multiplication -is to be added soon. +Matrix (2D NDArray) multiplication. """ -function *(arg0 :: NDArray, arg :: Real) - ret = copy(arg0, context(arg0)) - mul_to!(ret, arg) -end -function *(arg0 :: Real, arg :: NDArray) - *(arg, arg0) +function *(x::NDArray, y::NDArray) + @assert ndims(x) == 2 + @assert ndims(y) == 2 + dot(x, y) end """ @@ -683,42 +658,36 @@ function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) end """ -Elementwise division of NDArray -""" -div(x::NDArray, y::NDArray) = _div(x, y) -div(x::NDArray, s::Real) = _div_scalar(x, scalar=s) -div(s::Real, x::NDArray) = _rdiv_scalar(x, scalar=s) + rdiv_from!(x:: Real, y::NDArray) -import Base: / +Elementwise divide a scalar by an `NDArray`. Inplace updating. """ - ./(arg0 :: NDArray, arg :: Union{Real, NDArray}) - -Elementwise dividing an `NDArray` by a scalar or another `NDArray` of the same shape. -""" -@compatdot function Base.broadcast(::typeof(/), arg0 :: NDArray, - arg :: Union{Real, NDArray}) - div(arg0, arg) +function rdiv_from!(x::Real, y::NDArray) + @assert y.writable + _rdiv_scalar(y, scalar=convert(eltype(y), x), out=y) end -@compatdot function Base.broadcast(::typeof(/), arg0 :: Real, arg :: NDArray) - div(arg0, arg) -end +import Base: / """ - /(arg0 :: NDArray, arg :: Real) + ./(x::NDArray, y::NDArray) + ./(x::NDArray, y::Real) + ./(x:: Real, y::NDArray) -Divide an `NDArray` by a scalar. -Matrix division (solving linear systems) is not implemented yet. -""" -/(arg0 :: NDArray, arg :: Real) = div(arg0, arg) +* Elementwise dividing an `NDArray` by a scalar or another `NDArray` +of the same shape. -""" - /(arg0 :: Real, arg :: NDArray) +* Elementwise divide a scalar by an `NDArray`. -Elementwise divide a scalar by an `NDArray`. +* Matrix division (solving linear systems) is not implemented yet. """ -/(arg0 :: Real, arg :: NDArray) = div(arg0, arg) +/(x::NDArray, y::Real) = x ./ y + +broadcast_(::typeof(/), x::NDArray, y::NDArrayOrReal) = + div_from!(copy(x, context(x)), y) +broadcast_(::typeof(/), x::Real, y::NDArray) = + rdiv_from!(x, copy(y, context(y))) """ fill!(x, arr::NDArray) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 2b731f6beded..a15291f8b3fe 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -223,6 +223,18 @@ function test_mul() t6, a6 = rand_tensors(Float16, dims) scalar_small = Float16(1e-5) @test reldiff(t6 * scalar_small, copy(a6 .* scalar_small)) < 1e-1 + + info("NDArray::mul::matrix multiplication") + let x = mx.NDArray([1. 2]) + y = x' * x + @test copy(y) == [1. 2; 2 4] + end + + info("NDArray::mul::elementwise::issue 253") + let x = mx.NDArray([1. 2]) + y = x .* x + @test copy(y) == [1. 4.] + end end function test_div() @@ -246,31 +258,24 @@ function test_div() # test scalar scalar = rand() + 2 - @test reldiff(t2./scalar, copy(a2./scalar)) < thresh + @test reldiff(t2 ./ scalar, copy(a2 ./ scalar)) < thresh # test small and large scalar t4, a4 = rand_tensors(Float32, dims) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t4 / scalar_small, copy(a4 ./ scalar_small)) < thresh - @test reldiff(t4 / scalar_large, copy(a4 ./ scalar_large)) < thresh + @test reldiff(t4 ./ scalar_small, copy(a4 ./ scalar_small)) < thresh + @test reldiff(t4 ./ scalar_large, copy(a4 ./ scalar_large)) < thresh t5, a5 = rand_tensors(Float64, dims) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t5 / scalar_small, copy(a5 ./ scalar_small)) < thresh - @test reldiff(t5 / scalar_large, copy(a5 ./ scalar_large)) < thresh + @test reldiff(t5 ./ scalar_small, copy(a5 ./ scalar_small)) < thresh + @test reldiff(t5 ./ scalar_large, copy(a5 ./ scalar_large)) < thresh t6, a6 = rand_tensors(Float16, dims) scalar_large = 1e4 - @test reldiff(t6 / scalar_large, copy(a6 ./ scalar_large)) < 1e-1 - - let x = mx.NDArray([1 2; 3 4]) - @test eltype(x) == Int - @test copy(x / 2) == [0 1; 1 2] - @test copy(x / 2.5) == [0 1; 1 2] - @test copy(x / 2.9) == [0 1; 1 2] - end + @test reldiff(t6 ./ scalar_large, copy(a6 ./ scalar_large)) < 1e-1 end From 57cc677c5403fee9f7244fae8b18e1e0d63f1e5d Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 00:55:46 +0800 Subject: [PATCH 533/630] ndarray: elementwise power (#293) * ndarray: elementwise power ```julia x.^2 2.^x x.^y ``` * ndarray: elementwise power unfusion * Update NEWS --- NEWS.md | 6 ++++ src/ndarray.jl | 16 ++++++++- test/unittest/ndarray.jl | 72 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index a848a72c9b02..77ef0b766f16 100644 --- a/NEWS.md +++ b/NEWS.md @@ -26,6 +26,12 @@ 2.0 4.0 ``` +* Elementwise power of `NDArray`. (#293) + * `x.^2` + * `2.^x` + * `x.^y` + * where `x` and `y` are `NDArray`s. + ## API Changes * `reshape` of NDArray shares the same interface with Base (#272). diff --git a/src/ndarray.jl b/src/ndarray.jl index c3d80fb43914..d8b13e96cfe8 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -689,6 +689,21 @@ broadcast_(::typeof(/), x::NDArray, y::NDArrayOrReal) = broadcast_(::typeof(/), x::Real, y::NDArray) = rdiv_from!(x, copy(y, context(y))) +import Base: ^ + +""" + .^(x::NDArray, y::NDArray) + .^(x::NDArray, s::Real) + .^(s::Real, x::NDArray) + +Elementwise power of NDArray. +""" +^ + +broadcast_(::typeof(^), x::NDArray, y::NDArray) = _power(x, y) +broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar=s) +broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar=s) + """ fill!(x, arr::NDArray) @@ -713,7 +728,6 @@ end fill(x, dims::Integer...) = fill(x, dims) - """ Manipulating as Julia Arrays ---------------------------- diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index a15291f8b3fe..4552b79b6bcf 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -380,6 +380,77 @@ function test_clip() @test all(clip_down .<= copy(clipped) .<= clip_up) end +function test_power() + info("NDArray::power") + thresh = 1e8 + + info("NDArray::power::Int::x.^n") + let x = mx.NDArray([1 2; 3 4]) + @test eltype(x) == Int + @test copy(x.^-1) == [1 0; 0 0] + @test copy(x.^0) == [1 1; 1 1] + @test copy(x.^1) == [1 2; 3 4] + @test copy(x.^1.1) == [1 2; 3 4] + @test copy(x.^2) == [1 4; 9 16] + @test copy(x.^2.9) == [1 4; 9 16] + @test copy(x.^3) == [1 8; 27 64] + end + + info("NDArray::power::Int::n.^x") + let x = mx.NDArray([1 2; 3 4]) + @test eltype(x) == Int + @test copy(0.^x) == [0 0; 0 0] + @test copy(1.^x) == [1 1; 1 1] + @test copy(1.1.^x) == [1 1; 1 1] + @test copy(2.^x) == [2 4; 8 16] + @test copy(2.9.^x) == [2 4; 8 16] + @test copy(3.^x) == [3 9; 27 81] + end + + info("NDArray::power::Int::x.^y") + let x = mx.NDArray([1 2; 3 4]), y = mx.NDArray([2 2; 2 2]) + @test eltype(x) == Int + @test eltype(y) == Int + @test copy(x.^y) == [1 4; 9 16] + @test copy(y.^x) == [2 4; 8 16] + end + + info("NDArray::power::Float32::x.^n") + let x = mx.NDArray(Float32[1 2; 3 4]), A = Float32[1 2; 3 4] + @test eltype(x) == Float32 + @test copy(x.^0) == Float32[1 1; 1 1] + @test copy(x.^1) == Float32[1 2; 3 4] + @test copy(x.^2) == Float32[1 4; 9 16] + @test copy(x.^3) == Float32[1 8; 27 64] + + @test reldiff(copy(x.^-1), A.^-1) < thresh + @test reldiff(copy(x.^1.1), A.^1.1) < thresh + @test reldiff(copy(x.^2.9), A.^2.9) < thresh + end + + info("NDArray::power::Float32::n.^x") + let x = mx.NDArray(Float32[1 2; 3 4]), A = Float32[1 2; 3 4] + @test eltype(x) == Float32 + @test copy(0.^x) == Float32[0 0; 0 0] + @test copy(1.^x) == Float32[1 1; 1 1] + @test copy(2.^x) == Float32[2 4; 8 16] + @test copy(3.^x) == Float32[3 9; 27 81] + + @test reldiff(copy(1.1.^x), 1.1.^A) < thresh + @test reldiff(copy(2.9.^x), 2.9.^A) < thresh + end + + info("NDArray::power::Float32::x.^y") + let x = mx.NDArray(Float32[1 2; 3 4]), y = mx.NDArray(Float32[2 2; 2 2]) + @test eltype(x) == Float32 + @test eltype(y) == Float32 + @test copy(x.^y) == Float32[1 4; 9 16] + @test copy(y.^x) == Float32[2 4; 8 16] + end + + # TODO: Float64: wait for https://github.com/apache/incubator-mxnet/pull/8012 +end # function test_power + function test_sqrt() dims = rand_dims() info("NDArray::sqrt::dims = $dims") @@ -599,6 +670,7 @@ end test_gd() test_saveload() test_clip() + test_power() test_sqrt() test_eltype() test_nd_as_jl() From eea128af54b0eb82fef03bae139f5cc81af7b7af Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 01:18:24 +0800 Subject: [PATCH 534/630] ndarray: fix `add` broadcasting and more tests (#308) --- src/ndarray.jl | 19 ++++++++++--------- test/unittest/ndarray.jl | 6 ++++++ 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index d8b13e96cfe8..56b8869b1758 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -541,11 +541,11 @@ macro inplace(stmt) end """ - add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) + add_to!(dst::NDArray, args::NDArrayOrReal...) Add a bunch of arguments into `dst`. Inplace updating. """ -function add_to!(dst :: NDArray, args :: Union{Real, NDArray}...) +function add_to!(dst::NDArray, args::NDArrayOrReal...) @assert dst.writable for arg in args if isa(arg, Real) @@ -567,17 +567,18 @@ Summation. Multiple arguments of either scalar or `NDArray` could be added together. Note at least the first or second argument needs to be an `NDArray` to avoid ambiguity of built-in summation. """ -+(x::NDArray, ys::NDArrayOrReal...) = add_to!(copy(x, context(x)), ys...) ++(x::NDArray, ys::NDArrayOrReal...) = add_to!(copy(x, context(x)), ys...) ++(x::Real, y::NDArray, zs::NDArrayOrReal...) = add_to!(copy(y, context(y)), x, zs...) broadcast_(::typeof(+), x::NDArray, y::NDArrayOrReal) = x + y broadcast_(::typeof(+), x::Real, y::NDArray) = x + y """ - sub_from!(dst :: NDArray, args :: Union{Real, NDArray}...) + sub_from!(dst::NDArray, args::NDArrayOrReal...) Subtract a bunch of arguments from `dst`. Inplace updating. """ -function sub_from!(dst :: NDArray, arg :: Union{Real, NDArray}) +function sub_from!(dst::NDArray, arg::NDArrayOrReal) @assert dst.writable if isa(arg, Real) _minus_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) @@ -604,12 +605,12 @@ broadcast_(::typeof(-), x::NDArray, y::NDArrayOrReal) = x - y broadcast_(::typeof(-), x::Real, y::NDArray) = x - y """ - mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) + mul_to!(dst::NDArray, arg::NDArrayOrReal) Elementwise multiplication into `dst` of either a scalar or an `NDArray` of the same shape. Inplace updating. """ -function mul_to!(dst :: NDArray, arg :: Union{Real, NDArray}) +function mul_to!(dst::NDArray, arg::NDArrayOrReal) @assert dst.writable if isa(arg, Real) _mul_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) @@ -644,11 +645,11 @@ function *(x::NDArray, y::NDArray) end """ - div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) + div_from!(dst::NDArray, arg::NDArrayOrReal) Elementwise divide a scalar or an `NDArray` of the same shape from `dst`. Inplace updating. """ -function div_from!(dst :: NDArray, arg :: Union{Real, NDArray}) +function div_from!(dst::NDArray, arg::NDArrayOrReal) @assert dst.writable if isa(arg, Real) _div_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 4552b79b6bcf..0b9c7cf6dd09 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -132,6 +132,12 @@ function test_plus() scalar_large = Float16(1e4) @test reldiff(t6 + scalar_small, copy(a6 .+ scalar_small)) < 1e-1 @test reldiff(t6 + scalar_large, copy(a6 .+ scalar_large)) < 1e-1 + + let x = mx.NDArray([1 2; 3 4]), y = mx.NDArray([1 1; 1 1]) + @test copy(42 .+ x) == [43 44; 45 46] + @test copy(x .+ 42) == [43 44; 45 46] + @test copy(0 .+ x .+ y .+ 41) == [43 44; 45 46] + end end function test_minus() From f839be1fecef0356cced6e6227c74bfbc69ff38e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 01:22:30 +0800 Subject: [PATCH 535/630] Fix deprecations (#307) --- deps/build.jl | 6 +--- examples/char-lstm/lstm.jl | 6 ++-- examples/char-lstm/seq-data.jl | 2 +- plugins/io/svmlight.jl | 2 +- src/MXNet.jl | 4 +-- src/base.jl | 12 +++---- src/callback.jl | 12 +++---- src/compat.jl | 13 ++----- src/context.jl | 2 +- src/executor.jl | 2 +- src/initializer.jl | 10 +++--- src/io.jl | 26 +++++++------- src/kvstore.jl | 2 +- src/metric.jl | 28 +++++++-------- src/model.jl | 10 +++--- src/name.jl | 6 ++-- src/ndarray.jl | 62 +++++++++++++++++----------------- src/optimizer.jl | 22 ++++++------ src/optimizers/adadelta.jl | 4 +-- src/optimizers/adagrad.jl | 2 +- src/optimizers/adam.jl | 4 +-- src/optimizers/adamax.jl | 4 +-- src/optimizers/nadam.jl | 4 +-- src/optimizers/rmsprop.jl | 2 +- src/optimizers/sgd.jl | 2 +- src/random.jl | 4 +-- src/symbolic-node.jl | 8 ++--- test/unittest/bind.jl | 2 +- test/unittest/metric.jl | 8 ++--- test/unittest/ndarray.jl | 4 +-- 30 files changed, 128 insertions(+), 147 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index d6591c22c910..86d6a9839287 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -117,11 +117,7 @@ if !libmxnet_detected blas_path = Libdl.dlpath(Libdl.dlopen(Base.libblas_name)) - if VERSION >= v"0.5.0-dev+4338" - blas_vendor = Base.BLAS.vendor() - else - blas_vendor = Base.blas_vendor() - end + blas_vendor = Base.BLAS.vendor() ilp64 = "" if blas_vendor == :openblas64 diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index dcd1e6ddb60c..d930240ba1a2 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -2,14 +2,14 @@ using MXNet #--LSTMState -immutable LSTMState +struct LSTMState c :: mx.SymbolicNode h :: mx.SymbolicNode end #--/LSTMState #--LSTMParam -immutable LSTMParam +struct LSTMParam i2h_W :: mx.SymbolicNode h2h_W :: mx.SymbolicNode i2h_b :: mx.SymbolicNode @@ -116,7 +116,7 @@ end # Negative Log-likelihood -type NLL <: mx.AbstractEvalMetric +mutable struct NLL <: mx.AbstractEvalMetric nll_sum :: Float64 n_sample :: Int diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index fb7c8378a5f1..0aac5609dac5 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -32,7 +32,7 @@ function build_vocabulary(corpus_fn::AbstractString, vocab_fn::AbstractString; m end #--CharSeqProvider -type CharSeqProvider <: mx.AbstractDataProvider +mutable struct CharSeqProvider <: mx.AbstractDataProvider text :: AbstractString batch_size :: Int seq_len :: Int diff --git a/plugins/io/svmlight.jl b/plugins/io/svmlight.jl index 500c040394ee..46b79743b87f 100644 --- a/plugins/io/svmlight.jl +++ b/plugins/io/svmlight.jl @@ -5,7 +5,7 @@ datasets in this format could be found at http://www.csie.ntu.edu.tw/~cjlin/libs using MXNet using SVMLightLoader -type SVMLightProvider <: mx.AbstractDataProvider +mutable struct SVMLightProvider <: mx.AbstractDataProvider filename :: AbstractString batch_size :: Int fea_dim :: Int diff --git a/src/MXNet.jl b/src/MXNet.jl index a527eb6ec4c2..85991473d14c 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -11,9 +11,7 @@ using Compat import Compat.String import Compat.view -if VERSION >= v"0.6.0-dev.1024" - import Base.Iterators: filter -end +import Base.Iterators: filter using Formatting using MacroTools diff --git a/src/base.jl b/src/base.jl index b078793d75e1..5df0de509eec 100644 --- a/src/base.jl +++ b/src/base.jl @@ -1,5 +1,5 @@ "Exception thrown when an error occurred calling MXNet API." -immutable MXError <: Exception +struct MXError <: Exception msg :: AbstractString end @@ -91,7 +91,7 @@ end macro mx_define_handle_t(name, destructor) name = esc(name) quote - type $name + mutable struct $name value :: MX_handle function $name(value = C_NULL) @@ -161,7 +161,7 @@ dump_mx_param(val::Any) = string(val) dump_mx_param(val::Float64) = @sprintf("%.16e", val) dump_mx_param(val::Float32) = @sprintf("%.8e", val) dump_mx_param(val::Float16) = @sprintf("%.4e", val) -dump_mx_param{N, T<:Integer}(shape::NTuple{N, T}) = +dump_mx_param(shape::NTuple{N, T}) where {N, T<:Integer} = string(tuple(flipdim([shape...], 1)...)) @@ -203,7 +203,7 @@ end """Internal use only, this value is used to indicate a required value is not specified. """ -immutable __Undefined +struct __Undefined end function _defstruct_impl(is_immutable, name, fields) @@ -285,7 +285,7 @@ function _defstruct_impl(is_immutable, name, fields) if is_immutable quote - immutable $(name) <: $(super_name) + struct $(name) <: $(super_name) $type_body end @@ -293,7 +293,7 @@ function _defstruct_impl(is_immutable, name, fields) end else quote - type $(name) <: $(super_name) + mutable struct $(name) <: $(super_name) $type_body end diff --git a/src/callback.jl b/src/callback.jl index 780869daf9f0..289fdd066aa9 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -3,23 +3,23 @@ Abstract type of callback functions used in training. """ -@compat abstract type AbstractCallback end +abstract type AbstractCallback end """ AbstractBatchCallback Abstract type of callbacks to be called every mini-batch. """ -@compat abstract type AbstractBatchCallback <: AbstractCallback end +abstract type AbstractBatchCallback <: AbstractCallback end """ AbstractEpochCallback Abstract type of callbacks to be called every epoch. """ -@compat abstract type AbstractEpochCallback <: AbstractCallback end +abstract type AbstractEpochCallback <: AbstractCallback end -type BatchCallback <: AbstractBatchCallback +mutable struct BatchCallback <: AbstractBatchCallback frequency :: Int call_on_0 :: Bool callback :: Function @@ -86,7 +86,7 @@ function speedometer(;frequency::Int=50) end -type EpochCallback <: AbstractEpochCallback +mutable struct EpochCallback <: AbstractEpochCallback frequency :: Int call_on_0 :: Bool callback :: Function @@ -107,7 +107,7 @@ See also [`every_n_batch`](@ref). function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) EpochCallback(n, call_on_0, callback) end -function (cb :: EpochCallback){T<:Real}(model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) +function (cb :: EpochCallback)(model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) where T<:Real if state.curr_epoch == 0 if cb.call_on_0 cb.callback(model, state, metric) diff --git a/src/compat.jl b/src/compat.jl index 7357632174e5..257f1c2863ed 100644 --- a/src/compat.jl +++ b/src/compat.jl @@ -1,17 +1,12 @@ # this file contains code used for enabling backward compatibility with 0.5 # have to import base dotted operators if in 0.5 -if VERSION < v"0.6.0-dev" - import Base: .+, .-, .*, ./, .^ -end # this is for declaring broadcasted functions in 0.5 # TODO this macro should be removed when 0.5 support is dropped macro compatdot(fblock) - if VERSION ≥ v"0.6.0-dev" - return esc(fblock) - end + return esc(fblock) @capture(fblock, function Base.broadcast(::typeof(op_), args__) body_ end) @@ -24,9 +19,5 @@ macro compatdot(fblock) end macro compatmul(expr1, expr2) - if VERSION ≥ v"0.6.0-dev" - esc(:(broadcast(*, $expr1, $expr2))) - else - esc(:($expr1 .* $expr2)) - end + esc(:(broadcast(*, $expr1, $expr2))) end diff --git a/src/context.jl b/src/context.jl index 410a80ca8b4c..d627e7b5e7f7 100644 --- a/src/context.jl +++ b/src/context.jl @@ -5,7 +5,7 @@ A context describes the device type and id on which computation should be carried on. """ -immutable Context +struct Context device_type :: CONTEXT_TYPE device_id :: Int end diff --git a/src/executor.jl b/src/executor.jl index 8d10102f5c75..38f3925293c1 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -5,7 +5,7 @@ An executor is a realization of a symbolic architecture defined by a `SymbolicNo The actual forward and backward computation specified by the network architecture can be carried out with an executor. """ -type Executor +mutable struct Executor handle :: MX_ExecutorHandle symbol :: SymbolicNode arg_arrays :: Vector{NDArray} diff --git a/src/initializer.jl b/src/initializer.jl index 80e6026d1bf4..f741d3e5a279 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -15,9 +15,9 @@ Or, if full behavior customization is needed, override the following function init(self :: AbstractInitializer, name :: Base.Symbol, array :: NDArray) """ -@compat abstract type AbstractInitializer end +abstract type AbstractInitializer end -function init{T<:AbstractInitializer}(self :: T, name :: Base.Symbol, array :: NDArray) +function init(self :: T, name :: Base.Symbol, array :: NDArray) where T<:AbstractInitializer strname = string(name) if startswith(strname,"upsampling") _init_bilinear(self,name, array) @@ -94,7 +94,7 @@ end Initialize weights according to a uniform distribution within the provided scale. """ -immutable UniformInitializer <: AbstractInitializer +struct UniformInitializer <: AbstractInitializer scale :: AbstractFloat end """ @@ -113,7 +113,7 @@ end Initialize weights according to a univariate Gaussian distribution. """ -immutable NormalInitializer <: AbstractInitializer +struct NormalInitializer <: AbstractInitializer μ :: AbstractFloat σ :: AbstractFloat end @@ -150,7 +150,7 @@ used by various libraries. @enum XavierDistribution xv_uniform xv_normal @enum XavierRegularization xv_avg xv_in xv_out -immutable XavierInitializer <: AbstractInitializer +struct XavierInitializer <: AbstractInitializer distribution :: XavierDistribution regularization :: XavierRegularization magnitude :: Float64 diff --git a/src/io.jl b/src/io.jl index 242a9ad98d1a..2ba0bf78a584 100644 --- a/src/io.jl +++ b/src/io.jl @@ -15,7 +15,7 @@ Normally this involves defining: * `Base.done(provider, state) -> Bool` * `Base.next(provider, state) -> (AbstractDataBatch, AbstractDataProvider)` """ -@compat abstract type AbstractDataProvider end +abstract type AbstractDataProvider end """ get_batch_size(provider) -> Int @@ -53,7 +53,7 @@ function provide_label end Base type for data provider states. """ -@compat abstract type AbstractDataProviderState end +abstract type AbstractDataProviderState end """ AbstractDataBatch @@ -70,7 +70,7 @@ The following utility functions will be automatically defined: * [`load_data!`](@ref) * [`load_label!`](@ref) """ -@compat abstract type AbstractDataBatch end +abstract type AbstractDataBatch end """ count_samples(provider, batch) -> Int @@ -113,14 +113,14 @@ function get_label end A basic subclass of `AbstractDataBatch`, that implement the interface by accessing member fields. """ -type DataBatch <: AbstractDataBatch +mutable struct DataBatch <: AbstractDataBatch data :: Vector{NDArray} label :: Vector{NDArray} count :: Int end count_samples(batch :: DataBatch) = batch.count -get_data{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batch.data -get_label{Provider<:AbstractDataProvider}(::Provider, batch :: DataBatch) = batch.label +get_data(::Provider, batch :: DataBatch) where {Provider<:AbstractDataProvider} = batch.data +get_label(::Provider, batch :: DataBatch) where {Provider<:AbstractDataProvider} = batch.label """ SlicedNDArray @@ -252,7 +252,7 @@ Construct a data provider from `NDArray` or Julia Arrays. TODO: remove `data_padding` and `label_padding`, and implement rollover that copies the last or first several training samples to feed the padding. """ -type ArrayDataProvider <: AbstractDataProvider +mutable struct ArrayDataProvider <: AbstractDataProvider data_arrays :: Vector{Array} data_names :: Vector{Base.Symbol} label_arrays :: Vector{Array} @@ -277,7 +277,7 @@ function ArrayDataProvider(data::Any; batch_size::Int=0, shuffle::Bool=false, da ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle, data_padding=data_padding, label_padding=label_padding) end function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) - asarr{T}(arr :: Array{T}) = convert(Array{MX_float}, arr) + asarr(arr :: Array{T}) where {T} = convert(Array{MX_float}, arr) asarr(arr :: NDArray) = copy(arr) if isa(data, Union{NDArray, Array}) && eltype(data) <: Real @@ -362,7 +362,7 @@ end get_batch_size(provider::ArrayDataProvider) = provider.batch_size -immutable ArrayDataProviderState <: AbstractDataProviderState +struct ArrayDataProviderState <: AbstractDataProviderState curr_idx :: Int end @@ -385,7 +385,7 @@ function Base.done(provider::ArrayDataProvider, state :: ArrayDataProviderState) return state.curr_idx > provider.sample_count end -immutable ArrayDataBatch <: AbstractDataBatch +struct ArrayDataBatch <: AbstractDataBatch idx :: UnitRange{Int} end function Base.next(provider :: ArrayDataProvider, state :: ArrayDataProviderState) @@ -423,7 +423,7 @@ end A data provider that wrap built-in data iterators from libmxnet. See below for a list of built-in data iterators. """ -type MXDataProvider <: AbstractDataProvider +mutable struct MXDataProvider <: AbstractDataProvider handle :: MX_DataIterHandle data_shape :: Vector{Tuple{Base.Symbol, Tuple}} label_shape:: Vector{Tuple{Base.Symbol, Tuple}} @@ -474,10 +474,10 @@ provide_data(provider::MXDataProvider) = provider.data_shape provide_label(provider::MXDataProvider) = provider.label_shape get_batch_size(provider::MXDataProvider) = provider.batch_size -type MXDataProviderState <: AbstractDataProviderState +mutable struct MXDataProviderState <: AbstractDataProviderState has_next :: Bool end -immutable MXDataBatch <: AbstractDataBatch +struct MXDataBatch <: AbstractDataBatch end function Base.eltype(provider :: MXDataProvider) diff --git a/src/kvstore.jl b/src/kvstore.jl index a1d835f8a6c2..1ac56260b401 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -1,4 +1,4 @@ -type KVStore +mutable struct KVStore handle :: MX_KVStoreHandle updater_c :: Ptr{Void} updater :: Function diff --git a/src/metric.jl b/src/metric.jl index b39309f6275c..db38060c933e 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -8,7 +8,7 @@ interfaces: * [`reset!`](@ref) * [`get`](@ref) """ -@compat abstract type AbstractEvalMetric end +abstract type AbstractEvalMetric end """ hasNDArraySupport(metric) -> Val{true/false} @@ -30,11 +30,11 @@ Update and accumulate metrics. * `labels::Vector{NDArray}`: the labels from the data provider. * `preds::Vector{NDArray}`: the outputs (predictions) of the network. """ -function update!{T <: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}) +function update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}) where T <: AbstractEvalMetric _update!(metric, labels, preds, hasNDArraySupport(metric)) end -function _update!{T<: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{true}) +function _update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{true}) where T<: AbstractEvalMetric if length(labels) != length(preds) Base.warn_once( "The number of labels ($(length(labels))) does not correspond to the\ @@ -45,7 +45,7 @@ function _update!{T<: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray} end end -function _update!{T<: AbstractEvalMetric}(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{false}) +function _update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{false}) where T<: AbstractEvalMetric if length(labels) != length(preds) Base.warn_once( "The number of labels ($(length(labels))) does not correspond to the\ @@ -88,7 +88,7 @@ end A metric that calculates nothing. Can be used to ignore an output during training. """ -type NullMetric <: mx.AbstractEvalMetric +mutable struct NullMetric <: mx.AbstractEvalMetric end function update!(metric :: NullMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) @@ -114,7 +114,7 @@ To calculate both mean-squared error [`Accuracy`](@ref) and log-loss [`ACE`](@re mx.fit(..., eval_metric = mx.MultiMetric([mx.Accuracy(), mx.ACE()])) ``` """ -type MultiMetric <: mx.AbstractEvalMetric +mutable struct MultiMetric <: mx.AbstractEvalMetric metrics :: Vector{mx.AbstractEvalMetric} end @@ -146,7 +146,7 @@ and log-loss [`ACE`](@ref) for the second output: mx.fit(..., eval_metric = mx.SeqMetric([mx.Accuracy(), mx.ACE()])) ``` """ -type SeqMetric <: mx.AbstractEvalMetric +mutable struct SeqMetric <: mx.AbstractEvalMetric metrics :: Vector{mx.AbstractEvalMetric} end @@ -176,7 +176,7 @@ Multiclass classification accuracy. Calculates the mean accuracy per sample for softmax in one dimension. For a multi-dimensional softmax the mean accuracy over all dimensions is calculated. """ -type Accuracy <: AbstractEvalMetric +mutable struct Accuracy <: AbstractEvalMetric acc_sum :: Float64 n_sample :: Int @@ -235,7 +235,7 @@ Calculates the mean squared error regression loss. Requires that label and prediction have the same shape. """ -type MSE <: AbstractEvalMetric +mutable struct MSE <: AbstractEvalMetric mse_sum :: Vector{NDArray} n_sample :: Int @@ -310,7 +310,7 @@ For more discussion about normalized MSE, please see [#211](https://github.com/dmlc/MXNet.jl/pull/211) also. """ -type NMSE <: AbstractEvalMetric +mutable struct NMSE <: AbstractEvalMetric nmse_sum :: Float64 n_sample :: Int @@ -349,7 +349,7 @@ Calculates the averaged cross-entropy (logloss) for classification. # Arguments: * `eps::Float64`: Prevents returning `Inf` if `p = 0`. """ -type ACE <: AbstractEvalMetric +mutable struct ACE <: AbstractEvalMetric ace_sum :: Float64 n_sample :: Int eps :: Float64 @@ -368,7 +368,7 @@ end hasNDArraySupport(::ACE) = Val{false}() -function _update_single_output{T}(metric :: ACE, label :: Array{T}, pred :: Array{T}) +function _update_single_output(metric :: ACE, label :: Array{T}, pred :: Array{T}) where T eps = convert(T, metric.eps) # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) @@ -411,7 +411,7 @@ end Calculates the averaged cross-entropy per class and overall (see [`ACE`](@ref)). This can be used to quantify the influence of different classes on the overall loss. """ -type MultiACE <: AbstractEvalMetric +mutable struct MultiACE <: AbstractEvalMetric aces :: Vector{Float64} counts :: Vector{Int} eps :: Float64 @@ -432,7 +432,7 @@ end hasNDArraySupport(::MultiACE) = Val{false}() -function _update_single_output{T}(metric :: MultiACE, label :: Array{T}, pred :: Array{T}) +function _update_single_output(metric :: MultiACE, label :: Array{T}, pred :: Array{T}) where T eps = convert(T, metric.eps) # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) diff --git a/src/model.jl b/src/model.jl index a592b500e9cb..df15e4cace0a 100644 --- a/src/model.jl +++ b/src/model.jl @@ -3,7 +3,7 @@ The abstract super type of all models in MXNet.jl. """ -@compat abstract type AbstractModel end +abstract type AbstractModel end """ FeedForward @@ -14,7 +14,7 @@ explicitly handling of *time index*, but it is relatively easy to implement unrolled RNN / LSTM under this framework (*TODO*: add example). For models that handles sequential data explicitly, please use *TODO*... """ -type FeedForward <: AbstractModel +mutable struct FeedForward <: AbstractModel arch :: SymbolicNode ctx :: Vector{Context} @@ -292,9 +292,9 @@ end verbosity :: Int = 3 ) -function _invoke_callbacks{T<:Real}(self::FeedForward, callbacks::Vector{AbstractCallback}, - state::OptimizationState, type_filter::Type; - metric::Vector{Tuple{Base.Symbol, T}} = Vector{Tuple{Base.Symbol, Real}}()) +function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, + state::OptimizationState, type_filter::Type; + metric::Vector{Tuple{Base.Symbol, T}} = Vector{Tuple{Base.Symbol, Real}}()) where T<:Real map(callbacks) do cb if isa(cb, type_filter) if type_filter == AbstractEpochCallback diff --git a/src/name.jl b/src/name.jl index 8ba0f707ff1a..d281770eb357 100644 --- a/src/name.jl +++ b/src/name.jl @@ -1,4 +1,4 @@ -@compat abstract type AbstractNameManager end +abstract type AbstractNameManager end const NameType = Union{Base.Symbol, AbstractString} const NameCounter = Dict{Base.Symbol, Int} @@ -21,7 +21,7 @@ function _default_get_name!(counter :: NameCounter, name :: NameType, hint :: Na return name end -type BasicNameManager <: AbstractNameManager +mutable struct BasicNameManager <: AbstractNameManager counter :: NameCounter end BasicNameManager() = BasicNameManager(NameCounter()) @@ -30,7 +30,7 @@ function get!(manager :: BasicNameManager, name :: NameType, hint :: NameType) _default_get_name!(manager.counter, name, hint) end -type PrefixNameManager <: AbstractNameManager +mutable struct PrefixNameManager <: AbstractNameManager prefix :: Base.Symbol counter :: NameCounter end diff --git a/src/ndarray.jl b/src/ndarray.jl index 56b8869b1758..3e7625e3fdf6 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -3,7 +3,7 @@ const DType = Union{Float32, Float64, Float16, UInt8, Int32, Int8, Int64} @enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 kInt8 kInt64 const DEFAULT_DTYPE = Float32 # MSHADOW_DEFAULT_DTYPE -function toTypeFlag{T <: DType}(:: Type{T}) +function toTypeFlag(:: Type{T}) where T <: DType if T == Float32 return kFloat32 elseif T == Float64 @@ -44,7 +44,7 @@ function fromTypeFlag(T :: TypeFlag) end # create a NDArray handle of specific shape -function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) +function _ndarray_alloc(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) where N h_ref = Ref{MX_handle}(0) shape = flipdim(MX_uint[shape...],1) @mxcall(:MXNDArrayCreate, (Ptr{MX_uint}, MX_uint, Cint, Cint, Cint, Ref{MX_handle}), @@ -54,7 +54,7 @@ function _ndarray_alloc{N}(shape :: NTuple{N, Int}, ctx :: Context, delay_alloc end # create a NDArray handle of specific shape type -function _ndarray_alloc{T <: DType,N}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) +function _ndarray_alloc(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context, delay_alloc :: Bool) where {T <: DType,N} h_ref = Ref{MX_handle}(0) shape = flipdim(MX_uint[shape...],1) dtype = toTypeFlag(T) @@ -89,7 +89,7 @@ of tensor-based computation. C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory have shape (28,28,1,100). """ -type NDArray +mutable struct NDArray handle :: MX_NDArrayHandle writable :: Bool @@ -107,7 +107,7 @@ function Base.show(io :: IO, arr :: NDArray) Base.showarray(io, try_get_shared(arr, sync=:read), false, header=false) end -function NDArray{T<:Real}(data :: Array{T}) +function NDArray(data :: Array{T}) where T<:Real copy(data, cpu()) end @@ -141,13 +141,13 @@ end Allocate memory for an uninitialized `NDArray` with a specified type. """ -function empty{N,T<:DType}(::Type{T}, shape :: NTuple{N, Int}) +function empty(::Type{T}, shape :: NTuple{N, Int}) where {N,T<:DType} empty(T, shape, cpu()) end -function empty{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) +function empty(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) where {N,T<:DType} NDArray(_ndarray_alloc(T, shape, ctx, false)) end -function empty{T<:DType}(:: Type{T}, shape :: Int...) +function empty(:: Type{T}, shape :: Int...) where T<:DType empty(T, shape) end @@ -158,10 +158,10 @@ end Allocate memory for an uninitialized `NDArray` with specific shape of type Float32. """ -function empty{N}(shape :: NTuple{N, Int}) +function empty(shape :: NTuple{N, Int}) where N empty(shape, cpu()) end -function empty{N}(shape :: NTuple{N, Int}, ctx :: Context) +function empty(shape :: NTuple{N, Int}, ctx :: Context) where N NDArray(_ndarray_alloc(shape, ctx, false)) end function empty(shape :: Int...) @@ -186,15 +186,15 @@ end Create zero-ed `NDArray` with specific shape and type """ -function zeros{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) +function zeros(:: Type{T}, shape :: NTuple{N, Int}) where {N,T<:DType} zeros(T, shape, cpu()) end -function zeros{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) +function zeros(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) where {N,T<:DType} arr = empty(T, shape, ctx) arr[:] = zero(T) return arr end -function zeros{T<:DType}(:: Type{T}, shape :: Int...) +function zeros(:: Type{T}, shape :: Int...) where T<:DType zeros(T, shape) end @@ -205,10 +205,10 @@ end Create zero-ed `NDArray` with specific shape. """ -function zeros{N}(shape :: NTuple{N, Int}) +function zeros(shape :: NTuple{N, Int}) where N zeros(shape, cpu()) end -function zeros{N}(shape :: NTuple{N, Int}, ctx :: Context) +function zeros(shape :: NTuple{N, Int}, ctx :: Context) where N arr = empty(shape, ctx) arr[:] = 0 return arr @@ -224,15 +224,15 @@ end Create an `NDArray` with specific shape & type, and initialize with 1. """ -function ones{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}) +function ones(:: Type{T}, shape :: NTuple{N, Int}) where {N,T<:DType} ones(T, shape, cpu()) end -function ones{N,T<:DType}(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) +function ones(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) where {N,T<:DType} arr = empty(T, shape, ctx) arr[:] = one(T) return arr end -function ones{T<:DType}(:: Type{T}, shape :: Int...) +function ones(:: Type{T}, shape :: Int...) where T<:DType ones(T, shape) end @@ -243,10 +243,10 @@ end Create an `NDArray` with specific shape and initialize with 1. """ -function ones{N}(shape :: NTuple{N, Int}) +function ones(shape :: NTuple{N, Int}) where N ones(shape, cpu()) end -function ones{N}(shape :: NTuple{N, Int}, ctx :: Context) +function ones(shape :: NTuple{N, Int}, ctx :: Context) where N arr = empty(shape, ctx) arr[:] = 1 return arr @@ -298,7 +298,7 @@ end Get the element type of an `NDArray`. """ -function eltype{T <: Union{NDArray, MX_NDArrayHandle}}(arr :: T) +function eltype(arr :: T) where T <: Union{NDArray, MX_NDArrayHandle} dtype_ref = Ref{Cint}(0) @mxcall(:MXNDArrayGetDType, (MX_handle, Ptr{Cint}), arr, dtype_ref) @@ -359,13 +359,13 @@ function setindex!(arr :: NDArray, val :: Real, ::Colon) _set_value(out=arr, src=convert(eltype(arr), val)) return arr end -function setindex!{T<:Real}(arr :: NDArray, val :: Array{T}, ::Colon) +function setindex!(arr :: NDArray, val :: Array{T}, ::Colon) where T<:Real copy!(arr, val) end function setindex!(arr :: NDArray, val :: NDArray, ::Colon) copy!(arr, val) end -function setindex!{T<:Real}(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) +function setindex!(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) where T<:Real setindex!(slice(arr, idx), val, Colon()) end @@ -426,18 +426,18 @@ function copy!(dst :: NDArray, src :: NDArray) return dst end -function copy!{T<:DType}(dst :: Array{T}, src :: NDArray) +function copy!(dst :: Array{T}, src :: NDArray) where T<:DType @assert T == eltype(src) @assert size(dst) == size(src) @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{Void}, Csize_t), src, pointer(dst), length(dst)) return dst end -function copy!{T<:Real}(dst :: Array{T}, src :: NDArray) +function copy!(dst :: Array{T}, src :: NDArray) where T<:Real copy!(dst, copy(src)) end -function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) +function copy!(dst :: NDArray, src :: Array{T}) where T<:Real @assert dst.writable @assert size(dst) == size(src) src = convert(Array{eltype(dst)}, src) # this might involve copying @@ -446,7 +446,7 @@ function copy!{T<:Real}(dst :: NDArray, src :: Array{T}) return dst end -function copy_ignore_shape!{T<:Real}(dst :: NDArray, src :: Array{T}) +function copy_ignore_shape!(dst :: NDArray, src :: Array{T}) where T<:Real @assert dst.writable @assert length(dst) == length(src) src = convert(Array{eltype(dst)}, src) # this might involve copying @@ -477,7 +477,7 @@ function copy(arr :: NDArray, ctx :: Context) end # Create copy: Julia Array -> NDArray in a given context -function copy{T<:DType}(arr :: Array{T}, ctx :: Context) +function copy(arr :: Array{T}, ctx :: Context) where T<:DType dst = empty(T, size(arr), ctx) copy!(dst, arr) end @@ -487,7 +487,7 @@ end Convert an `NDArray` into a Julia `Array` of specific type. Data will be copied. """ -function convert{T<:Real}(t::Type{Array{T}}, arr :: NDArray) +function convert(t::Type{Array{T}}, arr :: NDArray) where T<:Real convert(t, copy(arr)) end @@ -721,7 +721,7 @@ end Create an `NDArray` filled with the value `x`, like `Base.fill`. """ -function fill{N}(x, dims::NTuple{N, Integer}, ctx::Context=cpu()) +function fill(x, dims::NTuple{N, Integer}, ctx::Context=cpu()) where N arr = empty(typeof(x), dims, ctx) arr[:] = x arr @@ -903,7 +903,7 @@ Test whether `j_arr` is sharing data with `arr`. """ is_shared(j_arr :: Array, arr :: NDArray) = false -function is_shared{T<:DType}(j_arr :: Array{T}, arr :: NDArray) +function is_shared(j_arr :: Array{T}, arr :: NDArray) where T<:DType if length(j_arr) != length(arr) return false end diff --git a/src/optimizer.jl b/src/optimizer.jl index 11508bf22035..89df56ba7050 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -3,21 +3,21 @@ Base type for all optimizers. """ -@compat abstract type AbstractOptimizer end +abstract type AbstractOptimizer end """ AbstractLearningRateScheduler Base type for all learning rate scheduler. """ -@compat abstract type AbstractLearningRateScheduler end +abstract type AbstractLearningRateScheduler end """ AbstractMomentumScheduler Base type for all momentum scheduler. """ -@compat abstract type AbstractMomentumScheduler end +abstract type AbstractMomentumScheduler end @@ -39,7 +39,7 @@ Base type for all momentum scheduler. but unlike the mini-batch count, the iteration count does **not** reset in each epoch. So it track the *total* number of mini-batches seen so far. """ -type OptimizationState +mutable struct OptimizationState batch_size :: Int curr_epoch :: Int curr_batch :: Int @@ -70,7 +70,7 @@ import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate Fixed learning rate scheduler always return the same learning rate. """ -type Fixed <: AbstractLearningRateScheduler +mutable struct Fixed <: AbstractLearningRateScheduler learning_rate :: Float64 end get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate @@ -81,7 +81,7 @@ get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rat ``\eta_t = \eta_0\gamma^t``. Here ``t`` is the epoch count, or the iteration count if `decay_on_iteration` is set to true. """ -type Exp <: AbstractLearningRateScheduler +mutable struct Exp <: AbstractLearningRateScheduler learning_rate :: Float64 gamma :: Float64 on_iteration :: Bool @@ -99,7 +99,7 @@ get_learning_rate(self :: Exp, state :: OptimizationState) = Here ``t`` is the epoch count, or the iteration count if `decay_on_iteration` is set to true. """ -type Inv <: AbstractLearningRateScheduler +mutable struct Inv <: AbstractLearningRateScheduler learning_rate :: Float64 gamma :: Float64 power :: Float64 @@ -147,7 +147,7 @@ import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum The null momentum scheduler always returns 0 for momentum. It is also used to explicitly indicate momentum should not be used. """ -immutable Null <: AbstractMomentumScheduler +struct Null <: AbstractMomentumScheduler end get_momentum(self :: Null, state :: OptimizationState) = 0.0 @@ -156,7 +156,7 @@ get_momentum(self :: Null, state :: OptimizationState) = 0.0 Fixed momentum scheduler always returns the same value. """ -type Fixed <: AbstractMomentumScheduler +mutable struct Fixed <: AbstractMomentumScheduler momentum :: Float64 end get_momentum(self :: Fixed, state :: OptimizationState) = self.momentum @@ -178,7 +178,7 @@ Here * ``\alpha``: default `0.96` * ``\mu_0``: default `0.99` """ -type NadamScheduler <: AbstractMomentumScheduler +mutable struct NadamScheduler <: AbstractMomentumScheduler mu0 :: Float64 delta :: Float64 gamma :: Float64 @@ -242,7 +242,7 @@ end Base class for all optimizer options. """ -@compat abstract type AbstractOptimizerOptions end +abstract type AbstractOptimizerOptions end """ normalized_gradient(opts, state, weight, grad) diff --git a/src/optimizers/adadelta.jl b/src/optimizers/adadelta.jl index aec81445fbb2..f15ff93c7beb 100644 --- a/src/optimizers/adadelta.jl +++ b/src/optimizers/adadelta.jl @@ -49,7 +49,7 @@ s_t &= \rho s_{t-1} + (1-\rho)*(\eta_t*g)^2`` ADADELTA: An Adaptive Learning Rate Method. arXiv Preprint arXiv:1212.5701. """ -type AdaDelta <: AbstractOptimizer +mutable struct AdaDelta <: AbstractOptimizer opts :: AdaDeltaOptions state :: OptimizationState @@ -61,7 +61,7 @@ type AdaDelta <: AbstractOptimizer end end -type AdaDeltaState +mutable struct AdaDeltaState acc :: NDArray delta_acc :: NDArray end diff --git a/src/optimizers/adagrad.jl b/src/optimizers/adagrad.jl index 2df3c07efd44..d12ca43d15b2 100644 --- a/src/optimizers/adagrad.jl +++ b/src/optimizers/adagrad.jl @@ -40,7 +40,7 @@ Epsilon is not included in the typical formula, see [2]. (http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf) """ -type AdaGrad <: AbstractOptimizer +mutable struct AdaGrad <: AbstractOptimizer opts :: AdaGradOptions state :: OptimizationState diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index a6b5c69eefc3..b8a6e9d385c0 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -28,7 +28,7 @@ Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. * `weight_decay::Real`: default `0.00001`, weight decay is equivalent to adding a global l2 regularizer for all the parameters. """ -type ADAM <: AbstractOptimizer +mutable struct ADAM <: AbstractOptimizer opts :: ADAMOptions state :: OptimizationState @@ -40,7 +40,7 @@ type ADAM <: AbstractOptimizer end end -type ADAMState +mutable struct ADAMState current_lr :: Float64 # current learning rate mt :: NDArray vt :: NDArray diff --git a/src/optimizers/adamax.jl b/src/optimizers/adamax.jl index 838264360e1b..adcdb78bf255 100644 --- a/src/optimizers/adamax.jl +++ b/src/optimizers/adamax.jl @@ -37,7 +37,7 @@ See [1] for further description. (http://arxiv.org/abs/1412.6980v8). """ -type AdaMax <: AbstractOptimizer +mutable struct AdaMax <: AbstractOptimizer opts :: AdaMaxOptions state :: OptimizationState @@ -49,7 +49,7 @@ type AdaMax <: AbstractOptimizer end end -type AdaMaxState +mutable struct AdaMaxState mt :: NDArray ut :: NDArray beta1Power :: Float64 diff --git a/src/optimizers/nadam.jl b/src/optimizers/nadam.jl index b90b7a106984..3b39c0aaefbe 100644 --- a/src/optimizers/nadam.jl +++ b/src/optimizers/nadam.jl @@ -47,7 +47,7 @@ at their default values. [http://www.cs.toronto.edu/~fritz/absps/momentum.pdf] (http://www.cs.toronto.edu/~fritz/absps/momentum.pdf) """ -type Nadam <: AbstractOptimizer +mutable struct Nadam <: AbstractOptimizer opts :: NadamOptions state :: OptimizationState @@ -61,7 +61,7 @@ type Nadam <: AbstractOptimizer end end -type NadamState +mutable struct NadamState mt :: NDArray nt :: NDArray momentum :: Float64 diff --git a/src/optimizers/rmsprop.jl b/src/optimizers/rmsprop.jl index 0c6c2d45a753..274e510f6cfe 100644 --- a/src/optimizers/rmsprop.jl +++ b/src/optimizers/rmsprop.jl @@ -43,7 +43,7 @@ learning rate ``\eta_t`` is calculated as: (http://www.youtube.com/watch?v=O3sxAc4hxZU) (formula @5:20) """ -type RMSProp <: AbstractOptimizer +mutable struct RMSProp <: AbstractOptimizer opts :: RMSPropOptions state :: OptimizationState diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index cd0b998fbed0..6e8ab34f1409 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -28,7 +28,7 @@ Stochastic gradient descent optimizer. * `weight_decay::Real`: default `0.0001`, weight decay is equivalent to adding a global l2 regularizer to the parameters. """ -type SGD <: AbstractOptimizer +mutable struct SGD <: AbstractOptimizer opts :: SGDOptions state :: OptimizationState diff --git a/src/random.jl b/src/random.jl index 2d594014da1c..3c5f1dcb564b 100644 --- a/src/random.jl +++ b/src/random.jl @@ -34,7 +34,7 @@ julia> mx.rand(0, 1, (2, 2)) |> copy 0.281153 0.713927 ``` """ -function rand{N}(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context=cpu()) +function rand(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context=cpu()) where N out = empty(shape, ctx) rand!(low, high, out) end @@ -53,7 +53,7 @@ end Draw random samples from a normal (Gaussian) distribution. """ -function randn{N}(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context=cpu()) +function randn(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context=cpu()) where N out = empty(shape, ctx) randn!(mean, stdvar, out) end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 9312ea5dd7c5..80daae1f53de 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -9,7 +9,7 @@ SymbolicNode is the basic building block of the symbolic graph in MXNet.jl. Make a new node by composing `self` with `args`. Or the arguments can be specified using keyword arguments. """ -type SymbolicNode +mutable struct SymbolicNode handle :: MX_SymbolHandle end function Base.unsafe_convert(::Type{MX_handle}, obj::SymbolicNode) @@ -723,13 +723,13 @@ from right to left. output shape would be (40,5) - with `reverse=true`, output shape will be (50,4). """ -reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; kwargs...) = +reshape(sym::SymbolicNode, dim::NTuple{N, Integer}; kwargs...) where {N} = _reshape(sym, dim; kwargs...) reshape(sym::SymbolicNode, dim::Integer...; kwargs...) = _reshape(sym, dim; kwargs...) -@inline function _reshape{N}(sym::SymbolicNode, dim::NTuple{N, Integer}; - reverse::Bool=false, name::String="") +@inline function _reshape(sym::SymbolicNode, dim::NTuple{N, Integer}; + reverse::Bool=false, name::String="") where N op = _get_cached_libmx_op_handle("reshape") node = _create_atomic_symbol(op.value, ["shape", "reverse"], [dump_mx_param(dim), dump_mx_param(!reverse)]) diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 0e32446b9e0f..77be97745926 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -7,7 +7,7 @@ using ..Main: rand_dims, reldiff ################################################################################ # Test Implementations ################################################################################ -function test_arithmetic{T <: mx.DType}(::Type{T}, uf, gf) +function test_arithmetic(::Type{T}, uf, gf) where T <: mx.DType shape = rand_dims() info("Bind::arithmetic::$T::$uf::dims = $shape") diff --git a/test/unittest/metric.jl b/test/unittest/metric.jl index 5b5632a87c76..ca51f83671d9 100644 --- a/test/unittest/metric.jl +++ b/test/unittest/metric.jl @@ -26,7 +26,7 @@ function generate_probs(n, m) end -function loglikelihood{T <: AbstractFloat}(labels::Vector{T}, probs::Array{T, 2}) +function loglikelihood(labels::Vector{T}, probs::Array{T, 2}) where T <: AbstractFloat LL = 0.0 eps = convert(T, 1.0e-8) for i = 1:size(labels, 1) @@ -50,11 +50,7 @@ function test_ace() metric = mx.ACE() # For categorical variables, ACE == -LL mx._update_single_output(metric, labels, probs) LL_v2 = metric.ace_sum / metric.n_sample - @static if VERSION >= v"0.6.0-dev.2075" - @test LL ≈ LL_v2 atol=1e-12 - else - @test_approx_eq_eps LL LL_v2 1e-12 - end + @test LL ≈ LL_v2 atol=1e-12 end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 0b9c7cf6dd09..2299e04836bd 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -7,8 +7,8 @@ using ..Main: rand_dims, reldiff ################################################################################ # Test Implementations ################################################################################ -rand_tensors{N}(dims::NTuple{N, Int}) = rand_tensors(mx.MX_float, dims) -function rand_tensors{N, T}(::Type{T}, dims::NTuple{N, Int}) +rand_tensors(dims::NTuple{N, Int}) where {N} = rand_tensors(mx.MX_float, dims) +function rand_tensors(::Type{T}, dims::NTuple{N, Int}) where {N, T} tensor = rand(T, dims) array = copy(tensor, mx.cpu()) return (tensor, array) From 4919273ed2e4ed8704b0e5c1d70aa26c86d69d37 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 23:01:41 +0800 Subject: [PATCH 536/630] ndarray: getindex/setindex! linear indexing (#294) * ndarray: getindex/setindex! linear indexing ```julia x = mx.zeros(2, 5) x[5] = 42 ``` * ndarray: implement first --- NEWS.md | 19 ++++++++++ src/ndarray.jl | 77 ++++++++++++++++++++++++++++++++-------- test/unittest/ndarray.jl | 64 +++++++++++++++++++++++++++++++++ 3 files changed, 146 insertions(+), 14 deletions(-) diff --git a/NEWS.md b/NEWS.md index 77ef0b766f16..fc998538f580 100644 --- a/NEWS.md +++ b/NEWS.md @@ -26,6 +26,25 @@ 2.0 4.0 ``` +* `NDArray` `getindex`/`setindex!` linear indexing support and `first` for extracting scalar value. (#TBD) + + ```julia + julia> x = mx.zeros(2, 5) + + julia> x[5] = 42 # do synchronization and set the value + ``` + + ```julia + julia> y = x[5] # actually, getindex won't do synchronization, but REPL's showing did it for you + 1 mx.NDArray{Float32} @ CPU0: + 42.0 + + julia> first(y) # do sync and get the value + 42.0f0 + + julia> y[] # this is available, also + 42.0f0 + ``` * Elementwise power of `NDArray`. (#293) * `x.^2` * `2.^x` diff --git a/src/ndarray.jl b/src/ndarray.jl index 3e7625e3fdf6..fb495af34ff1 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -312,6 +312,9 @@ function eltype(arr :: T) where T <: Union{NDArray, MX_NDArrayHandle} end end +@inline _first(arr::NDArray) = try_get_shared(arr, sync = :read) |> first + +Base.first(arr::NDArray) = _first(arr) """ slice(arr :: NDArray, start:stop) @@ -341,37 +344,58 @@ function slice(arr :: NDArray, slice::UnitRange{Int}) return NDArray(MX_NDArrayHandle(hdr_ref[]), arr.writable) end +function _at(handle::Union{MX_NDArrayHandle, MX_handle}, idx::Integer) + h_ref = Ref{MX_handle}(C_NULL) + @mxcall(:MXNDArrayAt, (MX_handle, MX_uint, Ref{MX_handle}), + handle, idx, h_ref) + h_ref[] +end + import Base: setindex! """ - setindex!(arr :: NDArray, val, idx) + setindex!(arr::NDArray, val, idx) -Assign values to an `NDArray`. Elementwise assignment is not implemented, only the following -scenarios are supported +Assign values to an `NDArray`. +The following scenarios are supported + +* single value assignment via linear indexing: `arr[42] = 24` * `arr[:] = val`: whole array assignment, `val` could be a scalar or an array (Julia `Array` or `NDArray`) of the same shape. * `arr[start:stop] = val`: assignment to a *slice*, `val` could be a scalar or an array of the same shape to the slice. See also [`slice`](@ref). """ -function setindex!(arr :: NDArray, val :: Real, ::Colon) - @assert(arr.writable) +function setindex!(arr::NDArray, val::Real, idx::Integer) + # linear indexing + @assert arr.writable + _set_value(out=arr[idx], src=val) +end + +function setindex!(arr::NDArray, val::Real, ::Colon) + @assert arr.writable _set_value(out=arr, src=convert(eltype(arr), val)) - return arr end -function setindex!(arr :: NDArray, val :: Array{T}, ::Colon) where T<:Real + +function setindex!(arr::NDArray, val::Array{T}, ::Colon) where T<:Real + @assert arr.writable copy!(arr, val) end -function setindex!(arr :: NDArray, val :: NDArray, ::Colon) + +function setindex!(arr::NDArray, val::NDArray, ::Colon) + @assert arr.writable copy!(arr, val) end -function setindex!(arr :: NDArray, val :: Union{T,Array{T},NDArray}, idx::UnitRange{Int}) where T<:Real + +function setindex!(arr::NDArray, val::Union{T,Array{T},NDArray}, + idx::UnitRange{Int}) where T<:Real + @assert arr.writable setindex!(slice(arr, idx), val, Colon()) end import Base: getindex """ - getindex(arr :: NDArray, idx) + getindex(arr::NDArray, idx) Shortcut for [`slice`](@ref). A typical use is to write @@ -396,18 +420,43 @@ which furthur translates into create a **copy** of the sub-array for Julia `Array`, while for `NDArray`, this is a *slice* that shares the memory. """ -function getindex(arr :: NDArray, ::Colon) +function getindex(arr::NDArray, ::Colon) return arr end """ -Shortcut for [`slice`](@ref). **NOTE** the behavior for Julia's built-in index slicing is to create a -copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. +Shortcut for [`slice`](@ref). +**NOTE** the behavior for Julia's built-in index slicing is to create a +copy of the sub-array, while here we simply call `slice`, +which shares the underlying memory. """ -function getindex(arr :: NDArray, idx::UnitRange{Int}) +function getindex(arr::NDArray, idx::UnitRange{Int}) slice(arr, idx) end +getindex(arr::NDArray) = _first(arr) + +function getindex(arr::NDArray, idx::Integer) + # linear indexing + len = length(arr) + size_ = size(arr) + + if idx <= 0 || idx > len + throw(BoundsError( + "attempt to access $(join(size_, 'x')) NDArray at index $(idx)")) + end + + idx -= 1 + offsets = size_[1:end-1] |> reverse ∘ cumprod ∘ collect + handle = arr.handle + for offset ∈ offsets + handle = _at(handle, idx ÷ offset) + idx %= offset + end + + _at(handle, idx) |> MX_NDArrayHandle |> x -> NDArray(x, arr.writable) +end + import Base: copy!, copy, convert, deepcopy """ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 2299e04836bd..ac9090e3ca33 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -85,6 +85,68 @@ function test_slice() @test copy(mx.slice(array, 2:3)) == [1 1; 1 1] end +function test_linear_idx() + info("NDArray::getindex::linear indexing") + let A = reshape(collect(1:30), 3, 10) + x = mx.NDArray(A) + + @test copy(x) == A + @test copy(x[1]) == [1] + @test copy(x[2]) == [2] + @test copy(x[3]) == [3] + @test copy(x[12]) == [12] + @test copy(x[13]) == [13] + @test copy(x[14]) == [14] + + @test_throws BoundsError x[-1] + @test_throws BoundsError x[0] + @test_throws BoundsError x[31] + @test_throws BoundsError x[42] + end + + let A = reshape(collect(1:24), 3, 2, 4) + x = mx.NDArray(A) + + @test copy(x) == A + @test copy(x[1]) == [1] + @test copy(x[2]) == [2] + @test copy(x[3]) == [3] + @test copy(x[11]) == [11] + @test copy(x[12]) == [12] + @test copy(x[13]) == [13] + @test copy(x[14]) == [14] + end + + info("NDArray::setindex!::linear indexing") + let A = reshape(collect(1:24), 3, 2, 4) + x = mx.NDArray(A) + + @test copy(x) == A + + x[4] = -4 + @test copy(x[4]) == [-4] + + x[11] = -11 + @test copy(x[11]) == [-11] + + x[24] = 42 + @test copy(x[24]) == [42] + end +end # function test_linear_idx + +function test_first() + info("NDArray::first") + let A = reshape(collect(1:30), 3, 10) + x = mx.NDArray(A) + + @test x[] == 1 + @test x[5][] == 5 + + @test first(x) == 1 + @test first(x[5]) == 5 + end +end # function test_first + function test_plus() dims = rand_dims() t1, a1 = rand_tensors(dims) @@ -668,6 +730,8 @@ end test_assign() test_copy() test_slice() + test_linear_idx() + test_first() test_plus() test_minus() test_mul() From 1fc03f2721f7407f5375f12cb97bcfe79a337543 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 23:03:31 +0800 Subject: [PATCH 537/630] ndarray: elementwise power for irrational (#310) --- NEWS.md | 5 +++++ src/base.jl | 11 ++++++----- src/ndarray.jl | 4 ++++ test/unittest/ndarray.jl | 16 ++++++++++++++++ 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/NEWS.md b/NEWS.md index fc998538f580..d5eed516db95 100644 --- a/NEWS.md +++ b/NEWS.md @@ -51,6 +51,11 @@ * `x.^y` * where `x` and `y` are `NDArray`s. +* Elementwise power of irrational and `NDArray` (#TBD) + * `e.^x` + * `x.^e` + * `π.^x` + ## API Changes * `reshape` of NDArray shares the same interface with Base (#272). diff --git a/src/base.jl b/src/base.jl index 5df0de509eec..e75dc3b6ac65 100644 --- a/src/base.jl +++ b/src/base.jl @@ -157,11 +157,12 @@ end # # TODO: find a better solution in case this cause issues in the future. ################################################################################ -dump_mx_param(val::Any) = string(val) -dump_mx_param(val::Float64) = @sprintf("%.16e", val) -dump_mx_param(val::Float32) = @sprintf("%.8e", val) -dump_mx_param(val::Float16) = @sprintf("%.4e", val) -dump_mx_param(shape::NTuple{N, T}) where {N, T<:Integer} = +dump_mx_param(val::Any) = string(val) +dump_mx_param(val::Float64) = @sprintf("%.16e", val) +dump_mx_param(val::Float32) = @sprintf("%.8e", val) +dump_mx_param(val::Float16) = @sprintf("%.4e", val) +dump_mx_param(val::Irrational) = @sprintf("%.16e", val) +dump_mx_param(shape::NTuple{N, <:Integer}) where N = string(tuple(flipdim([shape...], 1)...)) diff --git a/src/ndarray.jl b/src/ndarray.jl index fb495af34ff1..98f7dddc6fa1 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -754,6 +754,10 @@ broadcast_(::typeof(^), x::NDArray, y::NDArray) = _power(x, y) broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar=s) broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar=s) +broadcast_(::typeof(^), ::Irrational{:e}, x::NDArray) = exp(x) +broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar=s) +broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar=s) + """ fill!(x, arr::NDArray) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index ac9090e3ca33..26d40463adb4 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -516,6 +516,22 @@ function test_power() @test copy(y.^x) == Float32[2 4; 8 16] end + info("NDArray::power::e.^x::x.^e") + let x = mx.zeros(2, 3), A = [1 1 1; 1 1 1] + @test copy(e.^x) ≈ A + end + + let A = Float32[1 2; 3 4], x = mx.NDArray(A) + @test copy(e.^x) ≈ e.^A + @test copy(x.^e) ≈ A.^e + end + + info("NDArray::power::π.^x::x.^π") + let A = Float32[1 2; 3 4], x = mx.NDArray(A) + @test copy(π.^x) ≈ π.^A + @test copy(x.^π) ≈ A.^π + end + # TODO: Float64: wait for https://github.com/apache/incubator-mxnet/pull/8012 end # function test_power From 00d61d237319277b042e1235c8524ef40029a141 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 23:07:23 +0800 Subject: [PATCH 538/630] executor: return `outputs` on forward (#312) --- src/executor.jl | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/executor.jl b/src/executor.jl index 38f3925293c1..036c5743b7c0 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -156,7 +156,7 @@ function simple_bind(self :: SymbolicNode, ctx :: Context; end -function forward(self :: Executor; is_train::Bool=false, kwargs...) +function forward(self::Executor; is_train::Bool = false, kwargs...) for (k,v) in kwargs @assert(k ∈ self.arg_dict, "Unknown argument $k") @assert(isa(v, NDArray), "Keyword argument $k must be an NDArray") @@ -164,6 +164,8 @@ function forward(self :: Executor; is_train::Bool=false, kwargs...) end @mxcall(:MXExecutorForward, (MX_handle, Cint), self, is_train) + + self.outputs end function backward(self :: Executor) From 3f93ffc42cf159381b8b5e1459d8f0523bd60ba5 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 23:08:40 +0800 Subject: [PATCH 539/630] git: union merge policy for NEWS.md (#309) --- .gitattributes | 1 + 1 file changed, 1 insertion(+) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 000000000000..4b76ca8606cb --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +NEWS.md merge=union From 678b49e433f6c8f69255461301858cf12101114a Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 9 Nov 2017 23:41:05 +0800 Subject: [PATCH 540/630] ndarray: regularize fill! API (#311) In Base, it's fill!(A, x) --- NEWS.md | 2 +- src/ndarray.jl | 6 +++--- test/unittest/ndarray.jl | 4 ++-- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/NEWS.md b/NEWS.md index d5eed516db95..fcd48dc3f81e 100644 --- a/NEWS.md +++ b/NEWS.md @@ -11,7 +11,7 @@ * `fill(x, dims, ctx=cpu())` * `fill(x, dims...)` - * `fill!(x, arr::NDArray)` + * `fill!(arr::NDArray, x)` * Matrix (2D NDArray) multiplication is available now. (#TBD) diff --git a/src/ndarray.jl b/src/ndarray.jl index 98f7dddc6fa1..18db4f38b9ea 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -759,11 +759,11 @@ broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar=s) broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar=s) """ - fill!(x, arr::NDArray) + fill!(arr::NDArray, x) -Create an `NDArray` filled with the value `x`, like `Base.fill`. +Create an `NDArray` filled with the value `x`, like `Base.fill!`. """ -function fill!(x, arr::NDArray) +function Base.fill!(arr::NDArray, x) arr[:] = x arr end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 26d40463adb4..81093376ad2d 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -706,10 +706,10 @@ function test_fill() end info("NDArray::fill!::arr") - let x = mx.fill!(42, mx.zeros(2, 3, 4)) + let x = fill!(mx.zeros(2, 3, 4), 42) @test eltype(x) == Float32 @test size(x) == (2, 3, 4) - @test reldiff(copy(x), fill(Float32(42), 2, 3, 4)) < thresh + @test copy(x) ≈ fill(Float32(42), 2, 3, 4) end end # function test_fill From e083e5f295d35daa4820a21bd50d296ba230942f Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 11 Nov 2017 00:19:42 +0800 Subject: [PATCH 541/630] doc: cleanup useless enc file (#315) --- docs/.documenter.enc | Bin 1680 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 docs/.documenter.enc diff --git a/docs/.documenter.enc b/docs/.documenter.enc deleted file mode 100644 index 48bd92bc4742234f2364e62fe899efdf64a1f5d8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1680 zcmV;B25rWS<*O0{K{-TA^4}R}MY#~pBnC~nXf)$xi`VgJ=(}CLo z99B@ySk*kZH{=OD%kZt*$@gI0bE~V&8IFr)?w5O6@f*MXO1rRL!8xs5b~i-YSaAz8 z?Oa!ONEJ)LuTIWymjXYs=Oi>jHonzG$sc;Z*bK0{asSaCl)K8Dt9hwefjeLh>CozO zqh;;QUGT7%|V}q;lmM@=0X9JJ!pB?v8#q`2cOu+ zU@YKi4jzn?`JhZiTmUfQFk4;S}Pw~t=F}U z$%Rz&t@nw^pFcHC2L-yI$Y*cB3#7Rxc^W0K%)Ks79+Uzgm`KXYOBtz%nbUK)6W{GD z6vGFcsUm(_XRIWEX3zin!FjO}Sz?&zh2<}q#6!7T#mDR*Upz5b8j3x4wws>0y8d2U zgr4|K1?&yV3gZfVwYp8Mk)r^>44lPXYH~*@OrgnM4=fOdZBZK!f2-)hDy5ti9w|to z3b(Q=|AHjoLA#w>fJ(S+W#2{r*t{_IpAvCG#Dzda4Y6A710Z2cBun5P^+T7Tm*oQd z1GpiOufpgUcuJQvxFw|HHB`nA_*$k*K{RRaP4)YfMu!gzCr!upY4?3CSKuLt4nsHk z7GV%hH^^e+!1|r&n~=xskC>gi03^36$z|~i_qZ2JUcTV%POTl65g}~L$K<+;J@wUSt;2vLL4Ojd?W zBFdaPoa|QQmU>G641cN1B)OdmcaScG-0Zu4 zafkd|kBaQySh0;{(bK&+u)17jYVSCTLTJ3Yud0x;nOIIGq{LUyoxoez_PJcAA=DK< znsoy=qR64tOErR%#{xT9UVX&!%X)SR<|qo_MO@5o2!lje^^fxFuyLo!EM`7@P9Z(`Iqdg=4#^%Q8z=!DfcOz&iz zHt+S7=xTE<>f>P?R*4NHpCz5FiKsVp^Ld zd5$9(3lZ3}iDb`iIty37map_wc;xQPUfE59I#tt(Kp2#wv?ea~%%2gdJ4%Mt(%~9d zRHCY}3szlG0?5myNXNw9CBbPl_uoPVNqr%~LyW*k;fnaIS6ZXp9i@{Y}u8$kaBHuAgSBe z29Hcga%%BupHsJhkLpmccg5yF!GveSFS*-xYBpnpNO-TiIzIo+%#)oUKoVD`UI~HO z$TkJN-PR>vZr&d{r9ca1?@0Y3U{V?O`WSuiW!9;0Uh2#KbU_q{C38*|0zxhApkC&X zeUx~}r7VL8qdBHi48~Lh&rPXZviPvjgScMatjU&#pKvk^C8E`Wu{OUd>%v70GDn2S aF;ziPu#v|oq3fM$n|LCvE&l?FaMg<7X*&S` From 09b9718952582bea5d9e149025024568461c9bb3 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 13 Nov 2017 10:47:58 +0800 Subject: [PATCH 542/630] build: prevent CUDA_HOME KeyError (#316) address: https://github.com/dmlc/MXNet.jl/issues/316#issuecomment-343660751 --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 86d6a9839287..c1463da5c04f 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -63,7 +63,7 @@ if HAS_CUDA if HAS_CUDNN info("Found a CuDNN installation.") end - info("CUDA_HOME -> $(ENV["CUDA_HOME"])") + info("CUDA_HOME -> $(get(ENV, "CUDA_HOME", nothing))") else info("Did not find a CUDA installation, using CPU-only version of MXNet.") end From 9304e6e36a36488209ecc5a37f20ddaa7a8c4ebb Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 13 Nov 2017 11:51:23 +0800 Subject: [PATCH 543/630] sym: broadcast unfusion (#314) * sym: broadcast unfusion for `add` * sym: broadcast unfusion for `minus` * sym: broadcast unfusion for `multiplication` * sym: broadcast unfusion for `div` * sym: broadcast unfusion for `power` * sym: broadcast unfusion for `power` with irrational --- src/ndarray.jl | 4 +- src/symbolic-node.jl | 173 ++++++++++++------------ test/common.jl | 12 ++ test/unittest/symbolic-node.jl | 233 ++++++++++++++++++++++++++++++++- 4 files changed, 338 insertions(+), 84 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 18db4f38b9ea..3e4cff5591d5 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -722,7 +722,7 @@ import Base: / """ ./(x::NDArray, y::NDArray) ./(x::NDArray, y::Real) - ./(x:: Real, y::NDArray) + ./(x::Real, y::NDArray) * Elementwise dividing an `NDArray` by a scalar or another `NDArray` of the same shape. @@ -746,7 +746,7 @@ import Base: ^ .^(x::NDArray, s::Real) .^(s::Real, x::NDArray) -Elementwise power of NDArray. +Elementwise power of `NDArray`. """ ^ diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 80daae1f53de..72eb60b3a8c0 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -10,11 +10,15 @@ Make a new node by composing `self` with `args`. Or the arguments can be specified using keyword arguments. """ mutable struct SymbolicNode - handle :: MX_SymbolHandle + handle::MX_SymbolHandle end -function Base.unsafe_convert(::Type{MX_handle}, obj::SymbolicNode) + +const SymbolicNodeOrReal = Union{SymbolicNode, Real} + +@unfuse SymbolicNode # for broadcasting + +Base.unsafe_convert(::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(MX_handle, obj.handle) -end Base.convert(t::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::SymbolicNode) = Base.unsafe_convert(t, obj) @@ -483,105 +487,112 @@ function Base.getindex(self :: SymbolicNode, idx :: Int) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end -import Base.broadcast import Base: + -function +(self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) - ret = self - for arg in args - if isa(arg, SymbolicNode) - ret = _Plus(ret, arg) + +""" + +(args...) + .+(args...) + +Elementwise summation of `SymbolicNode`. +""" +function +(x::SymbolicNode, ys::SymbolicNodeOrReal...) + ret = x + for y ∈ ys + if y isa SymbolicNode + ret = _plus(ret, y) else - ret = _PlusScalar(ret, scalar=MX_float(arg)) + ret = _plus_scalar(ret, scalar=MX_float(y)) end end ret end -@compatdot function Base.broadcast(::typeof(+), self::SymbolicNode, args::Union{SymbolicNode,Real}...) - +(self, args...) -end -function +(s1 :: Real, self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) - +(self, s1, args...) -end -@compatdot function Base.broadcast(::typeof(+), s1::Real, self::SymbolicNode, - args::Union{SymbolicNode,Real}...) - +(self, s1, args...) -end + ++(s::Real, x::SymbolicNode, ys::SymbolicNodeOrReal...) = +(x + s, ys...) + +broadcast_(::typeof(+), x::SymbolicNode, ys::SymbolicNodeOrReal...) = +(x, ys...) +broadcast_(::typeof(+), s::Real, x::SymbolicNode, ys::SymbolicNodeOrReal...) = +(x + s, ys...) import Base: - -function -(self :: SymbolicNode, arg :: SymbolicNode) - _Minus(self, arg) -end -@compatdot function Base.broadcast(::typeof(-), self :: SymbolicNode, arg :: SymbolicNode) - -(self, arg) -end -function -(self :: SymbolicNode, arg :: Real) - _MinusScalar(self, scalar=MX_float(arg)) -end -@compatdot function Base.broadcast(::typeof(-), self :: SymbolicNode, arg :: Real) - -(self, arg) -end -function -(arg :: Real, self :: SymbolicNode) - _RMinusScalar(self, scalar=arg) -end -@compatdot function Base.broadcast(::typeof(-), arg :: Real, self :: SymbolicNode) - -(arg, self) -end +""" + -(x, y) + .-(x, y) -function -(self :: SymbolicNode) - -(0, self) -end +Elementwise substraction of `SymbolicNode`. +Operating with `Real` is available. +""" +x::SymbolicNode - y::SymbolicNode = _minus(x, y) +x::SymbolicNode - s::Real = _minus_scalar(x, scalar=MX_float(s)) +s::Real - x::SymbolicNode = _rminus_scalar(x, scalar=MX_float(s)) + +-(x::SymbolicNode) = 0 - x + +broadcast_(::typeof(-), x::SymbolicNode, y::SymbolicNodeOrReal) = x - y +broadcast_(::typeof(-), s::Real, x::SymbolicNode) = s - x import Base: * -@compatdot function Base.broadcast(::typeof(*), self :: SymbolicNode, args :: Union{SymbolicNode,Real}...) - ret = self - for arg in args - if isa(arg, SymbolicNode) - ret = _Mul(ret, arg) + +""" + .*(x, y) + +Elementwise multiplication of `SymbolicNode`. +""" +x::SymbolicNode * s::Real = _mul_scalar(x, scalar=MX_float(s)) +s::Real * x::SymbolicNode = _mul_scalar(x, scalar=MX_float(s)) + +function broadcast_(::typeof(*), x::SymbolicNode, ys::SymbolicNodeOrReal...) + ret = x + for y in ys + if y isa SymbolicNode + ret = _mul(ret, y) else - ret = _MulScalar(ret, scalar=MX_float(arg)) + ret = _mul_scalar(ret, scalar=MX_float(y)) end end ret end -@compatdot function Base.broadcast(::typeof(*), arg :: Real, self :: SymbolicNode, - args :: Union{SymbolicNode,Real}...) - broadcast(*, self, arg, args...) -end -function *(arg :: Real, self :: SymbolicNode) - _MulScalar(self, scalar=arg) -end -function *(self :: SymbolicNode, arg :: Real) - *(arg, self) -end + +broadcast_(::typeof(*), s::Real, x::SymbolicNode, ys::SymbolicNodeOrReal...) = + broadcast_(*, x * s, ys...) import Base: / -@compatdot function Base.broadcast(::typeof(/), self :: SymbolicNode, arg :: SymbolicNode) - _Div(self, arg) -end -@compatdot function Base.broadcast(::typeof(/), self :: SymbolicNode, arg :: Real) - _DivScalar(self, scalar=MX_float(arg)) -end -function /(self :: SymbolicNode, arg :: Real) - self ./ arg -end -function /(arg :: Real, self :: SymbolicNode) - _RDivScalar(self, scalar=arg) -end -@compatdot function Base.broadcast(::typeof(/), arg :: Real, self :: SymbolicNode) - _RDivScalar(self, scalar=arg) -end + +""" + ./(x, y) + +* Elementwise dividing a `SymbolicNode` by a scalar or another `SymbolicNode` +of the same shape. + +* Elementwise divide a scalar by an `SymbolicNode`. + +* Matrix division (solving linear systems) is not implemented yet. +""" +x::SymbolicNode / s::Real = _DivScalar(x, scalar=MX_float(s)) + +broadcast_(::typeof(/), x::SymbolicNode, y::SymbolicNode) = _div(x, y) +broadcast_(::typeof(/), x::SymbolicNode, s::Real) = _div_scalar(x, scalar=MX_float(s)) +broadcast_(::typeof(/), s::Real, x::SymbolicNode) = _rdiv_scalar(x, scalar=MX_float(s)) + import Base: ^ -@compatdot function Base.broadcast(::typeof(^), self :: SymbolicNode, pow :: SymbolicNode) - _Power(self, pow) -end -@compatdot function Base.broadcast(::typeof(^), self :: SymbolicNode, pow :: AbstractFloat) - _PowerScalar(self, scalar=pow) -end -function ^(self :: SymbolicNode, pow :: AbstractFloat) - self .^ pow -end + +""" + .^(x, y) + +Elementwise power of `SymbolicNode`. +Operating with `Real` is available. +""" +^ + +broadcast_(::typeof(^), x::SymbolicNode, y::SymbolicNode) = _power(x, y) +broadcast_(::typeof(^), x::SymbolicNode, s::Real) = _power_scalar(x, scalar=MX_float(s)) +broadcast_(::typeof(^), s::Real, x::SymbolicNode) = _rpower_scalar(x, scalar=MX_float(s)) + +broadcast_(::typeof(^), ::Irrational{:e}, x::SymbolicNode) = exp(x) +broadcast_(::typeof(^), x::SymbolicNode, s::Irrational) = + _power_scalar(x, scalar=MX_float(s)) +broadcast_(::typeof(^), s::Irrational, x::SymbolicNode) = + _rpower_scalar(x, scalar=MX_float(s)) function _compose!(node :: SymbolicNode; kwargs...) name = char_p(0) diff --git a/test/common.jl b/test/common.jl index 035650e74f72..53c9c251ef06 100644 --- a/test/common.jl +++ b/test/common.jl @@ -25,3 +25,15 @@ function mlpchain() mx.Activation(act_type=:relu) => mx.FullyConnected(name=:fc2, num_hidden=10) end + +""" +execution helper of SymbolicNode +""" +function exec(x::mx.SymbolicNode; feed...) + ks, vs = zip(feed...) + vs′ = mx.NDArray.(vs) + + e = mx.bind(x, context = mx.cpu(), args = Dict(zip(ks, vs′))) + mx.forward(e) + e.outputs +end diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index e81ef52055b1..39eda86b1bb2 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -2,7 +2,7 @@ module TestSymbolicNode using MXNet using Base.Test -using ..Main: mlp2, mlpchain, reldiff +using ..Main: mlp2, mlpchain, reldiff, exec ################################################################################ # Test Implementations @@ -256,6 +256,232 @@ function test_misc() symb = mx.ElementWiseSum(a,b) end +function test_add() + info("SymbolicNode::elementwise add") + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = exec(x .+ 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) == A .+ 42 + end + + let y = exec(42 .+ x; :x => A)[] + @test size(y) == size(A) + @test copy(y) == 42 .+ A + end + + let y = exec(-1 .+ x .+ 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) == -1 .+ A .+ 42 + end + end + + let A = Float32[1 2; 3 4], B = Float32[2 4; 6 8] + x = mx.Variable(:x) + y = mx.Variable(:y) + + let z = x .+ y + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) == A .+ B + end + + let z = y .+ x + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) == B .+ A + end + end +end # function test_add + +function test_minus() + info("SymbolicNode::elementwise minus") + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = exec(x .- 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) == A .- 42 + end + + let y = exec(42 .- x; :x => A)[] + @test size(y) == size(A) + @test copy(y) == 42 .- A + end + + let y = exec(-1 .- x .- 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) == -1 .- A .- 42 + end + + let y = exec(-x; :x => A)[] + @test size(y) == size(A) + @test copy(y) == -A + end + end + + let A = Float32[1 2; 3 4], B = Float32[2 4; 6 8] + x = mx.Variable(:x) + y = mx.Variable(:y) + + let z = x .- y + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) == A .- B + end + + let z = y .- x + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) == B .- A + end + end +end # function test_minus + +function test_mul() + info("SymoblicNode::elementwise mul") + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = exec(x .* 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) == A .* 42 + end + + let y = exec(42 .* x; :x => A)[] + @test size(y) == size(A) + @test copy(y) == 42 .* A + end + + let y = exec(-1 .* x .* 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) == -1 .* A .* 42 + end + end + + let A = Float32[1 2; 3 4], B = Float32[2 4; 6 8] + x = mx.Variable(:x) + y = mx.Variable(:y) + + let z = x .* y + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) == A .* B + end + + let z = y .* x + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) == B .* A + end + end +end # function test_mul + +function test_div() + info("SymoblicNode::elementwise div") + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = exec(x ./ 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) ≈ A ./ 42 + end + + let y = exec(42 ./ x; :x => A)[] + @test size(y) == size(A) + @test copy(y) ≈ 42 ./ A + end + + let y = exec(-1 ./ x ./ 42; :x => A)[] + @test size(y) == size(A) + @test copy(y) ≈ -1 ./ A ./ 42 + end + end + + let A = Float32[1 2; 3 4], B = Float32[2 4; 6 8] + x = mx.Variable(:x) + y = mx.Variable(:y) + + let z = x ./ y + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) ≈ A ./ B + end + + let z = y ./ x + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) ≈ B ./ A + end + end +end # function test_div + +function test_power() + info("SymoblicNode::elementwise power") + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = exec(x.^42; :x => A)[] + @test size(y) == size(A) + @test copy(y) ≈ A.^42 + end + + let y = exec(42.^x; :x => A)[] + @test size(y) == size(A) + @test copy(y) ≈ 42.^A + end + end + + let A = Float32[1 2; 3 4], B = Float32[2 4; 6 8] + x = mx.Variable(:x) + y = mx.Variable(:y) + + let z = x.^y + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) ≈ A.^B + end + + let z = y.^x + z = exec(z; :x => A, :y => B)[] + + @test size(z) == size(A) + @test copy(z) ≈ B.^A + end + end + + info("NDArray::power::e.^x::x.^e") + let x = mx.Variable(:x), A = [0 0 0; 0 0 0] + y = exec(e.^x; :x => A)[] + @test copy(y) ≈ ones(A) + end + + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = e.^x + z = exec(y; :x => A)[] + @test copy(z) ≈ e.^A + end + + let y = x.^e + z = exec(y; :x => A)[] + @test copy(z) ≈ A.^e + end + end + + info("NDArray::power::π.^x::x.^π") + let x = mx.Variable(:x), A = Float32[1 2; 3 4] + let y = π.^x + z = exec(y; :x => A)[] + @test copy(z) ≈ π.^A + end + + let y = x.^π + z = exec(y; :x => A)[] + @test copy(z) ≈ A.^π + end + end +end # function test_power + ################################################################################ # Run tests ################################################################################ @@ -273,6 +499,11 @@ end test_dot() test_print() test_misc() + test_add() + test_minus() + test_mul() + test_div() + test_power() end end From f8e1938e5096c9522e84f523f4bd2e43db11446e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 13 Nov 2017 13:34:29 +0800 Subject: [PATCH 544/630] Remove Compat and src/compat.jl (#318) --- REQUIRE | 1 - src/MXNet.jl | 2 -- src/compat.jl | 23 ----------------------- src/optimizers/adadelta.jl | 6 +++--- src/optimizers/adagrad.jl | 2 +- src/optimizers/adam.jl | 4 ++-- src/optimizers/nadam.jl | 2 +- src/optimizers/rmsprop.jl | 2 +- 8 files changed, 8 insertions(+), 34 deletions(-) delete mode 100644 src/compat.jl diff --git a/REQUIRE b/REQUIRE index 8c07e3f8b273..22caea9072b7 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,5 +1,4 @@ julia 0.6 -Compat 0.25.2 Formatting BinDeps JSON diff --git a/src/MXNet.jl b/src/MXNet.jl index 85991473d14c..d80bbd3d3cdd 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -24,8 +24,6 @@ import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm, include("base.jl") -include("compat.jl") - include("context.jl") include("util.jl") diff --git a/src/compat.jl b/src/compat.jl deleted file mode 100644 index 257f1c2863ed..000000000000 --- a/src/compat.jl +++ /dev/null @@ -1,23 +0,0 @@ -# this file contains code used for enabling backward compatibility with 0.5 - -# have to import base dotted operators if in 0.5 - - -# this is for declaring broadcasted functions in 0.5 -# TODO this macro should be removed when 0.5 support is dropped -macro compatdot(fblock) - return esc(fblock) - @capture(fblock, function Base.broadcast(::typeof(op_), args__) - body_ - end) - opdot = Symbol(string('.',op)) - esc(quote - function $opdot($(args...)) - $body - end - end) -end - -macro compatmul(expr1, expr2) - esc(:(broadcast(*, $expr1, $expr2))) -end diff --git a/src/optimizers/adadelta.jl b/src/optimizers/adadelta.jl index f15ff93c7beb..3915d036496a 100644 --- a/src/optimizers/adadelta.jl +++ b/src/optimizers/adadelta.jl @@ -78,14 +78,14 @@ function update(self :: AdaDelta, index :: Int, weight :: NDArray, # Update state.acc as in RMSProp @inplace state.acc .*= self.opts.rho - @inplace state.acc .+= (1 - self.opts.rho) * @compatmul(grad, grad) + @inplace state.acc .+= (1 - self.opts.rho) * grad .* grad # Compute update using the "old" state.delta_acc - update = @compatmul(grad, sqrt(state.delta_acc + self.opts.epsilon)) ./ + update = grad .* sqrt(state.delta_acc + self.opts.epsilon) ./ (sqrt(state.acc + self.opts.epsilon)) @inplace weight .+= -lr * update # update state.delta_acc using update @inplace state.delta_acc .*= self.opts.rho - @inplace state.delta_acc .+= (1 - self.opts.rho) * @compatmul(update, update) + @inplace state.delta_acc .+= (1 - self.opts.rho) * update .* update end diff --git a/src/optimizers/adagrad.jl b/src/optimizers/adagrad.jl index d12ca43d15b2..a5eee0bbd102 100644 --- a/src/optimizers/adagrad.jl +++ b/src/optimizers/adagrad.jl @@ -61,6 +61,6 @@ function update(self :: AdaGrad, index :: Int, weight :: NDArray, lr = get_learning_rate(self.opts.lr_scheduler, self.state) grad = normalized_gradient(self.opts, self.state, weight, grad) - @inplace state .+= @compatmul(grad, grad) + @inplace state .+= grad .* grad @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) end diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index b8a6e9d385c0..aa1bc90f9f78 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -60,8 +60,8 @@ function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, lr = state.current_lr grad = normalized_gradient(self.opts, self.state, weight, grad) - state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) * grad - state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) * @compatmul(grad, grad) + state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) .* grad + state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) .* grad .* grad at = sqrt(1.0 - state.beta2Power)/(1.0 - state.beta1Power) diff --git a/src/optimizers/nadam.jl b/src/optimizers/nadam.jl index 3b39c0aaefbe..c3ac18dcc11a 100644 --- a/src/optimizers/nadam.jl +++ b/src/optimizers/nadam.jl @@ -91,7 +91,7 @@ function update(self :: Nadam, index :: Int, weight :: NDArray, mt = state.mt / (1.0 - momentum_next) @inplace state.nt .*= self.opts.beta2 - @inplace state.nt .+= (1.0 - self.opts.beta2) * @compatmul(grad, grad) + @inplace state.nt .+= (1.0 - self.opts.beta2) .* grad .* grad nt = state.nt / (1.0 - state.beta2Power) state.beta2Power *= self.opts.beta2 diff --git a/src/optimizers/rmsprop.jl b/src/optimizers/rmsprop.jl index 274e510f6cfe..8afed0adc81d 100644 --- a/src/optimizers/rmsprop.jl +++ b/src/optimizers/rmsprop.jl @@ -65,7 +65,7 @@ function update(self :: RMSProp, index :: Int, weight :: NDArray, grad = normalized_gradient(self.opts, self.state, weight, grad) @inplace state .*= self.opts.rho - @inplace state .+= (1 - self.opts.rho) * @compatmul(grad, grad) + @inplace state .+= (1 - self.opts.rho) * grad .* grad @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) end From a19fc93859b17882615e45b7d23d02fc579cd688 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 14 Nov 2017 01:15:16 +0800 Subject: [PATCH 545/630] test: replace `reldiff` with `isapprox` (#321) * test/ndarray: replace `reldiff` with `isapprox` * test/symbolic-node: replace `reldiff` with `isapprox` * test/operator: replace `reldiff` with `isapprox` * test/io: replace `reldiff` with `isapprox` * test: remove `reldiff` --- test/common.jl | 6 -- test/unittest/bind.jl | 2 +- test/unittest/io.jl | 14 +-- test/unittest/ndarray.jl | 169 ++++++++++++++++----------------- test/unittest/operator.jl | 7 +- test/unittest/symbolic-node.jl | 15 +-- 6 files changed, 104 insertions(+), 109 deletions(-) diff --git a/test/common.jl b/test/common.jl index 53c9c251ef06..5854fc6659d0 100644 --- a/test/common.jl +++ b/test/common.jl @@ -1,12 +1,6 @@ ################################################################################ # Common models used in testing ################################################################################ -function reldiff(a, b) - diff = sum(abs.(a .- b)) - norm = sum(abs.(a)) - return diff / (norm + 1e-10) -end - function rand_dims(max_ndim=6) tuple(rand(1:10, rand(1:max_ndim))...) end diff --git a/test/unittest/bind.jl b/test/unittest/bind.jl index 77be97745926..3bac43f2345c 100644 --- a/test/unittest/bind.jl +++ b/test/unittest/bind.jl @@ -2,7 +2,7 @@ module TestBind using MXNet using Base.Test -using ..Main: rand_dims, reldiff +using ..Main: rand_dims ################################################################################ # Test Implementations diff --git a/test/unittest/io.jl b/test/unittest/io.jl index 88d77bd3c16b..f8e558613c99 100644 --- a/test/unittest/io.jl +++ b/test/unittest/io.jl @@ -1,8 +1,9 @@ module TestIO + using MXNet using Base.Test -using ..Main: rand_dims, reldiff +using ..Main: rand_dims function test_mnist() info("IO::MNIST") @@ -64,7 +65,7 @@ function test_arrays_impl(data::Vector, label::Vector, provider::mx.ArrayDataPro data_get = mx.get_data(provider, batch) for (d_real, d_get) in zip(data_batch, data_get) - @test reldiff(d_real, copy(d_get)[[1:n for n in size(d_real)]...]) < 1e-6 + @test d_real ≈ copy(d_get)[[1:n for n in size(d_real)]...] @test mx.count_samples(provider, batch) == size(d_real)[end] end end @@ -97,7 +98,7 @@ function test_arrays_shuffle() sample_count = 15 batch_size = 4 - data = rand(1, sample_count) + data = rand(mx.MX_float, 1, sample_count) label = collect(1:sample_count) provider = mx.ArrayDataProvider(data, :index => label, batch_size=batch_size, shuffle=true) @@ -107,14 +108,15 @@ function test_arrays_shuffle() for (idx, batch) in zip(idx_all, provider) data_batch = mx.get(provider, batch, :data) label_batch = mx.get(provider, batch, :index) - ns_batch = mx.count_samples(provider, batch) - data_got[idx:idx+ns_batch-1] = copy(data_batch)[1:ns_batch] + ns_batch = mx.count_samples(provider, batch) + data_got[idx:idx+ns_batch-1] = copy(data_batch)[1:ns_batch] label_got[idx:idx+ns_batch-1] = copy(label_batch)[1:ns_batch] end @test label_got != label @test sort(label_got) == label - @test reldiff(data_got, data[:,Int[label_got...]]) < 1e-6 + @test size(data_got) == size(data[:, Int[label_got...]]) + @test data_got ≈ data[:, Int[label_got...]] end @testset "IO Test" begin diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 81093376ad2d..0e1837bdd5a3 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1,8 +1,9 @@ module TestNDArray + using MXNet using Base.Test -using ..Main: rand_dims, reldiff +using ..Main: rand_dims ################################################################################ # Test Implementations @@ -23,12 +24,12 @@ function test_copy() # copy to NDArray and back array = copy(tensor, mx.cpu()) tensor2 = copy(array) - @test reldiff(tensor, tensor2) < 1e-6 + @test tensor ≈ tensor2 # copy between NDArray array2 = copy(array, mx.cpu()) tensor2 = copy(array2) - @test reldiff(tensor, tensor2) < 1e-6 + @test tensor ≈ tensor2 end function test_deepcopy() @@ -43,44 +44,43 @@ end function test_assign() dims = rand_dims() tensor = rand(mx.MX_float, dims) - thresh = 1e-3 info("NDArray::assign::dims = $dims") # Julia Array -> NDArray assignment array = mx.empty(size(tensor)) array[:]= tensor - @test reldiff(tensor, copy(array)) < thresh + @test tensor ≈ copy(array) array2 = mx.zeros(size(tensor)) - @test reldiff(zeros(size(tensor)), copy(array2)) < thresh + @test zeros(size(tensor)) ≈ copy(array2) array3 = mx.zeros(Float16, size(tensor)) - @test reldiff(zeros(Float16, size(tensor)), copy(array2)) < thresh + @test zeros(Float16, size(tensor)) ≈ copy(array2) # scalar -> NDArray assignment scalar = rand() array2[:] = scalar - @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < thresh + @test zeros(size(tensor)) + scalar ≈ copy(array2) scalar = rand(Float16) array2[:] = scalar - @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < thresh + @test zeros(size(tensor)) + scalar ≈ copy(array2) scalar = rand(Float64) array2[:] = scalar array3[:] = scalar - @test reldiff(zeros(size(tensor))+scalar, copy(array2)) < thresh - @test reldiff(zeros(Float16,size(tensor))+scalar, copy(array3)) < thresh + @test zeros(size(tensor)) + scalar ≈ copy(array2) + @test zeros(Float16, size(tensor)) + scalar ≈ copy(array3) # NDArray -> NDArray assignment array[:] = array2 - @test reldiff(zeros(size(tensor))+scalar, copy(array)) < thresh + @test zeros(size(tensor)) + scalar ≈ copy(array) end function test_slice() - array = mx.zeros((2,4)) - array[2:3] = ones(2,2) + array = mx.zeros((2, 4)) + array[2:3] = ones(2, 2) @test copy(array) == [0 1 1 0; 0 1 1 0] @test copy(mx.slice(array, 2:3)) == [1 1; 1 1] end @@ -152,48 +152,47 @@ function test_plus() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) t3, a3 = rand_tensors(dims) - thresh = 1e-6 info("NDArray::plus::dims = $dims") - @test reldiff(t1+t2, copy(a1+a2)) < thresh - @test reldiff(t1.+t2, copy(a1.+a2)) < thresh + @test t1 + t2 ≈ copy(a1 + a2) + @test t1 .+ t2 ≈ copy(a1 .+ a2) - @test reldiff(t1+t2+t3, copy(a1+a2+a3)) < thresh + @test t1 + t2 + t3 ≈ copy(a1 + a2 + a3) # test inplace += operation a0 = a1 # keep a reference to a1 @mx.inplace a1 += a2 # perform inplace += @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < thresh - @test reldiff(copy(a1), t1+t2) < thresh + @test copy(a0) ≈ copy(a1) + @test copy(a1) ≈ t1 + t2 # test scalar scalar = rand() - @test reldiff(t3 + scalar, copy(a3 + scalar)) < thresh - @test reldiff(t2+scalar+t3, copy(a2+scalar+a3)) < thresh + @test t3 + scalar ≈ copy(a3 + scalar) + @test t2 + scalar + t3 ≈ copy(a2 + scalar + a3) # test small and large scalar t4 = zeros(Float32, dims) a4 = copy(t4, mx.cpu()) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t4 + scalar_small, copy(a4 .+ scalar_small)) < thresh - @test reldiff(t4 + scalar_large, copy(a4 .+ scalar_large)) < thresh + @test t4 + scalar_small ≈ copy(a4 .+ scalar_small) + @test t4 + scalar_large ≈ copy(a4 .+ scalar_large) t5 = zeros(Float64, dims) a5 = copy(t5, mx.cpu()) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t5 + scalar_small, copy(a5 .+ scalar_small)) < thresh - @test reldiff(t5 + scalar_large, copy(a5 .+ scalar_large)) < thresh + @test t5 + scalar_small ≈ copy(a5 .+ scalar_small) + @test t5 + scalar_large ≈ copy(a5 .+ scalar_large) t6 = zeros(Float16, dims) a6 = copy(t6, mx.cpu()) scalar_small = Float16(1e-5) scalar_large = Float16(1e4) - @test reldiff(t6 + scalar_small, copy(a6 .+ scalar_small)) < 1e-1 - @test reldiff(t6 + scalar_large, copy(a6 .+ scalar_large)) < 1e-1 + @test t6 + scalar_small ≈ copy(a6 .+ scalar_small) + @test t6 + scalar_large ≈ copy(a6 .+ scalar_large) let x = mx.NDArray([1 2; 3 4]), y = mx.NDArray([1 1; 1 1]) @test copy(42 .+ x) == [43 44; 45 46] @@ -206,51 +205,51 @@ function test_minus() dims = rand_dims() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) - thresh = 1e-6 info("NDArray::minus::dims = $dims") - @test reldiff(t1-t2, copy(a1-a2)) < thresh - @test reldiff(t1.-t2, copy(a1.-a2)) < thresh + @test t1 - t2 ≈ copy(a1 - a2) + @test t1 .- t2 ≈ copy(a1 .- a2) - @test reldiff(-t1, copy(-a1)) < thresh + @test -t1 ≈ copy(-a1) # make sure the negation is not in-place, so a1 is not changed after previous # statement is executed - @test reldiff(t1, copy(a1)) < thresh + @test t1 ≈ copy(a1) # test inplace -= operation a0 = a1 # keep a reference to a1 @mx.inplace a1 -= a2 # perform inplace -= @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < thresh - @test reldiff(copy(a1), t1-t2) < thresh + @test a0.handle == a1.handle + @test copy(a0) ≈ copy(a1) + @test copy(a1) ≈ t1 - t2 # test scalar scalar = rand() - @test reldiff(t2 - scalar, copy(a2 - scalar)) < thresh + @test t2 - scalar ≈ copy(a2 - scalar) # test small and large scalar t4 = zeros(Float32, dims) a4 = copy(t4, mx.cpu()) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t4 - scalar_small, copy(a4 .- scalar_small)) < thresh - @test reldiff(t4 - scalar_large, copy(a4 .- scalar_large)) < thresh + @test t4 - scalar_small ≈ copy(a4 .- scalar_small) + @test t4 - scalar_large ≈ copy(a4 .- scalar_large) t5 = zeros(Float64, dims) a5 = copy(t5, mx.cpu()) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t5 - scalar_small, copy(a5 .- scalar_small)) < thresh - @test reldiff(t5 - scalar_large, copy(a5 .- scalar_large)) < thresh + @test t5 - scalar_small ≈ copy(a5 .- scalar_small) + @test t5 - scalar_large ≈ copy(a5 .- scalar_large) t6 = zeros(Float16, dims) a6 = copy(t6, mx.cpu()) scalar_small = Float16(1e-5) scalar_large = Float16(1e4) - @test reldiff(t6 - scalar_small, copy(a6 .- scalar_small)) < 1e-1 - @test reldiff(t6 - scalar_large, copy(a6 .- scalar_large)) < 1e-1 + @test t6 - scalar_small ≈ copy(a6 .- scalar_small) + @test t6 - scalar_large ≈ copy(a6 .- scalar_large) end function test_mul() @@ -258,39 +257,39 @@ function test_mul() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) t3, a3 = rand_tensors(dims) - thresh = 1e-6 info("NDArray::mul::dims = $dims") - @test reldiff(t1.*t2, copy(a1.*a2)) < thresh + @test t1 .* t2 ≈ copy(a1.*a2) # test inplace .*= operation a0 = a1 # keep a reference to a1 @mx.inplace a1 .*= a2 # perform inplace .*= @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < thresh - @test reldiff(copy(a1), t1.*t2) < thresh + @test a0.handle == a1.handle + @test copy(a0) ≈ copy(a1) + @test copy(a1) ≈ t1 .* t2 # test scalar scalar = mx.MX_float(rand()) - @test reldiff(t3 * scalar, copy(a3 .* scalar)) < thresh + @test t3 * scalar ≈ copy(a3 .* scalar) # test small and large scalar t4, a4 = rand_tensors(Float32, dims) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t4 * scalar_small, copy(a4 .* scalar_small)) < thresh - @test reldiff(t4 * scalar_large, copy(a4 .* scalar_large)) < thresh + @test t4 * scalar_small ≈ copy(a4 .* scalar_small) + @test t4 * scalar_large ≈ copy(a4 .* scalar_large) t5, a5 = rand_tensors(Float64, dims) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t5 * scalar_small, copy(a5 .* scalar_small)) < thresh - @test reldiff(t5 * scalar_large, copy(a5 .* scalar_large)) < thresh + @test t5 * scalar_small ≈ copy(a5 .* scalar_small) + @test t5 * scalar_large ≈ copy(a5 .* scalar_large) t6, a6 = rand_tensors(Float16, dims) scalar_small = Float16(1e-5) - @test reldiff(t6 * scalar_small, copy(a6 .* scalar_small)) < 1e-1 + @test t6 * scalar_small ≈ copy(a6 .* scalar_small) info("NDArray::mul::matrix multiplication") let x = mx.NDArray([1. 2]) @@ -309,41 +308,41 @@ function test_div() dims = rand_dims() t1, a1 = rand_tensors(dims) t2, a2 = rand_tensors(dims) - thresh = 1e-6 info("NDArray::div::dims = $dims") t2 .+= 2 # avoid numerical instability @mx.inplace a2 .+= 2 - @test reldiff(t1 ./ t2, copy(a1 ./ a2)) < thresh + @test t1 ./ t2 ≈ copy(a1 ./ a2) # test inplace -= operation a0 = a1 # keep a reference to a2 @mx.inplace a1 ./= a2 # perform inplace ./= @test a0 == a1 # make sure they are still the same object - @test reldiff(copy(a0), copy(a1)) < thresh - @test reldiff(copy(a1), t1 ./ t2) < thresh + @test a0.handle == a1.handle + @test copy(a0) ≈ copy(a1) + @test copy(a1) ≈ t1 ./ t2 # test scalar scalar = rand() + 2 - @test reldiff(t2 ./ scalar, copy(a2 ./ scalar)) < thresh + @test t2 ./ scalar ≈ copy(a2 ./ scalar) # test small and large scalar t4, a4 = rand_tensors(Float32, dims) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t4 ./ scalar_small, copy(a4 ./ scalar_small)) < thresh - @test reldiff(t4 ./ scalar_large, copy(a4 ./ scalar_large)) < thresh + @test t4 ./ scalar_small ≈ copy(a4 ./ scalar_small) + @test t4 ./ scalar_large ≈ copy(a4 ./ scalar_large) t5, a5 = rand_tensors(Float64, dims) scalar_small = 1e-8 scalar_large = 1e8 - @test reldiff(t5 ./ scalar_small, copy(a5 ./ scalar_small)) < thresh - @test reldiff(t5 ./ scalar_large, copy(a5 ./ scalar_large)) < thresh + @test t5 ./ scalar_small ≈ copy(a5 ./ scalar_small) + @test t5 ./ scalar_large ≈ copy(a5 ./ scalar_large) t6, a6 = rand_tensors(Float16, dims) scalar_large = 1e4 - @test reldiff(t6 ./ scalar_large, copy(a6 ./ scalar_large)) < 1e-1 + @test t6 ./ scalar_large ≈ copy(a6 ./ scalar_large) end @@ -373,7 +372,7 @@ function test_rdiv() info("NDarray::rdiv::Float32") let x = 1 ./ mx.NDArray(Float32[1 2; 3 4]) y = 1 ./ Float32[1 2; 3 4] - @test reldiff(copy(x), y) < 1e8 + @test copy(x) ≈ y end end # function test_rdiv @@ -390,7 +389,7 @@ function test_gd() @mx.inplace aw += -lr * (ag + wd * aw) tw += -lr * (tg + wd * tw) - @test reldiff(copy(aw), tw) < 1e-6 + @test copy(aw) ≈ tw end @@ -404,9 +403,9 @@ function test_saveload() j_array, nd_array = rand_tensors(dims) mx.save(fname, nd_array) data = mx.load(fname, mx.NDArray) - @test isa(data, Vector{mx.NDArray}) + @test data isa Vector{mx.NDArray} @test length(data) == 1 - @test reldiff(copy(data[1]), j_array) < 1e-6 + @test copy(data[1]) ≈ j_array # save and load N arrays of different shape arrays = [rand_tensors(rand_dims()) for i = 1:n_arrays] @@ -416,7 +415,7 @@ function test_saveload() @test isa(data, Vector{mx.NDArray}) @test length(data) == n_arrays for i = 1:n_arrays - @test reldiff(copy(data[i]), arrays[i][1]) < 1e-6 + @test copy(data[i]) ≈ arrays[i][1] end # save and load dictionary of ndarrays @@ -424,10 +423,10 @@ function test_saveload() dict = Dict([(n, v) for (n,v) in zip(names, nd_arrays)]) mx.save(fname, dict) data = mx.load(fname, mx.NDArray) - @test isa(data, Dict{Symbol, mx.NDArray}) + @test data isa Dict{Symbol, mx.NDArray} @test length(data) == n_arrays for i = 1:n_arrays - @test reldiff(copy(data[names[i]]), arrays[i][1]) < 1e-6 + @test copy(data[names[i]]) ≈ arrays[i][1] end rm(fname) @@ -443,14 +442,13 @@ function test_clip() clipped = mx.clip(nd_array, a_min=clip_down, a_max=clip_up) # make sure the original array is not modified - @test reldiff(copy(nd_array), j_array) < 1e-6 + @test copy(nd_array) ≈ j_array @test all(clip_down .<= copy(clipped) .<= clip_up) end function test_power() info("NDArray::power") - thresh = 1e8 info("NDArray::power::Int::x.^n") let x = mx.NDArray([1 2; 3 4]) @@ -491,9 +489,9 @@ function test_power() @test copy(x.^2) == Float32[1 4; 9 16] @test copy(x.^3) == Float32[1 8; 27 64] - @test reldiff(copy(x.^-1), A.^-1) < thresh - @test reldiff(copy(x.^1.1), A.^1.1) < thresh - @test reldiff(copy(x.^2.9), A.^2.9) < thresh + @test copy(x.^-1) ≈ A.^-1 + @test copy(x.^1.1) ≈ A.^1.1 + @test copy(x.^2.9) ≈ A.^2.9 end info("NDArray::power::Float32::n.^x") @@ -504,8 +502,8 @@ function test_power() @test copy(2.^x) == Float32[2 4; 8 16] @test copy(3.^x) == Float32[3 9; 27 81] - @test reldiff(copy(1.1.^x), 1.1.^A) < thresh - @test reldiff(copy(2.9.^x), 2.9.^A) < thresh + @test copy(1.1.^x) ≈ 1.1.^A + @test copy(2.9.^x) ≈ 2.9.^A end info("NDArray::power::Float32::x.^y") @@ -541,28 +539,28 @@ function test_sqrt() j_array, nd_array = rand_tensors(dims) sqrt_ed = sqrt(nd_array) - @test reldiff(copy(sqrt_ed), sqrt.(j_array)) < 1e-6 + @test copy(sqrt_ed) ≈ sqrt.(j_array) end function test_nd_as_jl() - dims = (2,3) + dims = (2, 3) info("NDArray::nd_as_jl::dims = $dims") x = mx.zeros(dims) + 5 y = mx.ones(dims) z = mx.zeros(dims) - @mx.nd_as_jl ro=x rw=(y,z) begin + @mx.nd_as_jl ro=x rw=(y, z) begin for i = 1:length(z) z[i] = x[i] end - z[:,1] = y[:,1] + z[:, 1] = y[:, 1] y[:] = 0 end - @test reldiff(copy(y), 0) < 1e-6 - @test reldiff(copy(z)[:,1], 1) < 1e-6 - @test reldiff(copy(z)[:,2:end], copy(x)[:,2:end]) < 1e-6 + @test sum(copy(y)) == 0 + @test sum(copy(z)[:, 1]) == 2 + @test copy(z)[:, 2:end] ≈ copy(x)[:, 2:end] end function test_dot() @@ -679,7 +677,6 @@ end function test_fill() info("NDArray::fill") - thresh = 1e8 let x = mx.fill(42, 2, 3, 4) @test eltype(x) == Int @@ -690,7 +687,7 @@ function test_fill() let x = mx.fill(Float32(42), 2, 3, 4) @test eltype(x) == Float32 @test size(x) == (2, 3, 4) - @test reldiff(copy(x), fill(Float32(42), 2, 3, 4)) < thresh + @test copy(x) ≈ fill(Float32(42), 2, 3, 4) end let x = mx.fill(42, (2, 3, 4)) @@ -702,7 +699,7 @@ function test_fill() let x = mx.fill(Float32(42), (2, 3, 4)) @test eltype(x) == Float32 @test size(x) == (2, 3, 4) - @test reldiff(copy(x), fill(Float32(42), 2, 3, 4)) < thresh + @test copy(x) ≈ fill(Float32(42), 2, 3, 4) end info("NDArray::fill!::arr") diff --git a/test/unittest/operator.jl b/test/unittest/operator.jl index 76e51f4403f5..85fc7c6238e9 100644 --- a/test/unittest/operator.jl +++ b/test/unittest/operator.jl @@ -1,8 +1,9 @@ module TestOperator + using MXNet using Base.Test -using ..Main: rand_dims, reldiff +using ..Main: rand_dims function test_scalar_op() data = mx.Variable(:data) @@ -19,13 +20,13 @@ function test_scalar_op() out = copy(exec_test.outputs[1]) jl_out1 = (4 - ((1+data_jl+1)*2/5) - 0.2) jl_out = 2 ./ jl_out1 - @test reldiff(copy(out), jl_out) < 1e-6 + @test copy(out) ≈ jl_out out_grad = 2mx.ones(shape) jl_grad = 2copy(out_grad) / 5 jl_grad = 2jl_grad ./ (jl_out1 .^ 2) mx.backward(exec_test, out_grad) - @test reldiff(copy(arr_grad), jl_grad) < 1e-6 + @test copy(arr_grad) ≈ jl_grad end ################################################################################ diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 39eda86b1bb2..baeb92a68e23 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -1,8 +1,9 @@ module TestSymbolicNode + using MXNet using Base.Test -using ..Main: mlp2, mlpchain, reldiff, exec +using ..Main: mlp2, mlpchain, exec ################################################################################ # Test Implementations @@ -233,12 +234,12 @@ function test_dot() y = mx.Variable(:y) z = mx.dot(x, y) z_exec = mx.bind(z, context=mx.cpu(), - args=Dict(:x=>mx.ones((100, 2)), :y=>mx.ones((2, 200)))) + args=Dict(:x => mx.ones((100, 2)), :y => mx.ones((2, 200)))) mx.forward(z_exec) ret = copy(z_exec.outputs[1]) @test size(ret) == (100, 200) - @test reldiff(ret, 2*ones(100, 200)) < 1e-6 + @test ret ≈ 2*ones(100, 200) end function test_print() @@ -253,7 +254,7 @@ function test_misc() # Test for #189 a = mx.Variable("a") b = mx.Variable("b") - symb = mx.ElementWiseSum(a,b) + symb = mx.ElementWiseSum(a, b) end function test_add() @@ -340,7 +341,7 @@ function test_minus() end # function test_minus function test_mul() - info("SymoblicNode::elementwise mul") + info("SymbolicNode::elementwise mul") let x = mx.Variable(:x), A = Float32[1 2; 3 4] let y = exec(x .* 42; :x => A)[] @test size(y) == size(A) @@ -379,7 +380,7 @@ function test_mul() end # function test_mul function test_div() - info("SymoblicNode::elementwise div") + info("SymbolicNode::elementwise div") let x = mx.Variable(:x), A = Float32[1 2; 3 4] let y = exec(x ./ 42; :x => A)[] @test size(y) == size(A) @@ -418,7 +419,7 @@ function test_div() end # function test_div function test_power() - info("SymoblicNode::elementwise power") + info("SymbolicNode::elementwise power") let x = mx.Variable(:x), A = Float32[1 2; 3 4] let y = exec(x.^42; :x => A)[] @test size(y) == size(A) From 526857e952963618bc8b6927871cecf1cf24a118 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 14 Nov 2017 02:09:57 +0800 Subject: [PATCH 546/630] docs: address docstring override warning (#320) Address this: ``` julia> using MXNet INFO: Recompiling stale cache file ~/.julia/lib/v0.6/MXNet.ji for module MXNet. WARNING: replacing docs for 'Base.:^ :: Union{}' in module 'MXNet.mx'. ``` --- src/ndarray.jl | 9 +-------- src/symbolic-node.jl | 2 +- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 3e4cff5591d5..0b0e01a40e64 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -741,14 +741,7 @@ broadcast_(::typeof(/), x::Real, y::NDArray) = import Base: ^ -""" - .^(x::NDArray, y::NDArray) - .^(x::NDArray, s::Real) - .^(s::Real, x::NDArray) - -Elementwise power of `NDArray`. -""" -^ +# document of `.^` is merged into SymbolicNode's broadcast_(::typeof(^), x::NDArray, y::NDArray) = _power(x, y) broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar=s) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 72eb60b3a8c0..bbe2d1874846 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -579,7 +579,7 @@ import Base: ^ """ .^(x, y) -Elementwise power of `SymbolicNode`. +Elementwise power of `SymbolicNode` and `NDArray`. Operating with `Real` is available. """ ^ From 80711865a2d721c9fe8d571a5cbc0a393eeb94d5 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 14 Nov 2017 02:14:10 +0800 Subject: [PATCH 547/630] travis: enable 'build branch updates' on master/stable only (#319) --- .travis.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.travis.yml b/.travis.yml index 734de9e1eff9..a91994db2fc1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,12 @@ julia: - 0.6 # - nightly 0.6 supports depends on #170 +branches: + only: + - master + - stable + - /^v\d+\.\d+(\.\d+)?(-\S*)?$/ # for tagging + # dependent apt packages addons: apt: From e4b6c0ebc202789afc62826d224a5cc9a5d42db1 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 14 Nov 2017 02:17:25 +0800 Subject: [PATCH 548/630] docs: update examples in overview (#317) * docs: update examples in overview close #286 * docs: add Makefile --- docs/Makefile | 3 + docs/src/index.md | 32 +++++- docs/src/user-guide/install.md | 16 +-- docs/src/user-guide/overview.md | 192 +++++++++++++++----------------- 4 files changed, 128 insertions(+), 115 deletions(-) create mode 100644 docs/Makefile diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 000000000000..d1cadf2e487b --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,3 @@ +all: + julia --color=yes ./make.jl + mkdocs build diff --git a/docs/src/index.md b/docs/src/index.md index a41b77478631..b6a51fc162ad 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -6,8 +6,10 @@ computing and state-of-art deep learning to Julia. Some highlight of features include: -* Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. -* Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. +* Efficient tensor/matrix computation across multiple devices, + including multiple CPUs, GPUs and distributed server nodes. +* Flexible symbolic manipulation to composite and construct + state-of-the-art deep learning models. For more details, see documentation below. Please also checkout the [examples](https://github.com/dmlc/MXNet.jl/tree/master/examples) directory. @@ -15,19 +17,39 @@ For more details, see documentation below. Please also checkout the ## Tutorials ```@contents -Pages = ["tutorial/mnist.md", "tutorial/char-lstm.md"] +Pages = [ + "tutorial/mnist.md", + "tutorial/char-lstm.md", +] Depth = 2 ``` ## User's Guide ```@contents -Pages = ["user-guide/install.md", "user-guide/overview.md", "user-guide/faq.md"] +Pages = [ + "user-guide/install.md", + "user-guide/overview.md", + "user-guide/faq.md", +] Depth = 2 ``` ## API Documentation ```@contents -Pages = ["api/context.md", "api/model.md", "api/initializers.md", "api/optimizers.md", "api/callbacks.md", "api/metric.md", "api/io.md", "api/ndarray.md", "api/symbolic-node.md", "api/nn-factory.md", "api/executor.md", "api/visualize.md"] +Pages = [ + "api/context.md", + "api/ndarray.md", + "api/symbolic-node.md", + "api/model.md", + "api/initializers.md", + "api/optimizers.md", + "api/callbacks.md", + "api/metric.md", + "api/io.md", + "api/nn-factory.md", + "api/executor.md", + "api/visualize.md", +] ``` diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index a585b4409c6d..30ed65d48511 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -10,7 +10,7 @@ To install MXNet.jl, simply type Pkg.add("MXNet") ``` -in the Julia REPL. Or to use the latest git version of MXNet.jl, use the +In the Julia REPL. Or to use the latest git version of MXNet.jl, use the following command instead ```julia @@ -27,9 +27,9 @@ to point the build-process in the right direction. If the automatic cuda detection fails you can also set `CUDA_HOME` to override the process. To control which version of libmxnet will be compiled, you can use the `MXNET_COMMIT` variable to point to either a version tag (e.g. `v0.10.0`), a -branch name (e.g. `master`) or a specific commit hash (e.g. `a0b1c2d3`). +branch name (e.g. `master`) or a specific commit hash (e.g. `a0b1c2d3`). -The libmxnet source is downloaded to `Pkg.dir("MXNet")/deps/src/mxnet`. +The libmxnet source is downloaded to `Pkg.dir("MXNet", "deps", "src", "mxnet")`. The automatic build is using default configurations, with OpenCV disabled. If the compilation failed due to unresolved dependency, or if you want to customize the build, you can compile and @@ -39,12 +39,12 @@ Manual Compilation ------------------ It is possible to compile libmxnet separately and point MXNet.jl to a -the existing library in case automatic compilation fails due to -unresolved dependencies in an un-standard environment; Or when one want -to work with a seperate, maybe customized libmxnet. +existing library in case automatic compilation fails due to +unresolved dependencies in an non-standard environment; Or when one want +to work with a separate, maybe customized libmxnet. To build libmxnet, please refer to [the installation guide of -libmxnet](http://mxnet.readthedocs.org/en/latest/build.html). After +libmxnet](https://mxnet.incubator.apache.org/install/index.html). After successfully installing libmxnet, set the `MXNET_HOME` *environment variable* to the location of libmxnet. In other words, the compiled `libmxnet.so` should be found in `$MXNET_HOME/lib`. @@ -65,7 +65,7 @@ Basically, MXNet.jl will search `libmxnet.so` or `libmxnet.dll` in the following paths (and in that order): - `$MXNET_HOME/lib`: customized libmxnet builds -- `Pkg.dir("MXNet")/deps/usr/lib`: automatic builds +- `Pkg.dir("MXNet", "deps", "usr", "lib")`: automatic builds - Any system wide library search path Note that MXNet.jl can not load `libmxnet.so` even if it is on one of diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index 9a7d8e514894..d1948220a3cf 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -1,8 +1,6 @@ -Overview -======== +# Overview -MXNet.jl Namespace ------------------- +## MXNet.jl Namespace Most the functions and types in MXNet.jl are organized in a flat namespace. Because many some functions are conflicting with existing @@ -11,13 +9,32 @@ convention of accessing the MXNet.jl interface is the to use the `mx.` prefix explicitly: ```julia -using MXNet - -x = mx.zeros(2,3) # MXNet NDArray -y = zeros(eltype(x), size(x)) # Julia Array -copy!(y, x) # Overloaded function in Julia Base -z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU -mx.copy!(z, y) # Same as copy!(z, y) +julia> using MXNet + +julia> x = mx.zeros(2,3) # MXNet NDArray +2×3 mx.NDArray{Float32} @ CPU0: + 0.0 0.0 0.0 + 0.0 0.0 0.0 + +julia> y = zeros(eltype(x), size(x)) # Julia Array +2×3 Array{Float32,2}: + 0.0 0.0 0.0 + 0.0 0.0 0.0 + +julia> copy!(y, x) # Overloaded function in Julia Base +2×3 Array{Float32,2}: + 0.0 0.0 0.0 + 0.0 0.0 0.0 + +julia> z = mx.ones(size(x), mx.gpu()) # MXNet NDArray on GPU +2×3 mx.NDArray{Float32} @ GPU0: + 1.0 1.0 1.0 + 1.0 1.0 1.0 + +julia> mx.copy!(z, y) # Same as copy!(z, y) +2×3 mx.NDArray{Float32} @ GPU0: + 0.0 0.0 0.0 + 0.0 0.0 0.0 ``` Note functions like `size`, `copy!` that is extensively overloaded for @@ -26,24 +43,23 @@ various types works out of the box. But functions like `zeros` and prefer, the `mx.` prefix can be used explicitly for all MXNet.jl functions, including `size` and `copy!` as shown in the last line. -Low Level Interface -------------------- +## Low Level Interface -### NDArrays +### `NDArray` -NDArray is the basic building blocks of the actual computations in +`NDArray` is the basic building blocks of the actual computations in MXNet. It is like a Julia `Array` object, with some important differences listed here: - The actual data could live on different `Context` (e.g. GPUs). For some contexts, iterating into the elements one by one is very slow, - thus indexing into NDArray is not supported in general. The easiest + thus indexing into NDArray is not recommanded in general. The easiest way to inspect the contents of an NDArray is to use the `copy` function to copy the contents as a Julia `Array`. -- Operations on NDArray (including basic arithmetics and neural +- Operations on `NDArray` (including basic arithmetics and neural network related operators) are executed in parallel with automatic dependency tracking to ensure correctness. -- There is no generics in NDArray, the `eltype` is always +- There is no generics in `NDArray`, the `eltype` is always `mx.MX_float`. Because for applications in machine learning, single precision floating point numbers are typical a best choice balancing between precision, speed and portability. Also since libmxnet is @@ -52,14 +68,14 @@ differences listed here: While most of the computation is hidden in libmxnet by operators corresponding to various neural network layers. Getting familiar with -the NDArray API is useful for implementing `Optimizer` or customized +the `NDArray` API is useful for implementing `Optimizer` or customized operators in Julia directly. -The followings are common ways to create NDArray objects: +The followings are common ways to create `NDArray` objects: - `mx.empty(shape[, context])`: create on uninitialized array of a given shape on a specific device. For example, - ` mx.empty(2,3)`, `mx.((2,3), mx.gpu(2)) `. + `mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))`. - `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: similar to the Julia's built-in `zeros` and `ones`. - `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to @@ -80,11 +96,13 @@ println(copy(a)) ``` A slice is a sub-region sharing the same memory with the original -NDArray object. A slice is always a contiguous piece of memory, so only +`NDArray` object. A slice is always a contiguous piece of memory, so only slicing on the *last* dimension is supported. The example above also -shows a way to set the contents of an NDArray. +shows a way to set the contents of an `NDArray`. -```julia +```@repl +using MXNet +mx.srand(42) a = mx.empty(2,3) a[:] = 0.5 # set all elements to a scalar a[:] = rand(size(a)) # set contents with a Julia Array @@ -101,39 +119,37 @@ a = b ``` does **not** mean copying the contents of `b` to `a`. Instead, it just -make the variable `a` pointing to a new object, which is `b`. Similarly, -inplace arithmetics does not work as expected: +make the variable `a` pointing to a new object, which is `b`. +Similarly, inplace arithmetics does not work as expected: -```julia +```@repl inplace-macro +using MXNet a = mx.ones(2) r = a # keep a reference to a b = mx.ones(2) a += b # translates to a = a + b -println(copy(a)) -# => Float32[2.0f0,2.0f0] -println(copy(r)) -# => Float32[1.0f0,1.0f0] +a +r ``` As we can see, `a` has expected value, but instead of inplace updating, -a new NDArray is created and `a` is set to point to this new object. If +a new `NDArray` is created and `a` is set to point to this new object. If we look at `r`, which still reference to the old `a`, its content has not changed. There is currently no way in Julia to overload the operators like `+=` to get customized behavior. -Instead, you will need to write `a[:] = a+b`, or if you want *real* +Instead, you will need to write `a[:] = a + b`, or if you want *real* inplace `+=` operation, MXNet.jl provides a simple macro `@mx.inplace`: -```julia +```@repl inplace-macro @mx.inplace a += b macroexpand(:(@mx.inplace a += b)) -# => :(MXNet.mx.add_to!(a,b)) ``` As we can see, it translate the `+=` operator to an explicit `add_to!` function call, which invokes into libmxnet to add the contents of `b` into `a` directly. For example, the following is the update rule in the -SGD `Optimizer` (both `grad` and `weight` are NDArray objects): +`SGD Optimizer` (both `grad` and `weight` are `NDArray` objects): ```julia @inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) @@ -142,10 +158,10 @@ SGD `Optimizer` (both `grad` and `weight` are NDArray objects): Note there is no much magic in `mx.inplace`: it only does a shallow translation. In the SGD update rule example above, the computation like scaling the gradient by `grad_scale` and adding the weight decay all -create temporary NDArray objects. To mitigate this issue, libmxnet has a +create temporary `NDArray` objects. To mitigate this issue, libmxnet has a customized memory allocator designed specifically to handle this kind of situations. The following snippet does a simple benchmark on allocating -temp NDArray vs. pre-allocating: +temp `NDArray` vs. pre-allocating: ```julia using Benchmark @@ -224,24 +240,24 @@ println(copy(a)) # 2.0 2.0 2.0] ``` -Intermediate Level Interface ----------------------------- +## Intermediate Level Interface ### Symbols and Composition The way we build deep learning models in MXNet.jl is to use the powerful symbolic composition system. It is like [Theano](http://deeplearning.net/software/theano/), except that we -avoided long expression compiliation time by providing *larger* neural +avoided long expression compilation time by providing *larger* neural network related building blocks to guarantee computation performance. See also [this note](http://mxnet.readthedocs.org/en/latest/program_model.html) for the design and trade-off of the MXNet symbolic composition system. -The basic type is `mx.Symbol`. The following is a trivial example of +The basic type is `mx.SymbolicNode`. The following is a trivial example of composing two symbols with the `+` operation. -```julia +```@repl +using MXNet A = mx.Variable(:A) B = mx.Variable(:B) C = A + B @@ -253,12 +269,13 @@ be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of 128 units and a ReLU activation function. -```julia +```@repl fcnet +using MXNet net = mx.Variable(:data) -net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) -net = mx.Activation(data=net, name=:relu1, act_type=:relu) -net = mx.FullyConnected(data=net, name=:fc2, num_hidden=64) -net = mx.Softmax(data=net, name=:out) +net = mx.FullyConnected(net, name=:fc1, num_hidden=128) +net = mx.Activation(net, name=:relu1, act_type=:relu) +net = mx.FullyConnected(net, name=:fc2, num_hidden=64) +net = mx.SoftmaxOutput(net, name=:out) ``` Each time we take the previous symbol, and compose with an operation. @@ -284,55 +301,33 @@ the networks, while *parameters* are typically trainable *weights*, When composing symbols, their arguments accumulates. We can list all the arguments by -```julia -julia> mx.list_arguments(net) -6-element Array{Symbol,1}: - :data # Input data, name from the first data variable - :fc1_weight # Weights of the fully connected layer named :fc1 - :fc1_bias # Bias of the layer :fc1 - :fc2_weight # Weights of the layer :fc2 - :fc2_bias # Bias of the layer :fc2 - :out_label # Input label, required by the softmax layer named :out +```@repl fcnet +mx.list_arguments(net) ``` Note the names of the arguments are generated according to the provided name for each layer. We can also specify those names explicitly: -```julia +```@repl +using MXNet net = mx.Variable(:data) w = mx.Variable(:myweight) -net = mx.FullyConnected(data=data, weight=w, name=:fc1, num_hidden=128) +net = mx.FullyConnected(data, weight=w, name=:fc1, num_hidden=128) mx.list_arguments(net) -# => -# 3-element Array{Symbol,1}: -# :data -# :myweight -# :fc1_bias ``` -The simple fact is that a `Variable` is just a placeholder `mx.Symbol`. +The simple fact is that a `Variable` is just a placeholder `mx.SymbolicNode`. In composition, we can use arbitrary symbols for arguments. For example: -```julia +```@repl +using MXNet net = mx.Variable(:data) -net = mx.FullyConnected(data=net, name=:fc1, num_hidden=128) +net = mx.FullyConnected(net, name=:fc1, num_hidden=128) net2 = mx.Variable(:data2) -net2 = mx.FullyConnected(data=net2, name=:net2, num_hidden=128) +net2 = mx.FullyConnected(net2, name=:net2, num_hidden=128) mx.list_arguments(net2) -# => -# 3-element Array{Symbol,1}: -# :data2 -# :net2_weight -# :net2_bias composed_net = net2(data2=net, name=:composed) mx.list_arguments(composed_net) -# => -# 5-element Array{Symbol,1}: -# :data -# :fc1_weight -# :fc1_bias -# :net2_weight -# :net2_bias ``` Note we use a composed symbol, `net` as the argument `data2` for `net2` @@ -347,9 +342,10 @@ symbol could be inferred automatically. For example, given the input shape, and some hyper-parameters like `num_hidden`, the shapes for the weights and bias in a neural network could be inferred. -```julia +```@repl infer-shape +using MXNet net = mx.Variable(:data) -net = mx.FullyConnected(data=net, name=:fc1, num_hidden=10) +net = mx.FullyConnected(net, name=:fc1, num_hidden=10) arg_shapes, out_shapes, aux_shapes = mx.infer_shape(net, data=(10, 64)) ``` @@ -357,19 +353,15 @@ The returned shapes corresponds to arguments with the same order as returned by `mx.list_arguments`. The `out_shapes` are shapes for outputs, and `aux_shapes` can be safely ignored for now. -```julia -for (n,s) in zip(mx.list_arguments(net), arg_shapes) - println("$n => $s") +```@repl infer-shape +for (n, s) in zip(mx.list_arguments(net), arg_shapes) + println("$n\t=> $s") end -# => -# data => (10,64) -# fc1_weight => (10,10) -# fc1_bias => (10,) -for (n,s) in zip(mx.list_outputs(net), out_shapes) - println("$n => $s") +``` +```@repl infer-shape +for (n, s) in zip(mx.list_outputs(net), out_shapes) + println("$n\t=> $s") end -# => -# fc1_output => (10,64) ``` ### Binding and Executing @@ -381,21 +373,18 @@ A context describes the computation devices (CPUs, GPUs, etc.) and an executor will carry out the computation (forward/backward) specified in the corresponding symbolic composition. -```julia +```@repl +using MXNet A = mx.Variable(:A) B = mx.Variable(:B) C = A .* B a = mx.ones(3) * 4 b = mx.ones(3) * 2 -c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)) +c_exec = mx.bind(C, context=mx.cpu(), args=Dict(:A => a, :B => b)); mx.forward(c_exec) +c_exec.outputs[1] copy(c_exec.outputs[1]) # copy turns NDArray into Julia Array -# => -# 3-element Array{Float32,1}: -# 8.0 -# 8.0 -# 8.0 ``` For neural networks, it is easier to use `simple_bind`. By providing the @@ -406,7 +395,6 @@ the binding and executing steps are hidden under the `Model` interface. **TODO** Provide pointers to model tutorial and further details about binding and symbolic API. -High Level Interface --------------------- +## High Level Interface The high level interface include model training and prediction API, etc. From a51d9b5ebadaf57d3f54df908e02f663e4935808 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 15 Nov 2017 00:09:41 +0800 Subject: [PATCH 549/630] docs: fix more example rendering in overview (#322) --- docs/src/user-guide/overview.md | 43 +++++++++++++++++++-------------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index d1948220a3cf..a767643a3735 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -85,14 +85,12 @@ Most of the convenient functions like `size`, `length`, `ndims`, `eltype` on array objects should work out-of-the-box. Although indexing is not supported, it is possible to take *slices*: -```julia +```@repl +using MXNet a = mx.ones(2,3) b = mx.slice(a, 1:2) b[:] = 2 -println(copy(a)) -# => -# Float32[2.0 2.0 1.0 -# 2.0 2.0 1.0] +a ``` A slice is a sub-region sharing the same memory with the original @@ -226,18 +224,19 @@ push. The following example shows how to create a local `KVStore`, initialize a value and then pull it back. -```julia +```@setup kv +using MXNet +``` + +```@example kv kv = mx.KVStore(:local) -shape = (2,3) +shape = (2, 3) key = 3 -mx.init!(kv, key, mx.ones(shape)*2) +mx.init!(kv, key, mx.ones(shape) * 2) a = mx.empty(shape) mx.pull!(kv, key, a) # pull value into a -println(copy(a)) -# => -# Float32[2.0 2.0 2.0 -# 2.0 2.0 2.0] +a ``` ## Intermediate Level Interface @@ -256,26 +255,34 @@ design and trade-off of the MXNet symbolic composition system. The basic type is `mx.SymbolicNode`. The following is a trivial example of composing two symbols with the `+` operation. -```@repl +```@setup sym1 using MXNet +``` + +```@example sym1 A = mx.Variable(:A) B = mx.Variable(:B) C = A + B +print(C) # debug printing ``` -We get a new *symbol* by composing existing *symbols* by some +We get a new `SymbolicNode` by composing existing `SymbolicNode`s by some *operations*. A hierarchical architecture of a deep neural network could be realized by recursive composition. For example, the following code snippet shows a simple 2-layer MLP construction, using a hidden layer of -128 units and a ReLU activation function. +128 units and a `ReLU` activation function. -```@repl fcnet +```@setup fcnet using MXNet +``` + +```@example fcnet net = mx.Variable(:data) net = mx.FullyConnected(net, name=:fc1, num_hidden=128) net = mx.Activation(net, name=:relu1, act_type=:relu) net = mx.FullyConnected(net, name=:fc2, num_hidden=64) net = mx.SoftmaxOutput(net, name=:out) +print(net) # debug printing ``` Each time we take the previous symbol, and compose with an operation. @@ -301,7 +308,7 @@ the networks, while *parameters* are typically trainable *weights*, When composing symbols, their arguments accumulates. We can list all the arguments by -```@repl fcnet +```@example fcnet mx.list_arguments(net) ``` @@ -312,7 +319,7 @@ name for each layer. We can also specify those names explicitly: using MXNet net = mx.Variable(:data) w = mx.Variable(:myweight) -net = mx.FullyConnected(data, weight=w, name=:fc1, num_hidden=128) +net = mx.FullyConnected(net, weight=w, name=:fc1, num_hidden=128) mx.list_arguments(net) ``` From 73b856bcb17f83a189aaee65f22ae0be001cc34c Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 16 Nov 2017 02:53:00 +0800 Subject: [PATCH 550/630] Update README badge (#324) [ci skip] --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 624b04206a48..c6600cdc9fd5 100644 --- a/README.md +++ b/README.md @@ -4,9 +4,10 @@ [![Windows Build](https://ci.appveyor.com/api/projects/status/re90njols2th2ide?svg=true)](https://ci.appveyor.com/project/pluskid/mxnet-jl) [![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) [![](https://img.shields.io/badge/docs-latest-blue.svg)](https://dmlc.github.io/MXNet.jl/latest) -[![MXNet](http://pkg.julialang.org/badges/MXNet_0.4.svg)](http://pkg.julialang.org/?pkg=MXNet) +[![](https://img.shields.io/badge/docs-stable-blue.svg)](https://dmlc.github.io/MXNet.jl/stable) +[![MXNet](http://pkg.julialang.org/badges/MXNet_0.6.svg)](http://pkg.julialang.org/?pkg=MXNet) [![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) -[![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet) MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of its features include: From 1dfb5b3693a51cccf3f78c5a037abf64eba67ffc Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 16 Nov 2017 08:48:17 +0800 Subject: [PATCH 551/630] Cut a release for mxnet v0.12.0 (#304) * prepare for releasing * build: fix windows build * update NEWS for #314 * update NEWS for #312 * update NEWS for #276 --- NEWS.md | 99 ++++++++++++++++++++++++++++++++++++++------------- deps/build.jl | 20 ++++++----- 2 files changed, 86 insertions(+), 33 deletions(-) diff --git a/NEWS.md b/NEWS.md index fcd48dc3f81e..c481c0598820 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,19 +1,31 @@ # v0.3.0 (TBD) -* Drop 0.5 support. (#TBD) +* Update `libmxnet` to v0.12.0. + (See https://github.com/apache/incubator-mxnet/releases/tag/0.12.0) + +* Drop 0.5 support. ([#300][300]) ## New API -* `deepcopy` for NDArray (#273) -* `scalar ./ NDArray` is available now. (#292) -* `fill` and `fill!` for NDArray (#TBD) +### `SymbolicNode` + +* Debugging print support. ([#276][276]) + +### `NDArray` + +* `deepcopy` for `NDArray` ([#273][273]) + +* `scalar ./ NDArray` is available now. ([#292][292]) + +* `fill` and `fill!` for `NDArray`. ([#297][297], [#311][311]) + An API correspond to Python's `mx.nd.full()` * `fill(x, dims, ctx=cpu())` * `fill(x, dims...)` * `fill!(arr::NDArray, x)` -* Matrix (2D NDArray) multiplication is available now. (#TBD) +* Matrix (2D `NDArray`) multiplication is available now. ([#300][300]) ```julia julia> x @@ -26,7 +38,8 @@ 2.0 4.0 ``` -* `NDArray` `getindex`/`setindex!` linear indexing support and `first` for extracting scalar value. (#TBD) +* `NDArray` `getindex`/`setindex!` linear indexing support and `first` for + extracting scalar value. ([#294][294]) ```julia julia> x = mx.zeros(2, 5) @@ -45,61 +58,99 @@ julia> y[] # this is available, also 42.0f0 ``` -* Elementwise power of `NDArray`. (#293) +* Elementwise power of `NDArray`. ([#293][293]) + * `x.^2` * `2.^x` * `x.^y` * where `x` and `y` are `NDArray`s. -* Elementwise power of irrational and `NDArray` (#TBD) +* Elementwise power of irrational and `NDArray`. ([#310][310]) + * `e.^x` * `x.^e` * `π.^x` ## API Changes -* `reshape` of NDArray shares the same interface with Base (#272). - * `reshape(NDArray, dim; reverse=false)` - * `reshape(NDArray, dim...; reverse=false)` - * `Reshape` deprecated. +### `SymbolicNode` -* `reshape` of SymbolicNode shares the same interface with Base - and additional keyword argument (#279). +* `reshape` of `SymbolicNode` shares the same interface with Base + and additional keyword argument. ([#279][279]) * `reshape(SymbolicNode, dim; reverse=false, name)` * `reshape(SymbolicNode, dim...; reverse=false, name)` - * `Reshape` deprecated. + * `Reshape` is deprecated. + +* `mx.forward(x)` will return `x.outputs` now. ([#312][312]) -* `srand!` deprecated, please use `srand` (#282) +### `NDArray` -* `mean` and `sum` of NDArray share the same interface with Base - and fix the `axis` indexing (#TBD). +* `reshape` of `NDArray` shares the same interface with Base. ([#272][272]) + + * `reshape(NDArray, dim; reverse=false)` + * `reshape(NDArray, dim...; reverse=false)` + * `Reshape` is deprecated. + +* `srand!` deprecated, please use `srand`. ([#282][282]) + +* `mean` and `sum` of `NDArray` share the same interface with Base + and fix the `axis` indexing. ([#303][303]) * This is a breaking change; no deprecated warning. * Before: `mean(arr, axis=0)` * After: `mean(arr, 1)` -* `max` and `min` of NDArray renamed to `maximum` and `minimum` and share the - same interface with Base. The `axis` indexing is fixed, also. (#TBD) +* `max` and `min` of `NDArray` renamed to `maximum` and `minimum` and share the + same interface with Base. The `axis` indexing is fixed, also. ([#303][303]) * This is a breaking change; no deprecated warning. * Before: `mx.max(arr, axis=0)` or `mx.max_axis(arr, axis=0)` * After: `maximum(arr, 1)` -* `mx.transpose` for high dimension NDArray has been renamed to `permutedims` - and shares the same interface with Base. (#TBD) +* `mx.transpose` for high dimension `NDArray` has been renamed to `permutedims` + and shares the same interface with Base. ([#303][303]) * This is a breaking changes; no deprecated warning. * Before: `mx.transpose(A, axis=[2, 1, 3])` * After: `permutedims(A, [2, 1, 3])` -* `prod` of `NDArray` shares the same interface with Base and fix - the `axis` indexing. (#TBD). +* `prod` of `NDArray` shares the same interface with Base and fix the `axis` + indexing. ([#303][303]) * This is a breaking change; no deprecated warning. * Before: `prod(arr, axis=-1)` * After: `prod(arr, 1)` +## Bugfix + +* Broadcasting operation on same variable is back. ([#300][300], [#314][314]) + ```julia + x = mx.NDArray(...) + x .* x + ``` + + ```julia + y = mx.Variable(:y) + y .* y + ``` + +[272]: https://github.com/dmlc/MXNet.jl/pull/272 +[273]: https://github.com/dmlc/MXNet.jl/pull/273 +[276]: https://github.com/dmlc/MXNet.jl/pull/276 +[279]: https://github.com/dmlc/MXNet.jl/pull/279 +[282]: https://github.com/dmlc/MXNet.jl/pull/282 +[292]: https://github.com/dmlc/MXNet.jl/pull/292 +[293]: https://github.com/dmlc/MXNet.jl/pull/293 +[294]: https://github.com/dmlc/MXNet.jl/pull/294 +[297]: https://github.com/dmlc/MXNet.jl/pull/297 +[300]: https://github.com/dmlc/MXNet.jl/pull/300 +[303]: https://github.com/dmlc/MXNet.jl/pull/303 +[310]: https://github.com/dmlc/MXNet.jl/pull/310 +[311]: https://github.com/dmlc/MXNet.jl/pull/311 +[312]: https://github.com/dmlc/MXNet.jl/pull/312 +[314]: https://github.com/dmlc/MXNet.jl/pull/314 + # v0.2.2 (2017.05.14) * Updated supported version of MXNet to 0.9.4. * Improved build-system with support for auto-detecting GPU support. diff --git a/deps/build.jl b/deps/build.jl index c1463da5c04f..619196628bf5 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -5,8 +5,8 @@ import JSON # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false -libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "master") -curr_win = "20170819" +libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "0.12.0") +curr_win = "20171019" # v0.12.0 if haskey(ENV, "MXNET_HOME") info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") @@ -85,7 +85,7 @@ if !libmxnet_detected return end info("Downloading pre-built packages for Windows.") - base_url = "https://github.com/yajiedesign/mxnet/releases/download/weekly_binary_build/prebuildbase_win10_x64_vc14.7z" + base_url = "https://github.com/yajiedesign/mxnet/releases/download/weekly_binary_build_v2/prebuildbase_win10_x64_vc14_v2.7z" if libmxnet_curr_ver == "master" # download_cmd uses powershell 2, but we need powershell 3 to do this @@ -99,15 +99,17 @@ if !libmxnet_detected exe7z = joinpath(JULIA_HOME, "7z.exe") - run(download_cmd(base_url, "mxnet_base.7z")) - run(`$exe7z x mxnet_base.7z -y -ousr`) - run(`cmd /c copy "usr\\3rdparty\\openblas\\bin\\*.dll" "usr\\lib"`) - run(`cmd /c copy "usr\\3rdparty\\opencv\\*.dll" "usr\\lib"`) - run(download_cmd(package_url, "mxnet.7z")) - run(`$exe7z x mxnet.7z -y -ousr`) + # this command will create the dir "usr\\lib" + run(`$exe7z x mxnet.7z build lib -y -ousr`) run(`cmd /c copy "usr\\build\\*.dll" "usr\\lib"`) + run(download_cmd(base_url, "mxnet_base.7z")) + run(`$exe7z x mxnet_base.7z -y -ousr`) + run(`cmd /c copy "usr\\prebuildbase_win10_x64_vc14_v2\\3rdparty\\bin\\*.dll" "usr\\lib"`) + + # testing + run(`cmd /c dir "usr\\lib"`) return end From b56c44ee4564201d99f0d5a8227375d03c093d4e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 16 Nov 2017 21:22:41 +0800 Subject: [PATCH 552/630] Bump upstream to v0.12.1 (#326) --- NEWS.md | 10 +++++++--- deps/build.jl | 3 ++- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/NEWS.md b/NEWS.md index c481c0598820..0cea0db56359 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,7 +1,11 @@ -# v0.3.0 (TBD) +# v0.3.0 (2017.11.16) -* Update `libmxnet` to v0.12.0. - (See https://github.com/apache/incubator-mxnet/releases/tag/0.12.0) +* Update `libmxnet` to + * On Windows: v0.12.0. + (See https://github.com/apache/incubator-mxnet/releases/tag/0.12.0) + + * On Linux/macOS: v0.12.1. + (See https://github.com/apache/incubator-mxnet/releases/tag/0.12.1) * Drop 0.5 support. ([#300][300]) diff --git a/deps/build.jl b/deps/build.jl index 619196628bf5..522ceed5ff23 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -5,7 +5,7 @@ import JSON # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false -libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "0.12.0") +libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "0.12.1") curr_win = "20171019" # v0.12.0 if haskey(ENV, "MXNET_HOME") @@ -139,6 +139,7 @@ if !libmxnet_detected USE_JULIA_BLAS = true FORCE_LAPACK = true end + info("USE_JULIA_BLAS -> $USE_JULIA_BLAS") blas_name = blas_vendor == :openblas64 ? "openblas" : string(blas_vendor) MSHADOW_LDFLAGS = "MSHADOW_LDFLAGS=-lm $blas_path" From 9efedbccc098a3b1eb1288ba6e51b61d8d3e169b Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 17 Nov 2017 00:30:11 +0800 Subject: [PATCH 553/630] Update releasing note (#327) --- README-DEV.md | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/README-DEV.md b/README-DEV.md index b148fde3d5c7..a1d6fa9012fc 100644 --- a/README-DEV.md +++ b/README-DEV.md @@ -4,7 +4,10 @@ 2. Check out the `stable` branch, merge with `master`. 3. Update `libmxnet_curr_ver` in `deps/build.jl` to the latest commit SHA (or any proper reference). Using `master` here is not good because future changes in libmxnet might break existing Julia packages. 4. Run tests. -5. Commit changes and push. -6. Run `Pkg.tag("MXNet")` in Julia. -7. Run `Pkg.publish()`, which will open a browser for making a pull request to METADATA.jl. -8. Edit the [releases page](https://github.com/dmlc/MXNet.jl/releases) to copy the release notes from `NEWS.md` to the newly created release tag. +5. Merge master into stable branch. +6. Tag stable branch: `git tag v1.2.3` +7. Push tag to remote: `git push origin ` +8. Edit the [releases page](https://github.com/dmlc/MXNet.jl/releases) + to copy the release notes from `NEWS.md` to the newly created release tag. +9. Goto https://github.com/JuliaLang/METADATA.jl/pulls + and check `attobot` already make a PR for the release. From a4aab013ddc9ec29a979cbde778189d7bb643a46 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 18 Nov 2017 00:32:50 +0800 Subject: [PATCH 554/630] docs: minor changes in overview (#330) --- docs/src/user-guide/overview.md | 57 ++++++++++++++++----------------- 1 file changed, 28 insertions(+), 29 deletions(-) diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index a767643a3735..cddeed6bc8c9 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -11,7 +11,7 @@ prefix explicitly: ```julia julia> using MXNet -julia> x = mx.zeros(2,3) # MXNet NDArray +julia> x = mx.zeros(2, 3) # MXNet NDArray 2×3 mx.NDArray{Float32} @ CPU0: 0.0 0.0 0.0 0.0 0.0 0.0 @@ -51,20 +51,20 @@ functions, including `size` and `copy!` as shown in the last line. MXNet. It is like a Julia `Array` object, with some important differences listed here: -- The actual data could live on different `Context` (e.g. GPUs). For - some contexts, iterating into the elements one by one is very slow, - thus indexing into NDArray is not recommanded in general. The easiest - way to inspect the contents of an NDArray is to use the `copy` - function to copy the contents as a Julia `Array`. -- Operations on `NDArray` (including basic arithmetics and neural - network related operators) are executed in parallel with automatic - dependency tracking to ensure correctness. -- There is no generics in `NDArray`, the `eltype` is always - `mx.MX_float`. Because for applications in machine learning, single - precision floating point numbers are typical a best choice balancing - between precision, speed and portability. Also since libmxnet is - designed to support multiple languages as front-ends, it is much - simpler to implement with a fixed data type. +- The actual data could live on different `Context` (e.g. GPUs). For + some contexts, iterating into the elements one by one is very slow, + thus indexing into NDArray is not recommanded in general. The easiest + way to inspect the contents of an NDArray is to use the `copy` + function to copy the contents as a Julia `Array`. +- Operations on `NDArray` (including basic arithmetics and neural + network related operators) are executed in parallel with automatic + dependency tracking to ensure correctness. +- There is no generics in `NDArray`, the `eltype` is always + `mx.MX_float`. Because for applications in machine learning, single + precision floating point numbers are typical a best choice balancing + between precision, speed and portability. Also since libmxnet is + designed to support multiple languages as front-ends, it is much + simpler to implement with a fixed data type. While most of the computation is hidden in libmxnet by operators corresponding to various neural network layers. Getting familiar with @@ -73,13 +73,13 @@ operators in Julia directly. The followings are common ways to create `NDArray` objects: -- `mx.empty(shape[, context])`: create on uninitialized array of a - given shape on a specific device. For example, - `mx.empty(2,3)`, `mx.((2,3), mx.gpu(2))`. -- `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: - similar to the Julia's built-in `zeros` and `ones`. -- `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to - a specific device. +- `mx.empty(shape[, context])`: create on uninitialized array of a + given shape on a specific device. For example, + `mx.empty(2, 3)`, `mx.((2, 3), mx.gpu(2))`. +- `mx.zeros(shape[, context])` and `mx.ones(shape[, context])`: + similar to the Julia's built-in `zeros` and `ones`. +- `mx.copy(jl_arr, context)`: copy the contents of a Julia `Array` to + a specific device. Most of the convenient functions like `size`, `length`, `ndims`, `eltype` on array objects should work out-of-the-box. Although indexing @@ -87,7 +87,7 @@ is not supported, it is possible to take *slices*: ```@repl using MXNet -a = mx.ones(2,3) +a = mx.ones(2, 3) b = mx.slice(a, 1:2) b[:] = 2 a @@ -101,7 +101,7 @@ shows a way to set the contents of an `NDArray`. ```@repl using MXNet mx.srand(42) -a = mx.empty(2,3) +a = mx.empty(2, 3) a[:] = 0.5 # set all elements to a scalar a[:] = rand(size(a)) # set contents with a Julia Array copy!(a, rand(size(a))) # set value by copying a Julia Array @@ -248,9 +248,8 @@ symbolic composition system. It is like [Theano](http://deeplearning.net/software/theano/), except that we avoided long expression compilation time by providing *larger* neural network related building blocks to guarantee computation performance. -See also [this -note](http://mxnet.readthedocs.org/en/latest/program_model.html) for the -design and trade-off of the MXNet symbolic composition system. +See also [this note](http://mxnet.readthedocs.org/en/latest/program_model.html) +for the design and trade-off of the MXNet symbolic composition system. The basic type is `mx.SymbolicNode`. The following is a trivial example of composing two symbols with the `+` operation. @@ -305,8 +304,8 @@ categories: *inputs* and *parameters*. *inputs* are data and labels for the networks, while *parameters* are typically trainable *weights*, *bias*, *filters*. -When composing symbols, their arguments accumulates. We can list all the -arguments by +When composing symbols, their arguments accumulates. +We can list all the arguments by ```@example fcnet mx.list_arguments(net) From 57b50d49691174ffc8fd78bf821d807e277d8f7b Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 18 Nov 2017 02:32:19 +0800 Subject: [PATCH 555/630] build: set upstream version back to master (#328) --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index 522ceed5ff23..a59d9f84a5b3 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -5,7 +5,7 @@ import JSON # First try to detect and load existing libmxnet ################################################################################ libmxnet_detected = false -libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "0.12.1") +libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "master") curr_win = "20171019" # v0.12.0 if haskey(ENV, "MXNET_HOME") From 8564f190728ca3fa50dd2d048667b9ed08cc7a60 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 21 Nov 2017 01:36:06 +0800 Subject: [PATCH 556/630] executor: add Base.show and Base.print (#337) --- src/executor.jl | 38 ++++++++++++++++++++++++++++++-------- src/symbolic-node.jl | 6 +++--- 2 files changed, 33 insertions(+), 11 deletions(-) diff --git a/src/executor.jl b/src/executor.jl index 036c5743b7c0..26da87c69ac9 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -203,20 +203,42 @@ function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, end +Base.show(io::IO, x::Executor) = + print(io, "mx.", split(string(typeof(x)), '.')[end], " ", x.handle.value) + """ + print([io::IO], x::Executor) + Get a debug string about internal execution plan. Can be used to get an estimated about the memory cost. + ```julia - net = ... # Symbol - dProvider = ... # DataProvider - exec = mx.simple_bind(net, mx.cpu(), data=size(dProvider.data_batch[1])) - dbg_str = mx.debug_str(exec) - println(split(ref, ['\\n'])[end-2]) +julia> x = mx.Variable(:x) +MXNet.mx.SymbolicNode x + +julia> exec = mx.bind(x + 1, mx.cpu(), Dict(:x => mx.ones(2,3))) +mx.Executor Ptr{Void} @0x000055c3dee9eb30 + +julia> print(exec) +Symbol Outputs: + output[0]=_plus_scalar0(0) +Variable:x +-------------------- +Op:_plus_scalar, Name=_plus_scalar0 +Inputs: + arg[0]=x(0) version=0 +Attrs: + scalar=1.00000000e+00 +Total 0 MB allocated +Total 11 TempSpace resource requested ``` """ -function debug_str(self :: Executor) - s_ref = Ref{Cstring}() - @mxcall(:MXExecutorPrint, (MX_handle, Ptr{Cstring}), self.handle, s_ref) +Base.print(io::IO, x::Executor) = print(io, debug_str(x)) +Base.print(x::Executor) = print(STDOUT, x) + +function debug_str(x::Executor) + s_ref = Ref{Cstring}(C_NULL) + @mxcall(:MXExecutorPrint, (MX_handle, Ptr{Cstring}), x.handle, s_ref) unsafe_string(s_ref[]) end diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index bbe2d1874846..c9bf7e3b9db7 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -229,16 +229,16 @@ Base.show(io::IO, sym::SymbolicNode) = import Base: print -function print(io :: IO, sym :: SymbolicNode) +function print(io::IO, sym::SymbolicNode) out = Ref{mx.char_p}(C_NULL) @mx.mxcall(:MXSymbolPrint, (mx.MX_SymbolHandle, Ref{mx.char_p}), sym.handle, out) print(io, unsafe_string(out[])) end -print(sym :: SymbolicNode) = print(STDOUT, sym) +print(sym::SymbolicNode) = print(STDOUT, sym) """ - print([io :: IO], sym :: SymbolicNode) + print([io::IO], sym::SymbolicNode) Print the content of symbol, used for debug. From ceb7fbfc303ce71e4c029651a4e8909648d67172 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 21 Nov 2017 01:46:51 +0800 Subject: [PATCH 557/630] travis: enable caching for reducing build time (#329) * travis: tweak number of make job on CI * travis: enable cache for libmxnet --- .travis.yml | 10 +++++++--- deps/build.jl | 10 +++++----- deps/cpcblas.sh | 7 +++++++ test/travis/run_test.sh | 8 +++++++- 4 files changed, 26 insertions(+), 9 deletions(-) create mode 100755 deps/cpcblas.sh diff --git a/.travis.yml b/.travis.yml index a91994db2fc1..680df7af481e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,10 +1,13 @@ # Documentation: http://docs.travis-ci.com/user/languages/julia/ sudo: false + language: julia + os: - linux - osx osx_image: xcode8 + julia: - 0.6 # - nightly 0.6 supports depends on #170 @@ -15,6 +18,10 @@ branches: - stable - /^v\d+\.\d+(\.\d+)?(-\S*)?$/ # for tagging +cache: + directories: + - $TRAVIS_BUILD_DIR/deps/src + # dependent apt packages addons: apt: @@ -40,9 +47,6 @@ notifications: email: false script: - # bump the time limit of no ouput - # the `travis_wait` wrapper can be removed once this issue fixed: - # https://github.com/JuliaLang/julia/pull/23601 - ${TRAVIS_DIR}/run_test.sh after_success: diff --git a/deps/build.jl b/deps/build.jl index a59d9f84a5b3..c22af6c7bf89 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -70,7 +70,7 @@ end function get_cpucore() if haskey(ENV, "TRAVIS") # on travis-ci - 4 + 2 else min(Sys.CPU_CORES, 8) end @@ -166,17 +166,17 @@ if !libmxnet_detected end) @build_steps begin ChangeDirectory(_mxdir) - `git submodule deinit --force .` `git fetch` if libmxnet_curr_ver != "master" `git checkout $libmxnet_curr_ver` else - `git merge --ff origin/$libmxnet_curr_ver` + `git checkout origin/$libmxnet_curr_ver` end `git submodule update --init --recursive` `git -C mshadow checkout -- make/mshadow.mk` - `make clean` - `cp ../../cblas.h include/cblas.h` + + # copying on changed, make travis caching happy + `../../cpcblas.sh` `sed -i -s "s/MSHADOW_CFLAGS = \(.*\)/MSHADOW_CFLAGS = \1 $ilp64/" mshadow/make/mshadow.mk` diff --git a/deps/cpcblas.sh b/deps/cpcblas.sh new file mode 100755 index 000000000000..1a592186b49c --- /dev/null +++ b/deps/cpcblas.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +# be invoked from build.jl + +set -e + +diff ../../cblas.h include/cblas.h || cp -v ../../cblas.h include/cblas.h diff --git a/test/travis/run_test.sh b/test/travis/run_test.sh index a576c69f6979..bf726eb49157 100755 --- a/test/travis/run_test.sh +++ b/test/travis/run_test.sh @@ -1,4 +1,10 @@ #!/bin/bash +set -e if [[ -a .git/shallow ]]; then git fetch --unshallow; fi -julia -e 'Pkg.clone(pwd()); Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' +julia -e 'Pkg.clone(pwd())' +( + cd `julia -e 'println(Pkg.dir("MXNet", "deps"))'` && + ln -fs $TRAVIS_BUILD_DIR/deps/src +) +julia -e 'Pkg.build("MXNet"); Pkg.test("MXNet"; coverage=true)' From 1a7887c5c1613fbbc676e4d1f5c0fa17318325a1 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 21 Nov 2017 01:48:05 +0800 Subject: [PATCH 558/630] ndarray: add outer constrcutor for AbstractArray (#334) --- src/ndarray.jl | 7 +++---- test/unittest/ndarray.jl | 33 ++++++++++++++++++++++++--------- 2 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 0b0e01a40e64..9aca2f3d9c42 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -98,6 +98,9 @@ mutable struct NDArray end end +NDArray(x::AbstractArray{T}) where {T<:DType} = copy(collect(x), cpu()) +NDArray(x::Array{T}) where {T<:DType} = copy(x, cpu()) + const NDArrayOrReal = Union{NDArray, Real} @unfuse NDArray @@ -107,10 +110,6 @@ function Base.show(io :: IO, arr :: NDArray) Base.showarray(io, try_get_shared(arr, sync=:read), false, header=false) end -function NDArray(data :: Array{T}) where T<:Real - copy(data, cpu()) -end - function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) Base.unsafe_convert(MX_handle, obj.handle) end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 0e1837bdd5a3..5217ca80feaa 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -15,6 +15,20 @@ function rand_tensors(::Type{T}, dims::NTuple{N, Int}) where {N, T} return (tensor, array) end +function test_constructor() + info("NDArray::NDArray(x::AbstractArray)") + function check_absarray(x) + y = mx.NDArray(x) + @test ndims(x) == ndims(y) + @test eltype(x) == eltype(y) + @test x[3] == y[3][] + end + + check_absarray(1:10) + check_absarray(1.0:10) +end # function test_constructor + + function test_copy() dims = rand_dims() tensor = rand(mx.MX_float, dims) @@ -87,7 +101,7 @@ end function test_linear_idx() info("NDArray::getindex::linear indexing") - let A = reshape(collect(1:30), 3, 10) + let A = reshape(1:30, 3, 10) x = mx.NDArray(A) @test copy(x) == A @@ -104,7 +118,7 @@ function test_linear_idx() @test_throws BoundsError x[42] end - let A = reshape(collect(1:24), 3, 2, 4) + let A = reshape(1:24, 3, 2, 4) x = mx.NDArray(A) @test copy(x) == A @@ -118,7 +132,7 @@ function test_linear_idx() end info("NDArray::setindex!::linear indexing") - let A = reshape(collect(1:24), 3, 2, 4) + let A = reshape(1:24, 3, 2, 4) x = mx.NDArray(A) @test copy(x) == A @@ -136,7 +150,7 @@ end # function test_linear_idx function test_first() info("NDArray::first") - let A = reshape(collect(1:30), 3, 10) + let A = reshape(1:30, 3, 10) x = mx.NDArray(A) @test x[] == 1 @@ -613,7 +627,7 @@ end function test_sum() info("NDArray::sum") - let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + let A = reshape(1.0:8, 2, 2, 2), X = mx.NDArray(A) @test copy(sum(X))[] == sum(A) @test copy(sum(X, 1)) == sum(A, 1) @test copy(sum(X, 2)) == sum(A, 2) @@ -626,7 +640,7 @@ end function test_mean() info("NDArray::mean") - let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + let A = reshape(1.0:8, 2, 2, 2), X = mx.NDArray(A) @test copy(mean(X))[] == mean(A) @test copy(mean(X, 1)) == mean(A, 1) @test copy(mean(X, 2)) == mean(A, 2) @@ -639,7 +653,7 @@ end function test_maximum() info("NDArray::maximum") - let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + let A = reshape(1.0:8, 2, 2, 2), X = mx.NDArray(A) @test copy(maximum(X))[] == maximum(A) @test copy(maximum(X, 1)) == maximum(A, 1) @test copy(maximum(X, 2)) == maximum(A, 2) @@ -652,7 +666,7 @@ end function test_minimum() info("NDArray::minimum") - let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + let A = reshape(1.0:8, 2, 2, 2), X = mx.NDArray(A) @test copy(minimum(X))[] == minimum(A) @test copy(minimum(X, 1)) == minimum(A, 1) @test copy(minimum(X, 2)) == minimum(A, 2) @@ -665,7 +679,7 @@ end function test_prod() info("NDArray::prod") - let A = reshape(1.0:8, 2, 2, 2) |> collect, X = mx.NDArray(A) + let A = reshape(1.0:8, 2, 2, 2), X = mx.NDArray(A) @test copy(prod(X))[] == prod(A) @test copy(prod(X, 1)) == prod(A, 1) @test copy(prod(X, 2)) == prod(A, 2) @@ -740,6 +754,7 @@ end # Run tests ################################################################################ @testset "NDArray Test" begin + test_constructor() test_assign() test_copy() test_slice() From 010ea3cc8340a91aba0fc2b9c9bcdf0d49ad9f6c Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 21 Nov 2017 03:12:37 +0800 Subject: [PATCH 559/630] sym: implement get_children (#333) See https://github.com/apache/incubator-mxnet/pull/5141 --- NEWS.md | 8 ++++++++ src/symbolic-node.jl | 29 +++++++++++++++++++++++++++++ test/unittest/symbolic-node.jl | 17 +++++++++++++++++ 3 files changed, 54 insertions(+) diff --git a/NEWS.md b/NEWS.md index 0cea0db56359..0259c6803791 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,3 +1,11 @@ +# v0.4.0 (#TBD) + +## New APIs + +### `SymbolicNode` + +* `mx.get_children` for exploring the graph programmatically. (#TBD) + # v0.3.0 (2017.11.16) * Update `libmxnet` to diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index c9bf7e3b9db7..11c2a6fdc8ec 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -119,6 +119,35 @@ function get_internals(self :: SymbolicNode) return SymbolicNode(MX_SymbolHandle(ref_hdr[])) end +""" + get_children(x::SymbolicNode) + +Gets a new grouped `SymbolicNode` whose output contains inputs to output +nodes of the original symbol. + +```julia +julia> x = mx.Variable(:x) +MXNet.mx.SymbolicNode x + +julia> y = mx.Variable(:y) +MXNet.mx.SymbolicNode y + +julia> z = x + y +MXNet.mx.SymbolicNode _plus1 + +julia> a |> mx.get_children |> mx.list_outputs +2-element Array{Symbol,1}: + :x + :y +``` +""" +function get_children(x::SymbolicNode) + hdl = Ref{MX_handle}(C_NULL) + @mxcall(:MXSymbolGetChildren, (MX_handle, Ref{MX_handle}), x, hdl) + sym = hdl[] |> MX_SymbolHandle |> SymbolicNode + isempty(list_outputs(sym)) ? nothing : sym +end + """ get_attr(self :: SymbolicNode, key :: Symbol) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index baeb92a68e23..766f789c413b 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -53,6 +53,23 @@ function test_internal() @test mx.list_arguments(fc1) == mx.list_arguments(oldfc) end +function test_get_children() + info("SymbolicNode::get_children") + + let x = mx.Variable(:x), y = mx.Variable(:y) + z = x + y + @test length(mx.list_outputs(z)) == 1 + @test length(mx.list_outputs(mx.get_children(z))) == 2 + @test mx.list_outputs(mx.get_children(z)) == [:x, :y] + end + + info("SymbolicNode::get_children::on leaf") + let x = mx.Variable(:x) + @test mx.get_children(x) == nothing + end +end # test_get_children + + function test_compose() info("SymbolicNode::compose") From 1f50a144c90e5034af79468b0c9a9ff234a11b98 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 21 Nov 2017 03:13:02 +0800 Subject: [PATCH 560/630] sym: fix printing of symbol generated via get_internals (#332) --- src/symbolic-node.jl | 8 +++++++- test/unittest/symbolic-node.jl | 11 +++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index 11c2a6fdc8ec..d8305eb57c00 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -250,7 +250,13 @@ function get_name(self :: mx.SymbolicNode) success = Ref(0) @mxcall(:MXSymbolGetName, (MX_handle, Ref{char_p}, Ref{Int}), self.handle.value, name, success) @assert success[] != -1 - return Symbol(unsafe_string(name[])) + + str = name[] + if str == C_NULL # e.g. the symbol returned via get_internals + string(self.handle.value) + else + Symbol(unsafe_string(str)) + end end Base.show(io::IO, sym::SymbolicNode) = diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index 766f789c413b..edde78c32541 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -468,7 +468,7 @@ function test_power() end end - info("NDArray::power::e.^x::x.^e") + info("SymbolicNode::power::e.^x::x.^e") let x = mx.Variable(:x), A = [0 0 0; 0 0 0] y = exec(e.^x; :x => A)[] @test copy(y) ≈ ones(A) @@ -486,7 +486,7 @@ function test_power() end end - info("NDArray::power::π.^x::x.^π") + info("SymbolicNode::power::π.^x::x.^π") let x = mx.Variable(:x), A = Float32[1 2; 3 4] let y = π.^x z = exec(y; :x => A)[] @@ -500,6 +500,12 @@ function test_power() end end # function test_power +function test_get_name() + info("SymbolicNode::get_name::with get_internals") + name = mx.get_name(mx.get_internals(mlp2())) # no error + @test contains(name, "Ptr") +end # function test_get_name + ################################################################################ # Run tests ################################################################################ @@ -522,6 +528,7 @@ end # function test_power test_mul() test_div() test_power() + test_get_name() end end From b0556e682dd3e361d4f1242152c27a6c85d144ac Mon Sep 17 00:00:00 2001 From: Michael Creel Date: Wed, 22 Nov 2017 09:57:12 +0100 Subject: [PATCH 561/630] Update regression-example.jl (#339) Fix the call to scatter plot, in the last line. It is commented out, but if a user uncomments it to see a plot of the fit, the fix is needed. --- examples/regression-example.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index 885f031dc658..94a0d3761ba2 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -57,4 +57,4 @@ mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprov plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) fit = mx.predict(model, plotprovider) println("correlation between fitted values and true regression line: ", cor(vec(fit), vec(ValidationOutput))) -#scatter(ValidationOutput,fit',w = 3, xlabel="true", ylabel="predicted", title="45º line is what we hope for", show=true) +#scatter(ValidationOutput',fit',w = 3, xlabel="true", ylabel="predicted", title="45º line is what we hope for", show=true) From 91a410e6a460eb9ead3955bb7462ffaf12cfead7 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 23 Nov 2017 01:17:50 +0800 Subject: [PATCH 562/630] test: minor style changes for mlp-test (#340) --- examples/mnist/mlp-test.jl | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index 2acedd073dfa..56bd00b6a122 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -2,32 +2,30 @@ # features of MXNet.jl in this example in order to detect regression errors. module MNISTTest + using MXNet using Base.Test include("mnist-data.jl") function get_mnist_mlp() - mlp = @mx.chain mx.Variable(:data) => + @mx.chain mx.Variable(:data) => mx.FullyConnected(name=:fc1, num_hidden=128) => mx.Activation(name=:relu1, act_type=:relu) => mx.FullyConnected(name=:fc2, num_hidden=64) => mx.Activation(name=:relu2, act_type=:relu) => mx.FullyConnected(name=:fc3, num_hidden=10) => mx.SoftmaxOutput(name=:softmax) - return mlp end -function get_mnist_data(batch_size=100) - return get_mnist_providers(batch_size) -end +get_mnist_data(batch_size = 100) = get_mnist_providers(batch_size) function mnist_fit_and_predict(optimizer, initializer, n_epoch) mlp = get_mnist_mlp() train_provider, eval_provider = get_mnist_data() # setup model - model = mx.FeedForward(mlp, context=mx.cpu()) + model = mx.FeedForward(mlp, context = mx.cpu()) # fit parameters cp_prefix = "mnist-test-cp" @@ -73,12 +71,25 @@ function mnist_fit_and_predict(optimizer, initializer, n_epoch) end function test_mnist_mlp() + info("MNIST::SGD") @test mnist_fit_and_predict(mx.SGD(lr=0.1, momentum=0.9), mx.UniformInitializer(0.01), 2) > 90 + + info("MNIST::ADAM") @test mnist_fit_and_predict(mx.ADAM(), mx.NormalInitializer(), 2) > 90 + + info("MNIST::AdaGrad") @test mnist_fit_and_predict(mx.AdaGrad(), mx.NormalInitializer(), 2) > 90 + + info("MNIST::AdaDelta") @test mnist_fit_and_predict(mx.AdaDelta(), mx.NormalInitializer(), 2) > 90 + + info("MNIST::AdaMax") @test mnist_fit_and_predict(mx.AdaMax(), mx.NormalInitializer(), 2) > 90 + + info("MNIST::RMSProp") @test mnist_fit_and_predict(mx.RMSProp(), mx.NormalInitializer(), 2) > 90 + + info("MNIST::Nadam") @test mnist_fit_and_predict(mx.Nadam(), mx.NormalInitializer(), 2) > 90 end From f12648245daba2926652862db958e6d8486f1332 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 23 Nov 2017 01:20:25 +0800 Subject: [PATCH 563/630] sym: handy macro `mx.var` for creating `mx.Variable` (#338) --- NEWS.md | 10 ++++++++++ src/symbolic-node.jl | 21 +++++++++++++++++++++ test/unittest/symbolic-node.jl | 16 ++++++++++++++++ 3 files changed, 47 insertions(+) diff --git a/NEWS.md b/NEWS.md index 0259c6803791..3e5dce1c6eee 100644 --- a/NEWS.md +++ b/NEWS.md @@ -6,6 +6,16 @@ * `mx.get_children` for exploring the graph programmatically. (#TBD) +* A handy macro `@mx.var` for creating `mx.Variable`. (#TBD) + + ```julia + julia> x = @mx.var x + MXNet.mx.SymbolicNode x + + julia> x, y, z = @mx.var x y z + (MXNet.mx.SymbolicNode x, MXNet.mx.SymbolicNode y, MXNet.mx.SymbolicNode z) + ``` + # v0.3.0 (2017.11.16) * Update `libmxnet` to diff --git a/src/symbolic-node.jl b/src/symbolic-node.jl index d8305eb57c00..bb3c97773488 100644 --- a/src/symbolic-node.jl +++ b/src/symbolic-node.jl @@ -347,6 +347,27 @@ function Variable(name :: Union{Symbol, AbstractString}; attrs = Dict()) node end +""" + @var ... + +A handy macro for creating `mx.Variable`. + +```julia +julia> x = @mx.var x +MXNet.mx.SymbolicNode x + +julia> x, y, z = @mx.var x y z +(MXNet.mx.SymbolicNode x, MXNet.mx.SymbolicNode y, MXNet.mx.SymbolicNode z) +``` +""" +macro var(n::Symbol) + Expr(:call, :Variable, QuoteNode(n)) +end + +macro var(names::Symbol...) + Expr(:tuple, map(n -> Expr(:call, :Variable, QuoteNode(n)), names)...) +end + """ Group(nodes :: SymbolicNode...) diff --git a/test/unittest/symbolic-node.jl b/test/unittest/symbolic-node.jl index edde78c32541..e3e003d296af 100644 --- a/test/unittest/symbolic-node.jl +++ b/test/unittest/symbolic-node.jl @@ -506,6 +506,21 @@ function test_get_name() @test contains(name, "Ptr") end # function test_get_name +function test_var() + info("SymbolicNode::var") + x = @mx.var x + @test x isa mx.SymbolicNode + + x′ = @mx.var x + @test x.handle != x′.handle + + x, y, z = @mx.var x y z + @test x isa mx.SymbolicNode + @test y isa mx.SymbolicNode + @test z isa mx.SymbolicNode +end # test_var + + ################################################################################ # Run tests ################################################################################ @@ -529,6 +544,7 @@ end # function test_get_name test_div() test_power() test_get_name() + test_var() end end From cb06a9aeb59d0bb64b3f0f17e7c919b7b1f56e0e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 25 Nov 2017 09:38:39 +0800 Subject: [PATCH 564/630] build: bump max number of make jobs to 32 (#343) --- deps/build.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deps/build.jl b/deps/build.jl index c22af6c7bf89..8b4c254cbb39 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -72,7 +72,7 @@ function get_cpucore() if haskey(ENV, "TRAVIS") # on travis-ci 2 else - min(Sys.CPU_CORES, 8) + min(Sys.CPU_CORES, 32) end end From 45d6279f3fcc833308b4a9bb4d1e07a5a2ea8767 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 27 Nov 2017 10:41:31 +0800 Subject: [PATCH 565/630] ndarray: add `Base.show(io, MIME"text/plain")` (#347) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * ndarray: add `Base.show(io, MIME"text/plain")` e.g. make Array of NDArray show like this ```julia julia> [mx.zeros(100)] 1-element Array{MXNet.mx.NDArray,1}: NDArray Float32[0.0, 0.0, 0.0 … 0.0, 0.0, 0.0] ``` * test cases --- src/ndarray.jl | 12 +++++++++--- test/unittest/ndarray.jl | 12 +++++++++++- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 9aca2f3d9c42..9cb168ca4ff5 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -105,9 +105,15 @@ const NDArrayOrReal = Union{NDArray, Real} @unfuse NDArray -function Base.show(io :: IO, arr :: NDArray) - println(io, "$(join(size(arr), "×")) mx.NDArray{$(eltype(arr))} @ $(context(arr)):") - Base.showarray(io, try_get_shared(arr, sync=:read), false, header=false) +function Base.show(io::IO, x::NDArray) + print(io, "NDArray ") + Base.showarray(io, try_get_shared(x, sync = :read), header = false) +end + +# for REPL +function Base.show(io::IO, ::MIME{Symbol("text/plain")}, x::NDArray) + println(io, "$(join(size(x), "×")) mx.NDArray{$(eltype(x))} @ $(context(x)):") + Base.showarray(io, try_get_shared(x, sync = :read), false, header = false) end function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 5217ca80feaa..2c278bb6e201 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -741,13 +741,23 @@ function test_transpose() end function test_show() - let str = sprint(show, mx.NDArray([1 2 3 4])) + info("NDArray::show::REPL") + let str = sprint(show, MIME"text/plain"(), mx.NDArray([1 2 3 4])) @test contains(str, "1×4") @test contains(str, "mx.NDArray") @test contains(str, "Int64") @test contains(str, "CPU") @test match(r"1\s+2\s+3\s+4", str) != nothing end + + info("NDArray::show") + let str = sprint(show, mx.NDArray([1 2 3 4])) + @test str == "NDArray [1 2 3 4]" + end + + let str = sprint(show, mx.zeros(4)) + @test str == "NDArray Float32[0.0, 0.0, 0.0, 0.0]" + end end ################################################################################ From e0f625a49328cd98a8e11093a4ed8a0981f555ed Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 28 Nov 2017 04:15:32 +0800 Subject: [PATCH 566/630] example: fix batchsize config in regression-example (#342) --- examples/regression-example.jl | 44 ++++++++++++++++++++++++++-------- 1 file changed, 34 insertions(+), 10 deletions(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index 94a0d3761ba2..e820d54001ec 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -13,8 +13,8 @@ generate_inputs(mean, var, size) = rand(MvNormal(mean, var), size) output(data) = sin.(data[1:1,:]).*sin.(data[2:2,:])./(data[1:1,:].*data[2:2,:]) # create training and evaluation data sets -mean=[0.0;0.0] -var=[1.0 0.0;0.0 1.0] +mean=[0.0; 0.0] +var=[1.0 0.0; 0.0 1.0] samplesize = 5000 TrainInput = generate_inputs(mean, var, samplesize) TrainOutput = output(TrainInput) @@ -22,9 +22,22 @@ ValidationInput = generate_inputs(mean, var, samplesize) ValidationOutput = output(ValidationInput) # how to set up data providers using data in memory -batchsize = 100 # can adjust this later, but must be defined now for next line -trainprovider = mx.ArrayDataProvider(:data => TrainInput, batch_size=batchsize, shuffle=true, :label => TrainOutput) -evalprovider = mx.ArrayDataProvider(:data => ValidationInput, batch_size=batchsize, shuffle=true, :label => ValidationOutput) +function data_source(batchsize = 100) + train = mx.ArrayDataProvider( + :data => TrainInput, + :label => TrainOutput, + batch_size = batchsize, + shuffle = true, + ) + valid = mx.ArrayDataProvider( + :data => ValidationInput, + :label => ValidationOutput, + batch_size = batchsize, + shuffle = true, + ) + + train, valid +end # create a two hidden layer MPL: try varying num_hidden, and change tanh to relu, # or add/remove a layer @@ -35,7 +48,7 @@ net = @mx.chain mx.Variable(:data) => mx.Activation(act_type=:tanh) => mx.FullyConnected(num_hidden=3) => mx.Activation(act_type=:tanh) => - mx.FullyConnected(num_hidden=1) => + mx.FullyConnected(num_hidden=1) => mx.LinearRegressionOutput(mx.Variable(:label)) # final model definition, don't change, except if using gpu @@ -47,11 +60,22 @@ optimizer = mx.ADAM() # train, reporting loss for training and evaluation sets # initial training with small batch size, to get to a good neighborhood -batchsize = 200 -mx.fit(model, optimizer, initializer=mx.NormalInitializer(0.0,0.1), eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 20) +trainprovider, evalprovider = data_source(#= batchsize =# 200) +mx.fit(model, optimizer, trainprovider, + initializer = mx.NormalInitializer(0.0, 0.1), + eval_metric = mx.MSE{mx.NDArray{Float32,1}}(), + eval_data = evalprovider, + n_epoch = 20, + callbacks = [mx.speedometer()]) # more training with the full sample -batchsize = samplesize -mx.fit(model, optimizer, eval_metric=mx.MSE(), trainprovider, eval_data=evalprovider, n_epoch = 20) +trainprovider, evalprovider = data_source(#= batchsize =# samplesize) +mx.fit(model, optimizer, trainprovider, + initializer = mx.NormalInitializer(0.0, 0.1), + eval_metric = mx.MSE{mx.NDArray{Float32,1}}(), + eval_data = evalprovider, + n_epoch = 500, # previous setting is batchsize = 200, epoch = 20 + # implies we did (5000 / 200) * 20 times update in previous `fit` + callbacks = [mx.speedometer()]) # obtain predictions plotprovider = mx.ArrayDataProvider(:data => ValidationInput, :label => ValidationOutput) From 935eb3594110ab628c81d49e1e1af2d07545df9b Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 28 Nov 2017 04:25:26 +0800 Subject: [PATCH 567/630] ndarray: make _minus type stable (#345) * ndarray: make _minus type stable The current importer `_import_ndarray_functions` provide `_minus(x, y; out = x)` and its return value is rely on keyword argument `out`. But Julia cannot (or hard to) do type inference on keyword argument at the moment, so this commit propose a new method `_minus!(x, y)` which modified the first argument, instead of provide a keyword argument. The new method can make type stable. fix #341 * add test cases --- src/ndarray.jl | 30 +++++++++++++++++++++++++----- test/unittest/ndarray.jl | 18 ++++++++++++------ 2 files changed, 37 insertions(+), 11 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 9cb168ca4ff5..9bb74f83f5d2 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -637,7 +637,7 @@ function sub_from!(dst::NDArray, arg::NDArrayOrReal) if isa(arg, Real) _minus_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) else - _minus(dst, arg, out=dst) + _minus!(dst, arg) end end @@ -1037,6 +1037,15 @@ function _autoimport(name::Symbol) end end +function _outexpr(name::Symbol, x #= the first arg of `sig` =#) + if endswith(string(name), "!") # `func!` + Ptr, 1, :([[MX_handle(x.handle)]]), :($x) + else + retexpr = :(NDArray(MX_NDArrayHandle(unsafe_load(hdls_ref[], 1)))) + Ref, 0, :(Ref{Ptr{MX_handle}}(C_NULL)), retexpr + end +end + macro _remap(sig::Expr, imp::Expr) fname = sig.args[1] opname = string(imp.args[1]) @@ -1055,16 +1064,19 @@ macro _remap(sig::Expr, imp::Expr) mxvals = Expr(:vect, map(x -> :(dump_mx_param($(x.args[2]))), mxargs)...) ndhlds = Expr(:vect, map(x -> :($(x).handle), ndin)...) + # handler for `func!` which has side effect on first argument. + T, n_output, hdls_ref, retexpr = _outexpr(fname, sig.args[2].args[1]) + func_body = quote op_handle = _get_cached_libmx_op_handle($opname) - n_output = Ref(Cint(0)) - hdls_ref = Ref{Ptr{MX_handle}}(C_NULL) + n_output = Ref(Cint($n_output)) + hdls_ref = $hdls_ref @mxcall(:MXImperativeInvoke, (MX_handle, Cint, Ptr{MX_handle}, Ref{Cint}, - Ref{Ptr{MX_handle}}, + $T{Ptr{MX_handle}}, Cint, char_pp, char_pp), @@ -1076,7 +1088,7 @@ macro _remap(sig::Expr, imp::Expr) $(length(mxargs)), $mxkeys, $mxvals) - NDArray(MX_NDArrayHandle(unsafe_load(hdls_ref[], 1))) + $retexpr end docstr = " $sig" @@ -1123,6 +1135,13 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap prod(arr::NDArray) prod(arr) @_remap prod(arr::NDArray, dims) prod(arr; axis = 0 .- dims, keepdims = true) +################################################################################ +# remapping to solving type unstablility +################################################################################ + +@_remap _minus(x::NDArray, y::NDArray) _minus(x, y) +@_remap _minus!(x::NDArray, y::NDArray) _minus(x, y) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1248,6 +1267,7 @@ const _op_import_bl = [ # import black list; do not import these funcs "dot", "transpose", "prod", + "_minus", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 2c278bb6e201..47ce9b08957d 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -264,6 +264,12 @@ function test_minus() scalar_large = Float16(1e4) @test t6 - scalar_small ≈ copy(a6 .- scalar_small) @test t6 - scalar_large ≈ copy(a6 .- scalar_large) + + info("NDArray::minus::type stablility") + let x = mx.zeros(dims), y = mx.ones(dims) + @inferred x - y + @inferred x .- y + end end function test_mul() @@ -361,29 +367,29 @@ end function test_rdiv() - info("NDarray::rdiv") + info("NDArray::rdiv") - info("NDarray::rdiv::Inf16") + info("NDArray::rdiv::Inf16") let x = 1 ./ mx.zeros(Float16, 4) @test copy(x) == [Inf16, Inf16, Inf16, Inf16] end - info("NDarray::rdiv::Inf32") + info("NDArray::rdiv::Inf32") let x = 1 ./ mx.zeros(Float32, 4) @test copy(x) == [Inf32, Inf32, Inf32, Inf32] end - info("NDarray::rdiv::Inf64") + info("NDArray::rdiv::Inf64") let x = 1 ./ mx.zeros(Float64, 4) @test copy(x) == [Inf64, Inf64, Inf64, Inf64] end - info("NDarray::rdiv::Int") + info("NDArray::rdiv::Int") let x = 1 ./ mx.NDArray([1 2; 3 4]) @test copy(x) == [1 0; 0 0] end - info("NDarray::rdiv::Float32") + info("NDArray::rdiv::Float32") let x = 1 ./ mx.NDArray(Float32[1 2; 3 4]) y = 1 ./ Float32[1 2; 3 4] @test copy(x) ≈ y From 2a5a284099766516bf3b26ce05ded190cef1ef6f Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 28 Nov 2017 04:26:34 +0800 Subject: [PATCH 568/630] model: refine and test cases for FeedForward constructor (#346) --- src/model.jl | 9 ++------- test/unittest/model.jl | 34 ++++++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 7 deletions(-) create mode 100644 test/unittest/model.jl diff --git a/src/model.jl b/src/model.jl index df15e4cace0a..44a2ba755798 100644 --- a/src/model.jl +++ b/src/model.jl @@ -25,6 +25,7 @@ mutable struct FeedForward <: AbstractModel # leave the rest fields undefined FeedForward(arch :: SymbolicNode, ctx :: Vector{Context}) = new(arch, ctx) + FeedForward(arch :: SymbolicNode, ctx :: Context) = new(arch, [ctx]) end """ @@ -53,14 +54,8 @@ end or a list of `Context` objects. In the latter case, data parallelization will be used for training. If no context is provided, the default context `cpu()` will be used. """ -function FeedForward(arch :: SymbolicNode; context :: Union{Context, Vector{Context}, Void} = nothing) - if isa(context, Void) - context = [Context(CPU)] - elseif isa(context, Context) - context = [context] - end +FeedForward(arch::SymbolicNode; context::Union{Context,Vector{Context}} = [cpu()]) = FeedForward(arch, context) -end """ init_model(self, initializer; overwrite=false, input_shapes...) diff --git a/test/unittest/model.jl b/test/unittest/model.jl new file mode 100644 index 000000000000..fafda7968e25 --- /dev/null +++ b/test/unittest/model.jl @@ -0,0 +1,34 @@ +module TestModel + +using Base.Test +using MXNet + + +function test_feedforward() + info("Model::FeedForward::constructor") + let x = @mx.var x + m = mx.FeedForward(x) + @assert m.arch === x + @assert length(m.ctx) == 1 + end + + info("Model::FeedForward::constructor::keyword context") + let x = @mx.var x + m = mx.FeedForward(x, context = mx.cpu()) + @assert m.arch === x + @assert length(m.ctx) == 1 + end + + let x = @mx.var x + m = mx.FeedForward(x, context = [mx.cpu(), mx.cpu(1)]) + @assert m.arch === x + @assert length(m.ctx) == 2 + end +end + + +@testset "Model Test" begin + test_feedforward() +end + +end # module TestModel From cb042fd2f870529799b16552d754980e867c498e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 1 Dec 2017 21:18:39 +0800 Subject: [PATCH 569/630] parametric NDArray (#331) * ndarray: add outer constrcutor for AbstractArray * ndarray: refine copy * ndarray: refine copy! * ndarray: refine convert * ndarray: refine add_to! * ndarray: refine sub_from! * ndarray: refine mul_to! * ndarray: refine div_from! * ndarray: refine rdiv_from! * ndarray: refine _wait_to_read/_wait_to_write * ndarray: refine is_shared * ndarray: refine save * ndarray: refine dot * ndarray: VecOfNDArray * executor: refine backward * ndarray: refine empty * executor: refine bind --- src/callback.jl | 31 +++-- src/executor.jl | 113 ++++++++-------- src/io.jl | 95 +++++++------ src/kvstore.jl | 36 ++--- src/metric.jl | 96 ++++++------- src/model.jl | 75 ++++++----- src/ndarray.jl | 281 ++++++++++++++++++--------------------- src/util.jl | 14 ++ test/unittest/ndarray.jl | 10 +- test/unittest/util.jl | 25 ++++ 10 files changed, 390 insertions(+), 386 deletions(-) create mode 100644 test/unittest/util.jl diff --git a/src/callback.jl b/src/callback.jl index 289fdd066aa9..16e44fdc632d 100644 --- a/src/callback.jl +++ b/src/callback.jl @@ -48,7 +48,7 @@ end See also [`every_n_epoch`](@ref) and [`speedometer`](@ref). """ -function every_n_batch(callback :: Function, n :: Int; call_on_0 :: Bool = false) +function every_n_batch(callback::Function, n::Int; call_on_0::Bool = false) BatchCallback(n, call_on_0, callback) end function (cb :: BatchCallback)(state :: OptimizationState) @@ -62,7 +62,7 @@ function (cb :: BatchCallback)(state :: OptimizationState) end """ - speedometer(; frequency=50) + speedometer(;frequency=50) Create an `AbstractBatchCallback` that measure the training speed (number of samples processed per second) every k mini-batches. @@ -71,9 +71,9 @@ Create an `AbstractBatchCallback` that measure the training speed * `frequency::Int`: keyword argument, default 50. The frequency (number of min-batches) to measure and report the speed. """ -function speedometer(;frequency::Int=50) +function speedometer(;frequency::Int = 50) cl_tic = 0 - every_n_batch(frequency, call_on_0=true) do state :: OptimizationState + every_n_batch(frequency, call_on_0 = true) do state::OptimizationState if state.curr_batch == 0 # reset timer cl_tic = time() @@ -104,10 +104,11 @@ A convenient function to construct a callback that runs every `n` full data-pass See also [`every_n_batch`](@ref). """ -function every_n_epoch(callback :: Function, n :: Int; call_on_0 :: Bool = false) +every_n_epoch(callback::Function, n::Int; call_on_0::Bool = false) = EpochCallback(n, call_on_0, callback) -end -function (cb :: EpochCallback)(model :: Any, state :: OptimizationState, metric :: Vector{Tuple{Base.Symbol, T}}) where T<:Real + +function (cb::EpochCallback)(model::Any, state::OptimizationState, + metric::Vector{Tuple{Symbol, T}}) where T<:Real if state.curr_epoch == 0 if cb.call_on_0 cb.callback(model, state, metric) @@ -124,15 +125,17 @@ Create an `AbstractEpochCallback` that save checkpoints of the model to disk. The checkpoints can be loaded back later on. # Arguments -* `prefix::AbstractString`: the prefix of the filenames to save the model. The model - architecture will be saved to prefix-symbol.json, while the weights will be saved - to prefix-0012.params, for example, for the 12-th epoch. -* `frequency::Int`: keyword argument, default 1. The frequency (measured in epochs) to - save checkpoints. +* `prefix::AbstractString`: the prefix of the filenames to save the model. + The model architecture will be saved to prefix-symbol.json, + while the weights will be saved to prefix-0012.params, + for example, for the 12-th epoch. +* `frequency::Int`: keyword argument, default is 1. + The frequency (measured in epochs) to save checkpoints. * `save_epoch_0::Bool`: keyword argument, default false. Whether we should save a - checkpoint for epoch 0 (model initialized but not seen any data yet). + checkpoint for epoch 0 (model initialized but not seen any data yet). """ -function do_checkpoint(prefix::AbstractString; frequency::Int=1, save_epoch_0=false) +function do_checkpoint(prefix::AbstractString; + frequency::Int = 1, save_epoch_0::Bool = false) mkpath(dirname(prefix)) every_n_epoch(frequency, call_on_0=save_epoch_0) do model, state, metric save_checkpoint(model, prefix, state) diff --git a/src/executor.jl b/src/executor.jl index 26da87c69ac9..cd4a9256eeca 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -8,46 +8,49 @@ be carried out with an executor. mutable struct Executor handle :: MX_ExecutorHandle symbol :: SymbolicNode - arg_arrays :: Vector{NDArray} - grad_arrays :: Vector{Union{Void,NDArray}} - aux_arrays :: Vector{NDArray} - outputs :: Vector{NDArray} - arg_dict :: Dict{Base.Symbol, NDArray} - aux_dict :: Dict{Base.Symbol, NDArray} + arg_arrays :: VecOfNDArray + grad_arrays :: Vector{Union{Void,<:NDArray}} + aux_arrays :: VecOfNDArray + outputs :: VecOfNDArray + arg_dict :: Dict{Symbol} + aux_dict :: Dict{Symbol} end -function Executor(hdr :: MX_ExecutorHandle, symbol :: SymbolicNode, - arg_arrays :: Vector{NDArray}, grad_arrays :: Vector{Union{Void,NDArray}}, - aux_arrays :: Vector{NDArray}) + +function Executor(hdl::MX_ExecutorHandle, sym::SymbolicNode, + arg_arrays::VecOfNDArray, grad_arrays::AbstractVector, + aux_arrays::VecOfNDArray) # get output arrays ref_size = Ref{MX_uint}(0) - ref_hdrs = Ref{Ptr{MX_handle}}(0) + ref_hdls = Ref{Ptr{MX_handle}}(C_NULL) @mxcall(:MXExecutorOutputs, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_handle}}), - hdr, ref_size, ref_hdrs) - out_hdrs = unsafe_wrap(Array, ref_hdrs[], ref_size[]) + hdl, ref_size, ref_hdls) + out_hdrs = unsafe_wrap(Array, ref_hdls[], ref_size[]) out_arrays = [NDArray(MX_NDArrayHandle(x)) for x in out_hdrs] - arg_names = list_arguments(symbol) + arg_names = list_arguments(sym) @assert(length(arg_names) == length(unique(arg_names)), "Duplicated names in arguments: $arg_names") - arg_dict = Dict{Base.Symbol,NDArray}(zip(arg_names, arg_arrays)) + arg_dict = Dict(zip(arg_names, arg_arrays)) - aux_names = list_auxiliary_states(symbol) + aux_names = list_auxiliary_states(sym) @assert(length(aux_names) == length(unique(aux_names)), "Duplicated names in auxiliary states: $aux_names") - aux_dict = Dict{Base.Symbol,NDArray}(zip(aux_names, aux_arrays)) + aux_dict = Dict(zip(aux_names, aux_arrays)) - Executor(hdr, symbol, arg_arrays, grad_arrays, aux_arrays, out_arrays, arg_dict, aux_dict) + Executor(hdl, sym, arg_arrays, grad_arrays, aux_arrays, out_arrays, arg_dict, aux_dict) end -function Base.unsafe_convert(::Type{MX_handle}, obj::Executor) +Base.unsafe_convert(::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(MX_handle, obj.handle) -end Base.convert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::Executor) = Base.unsafe_convert(t, obj) -function _get_ndarray_inputs(arg_key::AbstractString, args::Vector{NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) +function _get_ndarray_inputs(arg_key::AbstractString, args::VecOfNDArray, + arg_names::Vector{Symbol}, allow_missing::Bool) @assert(length(args) == length(arg_names), "Length of $arg_key does not match number of arguments") return (MX_handle[args...], args) end -function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Base.Symbol,NDArray}, arg_names::Vector{Base.Symbol}, allow_missing::Bool) + +function _get_ndarray_inputs(arg_key::AbstractString, args::Dict{Symbol}, + arg_names::Vector{Symbol}, allow_missing::Bool) args_vec = map(arg_names) do name arr = get(args, name, nothing) if !allow_missing @@ -75,16 +78,16 @@ Create an `Executor` by binding a `SymbolicNode` to concrete `NDArray`. * `ctx::Context`: the context on which the computation should run. * `args`: either a list of `NDArray` or a dictionary of name-array pairs. Concrete arrays for all the inputs in the network architecture. The inputs typically include - network parameters (weights, bias, filters, etc.), data and labels. See [`list_arguments`](@ref) - and [`infer_shape`](@ref). -* `args_grad`: -* `aux_states`: -* `grad_req`: + network parameters (weights, bias, filters, etc.), data and labels. + See [`list_arguments`](@ref) and [`infer_shape`](@ref). +* `args_grad`: a `Vector` of `NDArray` or a `Dict` contains `NDArray` +* `aux_states`: a `Vector` of `NDArray` or a `Dict` contains `NDArray` +* `grad_req`: single value, a `Vector` of `GRAD_REQ` or a `Dict{Symbol,GRAD_REQ}` """ -function bind(self :: SymbolicNode, ctx :: Context, args :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}}; - args_grad :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), - aux_states :: Union{Vector{NDArray},Dict{Base.Symbol,NDArray}} = Dict{Base.Symbol,NDArray}(), - grad_req :: Union{GRAD_REQ,Vector{GRAD_REQ},Dict{Base.Symbol,GRAD_REQ}} = GRAD_WRITE) +function bind(self::SymbolicNode, ctx::Context, args; + args_grad = Dict{Symbol,NDArray}(), + aux_states = Dict{Symbol,NDArray}(), + grad_req = GRAD_WRITE) arg_names = list_arguments(self) @@ -97,7 +100,7 @@ function bind(self :: SymbolicNode, ctx :: Context, args :: Union{Vector{NDArray elseif isa(grad_req, Vector{GRAD_REQ}) @assert(length(grad_req) == length(args)) reqs = MX_uint[grad_req...] - elseif isa(grad_req, Dict{Base.Symbol, GRAD_REQ}) + elseif isa(grad_req, Dict{Symbol, GRAD_REQ}) reqs = MX_uint[get(grad_req, name, GRAD_NOP) for name in arg_names] end @@ -111,20 +114,16 @@ function bind(self :: SymbolicNode, ctx :: Context, args :: Union{Vector{NDArray executor = Executor(MX_ExecutorHandle(ref_hdr[]), self, args, args_grad, aux_states) end -function bind(self :: SymbolicNode; kwargs...) + +function bind(x::SymbolicNode; context::Context = cpu(), kwargs...) kwargs = Dict(kwargs) @assert(haskey(kwargs, :args), "Must specify args") args = pop!(kwargs, :args) - if haskey(kwargs, :context) - context = pop!(kwargs, :context) - else - context = cpu() - end - bind(self, context, args; kwargs...) + bind(x, context, args; kwargs...) end -function simple_bind(self :: SymbolicNode, ctx :: Context; - grad_req :: Union{GRAD_REQ, Dict{Symbol, GRAD_REQ}}=GRAD_WRITE, +function simple_bind(self::SymbolicNode, ctx::Context; + grad_req::Union{GRAD_REQ,Dict{Symbol,GRAD_REQ}} = GRAD_WRITE, kwargs...) arg_shapes, out_shapes, aux_shapes = infer_shape(self; kwargs...) @assert(!isa(arg_shapes, Void), "Information not enough to perform complete shape inference") @@ -168,21 +167,15 @@ function forward(self::Executor; is_train::Bool = false, kwargs...) self.outputs end -function backward(self :: Executor) - backward(self, NDArray[]) -end -function backward(self :: Executor, out_grad :: NDArray) - backward(self, [out_grad]) -end -function backward(self :: Executor, out_grads :: Vector{NDArray}) - out_grads = MX_handle[out_grads...] - @mxcall(:MXExecutorBackward, (MX_handle, MX_uint, Ptr{MX_handle}), self, length(out_grads), out_grads) -end +backward(x::Executor) = backward(x, NDArray[]) +backward(x::Executor, out_grad::NDArray) = backward(x, [out_grad]) +backward(x::Executor, out_grads::VecOfNDArray) = + @mxcall(:MXExecutorBackward, (MX_handle, MX_uint, Ptr{MX_handle}), + x, length(out_grads), MX_handle[out_grads...]) - -function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, - aux_params::Union{Void,Dict{Base.Symbol,NDArray}}=nothing; - allow_extra_params::Bool=false) +function copy_params_from(self::Executor, arg_params::Dict{Symbol}, + aux_params::Dict{Symbol} = Dict{Symbol,Any}(); + allow_extra_params::Bool = false) for (name, array) in arg_params if haskey(self.arg_dict, name) copy!(self.arg_dict[name], array) @@ -191,13 +184,11 @@ function copy_params_from(self::Executor, arg_params::Dict{Base.Symbol,NDArray}, end end - if !isa(aux_params, Void) - for (name, array) in aux_params - if haskey(self.aux_dict, name) - copy!(self.aux_dict[name], array) - else - @assert(allow_extra_params, "Extra auxiliary state $name not recognized") - end + for (name, array) in aux_params + if haskey(self.aux_dict, name) + copy!(self.aux_dict[name], array) + else + @assert(allow_extra_params, "Extra auxiliary state $name not recognized") end end end diff --git a/src/io.jl b/src/io.jl index 2ba0bf78a584..597ea8a90c6e 100644 --- a/src/io.jl +++ b/src/io.jl @@ -113,24 +113,29 @@ function get_label end A basic subclass of `AbstractDataBatch`, that implement the interface by accessing member fields. """ -mutable struct DataBatch <: AbstractDataBatch - data :: Vector{NDArray} - label :: Vector{NDArray} +mutable struct DataBatch{T,S,N,M} <: AbstractDataBatch + data :: Vector{NDArray{T,N}} + label :: Vector{NDArray{S,M}} count :: Int end -count_samples(batch :: DataBatch) = batch.count -get_data(::Provider, batch :: DataBatch) where {Provider<:AbstractDataProvider} = batch.data -get_label(::Provider, batch :: DataBatch) where {Provider<:AbstractDataProvider} = batch.label + +count_samples(batch::DataBatch) = batch.count + +get_data(::Provider, batch::DataBatch) where {Provider<:AbstractDataProvider} = + batch.data + +get_label(::Provider, batch::DataBatch) where {Provider<:AbstractDataProvider} = + batch.label """ SlicedNDArray A alias type of `Tuple{UnitRange{Int},NDArray}`. """ -const SlicedNDArray = Tuple{UnitRange{Int},NDArray} +const SlicedNDArray = Tuple{UnitRange{Int},<:NDArray} function _load_general!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, - targets :: Vector{Vector{SlicedNDArray}}, loader::Function) + targets :: Vector{<:Vector{<:SlicedNDArray}}, loader::Function) data = loader(provider, batch) for (d_src, d_targets) in zip(data, targets) for (slice_idx, d_dst) in d_targets @@ -157,7 +162,7 @@ This utility function is used in data parallelization, where a mini-batch is spl and computed on several different devices. """ function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, - targets :: Vector{Vector{SlicedNDArray}}) + targets :: Vector{<:Vector{<:SlicedNDArray}}) _load_general!(provider, batch, targets, get_data) end @@ -171,16 +176,18 @@ end The same as [`load_data!`](@ref), except that this is for loading labels. """ function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, - targets :: Vector{Vector{SlicedNDArray}}) + targets :: Vector{<:Vector{<:SlicedNDArray}}) _load_general!(provider, batch, targets, get_label) end -function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) +function load_data!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{<:NDArray}) for (src, dst) in zip(get_data(provider, batch), targets) copy!(dst, src) end end -function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, targets :: Vector{NDArray}) +function load_label!(provider :: AbstractDataProvider, batch :: AbstractDataBatch, + targets :: Vector{<:NDArray}) for (src, dst) in zip(get_label(provider, batch), targets) copy!(dst, src) end @@ -198,7 +205,7 @@ import Base.get Returns the corresponding data array corresponding to that name. """ -function get(provider :: AbstractDataProvider, batch :: AbstractDataBatch, name :: Base.Symbol) +function get(provider::AbstractDataProvider, batch::AbstractDataBatch, name::Symbol) for (idx, (k, s)) in enumerate(provide_data(provider)) if name == k return get_data(provider, batch)[idx] @@ -216,20 +223,20 @@ end eachbatch(provider::AbstractDataProvider) Allows you to perform operations on data every epoch. This is especially useful -when you need to perform real-time augmentation of the data. +when you need to perform real-time augmentation of the data. # Arguments: * `provider`: an instance of the custom DataProvider type. You must return this instance after modifying its fields. """ -eachbatch(provider :: AbstractDataProvider) = provider +eachbatch(provider::AbstractDataProvider) = provider """ ArrayDataProvider A convenient tool to iterate `NDArray` or Julia `Array`. - + ArrayDataProvider(data[, label]; batch_size, shuffle, data_padding, label_padding) Construct a data provider from `NDArray` or Julia Arrays. @@ -252,19 +259,19 @@ Construct a data provider from `NDArray` or Julia Arrays. TODO: remove `data_padding` and `label_padding`, and implement rollover that copies the last or first several training samples to feed the padding. """ -mutable struct ArrayDataProvider <: AbstractDataProvider - data_arrays :: Vector{Array} - data_names :: Vector{Base.Symbol} - label_arrays :: Vector{Array} - label_names :: Vector{Base.Symbol} +mutable struct ArrayDataProvider{T,N} <: AbstractDataProvider + data_arrays :: Vector{Array{T,N}} + data_names :: Vector{Symbol} + label_arrays + label_names :: Vector{Symbol} batch_size :: Int sample_count :: Int shuffle :: Bool data_padding :: MX_float label_padding :: MX_float - data_batch :: Vector{NDArray} - label_batch :: Vector{NDArray} + data_batch + label_batch end # Julia's type system is sometimes very frustrating. You cannot specify a function @@ -273,10 +280,14 @@ end # results, about the parametric type in the Pair{T1,T2} type, thus does not match the # generic Pair type. In general, Int <: Number but Vector{Int} <: Vector{Number} is not # true. So let us just use Any here... -function ArrayDataProvider(data::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) - ArrayDataProvider(data, [], batch_size=batch_size, shuffle=shuffle, data_padding=data_padding, label_padding=label_padding) +function ArrayDataProvider(data; batch_size::Int = 0, shuffle::Bool = false, + data_padding::Real = 0, label_padding::Real = 0) + ArrayDataProvider(data, [], batch_size = batch_size, shuffle = shuffle, + data_padding = data_padding, label_padding = label_padding) end -function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bool=false, data_padding::Real=0, label_padding::Real=0) + +function ArrayDataProvider(data, label; batch_size::Int = 0, shuffle::Bool = false, + data_padding::Real = 0, label_padding::Real = 0) asarr(arr :: Array{T}) where {T} = convert(Array{MX_float}, arr) asarr(arr :: NDArray) = copy(arr) @@ -349,16 +360,15 @@ function ArrayDataProvider(data::Any, label::Any; batch_size::Int=0, shuffle::Bo end ArrayDataProvider(data_arrays, data_names, label_arrays, label_names, batch_size, - sample_count, shuffle, data_padding, label_padding, data_batch, label_batch) + sample_count, shuffle, MX_float(data_padding), MX_float(label_padding), + data_batch, label_batch) end -function provide_data(provider::ArrayDataProvider) - return collect(zip(provider.data_names, map(size, provider.data_batch))) -end +provide_data(provider::ArrayDataProvider) = + collect(zip(provider.data_names, map(size, provider.data_batch))) -function provide_label(provider::ArrayDataProvider) - return collect(zip(provider.label_names, map(size, provider.label_batch))) -end +provide_label(provider::ArrayDataProvider) = + collect(zip(provider.label_names, map(size, provider.label_batch))) get_batch_size(provider::ArrayDataProvider) = provider.batch_size @@ -366,9 +376,7 @@ struct ArrayDataProviderState <: AbstractDataProviderState curr_idx :: Int end -function Base.eltype(provider :: ArrayDataProvider) - ArrayDataProviderState -end +Base.eltype(provider :: ArrayDataProvider) = ArrayDataProviderState function Base.start(provider :: ArrayDataProvider) if provider.shuffle @@ -381,9 +389,8 @@ function Base.start(provider :: ArrayDataProvider) return ArrayDataProviderState(1) end -function Base.done(provider::ArrayDataProvider, state :: ArrayDataProviderState) - return state.curr_idx > provider.sample_count -end +Base.done(provider::ArrayDataProvider, state::ArrayDataProviderState) = + state.curr_idx > provider.sample_count struct ArrayDataBatch <: AbstractDataBatch idx :: UnitRange{Int} @@ -425,8 +432,8 @@ a list of built-in data iterators. """ mutable struct MXDataProvider <: AbstractDataProvider handle :: MX_DataIterHandle - data_shape :: Vector{Tuple{Base.Symbol, Tuple}} - label_shape:: Vector{Tuple{Base.Symbol, Tuple}} + data_shape :: Vector{Tuple{Symbol,Tuple}} + label_shape:: Vector{Tuple{Symbol,Tuple}} batch_size :: Int # those two a auxiliary variables to help avoid calling reset @@ -455,8 +462,8 @@ function _get_label(handle :: MX_DataIterHandle) end function MXDataProvider(handle :: MX_DataIterHandle; - data_name :: Base.Symbol=:data, - label_name :: Union{Base.Symbol,Void}=:softmax_label, + data_name :: Symbol = :data, + label_name :: Union{Symbol,Void} = :softmax_label, kwargs...) # for convenience, we ignore the rest keyword arguments # init iterator, load the first batch and get shapes @assert(_iter_next(handle), "Failed to load the first batch in MXDataProvider") @@ -569,7 +576,7 @@ function _define_data_iter_creator(hdr :: MX_handle) isprovider = endswith(string(iter_name), "Iter") signature = _format_signature(Int(ref_narg[]), ref_arg_names) f_desc = " " * string(iter_name) * "(" *signature * ")\n\n" - if isprovider + if isprovider f_desc *= "Can also be called with the alias `$(string(iter_name)[1:end-4] * "Provider")`.\n" end f_desc *= unsafe_string(ref_desc[]) * "\n\n" diff --git a/src/kvstore.jl b/src/kvstore.jl index 1ac56260b401..fa4768cceaf7 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -3,10 +3,10 @@ mutable struct KVStore updater_c :: Ptr{Void} updater :: Function - KVStore(hdr :: MX_KVStoreHandle) = new(hdr, Ptr{Void}(0)) + KVStore(hdr::MX_KVStoreHandle) = new(hdr, Ptr{Void}(0)) end -function KVStore(kv_type::Base.Symbol = :local) +function KVStore(kv_type::Symbol = :local) #@assert(kv_type ∈ [:local]) # TODO: update with allowed types ref_hdr = Ref{MX_handle}(0) @@ -20,7 +20,7 @@ end Base.convert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) -function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) +function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{<:Vector{<:NDArray}}) @assert length(keys) == length(vals) keys_flt = Int[] vals_flt = NDArray[] @@ -31,16 +31,15 @@ function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) return (keys_flt, vals_flt) end -function init!(self :: KVStore, key :: Int, val :: NDArray) - init!(self, [key], [val]) -end -function init!(self :: KVStore, key :: Int, vals :: Vector{NDArray}) - init!(self, Base.ones(Int, length(vals))*key, vals) -end -function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}) +init!(self::KVStore, key::Int, val::NDArray) = init!(self, [key], [val]) + +init!(self::KVStore, key::Int, vals::Vector{<:NDArray}) = + init!(self, Base.ones(Int, length(vals)) * key, vals) + +init!(self::KVStore, keys::Vector{Int}, vals::Vector{<:Vector{<:NDArray}}) = init!(self, _flatten_kvlist(keys, vals)...) -end -function init!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}) + +function init!(self::KVStore, keys::Vector{Int}, vals::Vector{<:NDArray}) @assert length(keys) == length(vals) keys = Cint[keys...] vals = MX_handle[vals...] @@ -52,13 +51,14 @@ import Base.push! function push!(self :: KVStore, key :: Int, val :: NDArray; priority :: Int = 0) push!(self, [key], [val]; priority = priority) end -function push!(self :: KVStore, key :: Int, vals :: Vector{NDArray}; priority :: Int = 0) +function push!(self :: KVStore, key :: Int, vals :: Vector{<:NDArray}; priority :: Int = 0) push!(self, Base.ones(Int, length(vals))*key, vals; priority = priority) end -function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{Vector{NDArray}}; priority::Int=0) +function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{<:Vector{<:NDArray}}; + priority::Int=0) push!(self, _flatten_kvlist(keys, vals)...; priority = priority) end -function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{NDArray}; priority::Int=0) +function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{<:NDArray}; priority::Int=0) @assert length(keys) == length(vals) keys = Cint[keys...] vals = MX_handle[vals...] @@ -69,13 +69,13 @@ end function pull!(self :: KVStore, key :: Int, out :: NDArray; priority :: Int = 0) pull!(self, [key], [out]) end -function pull!(self :: KVStore, key :: Int, outs :: Vector{NDArray}; priority :: Int = 0) +function pull!(self :: KVStore, key :: Int, outs :: Vector{<:NDArray}; priority :: Int = 0) pull!(self, Base.ones(Int, length(outs))*key, outs; priority = priority) end -function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{Vector{NDArray}}; priority::Int=0) +function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{<:Vector{<:NDArray}}; priority::Int=0) pull!(self, _flatten_kvlist(keys, outs)...; priority = priority) end -function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{NDArray}; priority::Int=0) +function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{<:NDArray}; priority::Int=0) @assert length(keys) == length(outs) keys = Cint[keys...] outs = MX_handle[outs...] diff --git a/src/metric.jl b/src/metric.jl index db38060c933e..3998af8efb8c 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -14,7 +14,7 @@ abstract type AbstractEvalMetric end hasNDArraySupport(metric) -> Val{true/false} Trait for `_update_single_output` should return `Val{true}() if metric can handle `NDArray` -directly and `Val{false}()i` if requires `Array`. Metric that work with NDArrays can be +directly and `Val{false}()` if requires `Array`. Metric that work with NDArrays can be async, while native Julia arrays require that we copy the output of the network, which is a blocking operation. """ @@ -30,11 +30,12 @@ Update and accumulate metrics. * `labels::Vector{NDArray}`: the labels from the data provider. * `preds::Vector{NDArray}`: the outputs (predictions) of the network. """ -function update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}) where T <: AbstractEvalMetric +function update!(metric::T, labels::VecOfNDArray, preds::VecOfNDArray) where T <: AbstractEvalMetric _update!(metric, labels, preds, hasNDArraySupport(metric)) end -function _update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{true}) where T<: AbstractEvalMetric +function _update!(metric::T, labels::VecOfNDArray, preds::VecOfNDArray, + ::Val{true}) where T<: AbstractEvalMetric if length(labels) != length(preds) Base.warn_once( "The number of labels ($(length(labels))) does not correspond to the\ @@ -45,7 +46,8 @@ function _update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArra end end -function _update!(metric :: T, labels :: Vector{NDArray}, preds :: Vector{NDArray}, :: Val{false}) where T<: AbstractEvalMetric +function _update!(metric::T, labels::VecOfNDArray, preds::VecOfNDArray, + ::Val{false}) where T<: AbstractEvalMetric if length(labels) != length(preds) Base.warn_once( "The number of labels ($(length(labels))) does not correspond to the\ @@ -65,9 +67,7 @@ end Reset the accumulation counter. """ -function reset!(metric :: AbstractEvalMetric) - throw(MethodError(reset!, (typeof(metric),))) -end +reset!(metric::AbstractEvalMetric) = throw(MethodError(reset!, (typeof(metric),))) import Base: get @@ -79,9 +79,7 @@ Get the accumulated metrics. Returns `Vector{Tuple{Base.Symbol, Real}}`, a list of name-value pairs. For example, `[(:accuracy, 0.9)]`. """ -function get(metric :: AbstractEvalMetric) - throw(MethodError(get, (typeof(metric),))) -end +get(metric::AbstractEvalMetric) = throw(MethodError(get, (typeof(metric),))) """ NullMetric() @@ -91,17 +89,11 @@ A metric that calculates nothing. Can be used to ignore an output during trainin mutable struct NullMetric <: mx.AbstractEvalMetric end -function update!(metric :: NullMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) - return nothing -end +update!(metric::NullMetric, labels::VecOfNDArray, preds::VecOfNDArray) = nothing -function reset!(metric :: NullMetric) - return nothing -end +reset!(metric::NullMetric) = nothing -function get(metric :: NullMetric) - return Tuple{Symbol, Float64}[] -end +get(metric::NullMetric) = Tuple{Symbol, Float64}[] """ MultiMetric(metrics::Vector{AbstractEvalMetric}) @@ -118,21 +110,19 @@ mutable struct MultiMetric <: mx.AbstractEvalMetric metrics :: Vector{mx.AbstractEvalMetric} end -function update!(metric :: MultiMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) +function update!(metric :: MultiMetric, labels :: Vector{<:NDArray}, preds :: Vector{<:NDArray}) for m in metric.metrics update!(m, labels, preds) end - return nothing + nothing end function reset!(metric :: MultiMetric) map(reset!, metric.metrics) - return nothing + nothing end -function get(metric :: MultiMetric) - mapreduce(get, append!, metric.metrics) -end +get(metric :: MultiMetric) = mapreduce(get, append!, metric.metrics) """ SeqMetric(metrics::Vector{AbstractEvalMetric}) @@ -150,23 +140,21 @@ mutable struct SeqMetric <: mx.AbstractEvalMetric metrics :: Vector{mx.AbstractEvalMetric} end -function update!(metric :: SeqMetric, labels :: Vector{NDArray}, preds :: Vector{NDArray}) +function update!(metric::SeqMetric, labels::VecOfNDArray, preds::VecOfNDArray) @assert length(metric.metrics) == length(labels) @assert length(metric.metrics) == length(preds) for (m, l, p) in zip(metric.metrics, labels, preds) update!(m, [l], [p]) end - return nothing + nothing end -function reset!(metric :: SeqMetric) +function reset!(metric::SeqMetric) map(reset!, metric.metrics) - return nothing + nothing end -function get(metric :: SeqMetric) - mapreduce(get, append!, metric.metrics) -end +get(metric::SeqMetric) = mapreduce(get, append!, metric.metrics) """ Accuracy @@ -185,7 +173,7 @@ end hasNDArraySupport(::Accuracy) = Val{false}() -function _update_single_output(metric :: Accuracy, label :: Array, pred :: Array) +function _update_single_output(metric::Accuracy, label::Array, pred::Array) # Samples are stored in the last dimension @assert size(label, ndims(label)) == size(pred, ndims(pred)) @@ -217,9 +205,7 @@ function _update_single_output(metric :: Accuracy, label :: Array, pred :: Array end end -function get(metric :: Accuracy) - return [(:accuracy, metric.acc_sum / metric.n_sample)] -end +get(metric::Accuracy) = [(:accuracy, metric.acc_sum / metric.n_sample)] function reset!(metric :: Accuracy) metric.acc_sum = 0.0 @@ -235,31 +221,34 @@ Calculates the mean squared error regression loss. Requires that label and prediction have the same shape. """ -mutable struct MSE <: AbstractEvalMetric - mse_sum :: Vector{NDArray} +mutable struct MSE{N} <: AbstractEvalMetric + mse_sum :: Vector{NDArray{MX_float,N}} n_sample :: Int - MSE() = new(Vector{NDArray}(), 0) + MSE{N}() where {N} = new(Vector{NDArray{MX_float,N}}(), 0) end +MSE() = MSE{1}() # backward compat? + hasNDArraySupport(::MSE) = Val{true}() -function _update_single_output(metric :: MSE, label :: NDArray, pred :: NDArray) +function _update_single_output(metric::MSE, label::NDArray{T,N}, + pred::NDArray{T,N}) where {T,N} @assert size(label) == size(pred) metric.n_sample += length(label) - mse_sum = mx.sum(mx._PowerScalar(label - pred,scalar=2)) + mse_sum = mx.sum((label .- pred).^2) push!(metric.mse_sum, mse_sum) - return nothing + nothing end -function get(metric :: MSE) +function get(metric::MSE) # Delay copy until last possible moment mse_sum = mapreduce(nda->copy(nda)[1], +, 0.0, metric.mse_sum) - return [(:MSE, mse_sum / metric.n_sample)] + [(:MSE, mse_sum / metric.n_sample)] end -function reset!(metric :: MSE) - metric.mse_sum = Vector{NDArray}() +function reset!(metric::MSE{N}) where N + metric.mse_sum = Vector{NDArray{Float32,N}}() metric.n_sample = 0 end @@ -319,7 +308,7 @@ end hasNDArraySupport(::NMSE) = Val{false}() -function _update_single_output(metric :: NMSE, label :: Array, pred :: Array) +function _update_single_output(metric::NMSE, label::Array, pred::Array) n_sample = size(pred)[end] metric.n_sample += n_sample @@ -332,11 +321,9 @@ function _update_single_output(metric :: NMSE, label :: Array, pred :: Array) end end -function get(metric :: NMSE) - return [(:NMSE, metric.nmse_sum / metric.n_sample)] -end +get(metric::NMSE) = [(:NMSE, metric.nmse_sum / metric.n_sample)] -function reset!(metric :: NMSE) +function reset!(metric::NMSE) metric.nmse_sum = 0.0 metric.n_sample = 0 end @@ -357,11 +344,9 @@ mutable struct ACE <: AbstractEvalMetric ACE(eps=1.0e-8) = new(0.0, 0, eps) end -function get(metric :: ACE) - return [(:ACE, - metric.ace_sum / metric.n_sample)] -end +get(metric::ACE) = [(:ACE, - metric.ace_sum / metric.n_sample)] -function reset!(metric :: ACE) +function reset!(metric::ACE) metric.ace_sum = 0.0 metric.n_sample = 0 end @@ -474,4 +459,3 @@ function _update_single_output(metric :: MultiACE, label :: Array{T}, pred :: Ar error("Can't handle prediction with dimensions $(ndims(pred)).") end end - diff --git a/src/model.jl b/src/model.jl index 44a2ba755798..06b7a2cf8a13 100644 --- a/src/model.jl +++ b/src/model.jl @@ -18,14 +18,14 @@ mutable struct FeedForward <: AbstractModel arch :: SymbolicNode ctx :: Vector{Context} - arg_params :: Dict{Base.Symbol, NDArray} - aux_params :: Dict{Base.Symbol, NDArray} + arg_params :: Dict{Symbol} + aux_params :: Dict{Symbol} - pred_exec :: Union{Executor, Void} + pred_exec :: Union{Executor,Void} # leave the rest fields undefined - FeedForward(arch :: SymbolicNode, ctx :: Vector{Context}) = new(arch, ctx) - FeedForward(arch :: SymbolicNode, ctx :: Context) = new(arch, [ctx]) + FeedForward(arch::SymbolicNode, ctx::Vector{Context}) = new(arch, ctx) + FeedForward(arch::SymbolicNode, ctx::Context) = new(arch, [ctx]) end """ @@ -33,7 +33,7 @@ Get a split of `batch_size` into `n_split` pieces for data parallelization. Retu of length `n_split`, with each entry a `UnitRange{Int}` indicating the slice index for that piece. """ -function _split_inputs(batch_size :: Int, n_split :: Int) +function _split_inputs(batch_size::Int, n_split::Int) @assert(batch_size >= n_split) per_split = floor(Int, batch_size / n_split) counts = Base.zeros(Int, n_split)+per_split @@ -73,7 +73,7 @@ weights. * `input_shapes`: the shape of all data and label inputs to this model, given as keyword arguments. For example, `data=(28,28,1,100), label=(100,)`. """ -function init_model(self :: FeedForward, initializer :: AbstractInitializer; overwrite::Bool=false, input_shapes...) +function init_model(self::FeedForward, initializer::AbstractInitializer; overwrite::Bool=false, input_shapes...) # all arg names, including data, label, and parameters arg_names = list_arguments(self.arch) @@ -92,8 +92,8 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove self.aux_params = Dict{Symbol, NDArray}() end - arg_params = Dict{Symbol, NDArray}() - aux_params = Dict{Symbol, NDArray}() + arg_params = Dict{Symbol,NDArray}() + aux_params = Dict{Symbol,NDArray}() for (name, shape) in filter(x -> in(x[1],param_names), zip(arg_names, arg_shapes)) if haskey(self.arg_params, name) @@ -138,7 +138,7 @@ function init_model(self :: FeedForward, initializer :: AbstractInitializer; ove return (arg_names, param_names, aux_names) end -function _setup_predictor(self :: FeedForward, overwrite :: Bool=false; verbosity :: Integer = 1, data_shapes...) +function _setup_predictor(self::FeedForward, overwrite::Bool=false; verbosity::Integer = 1, data_shapes...) if !isdefined(self, :pred_exec) || isa(self.pred_exec, Void) || overwrite if !isdefined(self, :arg_params) || !isdefined(self, :aux_params) @assert(false, "Model weights not defined, please init or train the model, or load from file") @@ -202,12 +202,12 @@ end See also [`train`](@ref), [`fit`](@ref), [`init_model`](@ref), and [`load_checkpoint`](@ref) """ -function predict(callback :: Function, self :: FeedForward, data :: AbstractDataProvider; - overwrite :: Bool = true, verbosity :: Integer = 1) +function predict(callback::Function, self::FeedForward, data::AbstractDataProvider; + overwrite::Bool = true, verbosity::Integer = 1) predict(self, data; overwrite = overwrite, callback=callback, verbosity = verbosity) end -function predict(self :: FeedForward, data :: AbstractDataProvider; - overwrite::Bool=true, callback::Union{Function,Void}=nothing, verbosity :: Integer = 1) +function predict(self::FeedForward, data::AbstractDataProvider; + overwrite::Bool = true, callback::Union{Function,Void}=nothing, verbosity::Integer = 1) data_shapes = provide_data(data) data_names = [x[1] for x in data_shapes] _setup_predictor(self, overwrite; verbosity = verbosity, data_shapes...) @@ -255,11 +255,13 @@ function predict(self :: FeedForward, data :: AbstractDataProvider; return output_arrays end -function _init_model(self :: FeedForward, data :: AbstractDataProvider, initializer :: AbstractInitializer, overwrite :: Bool) - init_model(self, initializer; overwrite=overwrite, [provide_data(data)..., provide_label(data)...]...) +function _init_model(self::FeedForward, data::AbstractDataProvider, + initializer::AbstractInitializer, overwrite::Bool) + init_model(self, initializer; overwrite=overwrite, + [provide_data(data)..., provide_label(data)...]...) end -function _create_kvstore(kv_type :: Base.Symbol, num_device :: Int, arg_params :: Dict{Base.Symbol,NDArray}, verbosity :: Int) +function _create_kvstore(kv_type::Symbol, num_device::Int, arg_params::Dict{Symbol}, verbosity::Int) if num_device == 1 && !ismatch(r"dist", string(kv_type)) return nothing else @@ -281,7 +283,7 @@ end n_epoch :: Int = 10, eval_data :: Union{Void, AbstractDataProvider} = nothing, eval_metric :: AbstractEvalMetric = Accuracy(), - kvstore :: Union{Base.Symbol, KVStore} = :local, + kvstore :: Union{Symbol, KVStore} = :local, force_init :: Bool = false, callbacks :: Vector{AbstractCallback} = AbstractCallback[], verbosity :: Int = 3 @@ -289,7 +291,7 @@ end function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, state::OptimizationState, type_filter::Type; - metric::Vector{Tuple{Base.Symbol, T}} = Vector{Tuple{Base.Symbol, Real}}()) where T<:Real + metric::Vector{Tuple{Symbol,T}} = Vector{Tuple{Symbol,Real}}()) where T<:Real map(callbacks) do cb if isa(cb, type_filter) if type_filter == AbstractEpochCallback @@ -327,7 +329,7 @@ Train the `model` on `data` with the `optimizer`. calculated on the validation set. * `kvstore`: keyword argument, default `:local`. The key-value store used to synchronize gradients and parameters when multiple devices are used for training. - :type kvstore: `KVStore` or `Base.Symbol` + :type kvstore: `KVStore` or `Symbol` * `initializer::AbstractInitializer`: keyword argument, default `UniformInitializer(0.01)`. * `force_init::Bool`: keyword argument, default false. By default, the random initialization using the provided `initializer` will be skipped if the model weights already exists, maybe from a previous @@ -342,7 +344,8 @@ Train the `model` on `data` with the `optimizer`. - `2`: Print one time messages and a message at the start of each epoch - `3`: Print a summary of the training and validation accuracy for each epoch """ -function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) +function fit(self::FeedForward, optimizer::AbstractOptimizer, data::AbstractDataProvider; + kwargs...) opts = TrainingOptions(; kwargs...) opts.verbosity >= 1 && info("Start training on $(self.ctx)") @@ -357,7 +360,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra # setup kvstore kvstore = opts.kvstore - if isa(kvstore, Base.Symbol) + if isa(kvstore, Symbol) opts.verbosity >= 2 && info("Creating KVStore...") kvstore = _create_kvstore(kvstore, length(self.ctx), self.arg_params, opts.verbosity) end @@ -379,7 +382,7 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra freeze_idx = filter(i -> in(param_names[i], freeze_names), 1:length(param_names)) # Setup grad_req as a dictionary - grad_req = Dict{Symbol, GRAD_REQ}() + grad_req = Dict{Symbol,GRAD_REQ}() for param in param_names if in(param, freeze_names) grad_req[param] = GRAD_NOP @@ -581,24 +584,26 @@ function fit(self :: FeedForward, optimizer :: AbstractOptimizer, data :: Abstra nothing end -function save_checkpoint(self :: FeedForward, prefix :: AbstractString, state :: OptimizationState) +save_checkpoint(self::FeedForward, prefix::AbstractString, state::OptimizationState) = save_checkpoint(self.arch, self.arg_params, self.aux_params, prefix, state.curr_epoch) -end -function save_checkpoint(sym :: SymbolicNode, arg_params :: Dict{Base.Symbol, NDArray}, - aux_params :: Dict{Base.Symbol, NDArray}, prefix :: AbstractString, epoch :: Int) + +function save_checkpoint(sym::SymbolicNode, arg_params::Dict{Symbol}, + aux_params::Dict{Symbol}, prefix::AbstractString, epoch::Int) save("$prefix-symbol.json", sym) - save_dict = merge(Dict{Base.Symbol, NDArray}(map((x) -> Symbol("arg:$(x[1])") => x[2], arg_params)), - Dict{Base.Symbol, NDArray}(map((x) -> Symbol("aux:$(x[1])") => x[2], aux_params))) + save_dict = Dict{Symbol, NDArray}(map((x) -> Symbol("arg:$(x[1])") => x[2], arg_params)) + if !isempty(aux_params) + merge!(save_dict, Dict(map((x) -> Symbol("aux:$(x[1])") => x[2], aux_params))) + end save_filename = format("{1}-{2:04d}.params", prefix, epoch) save(save_filename, save_dict) info("Saved checkpoint to '$save_filename'") end -function load_checkpoint(prefix :: AbstractString, epoch :: Int) +function load_checkpoint(prefix::AbstractString, epoch::Int) arch = load("$prefix-symbol.json", SymbolicNode) saved_dict = load(format("{1}-{2:04d}.params", prefix, epoch), NDArray) - arg_params = Dict{Base.Symbol, NDArray}() - aux_params = Dict{Base.Symbol, NDArray}() + arg_params = Dict{Symbol,Any}() + aux_params = Dict{Symbol,Any}() for (k,v) in saved_dict tp, name = split(string(k), ':') name = Symbol(name) @@ -617,7 +622,7 @@ end Load a mx.FeedForward model from the checkpoint *prefix*, *epoch* and optionally provide a context. """ -function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForward}; context = nothing) +function load_checkpoint(prefix::AbstractString, epoch::Int, ::Type{FeedForward}; context = nothing) arch, arg_params, aux_params = load_checkpoint(prefix, epoch) model = FeedForward(arch, context = context) model.arg_params = arg_params @@ -625,8 +630,8 @@ function load_checkpoint(prefix :: AbstractString, epoch :: Int, ::Type{FeedForw return model end -function load_checkpoint(self :: FeedForward, prefix :: AbstractString, epoch :: Int; - overwrite :: Bool = true, allow_different_arch :: Bool = false) +function load_checkpoint(self::FeedForward, prefix::AbstractString, epoch::Int; + overwrite::Bool = true, allow_different_arch::Bool = false) if isdefined(self, :arg_params) && isdefined(self, :aux_params) && !overwrite info("model weights already exists, skip loading... (call with overwrite=true if needed)") return self diff --git a/src/ndarray.jl b/src/ndarray.jl index 9bb74f83f5d2..1a2ffa280d58 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -3,7 +3,7 @@ const DType = Union{Float32, Float64, Float16, UInt8, Int32, Int8, Int64} @enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 kInt8 kInt64 const DEFAULT_DTYPE = Float32 # MSHADOW_DEFAULT_DTYPE -function toTypeFlag(:: Type{T}) where T <: DType +function toTypeFlag(T::Type{<:DType}) if T == Float32 return kFloat32 elseif T == Float64 @@ -23,7 +23,7 @@ function toTypeFlag(:: Type{T}) where T <: DType end end -function fromTypeFlag(T :: TypeFlag) +function fromTypeFlag(T::TypeFlag) if T == kFloat32 return Float32 elseif T == kFloat64 @@ -89,19 +89,21 @@ of tensor-based computation. C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory have shape (28,28,1,100). """ -mutable struct NDArray +mutable struct NDArray{T,D} handle :: MX_NDArrayHandle writable :: Bool - function NDArray(handle, writable=true) - new(handle, writable) - end + NDArray{T,D}(handle, writable = true) where {T,D} = new(handle, writable) end NDArray(x::AbstractArray{T}) where {T<:DType} = copy(collect(x), cpu()) NDArray(x::Array{T}) where {T<:DType} = copy(x, cpu()) +NDArray(handle, writable = true) = + NDArray{eltype(handle), ndims(handle)}(handle, writable) +# type aliases const NDArrayOrReal = Union{NDArray, Real} +const VecOfNDArray = AbstractVector{<:NDArray} @unfuse NDArray @@ -112,15 +114,15 @@ end # for REPL function Base.show(io::IO, ::MIME{Symbol("text/plain")}, x::NDArray) - println(io, "$(join(size(x), "×")) mx.NDArray{$(eltype(x))} @ $(context(x)):") - Base.showarray(io, try_get_shared(x, sync = :read), false, header = false) + type_ = split(string(typeof(x)), '.', limit=2)[end] + println(io, "$(join(size(x), "×")) $(type_) @ $(context(x)):") + Base.showarray(io, try_get_shared(x, sync = :read), false, header=false) end -function Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) +Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(MX_handle, obj.handle) -end -Base.convert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) -Base.cconvert(t::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(t, obj) +Base.convert(T::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(T, obj) +Base.cconvert(T::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(T, obj) ################################################################################ # NDArray functions exported to the users @@ -140,21 +142,15 @@ end """ - empty(DType, shape :: Tuple, ctx :: Context) - empty(DType, shape :: Tuple) + empty(DType, dims[, ctx::Context = cpu()]) + empty(DType, dims) empty(DType, dim1, dim2, ...) Allocate memory for an uninitialized `NDArray` with a specified type. """ -function empty(::Type{T}, shape :: NTuple{N, Int}) where {N,T<:DType} - empty(T, shape, cpu()) -end -function empty(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) where {N,T<:DType} - NDArray(_ndarray_alloc(T, shape, ctx, false)) -end -function empty(:: Type{T}, shape :: Int...) where T<:DType - empty(T, shape) -end +empty(::Type{T}, dims::NTuple{N, Int}, ctx::Context = cpu()) where {N,T<:DType} = + NDArray{T, N}(_ndarray_alloc(T, dims, ctx, false)) +empty(::Type{T}, dims::Int...) where {T<:DType} = empty(T, dims) """ empty(shape :: Tuple, ctx :: Context) @@ -173,54 +169,39 @@ function empty(shape :: Int...) empty(shape) end -import Base.similar - """ - similar(arr :: NDArray) + similar(x::NDArray) -Create an `NDArray` with similar shape, data type, and context with the given one. +Create an `NDArray` with similar shape, data type, +and context with the given one. +Note that the returned `NDArray` is uninitialized. """ -function similar(arr :: NDArray) - empty(eltype(arr), size(arr), context(arr)) -end +Base.similar(x::NDArray{T}) where {T} = empty(T, size(x), context(x)) """ - zeros(DType, shape :: Tuple, ctx :: Context) - zeros(DType, shape :: Tuple) - zeros(DType, dim1, dim2, ...) + zeros(DType, dims[, ctx::Context = cpu()]) + zeros(DType, dims...) -Create zero-ed `NDArray` with specific shape and type +Create zero-ed `NDArray` with specific shape and type. """ -function zeros(:: Type{T}, shape :: NTuple{N, Int}) where {N,T<:DType} - zeros(T, shape, cpu()) -end -function zeros(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) where {N,T<:DType} - arr = empty(T, shape, ctx) +function zeros(::Type{T}, dims::NTuple{N, Int}, ctx::Context = cpu()) where {N,T<:DType} + arr = empty(T, dims, ctx) arr[:] = zero(T) - return arr -end -function zeros(:: Type{T}, shape :: Int...) where T<:DType - zeros(T, shape) + arr end +zeros(::Type{T}, dims::Int...) where {T<:DType} = zeros(T, dims) + """ - zeros(shape :: Tuple, ctx :: Context) - zeros(shape :: Tuple) - zeros(dim1, dim2, ...) + zeros(dims[, ctx::Context = cpu()]) + zeros(dims...) Create zero-ed `NDArray` with specific shape. """ -function zeros(shape :: NTuple{N, Int}) where N - zeros(shape, cpu()) -end -function zeros(shape :: NTuple{N, Int}, ctx :: Context) where N - arr = empty(shape, ctx) - arr[:] = 0 - return arr -end -function zeros(shape :: Int...) - zeros(shape) -end +zeros(dims::NTuple{N, Int}, ctx::Context = cpu()) where N = + zeros(MX_float, dims, ctx) + +zeros(dims::Int...) = zeros(dims) """ ones(DType, shape :: Tuple, ctx :: Context) @@ -263,11 +244,11 @@ end import Base: size, length, ndims, eltype """ - size(arr :: NDArray) - size(arr :: NDArray, dim :: Int) + size(x::NDArray) + size(x::NDArray, dim) -Get the shape of an `NDArray`. The shape is in Julia's column-major convention. See -also the notes on NDArray shapes [`NDArray`](@ref). +Get the shape of an `NDArray`. The shape is in Julia's column-major convention. +See also the notes on NDArray shapes [`NDArray`](@ref). """ function size(arr :: NDArray) ref_ndim = Ref{MX_uint}(0) @@ -281,45 +262,50 @@ function size(arr :: NDArray, dim :: Int) end """ - length(arr :: NDArray) + length(x::NDArray) Get the number of elements in an `NDArray`. """ -function length(arr :: NDArray) - prod(size(arr)) -end +length(x::NDArray) = prod(size(x)) """ - ndims(arr :: NDArray) + ndims(x::NDArray) -Get the number of dimensions of an `NDArray`. Is equivalent to `length(size(arr))`. +Get the number of dimensions of an `NDArray`. +Is equivalent to `length(size(arr))`. """ -function ndims(arr :: NDArray) - length(size(arr)) +ndims(x::NDArray) = ndims(x.handle) + +function ndims(x::MX_NDArrayHandle)::Int + ref_ndim = Ref{MX_uint}(0) + ref_shape = Ref{Ptr{MX_uint}}(0) + @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), + x, ref_ndim, ref_shape) + ref_ndim[] end """ - eltype(arr :: NDArray) + eltype(x::NDArray) Get the element type of an `NDArray`. """ -function eltype(arr :: T) where T <: Union{NDArray, MX_NDArrayHandle} +function eltype(x::Union{NDArray, MX_NDArrayHandle}) dtype_ref = Ref{Cint}(0) - @mxcall(:MXNDArrayGetDType, (MX_handle, Ptr{Cint}), arr, dtype_ref) + @mxcall(:MXNDArrayGetDType, (MX_handle, Ptr{Cint}), x, dtype_ref) - if dtype_ref[] == -1 # arr->is_none() - warn("Eltype of $arr is not defined") + if dtype_ref[] == -1 # x->is_none() + warn("Eltype of $x is not defined") Base.show_backtrace(STDOUT, backtrace()) println() - return Float32 + Float32 else - return fromTypeFlag(TypeFlag(dtype_ref[])) + fromTypeFlag(TypeFlag(dtype_ref[])) end end -@inline _first(arr::NDArray) = try_get_shared(arr, sync = :read) |> first +@inline _first(x::NDArray) = try_get_shared(x, sync = :read) |> first -Base.first(arr::NDArray) = _first(arr) +Base.first(x::NDArray) = _first(x) """ slice(arr :: NDArray, start:stop) @@ -469,7 +455,7 @@ import Base: copy!, copy, convert, deepcopy Copy contents of `src` into `dst`. """ -function copy!(dst :: NDArray, src :: NDArray) +function copy!(dst::NDArray, src::NDArray) @assert(dst.writable) if dst.handle == src.handle warn("Copying an NDArray to itself") @@ -480,33 +466,31 @@ function copy!(dst :: NDArray, src :: NDArray) return dst end -function copy!(dst :: Array{T}, src :: NDArray) where T<:DType - @assert T == eltype(src) +function copy!(dst::Array{T}, src::NDArray{T}) where T<:DType @assert size(dst) == size(src) @mxcall(:MXNDArraySyncCopyToCPU, (MX_handle, Ptr{Void}, Csize_t), src, pointer(dst), length(dst)) - return dst -end -function copy!(dst :: Array{T}, src :: NDArray) where T<:Real - copy!(dst, copy(src)) + dst end -function copy!(dst :: NDArray, src :: Array{T}) where T<:Real +copy!(dst::Array{<:Real}, src::NDArray) = copy!(dst, copy(src)) + +function copy!(dst::NDArray{T}, src::Array{<:Real}) where {T} @assert dst.writable @assert size(dst) == size(src) - src = convert(Array{eltype(dst)}, src) # this might involve copying + src = convert(Array{T}, src) # this might involve copying @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{Void}, Csize_t), dst.handle, pointer(src), length(src)) - return dst + dst end -function copy_ignore_shape!(dst :: NDArray, src :: Array{T}) where T<:Real +function copy_ignore_shape!(dst::NDArray{T}, src::Array{<:Real}) where {T} @assert dst.writable @assert length(dst) == length(src) - src = convert(Array{eltype(dst)}, src) # this might involve copying + src = convert(Array{T}, src) # this might involve copying @mxcall(:MXNDArraySyncCopyFromCPU, (MX_handle, Ptr{Void}, Csize_t), dst.handle, pointer(src), length(src)) - return dst + dst end @@ -519,31 +503,23 @@ Create a copy of an array. When no `Context` is given, create a Julia `Array`. Otherwise, create an `NDArray` on the specified context. """ # Create copy: NDArray -> Julia Array -function copy(arr :: NDArray) - j_arr = Array{eltype(arr)}(size(arr)) - copy!(j_arr, arr) -end +copy(x::NDArray{T,D}) where{T,D} = copy!(Array{T,D}(size(x)), x) # Create copy: NDArray -> NDArray in a given context -function copy(arr :: NDArray, ctx :: Context) - dst = NDArray(_ndarray_alloc(eltype(arr), size(arr), ctx, true)) - copy!(dst, arr) -end +copy(x::NDArray{T,D}, ctx::Context) where {T,D} = + copy!(NDArray{T,D}(_ndarray_alloc(T, size(x), ctx, true)), x) # Create copy: Julia Array -> NDArray in a given context -function copy(arr :: Array{T}, ctx :: Context) where T<:DType - dst = empty(T, size(arr), ctx) - copy!(dst, arr) -end +copy(x::Array{T}, ctx::Context) where {T<:DType} = + copy!(empty(T, size(x), ctx), x) """ - convert(::Type{Array{T}}, arr :: NDArray) + convert(::Type{Array{<:Real}}, x::NDArray) -Convert an `NDArray` into a Julia `Array` of specific type. Data will be copied. +Convert an `NDArray` into a Julia `Array` of specific type. +Data will be copied. """ -function convert(t::Type{Array{T}}, arr :: NDArray) where T<:Real - convert(t, copy(arr)) -end +convert(T::Type{Array{<:Real}}, x::NDArray) = convert(T, copy(x)) """ deepcopy(arr::NDArray) @@ -599,13 +575,13 @@ end Add a bunch of arguments into `dst`. Inplace updating. """ -function add_to!(dst::NDArray, args::NDArrayOrReal...) +function add_to!(dst::NDArray{T}, args::NDArrayOrReal...) where T @assert dst.writable for arg in args if isa(arg, Real) - _plus_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) + _plus_scalar(dst, scalar = convert(T, arg), out = dst) else - _plus(dst, arg, out=dst) + _plus(dst, arg, out = dst) end end return dst @@ -632,10 +608,10 @@ broadcast_(::typeof(+), x::Real, y::NDArray) = x + y Subtract a bunch of arguments from `dst`. Inplace updating. """ -function sub_from!(dst::NDArray, arg::NDArrayOrReal) +function sub_from!(dst::NDArray{T}, arg::NDArrayOrReal) where T @assert dst.writable if isa(arg, Real) - _minus_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) + _minus_scalar(dst, scalar = convert(T, arg), out = dst) else _minus!(dst, arg) end @@ -664,12 +640,12 @@ broadcast_(::typeof(-), x::Real, y::NDArray) = x - y Elementwise multiplication into `dst` of either a scalar or an `NDArray` of the same shape. Inplace updating. """ -function mul_to!(dst::NDArray, arg::NDArrayOrReal) +function mul_to!(dst::NDArray{T}, arg::NDArrayOrReal) where T @assert dst.writable if isa(arg, Real) - _mul_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) + _mul_scalar(dst, scalar = convert(T, arg), out = dst) else - _mul(dst, arg, out=dst) + _mul(dst, arg, out = dst) end end @@ -692,23 +668,19 @@ broadcast_(::typeof(*), x::Real, y::NDArray) = y .* x Matrix (2D NDArray) multiplication. """ -function *(x::NDArray, y::NDArray) - @assert ndims(x) == 2 - @assert ndims(y) == 2 - dot(x, y) -end +*(x::NDArray{T,2}, y::NDArray{S,2}) where {T,S} = dot(x, y) """ div_from!(dst::NDArray, arg::NDArrayOrReal) Elementwise divide a scalar or an `NDArray` of the same shape from `dst`. Inplace updating. """ -function div_from!(dst::NDArray, arg::NDArrayOrReal) +function div_from!(dst::NDArray{T}, arg::NDArrayOrReal) where {T} @assert dst.writable if isa(arg, Real) - _div_scalar(dst, scalar=convert(eltype(dst), arg), out=dst) + _div_scalar(dst, scalar = convert(T, arg), out = dst) else - _div(dst, arg, out=dst) + _div(dst, arg, out = dst) end end @@ -717,9 +689,9 @@ end Elementwise divide a scalar by an `NDArray`. Inplace updating. """ -function rdiv_from!(x::Real, y::NDArray) +function rdiv_from!(x::Real, y::NDArray{T}) where {T} @assert y.writable - _rdiv_scalar(y, scalar=convert(eltype(y), x), out=y) + _rdiv_scalar(y, scalar = convert(T, x), out = y) end import Base: / @@ -902,12 +874,11 @@ function pointer(arr :: NDArray) @mxcall(:MXNDArrayGetData, (MX_handle, Ref{Ptr{Void}}), arr, pdata) return convert(Ptr{eltype(arr)}, pdata[]) end -function _wait_to_read(arr :: NDArray) + +@inline _wait_to_read(arr :: NDArray) = @mxcall(:MXNDArrayWaitToRead, (MX_handle,), arr) -end -function _wait_to_write(arr :: NDArray) +@inline _wait_to_write(arr :: NDArray) = @mxcall(:MXNDArrayWaitToWrite, (MX_handle,), arr) -end """ try_get_shared(arr; sync=:nop) @@ -926,19 +897,19 @@ Try to create a Julia array by sharing the data with the underlying `NDArray`. On CPU, invoke `_wait_to_read` if `:read`; invoke `_wait_to_write` if `:write`. """ -function try_get_shared(arr :: NDArray; sync::Symbol=:nop) - if context(arr).device_type == CPU +function try_get_shared(x::NDArray; sync::Symbol=:nop) + if context(x).device_type == CPU # try to do data sharing if sync == :read - _wait_to_read(arr) + _wait_to_read(x) elseif sync == :write - _wait_to_write(arr) + _wait_to_write(x) end - unsafe_wrap(Array, pointer(arr), size(arr)) + unsafe_wrap(Array, pointer(x), size(x)) else # impossible to share, just copying - copy(arr) + copy(x) end end @@ -952,16 +923,16 @@ Test whether `j_arr` is sharing data with `arr`. * `j_arr::Array`: the Julia Array. * `arr::NDArray`: the `NDArray`. """ -is_shared(j_arr :: Array, arr :: NDArray) = false +is_shared(::Array, ::NDArray) = false -function is_shared(j_arr :: Array{T}, arr :: NDArray) where T<:DType +function is_shared(j_arr::Array{T}, arr::NDArray{T}) where {T<:DType} if length(j_arr) != length(arr) return false end if context(arr).device_type != CPU return false end - return pointer(j_arr) == pointer(arr) + pointer(j_arr) == pointer(arr) end """ @@ -980,7 +951,7 @@ corresponding components enabled. Examples: * `hdfs://my-bucket/path/my-hdfs-ndarray` * `/path-to/my-local-ndarray` """ -function load(filename::AbstractString, ::Type{NDArray}) +function load(filename::AbstractString, ::Type{<:NDArray}) out_size = Ref{MX_uint}(0) out_hdrs = Ref{Ptr{MX_handle}}(0) out_name_size = Ref{MX_uint}(0) @@ -999,25 +970,25 @@ function load(filename::AbstractString, ::Type{NDArray}) end """ - save(filename :: AbstractString, data) + save(filename::AbstractString, data) Save NDarrays to binary file. Filename could be S3 or HDFS address, if `libmxnet` is built with corresponding support (see `load`). * `filename::String`: path to the binary file to write to. -* `data`: data to save to file. Data can be a`NDArray`, a `Vector{NDArray}`, or a `Dict{Base.Symbol, NDArray}`. +* `data`: data to save to file. Data can be a`NDArray`, a `Vector` of `NDArray`, + or a `Dict{Symbol}` contains `NDArray`s. """ -function save(filename::String, data::NDArray) - save(filename, [data]) -end -function save(filename::String, data::Vector{NDArray}) +save(filename::String, data::NDArray) = save(filename, [data]) + +save(filename::String, data::VecOfNDArray) = @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), filename, length(data), MX_handle[data...], char_pp(0)) -end -function save(filename::String, data::Dict{Base.Symbol,NDArray}) - names = [k for k in keys(data)] - arrays = MX_handle[data[k] for k in names] - names = String[string(k) for k in names] + +function save(filename::String, data::Dict{Symbol}) + names = keys(data) + arrays = MX_handle.(collect(values(data))) + names = String.(collect(names)) @mxcall(:MXNDArraySave, (char_p, MX_uint, Ptr{MX_handle}, char_pp), filename, length(names), arrays, names) @@ -1047,7 +1018,7 @@ function _outexpr(name::Symbol, x #= the first arg of `sig` =#) end macro _remap(sig::Expr, imp::Expr) - fname = sig.args[1] + fname = (sig.head == :call) ? sig.args[1] : sig.args[1].args[1] # case of `where` opname = string(imp.args[1]) import_expr = _autoimport(fname) @@ -1065,7 +1036,7 @@ macro _remap(sig::Expr, imp::Expr) ndhlds = Expr(:vect, map(x -> :($(x).handle), ndin)...) # handler for `func!` which has side effect on first argument. - T, n_output, hdls_ref, retexpr = _outexpr(fname, sig.args[2].args[1]) + T, n_output, hdls_ref, retexpr = _outexpr(fname, _firstarg(sig)) func_body = quote op_handle = _get_cached_libmx_op_handle($opname) @@ -1126,7 +1097,7 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap minimum(arr::NDArray, dims) min(arr; axis = 0 .- dims, keepdims = true) # See https://github.com/dmlc/MXNet.jl/issues/55 -@_remap dot(x::NDArray, y::NDArray) dot(y, x) +@_remap dot(x::NDArray{T,N}, y::NDArray{S,N}) where {T,S,N} dot(y, x) # See https://github.com/dmlc/MXNet.jl/pull/123 @_remap transpose(arr::NDArray) transpose(_only2d(arr)) @@ -1192,7 +1163,7 @@ function _get_ndarray_function_def(name :: String) func_name = Symbol(name) func_def = quote - function $func_name(::Type{NDArray}, args::NDArray...; out=nothing, kwargs...) + function $func_name(::Type{<:NDArray}, args::NDArray...; out=nothing, kwargs...) if out != nothing output_vars = out if isa(output_vars, NDArray) diff --git a/src/util.jl b/src/util.jl index 6877200d87b8..b0f91c824566 100644 --- a/src/util.jl +++ b/src/util.jl @@ -202,3 +202,17 @@ function _sig_checker() end end + +""" +Get first position argument from function sig +""" +function _firstarg(sig::Expr) + if sig.head ∈ (:where, :(::)) + _firstarg(sig.args[1]) + elseif sig.head == :call + i = (sig.args[2] isa Expr && sig.args[2].head == :parameters) ? 3 : 2 + _firstarg(sig.args[i]) + end +end + +_firstarg(s::Symbol) = s diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 47ce9b08957d..f6d1b1281b08 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -423,7 +423,7 @@ function test_saveload() j_array, nd_array = rand_tensors(dims) mx.save(fname, nd_array) data = mx.load(fname, mx.NDArray) - @test data isa Vector{mx.NDArray} + @test data isa Vector{<:mx.NDArray} @test length(data) == 1 @test copy(data[1]) ≈ j_array @@ -432,7 +432,7 @@ function test_saveload() nd_arrays = mx.NDArray[x[2] for x in arrays] mx.save(fname, nd_arrays) data = mx.load(fname, mx.NDArray) - @test isa(data, Vector{mx.NDArray}) + @test data isa Vector{<:mx.NDArray} @test length(data) == n_arrays for i = 1:n_arrays @test copy(data[i]) ≈ arrays[i][1] @@ -443,7 +443,7 @@ function test_saveload() dict = Dict([(n, v) for (n,v) in zip(names, nd_arrays)]) mx.save(fname, dict) data = mx.load(fname, mx.NDArray) - @test data isa Dict{Symbol, mx.NDArray} + @test data isa Dict{Symbol,<:mx.NDArray} @test length(data) == n_arrays for i = 1:n_arrays @test copy(data[names[i]]) ≈ arrays[i][1] @@ -592,6 +592,10 @@ function test_dot() y = mx.zeros(dims2) z = mx.dot(x, y) @test size(z) == (2, 8) + + x = mx.zeros(1, 2) + y = mx.zeros(1, 2, 3) + @test_throws MethodError dot(x, y) end function test_eltype() diff --git a/test/unittest/util.jl b/test/unittest/util.jl new file mode 100644 index 000000000000..d27b509bd010 --- /dev/null +++ b/test/unittest/util.jl @@ -0,0 +1,25 @@ +module TestUtil + +using Base.Test + +using MXNet + + +function test_firstarg() + info("Util::_firstarg") + @test mx._firstarg(:(f(x, y))) == :x + @test mx._firstarg(:(f(x::mx.NDArray, y))) == :x + @test mx._firstarg(:(f(x::mx.NDArray, y::mx.NDArray))) == :x + @test mx._firstarg(:(f(x::Int, y::mx.NDArray))) == :x + @test mx._firstarg(:(f(x::Int, y::mx.NDArray; other = 42))) == :x + @test mx._firstarg(:(f(x::mx.NDArray{T}, y) where {T})) == :x + @test mx._firstarg(:(f(x::mx.NDArray{T,N}, y) where {T,N})) == :x + @test mx._firstarg(:(f(x::mx.NDArray{T,N} where {T,N}, y))) == :x +end # function test_firstarg + + +@testset "Util Test" begin + test_firstarg() +end # @testset "Util" + +end # module TestUtil From 914fab6d3498731103c00985062e478811ded1b7 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 2 Dec 2017 13:10:14 +0800 Subject: [PATCH 570/630] ndarray: minor style changes (#349) --- src/ndarray.jl | 61 +++++++++++++++++--------------------------------- 1 file changed, 21 insertions(+), 40 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 1a2ffa280d58..f750f4fcef1b 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -128,11 +128,11 @@ Base.cconvert(T::Type{MX_handle}, obj::NDArray) = Base.unsafe_convert(T, obj) # NDArray functions exported to the users ################################################################################ """ - context(arr :: NDArray) + context(arr::NDArray) Get the context that this `NDArray` lives on. """ -function context(arr :: NDArray) +function context(arr::NDArray) ref_typeid = Ref{Cint}(0) ref_devid = Ref{Cint}(0) @mxcall(:MXNDArrayGetContext, (MX_handle, Ref{Cint}, Ref{Cint}), @@ -140,7 +140,6 @@ function context(arr :: NDArray) return Context(ref_typeid[], ref_devid[]) end - """ empty(DType, dims[, ctx::Context = cpu()]) empty(DType, dims) @@ -148,26 +147,19 @@ end Allocate memory for an uninitialized `NDArray` with a specified type. """ -empty(::Type{T}, dims::NTuple{N, Int}, ctx::Context = cpu()) where {N,T<:DType} = +empty(::Type{T}, dims::NTuple{N,Int}, ctx::Context = cpu()) where {N,T<:DType} = NDArray{T, N}(_ndarray_alloc(T, dims, ctx, false)) empty(::Type{T}, dims::Int...) where {T<:DType} = empty(T, dims) """ - empty(shape :: Tuple, ctx :: Context) - empty(shape :: Tuple) + empty(dims::Tuple[, ctx::Context = cpu()]) empty(dim1, dim2, ...) Allocate memory for an uninitialized `NDArray` with specific shape of type Float32. """ -function empty(shape :: NTuple{N, Int}) where N - empty(shape, cpu()) -end -function empty(shape :: NTuple{N, Int}, ctx :: Context) where N - NDArray(_ndarray_alloc(shape, ctx, false)) -end -function empty(shape :: Int...) - empty(shape) -end +empty(dims::NTuple{N,Int}, ctx::Context = cpu()) where N = + NDArray(_ndarray_alloc(dims, ctx, false)) +empty(dims::Int...) = empty(dims) """ similar(x::NDArray) @@ -184,7 +176,7 @@ Base.similar(x::NDArray{T}) where {T} = empty(T, size(x), context(x)) Create zero-ed `NDArray` with specific shape and type. """ -function zeros(::Type{T}, dims::NTuple{N, Int}, ctx::Context = cpu()) where {N,T<:DType} +function zeros(::Type{T}, dims::NTuple{N,Int}, ctx::Context = cpu()) where {N,T<:DType} arr = empty(T, dims, ctx) arr[:] = zero(T) arr @@ -200,47 +192,36 @@ Create zero-ed `NDArray` with specific shape. """ zeros(dims::NTuple{N, Int}, ctx::Context = cpu()) where N = zeros(MX_float, dims, ctx) - zeros(dims::Int...) = zeros(dims) """ - ones(DType, shape :: Tuple, ctx :: Context) - ones(DType, shape :: Tuple) - ones(DType, dim1, dim2, ...) + ones(DType, dims::Tuple[, ctx::Context = cpu()]) + ones(DType, dim1, dim2...) Create an `NDArray` with specific shape & type, and initialize with 1. """ -function ones(:: Type{T}, shape :: NTuple{N, Int}) where {N,T<:DType} - ones(T, shape, cpu()) -end -function ones(:: Type{T}, shape :: NTuple{N, Int}, ctx :: Context) where {N,T<:DType} - arr = empty(T, shape, ctx) +function ones(::Type{T}, dims::NTuple{N,Int}, ctx::Context = cpu()) where {N,T<:DType} + arr = empty(T, dims, ctx) arr[:] = one(T) - return arr -end -function ones(:: Type{T}, shape :: Int...) where T<:DType - ones(T, shape) + arr end +ones(::Type{T}, dims::Int...) where T<:DType = ones(T, dims) + """ - ones(shape :: Tuple, ctx :: Context) - ones(shape :: Tuple) + ones(dims::Tuple[, ctx::Context = cpu()]) ones(dim1, dim2, ...) Create an `NDArray` with specific shape and initialize with 1. """ -function ones(shape :: NTuple{N, Int}) where N - ones(shape, cpu()) -end -function ones(shape :: NTuple{N, Int}, ctx :: Context) where N - arr = empty(shape, ctx) +function ones(dims::NTuple{N,Int}, ctx::Context = cpu()) where N + arr = empty(dims, ctx) arr[:] = 1 - return arr -end -function ones(shape :: Int...) - ones(shape) + arr end +ones(dims::Int...) = ones(dims) + import Base: size, length, ndims, eltype """ From c43d0ddf26b65a4ff55bc1d2f9ce9bc4d82777d4 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 2 Dec 2017 14:26:22 +0800 Subject: [PATCH 571/630] model: fix test cases macro (#355) --- test/unittest/model.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/unittest/model.jl b/test/unittest/model.jl index fafda7968e25..86a3a6f131ec 100644 --- a/test/unittest/model.jl +++ b/test/unittest/model.jl @@ -8,21 +8,21 @@ function test_feedforward() info("Model::FeedForward::constructor") let x = @mx.var x m = mx.FeedForward(x) - @assert m.arch === x - @assert length(m.ctx) == 1 + @test m.arch === x + @test length(m.ctx) == 1 end info("Model::FeedForward::constructor::keyword context") let x = @mx.var x m = mx.FeedForward(x, context = mx.cpu()) - @assert m.arch === x - @assert length(m.ctx) == 1 + @test m.arch === x + @test length(m.ctx) == 1 end let x = @mx.var x m = mx.FeedForward(x, context = [mx.cpu(), mx.cpu(1)]) - @assert m.arch === x - @assert length(m.ctx) == 2 + @test m.arch === x + @test length(m.ctx) == 2 end end From 09ee1f4ad067d76d57487afba4d55439602bfc9d Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 3 Dec 2017 00:56:28 +0800 Subject: [PATCH 572/630] ndarray: implement `size(x, dims...)` (#350) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ```julia julia> x = mx.NDArray([1 2; 3 4; 5 6]) 3×2 mx.NDArray{Int64,2} @ CPU0: 1 2 3 4 5 6 julia> size(x, 1, 2, 3, 4) (3, 2, 1, 1) ``` --- NEWS.md | 16 ++++++++++++++++ src/ndarray.jl | 17 ++++++++++++----- test/unittest/ndarray.jl | 10 ++++++++++ 3 files changed, 38 insertions(+), 5 deletions(-) diff --git a/NEWS.md b/NEWS.md index 3e5dce1c6eee..307e8180eb71 100644 --- a/NEWS.md +++ b/NEWS.md @@ -16,6 +16,22 @@ (MXNet.mx.SymbolicNode x, MXNet.mx.SymbolicNode y, MXNet.mx.SymbolicNode z) ``` +### `NDArray` + +* `size(x, dims...)` is supported now. (#TBD) + + ```julia + julia> x = mx.NDArray([1 2; 3 4; 5 6]) + 3×2 mx.NDArray{Int64,2} @ CPU0: + 1 2 + 3 4 + 5 6 + + julia> size(x, 1, 2, 3, 4) + (3, 2, 1, 1) + + ``` + # v0.3.0 (2017.11.16) * Update `libmxnet` to diff --git a/src/ndarray.jl b/src/ndarray.jl index f750f4fcef1b..045b7c28eb1e 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -226,22 +226,29 @@ import Base: size, length, ndims, eltype """ size(x::NDArray) - size(x::NDArray, dim) + size(x::NDArray, dims...) Get the shape of an `NDArray`. The shape is in Julia's column-major convention. See also the notes on NDArray shapes [`NDArray`](@ref). """ -function size(arr :: NDArray) +function size(x::NDArray) ref_ndim = Ref{MX_uint}(0) ref_shape = Ref{Ptr{MX_uint}}(0) @mxcall(:MXNDArrayGetShape, (MX_handle, Ref{MX_uint}, Ref{Ptr{MX_uint}}), - arr, ref_ndim, ref_shape) + x, ref_ndim, ref_shape) tuple(map(Int, flipdim(unsafe_wrap(Array, ref_shape[], ref_ndim[]),1))...) end -function size(arr :: NDArray, dim :: Int) - size(arr)[dim] + +function size(x::NDArray{T,N}, dim::Int) where {T,N} + if dim > N + 1 + else + size(x)[dim] + end end +size(x::NDArray, dims::Int...) = map(d -> size(x, d), dims) + """ length(x::NDArray) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index f6d1b1281b08..367de8177126 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -770,6 +770,15 @@ function test_show() end end +function test_size() + info("NDArray::size") + let A = [1 2; 3 4; 5 6], x = mx.NDArray(A) + @test size(A) == size(x) + @test size(A, 1, 2, 3, 4, 5) == size(x, 1, 2, 3, 4, 5) + @inferred size(x, 1, 2, 3, 4, 5) + end +end # function test_size() + ################################################################################ # Run tests ################################################################################ @@ -802,6 +811,7 @@ end test_fill() test_transpose() test_show() + test_size() end end From 8a19651a680b671137d26f4b148c02efcf52c55c Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 3 Dec 2017 00:59:57 +0800 Subject: [PATCH 573/630] ndarray: implement endof (#351) --- src/ndarray.jl | 2 ++ test/unittest/ndarray.jl | 8 ++++++++ 2 files changed, 10 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index 045b7c28eb1e..f655803beaf4 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -295,6 +295,8 @@ end Base.first(x::NDArray) = _first(x) +Base.endof(x::NDArray) = length(x) + """ slice(arr :: NDArray, start:stop) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 367de8177126..d6259b317c03 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -161,6 +161,13 @@ function test_first() end end # function test_first +function test_endof() + info("NDArray::endof") + let A = [1 2; 3 4; 5 6], x = mx.NDArray(A) + @test endof(A) == endof(x) + end +end # function test_endof + function test_plus() dims = rand_dims() t1, a1 = rand_tensors(dims) @@ -789,6 +796,7 @@ end # function test_size() test_slice() test_linear_idx() test_first() + test_endof() test_plus() test_minus() test_mul() From fd7fb79cfa27dbac515d80b829e88a13c91fc66d Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 3 Dec 2017 01:02:50 +0800 Subject: [PATCH 574/630] ndarray: make _plus type stable (#352) --- src/ndarray.jl | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index f655803beaf4..91cb7d106ea9 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -571,10 +571,10 @@ function add_to!(dst::NDArray{T}, args::NDArrayOrReal...) where T if isa(arg, Real) _plus_scalar(dst, scalar = convert(T, arg), out = dst) else - _plus(dst, arg, out = dst) + _plus!(dst, arg) end end - return dst + dst end import Base: + @@ -1100,6 +1100,9 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) # remapping to solving type unstablility ################################################################################ +@_remap _plus(x::NDArray, y::NDArray) _plus(x, y) +@_remap _plus!(x::NDArray, y::NDArray) _plus(x, y) + @_remap _minus(x::NDArray, y::NDArray) _minus(x, y) @_remap _minus!(x::NDArray, y::NDArray) _minus(x, y) @@ -1228,6 +1231,8 @@ const _op_import_bl = [ # import black list; do not import these funcs "dot", "transpose", "prod", + + "_plus", "_minus", ] From b9855c484c66f56ec52eef8636a4c34be524a4d3 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 4 Dec 2017 11:45:20 +0800 Subject: [PATCH 575/630] ndarray: type convert of _mul_scalar (#356) Ref: #353 --- src/ndarray.jl | 5 +++-- test/unittest/ndarray.jl | 7 +++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 91cb7d106ea9..e818dca64178 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -630,13 +630,14 @@ broadcast_(::typeof(-), x::Real, y::NDArray) = x - y Elementwise multiplication into `dst` of either a scalar or an `NDArray` of the same shape. Inplace updating. """ -function mul_to!(dst::NDArray{T}, arg::NDArrayOrReal) where T +function mul_to!(dst::NDArray, arg::NDArrayOrReal) @assert dst.writable if isa(arg, Real) - _mul_scalar(dst, scalar = convert(T, arg), out = dst) + _mul_scalar(dst, scalar = arg, out = dst) else _mul(dst, arg, out = dst) end + dst end import Base: * diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index d6259b317c03..7ae60e43d13a 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -329,6 +329,13 @@ function test_mul() y = x .* x @test copy(y) == [1. 4.] end + + info("NDArray::mul::scalar::type convert") + let x = mx.NDArray([1, 2, 3]) + y = x .* π + @test eltype(x) == Int + @test copy(y) == [3, 6, 9] + end end function test_div() From ad57be93073b1ea11411a5bdc4a70e68db56fbbe Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 4 Dec 2017 11:48:25 +0800 Subject: [PATCH 576/630] ndarray: type convertion of _plus_scalar (#360) Ref: #353 --- src/ndarray.jl | 4 ++-- test/unittest/ndarray.jl | 9 +++++++++ 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index e818dca64178..01f99bfccc31 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -565,11 +565,11 @@ end Add a bunch of arguments into `dst`. Inplace updating. """ -function add_to!(dst::NDArray{T}, args::NDArrayOrReal...) where T +function add_to!(dst::NDArray, args::NDArrayOrReal...) @assert dst.writable for arg in args if isa(arg, Real) - _plus_scalar(dst, scalar = convert(T, arg), out = dst) + _plus_scalar(dst, scalar = arg, out = dst) else _plus!(dst, arg) end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 7ae60e43d13a..7136b994df75 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -220,6 +220,15 @@ function test_plus() @test copy(x .+ 42) == [43 44; 45 46] @test copy(0 .+ x .+ y .+ 41) == [43 44; 45 46] end + + info("NDArray::plus::scalar::type convert") + let x = mx.NDArray([1, 2, 3]) + y = x .+ 0.5 + @test copy(y) == copy(x) + + y = x .+ 2.9 + @test copy(y) == [3, 4, 5] + end end function test_minus() From 642b17b1c06f81ecf485588f00006b581d53ae41 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 4 Dec 2017 11:53:34 +0800 Subject: [PATCH 577/630] ndarray: type convertion of _rdiv_scalar (#358) Ref: #353 --- src/ndarray.jl | 5 +++-- test/unittest/ndarray.jl | 7 +++++++ 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 01f99bfccc31..013e6b5ec3ff 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -680,9 +680,10 @@ end Elementwise divide a scalar by an `NDArray`. Inplace updating. """ -function rdiv_from!(x::Real, y::NDArray{T}) where {T} +function rdiv_from!(x::Real, y::NDArray) @assert y.writable - _rdiv_scalar(y, scalar = convert(T, x), out = y) + _rdiv_scalar(y, scalar = x, out = y) + y end import Base: / diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 7136b994df75..1a66034c3801 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -417,6 +417,13 @@ function test_rdiv() y = 1 ./ Float32[1 2; 3 4] @test copy(x) ≈ y end + + info("NDArray:rdiv::type convert") + let x = mx.NDArray([1, 2, 3]) + y = 5.5 ./ x + @test eltype(y) == Int # this differs from julia + @test copy(y) == [5, 2, 1] + end end # function test_rdiv From ce0e237895bdc58051de8b489893c9823f43a798 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 6 Dec 2017 23:01:42 +0800 Subject: [PATCH 578/630] doc: sort out table for ndarray api (#362) --- docs/src/api.md | 15 ++++++++++++++- docs/src/api/ndarray.md | 27 +++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/docs/src/api.md b/docs/src/api.md index fa48c540721a..4984129863d0 100644 --- a/docs/src/api.md +++ b/docs/src/api.md @@ -1,5 +1,18 @@ # API Documentation ```@contents -Pages = ["api/context.md", "api/model.md", "api/initializers.md", "api/optimizers.md", "api/callbacks.md", "api/metric.md", "api/io.md", "api/ndarray.md", "api/symbolic-node.md", "api/nn-factory.md", "api/executor.md", "api/visualize.md"] +Pages = [ + "api/symbolic-node.md", + "api/ndarray.md", + "api/context.md", + "api/model.md", + "api/initializers.md", + "api/optimizers.md", + "api/callbacks.md", + "api/metric.md", + "api/io.md", + "api/nn-factory.md", + "api/executor.md", + "api/visualize.md", +] ``` diff --git a/docs/src/api/ndarray.md b/docs/src/api/ndarray.md index 4c6832c58969..660bd95fc070 100644 --- a/docs/src/api/ndarray.md +++ b/docs/src/api/ndarray.md @@ -1,5 +1,32 @@ # NDArray API +## Arithmetic Operations + +In the following example `y` can be a `Real` value or another `NDArray` + +| API | Example | | +|-----|----------|----------------------------| +| `+` | `x .+ y` | Elementwise summation | +| `-` | `x .- y` | Elementwise minus | +| `*` | `x .* y` | Elementwise multiplication | +| `/` | `x ./ y` | Elementwise division | +| `^` | `x .^ y` | Elementwise power | + + +## Trigonometric functions + +| API | Example | | +|----------------|------------|-----------------------------| +| [`sin`](@ref) | `sin.(x)` | Elementwise sine | +| [`cos`](@ref) | `cos.(x)` | Elementwise cosine | +| [`tan`](@ref) | `tan.(x)` | Elementwise tangent | +| [`asin`](@ref) | `asin.(x)` | Elementwise inverse sine | +| [`acos`](@ref) | `acos.(x)` | Elementwise inverse cosine | +| [`atan`](@ref) | `atan.(x)` | Elementwise inverse tangent | + + +## Reference + ```@autodocs Modules = [MXNet.mx] Pages = ["ndarray.jl"] From f8d4f620e2371ac1cb87a32c5d7037aa947efbb8 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 7 Dec 2017 09:32:12 +0800 Subject: [PATCH 579/630] ndarray: type convertion of _minus_scalar (#354) Ref: #353 --- src/ndarray.jl | 5 +++-- test/unittest/ndarray.jl | 5 +++++ 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 013e6b5ec3ff..0d8845f2ad5f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -598,13 +598,14 @@ broadcast_(::typeof(+), x::Real, y::NDArray) = x + y Subtract a bunch of arguments from `dst`. Inplace updating. """ -function sub_from!(dst::NDArray{T}, arg::NDArrayOrReal) where T +function sub_from!(dst::NDArray, arg::NDArrayOrReal) @assert dst.writable if isa(arg, Real) - _minus_scalar(dst, scalar = convert(T, arg), out = dst) + _minus_scalar(dst, scalar = arg, out = dst) else _minus!(dst, arg) end + dst end import Base: - diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 1a66034c3801..f111f9443141 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -281,6 +281,11 @@ function test_minus() @test t6 - scalar_small ≈ copy(a6 .- scalar_small) @test t6 - scalar_large ≈ copy(a6 .- scalar_large) + info("NDArray::minus::scalar::type convert") + let x = mx.NDArray([1, 2, 3]) + @test copy(x .- π) ≈ [-2, -1, 0] + end + info("NDArray::minus::type stablility") let x = mx.zeros(dims), y = mx.ones(dims) @inferred x - y From 14f974849d133de974b694b2f2caf4be0f4408b8 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 8 Dec 2017 10:31:32 +0800 Subject: [PATCH 580/630] ndarray: do not auto-import _full, _ones and _zeros (#359) --- src/ndarray.jl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 0d8845f2ad5f..8e60ee4e792c 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -737,7 +737,7 @@ end Create an `NDArray` filled with the value `x`, like `Base.fill`. """ -function fill(x, dims::NTuple{N, Integer}, ctx::Context=cpu()) where N +function fill(x, dims::NTuple{N,Integer}, ctx::Context=cpu()) where N arr = empty(typeof(x), dims, ctx) arr[:] = x arr @@ -1224,6 +1224,9 @@ function _get_ndarray_function_def(name :: String) end const _op_import_bl = [ # import black list; do not import these funcs + "_full", # we already have `mx.fill` + "_ones", # we already have `mx.ones` + "_zeros", # we already have `mx.zeros` "mean", "reshape", "sum", From daf787c992dc4fa2d09b5a06cb08e509186ea646 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 9 Dec 2017 13:46:06 +0800 Subject: [PATCH 581/630] ndarray: make trigonometric functions dot-call only (#361) - Also, renamed arc* funcs to a* to keep consistent with Base - deprecated non-dot-call --- NEWS.md | 16 +++++++++++ src/MXNet.jl | 1 + src/broadcast.jl | 9 ++++++ src/deprecated.jl | 8 ++++++ src/ndarray.jl | 59 +++++++++++++++++++++++++++++++++------- src/util.jl | 10 ++++++- test/unittest/ndarray.jl | 22 +++++++++++++++ test/unittest/util.jl | 2 ++ 8 files changed, 116 insertions(+), 11 deletions(-) create mode 100644 src/broadcast.jl diff --git a/NEWS.md b/NEWS.md index 307e8180eb71..79df75eee52c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -32,6 +32,22 @@ ``` +## API Changes + +### `NDArray` + +* Please use dot-call on following trigonometric functions. + Also, the `arc*` has been renamed to keep consistent with `Base`. + (#TBD) + + * `sin.(x)` + * `cos.(x)` + * `tan.(x)` + * `arcsin(x)` -> `asin.(x)` + * `arccos(x)` -> `acos.(x)` + * `arctan(x)` -> `atan.(x)` + + # v0.3.0 (2017.11.16) * Update `libmxnet` to diff --git a/src/MXNet.jl b/src/MXNet.jl index d80bbd3d3cdd..631fbf0a2de0 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -26,6 +26,7 @@ include("base.jl") include("context.jl") include("util.jl") +include("broadcast.jl") include("ndarray.jl") include("random.jl") diff --git a/src/broadcast.jl b/src/broadcast.jl new file mode 100644 index 000000000000..a5fdacd829d0 --- /dev/null +++ b/src/broadcast.jl @@ -0,0 +1,9 @@ +using TakingBroadcastSeriously: Broadcasted, unwrap + +for f in :[tan, asin, acos, atan, + sinh, cosh, tanh, asinh, acosh, atanh].args + # copy from TakingBroadcastSeriously + @eval Base.$f(a::Broadcasted...) = Broadcasted(broadcast_($f, unwrap.(a)...)) + @eval Base.$f(a::Broadcasted, b) = Broadcasted(broadcast_($f, unwrap(a), b)) + @eval Base.$f(b, a::Broadcasted) = Broadcasted(broadcast_($f, b, unwrap(a))) +end diff --git a/src/deprecated.jl b/src/deprecated.jl index b2816fad5432..dc19f5663b12 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -8,3 +8,11 @@ # srand (#282) @deprecate srand!(seed_state::Int) srand(seed_state) + +# v0.4 +@deprecate sin(x::NDArray) sin.(x) +@deprecate cos(x::NDArray) cos.(x) +@deprecate tan(x::NDArray) tan.(x) +@deprecate arcsin(x::NDArray) asin.(x) +@deprecate arccos(x::NDArray) acos.(x) +@deprecate arctan(x::NDArray) atan.(x) diff --git a/src/ndarray.jl b/src/ndarray.jl index 8e60ee4e792c..37894882bc33 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -993,7 +993,11 @@ end const _mxsig = Dict{Symbol,Expr}() -function _autoimport(name::Symbol) +function _autoimport(name::Symbol, sig::Expr) + if name == :broadcast_ + name = _broadcast_target(sig) + end + if isdefined(Base, name) :(import Base: $name) else @@ -1010,11 +1014,28 @@ function _outexpr(name::Symbol, x #= the first arg of `sig` =#) end end +_broadcast_target(sig::Expr) = sig.args[2].args[].args[end] + +""" +Generate docstring from function signature +""" +function _docsig(fname::Symbol, sig::Expr) + if fname !== :broadcast_ + " $sig" + else + name = _broadcast_target(sig) + sig_ = Expr(:call, Symbol(name, "."), sig.args[3:end]...) + str = " $sig_" + @eval @doc $str $name + "" + end +end + macro _remap(sig::Expr, imp::Expr) fname = (sig.head == :call) ? sig.args[1] : sig.args[1].args[1] # case of `where` opname = string(imp.args[1]) - import_expr = _autoimport(fname) + import_expr = _autoimport(fname, sig) if isa(imp.args[2], Expr) && imp.args[2].head == :parameters ndin = imp.args[3:end] @@ -1055,7 +1076,7 @@ macro _remap(sig::Expr, imp::Expr) $retexpr end - docstr = " $sig" + docstr = _docsig(fname, sig) func_def = Expr(:function, sig, func_body) esc(quote @@ -1099,6 +1120,14 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap prod(arr::NDArray) prod(arr) @_remap prod(arr::NDArray, dims) prod(arr; axis = 0 .- dims, keepdims = true) +# trigonometric functions, remap to keep consistent with Base +@_remap broadcast_(::typeof(sin), x::NDArray) sin(x) +@_remap broadcast_(::typeof(cos), x::NDArray) cos(x) +@_remap broadcast_(::typeof(tan), x::NDArray) tan(x) +@_remap broadcast_(::typeof(asin), x::NDArray) arcsin(x) +@_remap broadcast_(::typeof(acos), x::NDArray) arccos(x) +@_remap broadcast_(::typeof(atan), x::NDArray) arctan(x) + ################################################################################ # remapping to solving type unstablility ################################################################################ @@ -1227,19 +1256,29 @@ const _op_import_bl = [ # import black list; do not import these funcs "_full", # we already have `mx.fill` "_ones", # we already have `mx.ones` "_zeros", # we already have `mx.zeros` - "mean", - "reshape", - "sum", + + # arithmetic + "_plus", + "_minus", + + "dot", "max", "max_axis", + "mean", "min", "min_axis", - "dot", - "transpose", "prod", + "reshape", + "sum", + "transpose", - "_plus", - "_minus", + # trigonometric + "sin", + "cos", + "tan", + "arcsin", + "arccos", + "arctan", ] macro _import_ndarray_functions() diff --git a/src/util.jl b/src/util.jl index b0f91c824566..c53fb9a597fa 100644 --- a/src/util.jl +++ b/src/util.jl @@ -210,7 +210,15 @@ function _firstarg(sig::Expr) if sig.head ∈ (:where, :(::)) _firstarg(sig.args[1]) elseif sig.head == :call - i = (sig.args[2] isa Expr && sig.args[2].head == :parameters) ? 3 : 2 + i = if sig.args[2] isa Expr && sig.args[2].head == :parameters + # there are some keyward arguments locate at args[2] + 3 + elseif sig.args[1] === :broadcast_ + # case of broadcasting, skip the first arg `::typeof(...)` + 3 + else + 2 + end _firstarg(sig.args[i]) end end diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index f111f9443141..32fabd3c4bc5 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -814,6 +814,27 @@ function test_size() end end # function test_size() +function check_trigonometric(f) + info("NDArray::$f") + let A = [.1 .2; .3 .4], x = mx.NDArray(A) + B = f.(A) + y = f.(x) + @test copy(y) ≈ B + end + + let A = Float32[.1 .2; .3 .4], x = mx.NDArray(A) + B = f.(A) + y = f.(x) + @test copy(y) ≈ B + end +end # function check_trigonometric + +function test_trigonometric() + for f ∈ [sin, cos, tan, asin, acos, atan] + check_trigonometric(f) + end +end # function test_trigonometric + ################################################################################ # Run tests ################################################################################ @@ -848,6 +869,7 @@ end # function test_size() test_transpose() test_show() test_size() + test_trigonometric() end end diff --git a/test/unittest/util.jl b/test/unittest/util.jl index d27b509bd010..823decffd442 100644 --- a/test/unittest/util.jl +++ b/test/unittest/util.jl @@ -15,6 +15,8 @@ function test_firstarg() @test mx._firstarg(:(f(x::mx.NDArray{T}, y) where {T})) == :x @test mx._firstarg(:(f(x::mx.NDArray{T,N}, y) where {T,N})) == :x @test mx._firstarg(:(f(x::mx.NDArray{T,N} where {T,N}, y))) == :x + @test mx._firstarg(:(broadcast_(::typeof(asin), x::mx.NDArray))) == :x + @test mx._firstarg(:(broadcast_(::typeof(asin), x::mx.NDArray, y::mx.NDArray))) == :x end # function test_firstarg From 233fcfc5a89d69037290964c38066bbe0bda6a87 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 9 Dec 2017 14:26:02 +0800 Subject: [PATCH 582/630] ndarray: change internal api of plus to help autograd (#364) address https://github.com/dmlc/MXNet.jl/pull/274#issuecomment-349951876 --- src/ndarray.jl | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 37894882bc33..c9b17924f998 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -587,8 +587,10 @@ Summation. Multiple arguments of either scalar or `NDArray` could be added together. Note at least the first or second argument needs to be an `NDArray` to avoid ambiguity of built-in summation. """ -+(x::NDArray, ys::NDArrayOrReal...) = add_to!(copy(x, context(x)), ys...) -+(x::Real, y::NDArray, zs::NDArrayOrReal...) = add_to!(copy(y, context(y)), x, zs...) ++(x::NDArray) = x ++(x::NDArray, y::NDArray) = _plus(x, y) ++(x::NDArray, y::Real) = _plus_scalar(x, scalar = y) ++(y::Real, x::NDArray) = _plus_scalar(x, scalar = y) broadcast_(::typeof(+), x::NDArray, y::NDArrayOrReal) = x + y broadcast_(::typeof(+), x::Real, y::NDArray) = x + y @@ -1205,20 +1207,16 @@ function _get_ndarray_function_def(name :: String) args = MX_handle[] end - if length(output_vars) > 0 - output_handles = map((x) -> Base.cconvert(MX_handle, x), output_vars) - # XXX: Julia 0.4 has bug: [Array{MX_handle}] == Array{MX_handle} - output_handles_pp = Array{Array{MX_handle}}(1) - output_handles_pp[1] = Base.cconvert(Ptr{MX_handle}, output_handles) + output_handles_pp = if length(output_vars) > 0 + [map(x -> x.handle, output_vars)] else - output_handles_pp = [Base.convert(Ptr{MX_handle}, 0)] + [Ptr{MX_handle}(C_NULL)] end num_outputs_p = [convert(Cint, num_outputs)] kw_keys_str = String[string(x[1]) for x in kwargs] kw_vals_str = String[dump_mx_param(x[2]) for x in kwargs] - #op_handle = _get_cached_libmx_op_handle($(QuoteNode(name))) op_handle = _get_cached_libmx_op_handle($(name)) @mxcall(:MXImperativeInvoke, (MX_handle, Cint, Ptr{MX_handle}, @@ -1229,13 +1227,13 @@ function _get_ndarray_function_def(name :: String) length(kwargs), kw_keys_str, kw_vals_str) if out == nothing - handle_array = unsafe_wrap(Array, output_handles_pp[], num_outputs_p[]) - handle_array = [MX_NDArrayHandle(x) for x in handle_array] - arrays = [NDArray(hdr) for hdr in handle_array] - if length(arrays) == 1 - return arrays[1] + n = num_outputs_p[] + hdls = unsafe_wrap(Array{MX_handle}, output_handles_pp[], n) + xs = NDArray[NDArray(MX_NDArrayHandle(x)) for x in hdls] + if n == 1 + return xs[] else - return arrays + return xs end else return out From eb819b03055fb4ba984ff9cbb0a0c78ec98eb2a2 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 9 Dec 2017 14:28:40 +0800 Subject: [PATCH 583/630] ndarray: change internal api of mul/div to help autograd (#366) address https://github.com/dmlc/MXNet.jl/pull/274#issuecomment-349951876 --- src/ndarray.jl | 28 +++++++++++++--------------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index c9b17924f998..28374943a6c9 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -650,12 +650,12 @@ import Base: * Currently only multiplication a scalar with an `NDArray` is implemented. """ -*(x:: NDArray, y::Real) = x .* y -*(x::Real, y::NDArray) = y .* x +*(x::NDArray, y::Real) = _mul_scalar(x, scalar = y) +*(y::Real, x::NDArray) = _mul_scalar(x, scalar = y) -broadcast_(::typeof(*), x::NDArray, y::NDArrayOrReal) = - mul_to!(copy(x, context(x)), y) -broadcast_(::typeof(*), x::Real, y::NDArray) = y .* x +broadcast_(::typeof(*), x::NDArray, y::Real) = x * y +broadcast_(::typeof(*), y::Real, x::NDArray) = x * y +broadcast_(::typeof(*), x::NDArray, y::NDArray) = _mul(x, y) """ *(A::NDArray, B::NDArray) @@ -703,25 +703,23 @@ of the same shape. * Matrix division (solving linear systems) is not implemented yet. """ -/(x::NDArray, y::Real) = x ./ y +/(x::NDArray, y::Real) = _div_scalar(x, scalar = y) -broadcast_(::typeof(/), x::NDArray, y::NDArrayOrReal) = - div_from!(copy(x, context(x)), y) - -broadcast_(::typeof(/), x::Real, y::NDArray) = - rdiv_from!(x, copy(y, context(y))) +broadcast_(::typeof(/), x::NDArray, y::NDArray) = _div(x, y) +broadcast_(::typeof(/), x::NDArray, y::Real) = _div_scalar(x, scalar = y) +broadcast_(::typeof(/), y::Real, x::NDArray) = _rdiv_scalar(x, scalar = y) import Base: ^ # document of `.^` is merged into SymbolicNode's broadcast_(::typeof(^), x::NDArray, y::NDArray) = _power(x, y) -broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar=s) -broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar=s) +broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar = s) +broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar = s) broadcast_(::typeof(^), ::Irrational{:e}, x::NDArray) = exp(x) -broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar=s) -broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar=s) +broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar = s) +broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar = s) """ fill!(arr::NDArray, x) From 2ca5565b1b3a8a4cc219ab92b03029711731ccd1 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 9 Dec 2017 14:28:55 +0800 Subject: [PATCH 584/630] ndarray: change internal api of minus to help autograd (#365) address https://github.com/dmlc/MXNet.jl/pull/274#issuecomment-349951876 Although this patch cannot pass `@inferred`, but `code_warntype` give me this: ```julia end::MXNet.mx.NDArray{_,_} where _ where _ ``` And seems it doesn't hurt performance. --- src/ndarray.jl | 7 ++++--- test/unittest/ndarray.jl | 6 ------ 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 28374943a6c9..7854a6ec9a4c 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -620,9 +620,10 @@ import Base: - Subtraction `x - y`, of scalar types or `NDArray`. Or create the negative of `x`. """ --(x::NDArray) = _mul_scalar(x, scalar=-one(eltype(x))) --(x::NDArray, y::NDArrayOrReal) = sub_from!(copy(x, context(x)), y) --(x::Real, y::NDArray) = -y .+ x +-(x::NDArray) = _mul_scalar(x, scalar = -one(eltype(x))) +-(x::NDArray, y::NDArray) = _minus(x, y) +-(x::NDArray, y::Real) = _minus_scalar(x, scalar = y) +-(y::Real, x::NDArray) = _rminus_scalar(x, scalar = y) broadcast_(::typeof(-), x::NDArray, y::NDArrayOrReal) = x - y broadcast_(::typeof(-), x::Real, y::NDArray) = x - y diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 32fabd3c4bc5..9f69503f7f13 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -285,12 +285,6 @@ function test_minus() let x = mx.NDArray([1, 2, 3]) @test copy(x .- π) ≈ [-2, -1, 0] end - - info("NDArray::minus::type stablility") - let x = mx.zeros(dims), y = mx.ones(dims) - @inferred x - y - @inferred x .- y - end end function test_mul() From 12198f08e42fdcadab6ef183f5691ffe586f8641 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Dec 2017 03:12:06 +0800 Subject: [PATCH 585/630] ndarray: copy(AbstractArray, context) (#367) julia> copy(1:4, mx.cpu()) 4 mx.NDArray{Int64,1} @ CPU0: 1 2 3 4 julia> copy(1.:4, mx.cpu()) 4 mx.NDArray{Float64,1} @ CPU0: 1.0 2.0 3.0 4.0 --- NEWS.md | 18 ++++++++++++++++++ src/ndarray.jl | 11 +++++------ test/unittest/ndarray.jl | 11 +++++++++++ 3 files changed, 34 insertions(+), 6 deletions(-) diff --git a/NEWS.md b/NEWS.md index 79df75eee52c..2ed915e80e56 100644 --- a/NEWS.md +++ b/NEWS.md @@ -32,6 +32,24 @@ ``` +* `copy(AbstractArray, context)` is implemented now. (#TBD) + + ```julia + julia> copy(1:4, mx.cpu()) + 4 mx.NDArray{Int64,1} @ CPU0: + 1 + 2 + 3 + 4 + + julia> copy(1.:4, mx.cpu()) + 4 mx.NDArray{Float64,1} @ CPU0: + 1.0 + 2.0 + 3.0 + 4.0 + ``` + ## API Changes ### `NDArray` diff --git a/src/ndarray.jl b/src/ndarray.jl index 7854a6ec9a4c..756e5de35419 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -401,9 +401,7 @@ which furthur translates into create a **copy** of the sub-array for Julia `Array`, while for `NDArray`, this is a *slice* that shares the memory. """ -function getindex(arr::NDArray, ::Colon) - return arr -end +getindex(arr::NDArray, ::Colon) = arr """ Shortcut for [`slice`](@ref). @@ -411,9 +409,7 @@ Shortcut for [`slice`](@ref). copy of the sub-array, while here we simply call `slice`, which shares the underlying memory. """ -function getindex(arr::NDArray, idx::UnitRange{Int}) - slice(arr, idx) -end +getindex(arr::NDArray, idx::UnitRange{Int}) = slice(arr, idx) getindex(arr::NDArray) = _first(arr) @@ -503,6 +499,9 @@ copy(x::NDArray{T,D}, ctx::Context) where {T,D} = copy(x::Array{T}, ctx::Context) where {T<:DType} = copy!(empty(T, size(x), ctx), x) +copy(x::AbstractArray, ctx::Context) = + copy!(empty(eltype(x), size(x), ctx), collect(x)) + """ convert(::Type{Array{<:Real}}, x::NDArray) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 9f69503f7f13..398912bc0201 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -44,6 +44,17 @@ function test_copy() array2 = copy(array, mx.cpu()) tensor2 = copy(array2) @test tensor ≈ tensor2 + + info("NDArray::copy::AbstractArray") + let x = copy(1:4, mx.cpu()) + @test eltype(x) == Int + @test copy(x) == [1, 2, 3, 4] + end + + let x = copy(1.:4, mx.cpu()) + @test eltype(x) == Float64 + @test copy(x) ≈ [1., 2, 3, 4] + end end function test_deepcopy() From 2d7cdc6a558e34ad061a57418e79b5f1551cea73 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Dec 2017 03:21:30 +0800 Subject: [PATCH 586/630] Reexporting NDArray and SymbolicNode (#306) * Reexporting NDArray and SymbolicNode * Executor * context and empty * exporting more * context * fix test --- NEWS.md | 57 +++++++++++++++++++++++++++++++ REQUIRE | 1 + deps/build.jl | 1 - src/MXNet.jl | 85 +++++++++++++++++++++++++++++++++++++++++++--- src/context.jl | 11 ++---- src/initializer.jl | 5 +-- src/io.jl | 14 ++++---- src/metric.jl | 8 ++--- src/optimizer.jl | 6 ++-- 9 files changed, 158 insertions(+), 30 deletions(-) diff --git a/NEWS.md b/NEWS.md index 2ed915e80e56..bb08c1f9d7d2 100644 --- a/NEWS.md +++ b/NEWS.md @@ -1,5 +1,62 @@ # v0.4.0 (#TBD) +* Following material from `mx` module got exported (#TBD): + * `NDArray` + * `context()` + * `empty()` + + * `SymbolicNode` + * `Variable` + * `@var` + + * `Context` + * `cpu()` + * `gpu()` + + * `AbstractModel` + * `FeedForward` + * `predict()` + + * `MLP` + + * `Executor` + * `bind()` + * `simple_bind()` + * `forward()` + * `backward()` + + * `AbstractEvalMetric` + * `ACE` + * `Accuracy` + * `MSE` + * `MultiACE` + * `MultiMetric` + * `NMSE` + * `SeqMetric` + + * `KVStore` + + * `AbstractInitializer` + * `UniformInitializer` + * `NormalInitializer` + * `XavierInitializer` + + * `AbstractOptimizer` + * `AdaDelta` + * `AdaGrad` + * `ADAM` + * `AdaMax` + * `Nadam` + * `RMSProp` + * `SGD` + + * `AbstractDataProvider` + * `AbstractDataBatch` + * `ArrayDataProvider` + * `ArrayDataBatch` + + * `to_graphviz()` + ## New APIs ### `SymbolicNode` diff --git a/REQUIRE b/REQUIRE index 22caea9072b7..5a76dc543b25 100644 --- a/REQUIRE +++ b/REQUIRE @@ -4,3 +4,4 @@ BinDeps JSON MacroTools TakingBroadcastSeriously +Reexport diff --git a/deps/build.jl b/deps/build.jl index 8b4c254cbb39..b09e5f44242e 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -1,4 +1,3 @@ -using Compat import JSON ################################################################################ diff --git a/src/MXNet.jl b/src/MXNet.jl index 631fbf0a2de0..3583c140b64c 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -2,15 +2,13 @@ __precompile__() module MXNet +using Reexport + # we put everything in the namespace mx, because there are a lot of # functions with the same names as built-in utilities like "zeros", etc. export mx module mx -using Compat -import Compat.String -import Compat.view - import Base.Iterators: filter using Formatting @@ -22,6 +20,83 @@ import TakingBroadcastSeriously: broadcast_ import Base: round, ceil, floor, cos, sin, abs, sign, exp, sqrt, exp, log, norm, transpose +############################################################################### +# exports +############################################################################### + +# symbolic-node.jl +export SymbolicNode, + Variable, + @var + +# ndarray.jl +export NDArray, + context, + empty + +# executor.jl +export Executor, + bind, + simple_bind, + forward, + backward + +# context.jl +export Context, + cpu, + gpu + +# model.jl +export AbstractModel, + FeedForward, + predict + +# nn-factory.jl +export MLP + +# metric.jl +export AbstractEvalMetric, + ACE, + Accuracy, + MSE, + MultiACE, + MultiMetric, + NMSE, + SeqMetric + +# kvstore.jl +export KVStore + +# initializer.jl +export AbstractInitializer, + UniformInitializer, + NormalInitializer, + XavierInitializer + +# optimizer.jl +export AbstractOptimizer, + AdaDelta, + AdaGrad, + ADAM, + AdaMax, + Nadam, + RMSProp, + SGD + +# io.jl +export AbstractDataProvider, + AbstractDataBatch, + DataBatch, + ArrayDataProvider, + ArrayDataBatch + +# visualize.jl +export to_graphviz + +############################################################################### +# includes +############################################################################### + include("base.jl") include("context.jl") @@ -53,4 +128,6 @@ include("deprecated.jl") end # mx +@reexport using .mx + end # module MXNet diff --git a/src/context.jl b/src/context.jl index d627e7b5e7f7..2b83eebe97a7 100644 --- a/src/context.jl +++ b/src/context.jl @@ -12,9 +12,8 @@ end Context(dev_type :: Union{CONTEXT_TYPE, Int}, dev_id :: Int = 0) = Context(convert(CONTEXT_TYPE, dev_type), dev_id) -function Base.show(io :: IO, ctx :: Context) +Base.show(io::IO, ctx::Context) = print(io, "$(ctx.device_type)$(ctx.device_id)") -end """ cpu(dev_id) @@ -25,9 +24,7 @@ operations when no context is specified. # Arguments * `dev_id::Int = 0`: the CPU id. """ -function cpu(dev_id::Int=0) - return Context(CPU, dev_id) -end +cpu(dev_id::Int = 0) = Context(CPU, dev_id) """ gpu(dev_id) @@ -37,6 +34,4 @@ Get a GPU context with a specific id. The K GPUs on a node is typically numbered # Arguments * `dev_id :: Int = 0` the GPU device id. """ -function gpu(dev_id::Int=0) - return Context(GPU, dev_id) -end +gpu(dev_id::Int = 0) = return Context(GPU, dev_id) diff --git a/src/initializer.jl b/src/initializer.jl index f741d3e5a279..157958586642 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -118,7 +118,7 @@ struct NormalInitializer <: AbstractInitializer σ :: AbstractFloat end """ - NormalIninitializer(; mu=0, sigma=0.01) + NormalInitializer(; mu=0, sigma=0.01) Construct a `NormalInitializer` with mean `mu` and variance `sigma`. """ @@ -156,7 +156,8 @@ struct XavierInitializer <: AbstractInitializer magnitude :: Float64 end -XavierInitializer(; distribution = xv_uniform, regularization = xv_avg, magnitude = 3.0) = XavierInitializer(distribution, regularization, magnitude) +XavierInitializer(; distribution = xv_uniform, regularization = xv_avg, magnitude = 3.0) = + XavierInitializer(distribution, regularization, magnitude) function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: NDArray) dims = size(array) diff --git a/src/io.jl b/src/io.jl index 597ea8a90c6e..c8dbee9b8e1a 100644 --- a/src/io.jl +++ b/src/io.jl @@ -25,7 +25,7 @@ abstract type AbstractDataProvider end Returns the mini-batch size of the provided data. All the provided data should have the same mini-batch size (i.e. the last dimension). """ -function get_batch_size end +get_batch_size """ provide_data(provider) -> Vector{Tuple{Base.Symbol, Tuple}} @@ -36,7 +36,7 @@ function get_batch_size end Returns a vector of (name, shape) pairs describing the names of the data it provides, and the corresponding shapes. """ -function provide_data end +provide_data """ provide_label(provider) -> Vector{Tuple{Base.Symbol, Tuple}} @@ -46,7 +46,7 @@ function provide_data end Returns a vector of (name, shape) pairs describing the names of the labels it provides, and the corresponding shapes. """ -function provide_label end +provide_label """ AbstractDataProviderState @@ -81,7 +81,7 @@ abstract type AbstractDataBatch end Returns the number of samples in this batch. This number should be greater than 0, but less than or equal to the batch size. This is used to indicate at the end of the data set, there might not be enough samples for a whole mini-batch. """ -function count_samples end +count_samples """ get_data(provider, batch) -> Vector{NDArray} @@ -94,7 +94,7 @@ Returns a vector of data in this batch, should be in the same order as declared The last dimension of each `NDArray` should always match the batch_size, even when `count_samples` returns a value less than the batch size. In this case, the data provider is free to pad the remaining contents with any value. """ -function get_data end +get_data """ get_label(provider, batch) -> Vector{NDArray} @@ -105,7 +105,7 @@ function get_data end Returns a vector of labels in this batch. Similar to [`get_data`](@ref). """ -function get_label end +get_label """ DataBatch @@ -547,7 +547,7 @@ function _get_iter_name(hdr :: MX_handle) return Symbol(unsafe_string(ref_name[])) end -const _iter_creator_cache = Dict{Symbol, MX_handle}() +const _iter_creator_cache = Dict{Symbol,MX_handle}() function _populate_iter_creator_cache!() empty!(_iter_creator_cache) h_creators = _get_iter_creators() diff --git a/src/metric.jl b/src/metric.jl index 3998af8efb8c..489df2ddd8b2 100644 --- a/src/metric.jl +++ b/src/metric.jl @@ -106,7 +106,7 @@ To calculate both mean-squared error [`Accuracy`](@ref) and log-loss [`ACE`](@re mx.fit(..., eval_metric = mx.MultiMetric([mx.Accuracy(), mx.ACE()])) ``` """ -mutable struct MultiMetric <: mx.AbstractEvalMetric +mutable struct MultiMetric <: AbstractEvalMetric metrics :: Vector{mx.AbstractEvalMetric} end @@ -122,7 +122,7 @@ function reset!(metric :: MultiMetric) nothing end -get(metric :: MultiMetric) = mapreduce(get, append!, metric.metrics) +get(metric::MultiMetric) = mapreduce(get, append!, metric.metrics) """ SeqMetric(metrics::Vector{AbstractEvalMetric}) @@ -136,8 +136,8 @@ and log-loss [`ACE`](@ref) for the second output: mx.fit(..., eval_metric = mx.SeqMetric([mx.Accuracy(), mx.ACE()])) ``` """ -mutable struct SeqMetric <: mx.AbstractEvalMetric - metrics :: Vector{mx.AbstractEvalMetric} +mutable struct SeqMetric <: AbstractEvalMetric + metrics :: Vector{AbstractEvalMetric} end function update!(metric::SeqMetric, labels::VecOfNDArray, preds::VecOfNDArray) diff --git a/src/optimizer.jl b/src/optimizer.jl index 89df56ba7050..8d46a9e407a1 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -62,7 +62,6 @@ function get_learning_rate end ################################################################################ # The learning rate module module LearningRate -using Compat import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate """ @@ -138,7 +137,6 @@ end ################################################################################ # The Momentum module module Momentum -using Compat import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum """ @@ -226,9 +224,9 @@ store all the states needed for each weights. * `optimizer::AbstractOptimizer`: the underlying optimizer. """ -function get_updater(optimizer :: AbstractOptimizer) +function get_updater(optimizer::AbstractOptimizer) states = Dict{Int,Any}() - function updater(index :: Int, grad :: NDArray, weight :: NDArray) + function updater(index::Int, grad::NDArray, weight::NDArray) if !haskey(states, index) states[index] = create_state(optimizer, index, weight) end From db095288615a64b84318acd84571c938eb46af16 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 11 Dec 2017 03:23:46 +0800 Subject: [PATCH 587/630] example: fix parametric NDArray in lstm example (#370) and fix some depwarns fix #368 --- examples/char-lstm/config.jl | 43 +++++++++++++++++----------------- examples/char-lstm/lstm.jl | 4 ++-- examples/char-lstm/sampler.jl | 6 ++--- examples/char-lstm/seq-data.jl | 8 +++---- examples/char-lstm/train.jl | 8 +++---- 5 files changed, 34 insertions(+), 35 deletions(-) diff --git a/examples/char-lstm/config.jl b/examples/char-lstm/config.jl index 974989167ad1..b5961549675f 100644 --- a/examples/char-lstm/config.jl +++ b/examples/char-lstm/config.jl @@ -1,24 +1,23 @@ -const DROPOUT = 0 -const BATCH_SIZE = 32 -const SEQ_LENGTH = 32 -const DIM_HIDDEN = 256 -const DIM_EMBED = 256 -const LSTM_N_LAYER = 2 -const N_EPOCH = 21 -const BASE_LR = 0.01 -const WEIGHT_DECAY = 0.00001 -const CLIP_GRADIENT = 1 -const NAME = :ptb -const N_GPU = 4 -const USE_GPU = true -const DATA_TR_RATIO = 0.9 -const CKPOINT_PREFIX = joinpath(dirname(@__FILE__), "checkpoints/$NAME") +const DROPOUT = 0 +const BATCH_SIZE = 32 +const SEQ_LENGTH = 32 +const DIM_HIDDEN = 256 +const DIM_EMBED = 256 +const LSTM_N_LAYER = 2 +const N_EPOCH = 21 +const BASE_LR = 0.01 +const WEIGHT_DECAY = 0.00001 +const CLIP_GRADIENT = 1 +const NAME = :ptb +const N_GPU = 1 +const USE_GPU = true +const DATA_TR_RATIO = 0.9 +const CKPOINT_PREFIX = joinpath(@__DIR__, "checkpoints/$NAME") -const BATCH_SIZE_SMP= 10 -const SAMPLE_LENGTH = 100 -const SAMPLE_START = 'a' - -const UNKNOWN_CHAR = Char(0) -const INPUT_FILE = joinpath(dirname(@__FILE__), "input.txt") -const VOCAB_FILE = joinpath(dirname(@__FILE__), "vocab.dat") +const BATCH_SIZE_SMP = 10 +const SAMPLE_LENGTH = 100 +const SAMPLE_START = 'a' +const UNKNOWN_CHAR = Char(0) +const INPUT_FILE = joinpath(@__DIR__, "input.txt") +const VOCAB_FILE = joinpath(@__DIR__, "vocab.dat") diff --git a/examples/char-lstm/lstm.jl b/examples/char-lstm/lstm.jl index d930240ba1a2..de6748df9420 100644 --- a/examples/char-lstm/lstm.jl +++ b/examples/char-lstm/lstm.jl @@ -123,12 +123,12 @@ mutable struct NLL <: mx.AbstractEvalMetric NLL() = new(0.0, 0) end -function mx.update!(metric :: NLL, labels :: Vector{mx.NDArray}, preds :: Vector{mx.NDArray}) +function mx.update!(metric::NLL, labels::Vector{<:mx.NDArray}, preds::Vector{<:mx.NDArray}) @assert length(labels) == length(preds) nll = 0.0 for (label, pred) in zip(labels, preds) @mx.nd_as_jl ro=(label, pred) begin - nll -= sum(log(max(broadcast_getindex(pred, round(Int,label+1), 1:length(label)), 1e-20))) + nll -= sum(log.(max.(broadcast_getindex(pred, round.(Int,label+1), 1:length(label)), 1e-20))) end end diff --git a/examples/char-lstm/sampler.jl b/examples/char-lstm/sampler.jl index ad34f344cbcd..df4647f4f893 100644 --- a/examples/char-lstm/sampler.jl +++ b/examples/char-lstm/sampler.jl @@ -1,6 +1,6 @@ -include(joinpath(dirname(@__FILE__), "config.jl")) -include(joinpath(dirname(@__FILE__), "lstm.jl")) -include(joinpath(dirname(@__FILE__), "seq-data.jl")) +include(joinpath(@__DIR__, "config.jl")) +include(joinpath(@__DIR__, "lstm.jl")) +include(joinpath(@__DIR__, "seq-data.jl")) using StatsBase using MXNet diff --git a/examples/char-lstm/seq-data.jl b/examples/char-lstm/seq-data.jl index 0aac5609dac5..1456ae94a1a3 100644 --- a/examples/char-lstm/seq-data.jl +++ b/examples/char-lstm/seq-data.jl @@ -60,7 +60,7 @@ end #--/provide #--eachbatch-part1 -function mx.eachbatch(p :: CharSeqProvider) +function mx.eachbatch(p::CharSeqProvider) data_all = [mx.zeros(shape) for (name, shape) in mx.provide_data(p)] label_all = [mx.zeros(shape) for (name, shape) in mx.provide_label(p)] @@ -73,7 +73,7 @@ function mx.eachbatch(p :: CharSeqProvider) #--eachbatch-part2 #... - function _text_iter() + function _text_iter(c::Channel) text = p.text n_batch = floor(Int, length(text) / p.batch_size / p.seq_len) @@ -100,11 +100,11 @@ function mx.eachbatch(p :: CharSeqProvider) copy!(label_all[i], label_jl[i]) end - produce(batch) + put!(c, batch) end end - return Task(_text_iter) + return Channel(_text_iter) end #--/eachbatch-part2 diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index 30578603cafc..000534f64d7b 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -1,6 +1,6 @@ -include(joinpath(dirname(@__FILE__), "config.jl")) -include(joinpath(dirname(@__FILE__), "lstm.jl")) -include(joinpath(dirname(@__FILE__), "seq-data.jl")) +include(joinpath(@__DIR__, "config.jl")) +include(joinpath(@__DIR__, "lstm.jl")) +include(joinpath(@__DIR__, "seq-data.jl")) # build vocabulary vocab = build_vocabulary(INPUT_FILE, VOCAB_FILE) @@ -29,7 +29,7 @@ data_val = CharSeqProvider(text_val, BATCH_SIZE, SEQ_LENGTH, vocab, NAME, if USE_GPU context = [mx.gpu(i) for i = 0:N_GPU-1] else - context = [mx.cpu()] + context = mx.cpu() end #--train From 27c66ecb61314d1b77810f6929a6f0a0d85061b4 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 12 Dec 2017 10:33:06 +0800 Subject: [PATCH 588/630] sym: overload `bind` (#372) fix this on REPL: WARNING: both mx and Base export "bind"; uses of it in module MXNet must be qualified --- src/executor.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/executor.jl b/src/executor.jl index cd4a9256eeca..c99517b6d76b 100644 --- a/src/executor.jl +++ b/src/executor.jl @@ -1,3 +1,5 @@ +import Base: bind + """ Executor From bfb1cc4fe5de6f42b3dc215680118e66e5d04803 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 11:50:37 +0800 Subject: [PATCH 589/630] ndarray: remap hyperbolic function (#374) --- NEWS.md | 10 ++++++++++ src/deprecated.jl | 7 +++++++ src/ndarray.jl | 16 ++++++++++++++++ test/unittest/ndarray.jl | 27 +++++++++++++++++++++++++++ 4 files changed, 60 insertions(+) diff --git a/NEWS.md b/NEWS.md index bb08c1f9d7d2..703cb216a001 100644 --- a/NEWS.md +++ b/NEWS.md @@ -122,6 +122,16 @@ * `arccos(x)` -> `acos.(x)` * `arctan(x)` -> `atan.(x)` +* Please use dot-call on following hyperbolic functions. + Also, the `arc*` has been renamed to keep consistent with `Base`. + (#TBD) + + * `sinh.(x)` + * `cosh.(x)` + * `tanh.(x)` + * `arcsinh(x)` -> `asinh.(x)` + * `arccosh(x)` -> `acosh.(x)` + * `arctanh(x)` -> `atanh.(x)` # v0.3.0 (2017.11.16) diff --git a/src/deprecated.jl b/src/deprecated.jl index dc19f5663b12..8a8df0a56789 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -16,3 +16,10 @@ @deprecate arcsin(x::NDArray) asin.(x) @deprecate arccos(x::NDArray) acos.(x) @deprecate arctan(x::NDArray) atan.(x) + +@deprecate sinh(x::NDArray) sinh.(x) +@deprecate cosh(x::NDArray) cosh.(x) +@deprecate tanh(x::NDArray) tanh.(x) +@deprecate arcsinh(x::NDArray) asinh.(x) +@deprecate arccosh(x::NDArray) acosh.(x) +@deprecate arctanh(x::NDArray) atanh.(x) diff --git a/src/ndarray.jl b/src/ndarray.jl index 756e5de35419..900285ae5782 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1128,6 +1128,14 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap broadcast_(::typeof(acos), x::NDArray) arccos(x) @_remap broadcast_(::typeof(atan), x::NDArray) arctan(x) +# hyperbolic funcs, remap to keep consistent with Base +@_remap broadcast_(::typeof(sinh), x::NDArray) sinh(x) +@_remap broadcast_(::typeof(cosh), x::NDArray) cosh(x) +@_remap broadcast_(::typeof(tanh), x::NDArray) tanh(x) +@_remap broadcast_(::typeof(asinh), x::NDArray) arcsinh(x) +@_remap broadcast_(::typeof(acosh), x::NDArray) arccosh(x) +@_remap broadcast_(::typeof(atanh), x::NDArray) arctanh(x) + ################################################################################ # remapping to solving type unstablility ################################################################################ @@ -1275,6 +1283,14 @@ const _op_import_bl = [ # import black list; do not import these funcs "arcsin", "arccos", "arctan", + + # hyperbolic + "sinh", + "cosh", + "tanh", + "arcsinh", + "arccosh", + "arctanh", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 398912bc0201..0608d4fa6c41 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -840,6 +840,32 @@ function test_trigonometric() end end # function test_trigonometric +function check_hyperbolic(f, A) + info("NDArray::$f") + let x = NDArray(A) + B = f.(A) + y = f.(x) + @test copy(y) ≈ B + end + + let A = Float32.(A), x = NDArray(A) + B = f.(A) + y = f.(x) + @test copy(y) ≈ B + end +end # function check_hyperbolic + +function test_hyperbolic() + for f ∈ [sinh, cosh, tanh, asinh, acosh, atanh] + A = if f == acosh + [1.1, 1.2, 1.3, 1.4] + else + [.1, .2, .3, .4] + end + check_hyperbolic(f, A) + end +end # function test_hyperbolic + ################################################################################ # Run tests ################################################################################ @@ -875,6 +901,7 @@ end # function test_trigonometric test_show() test_size() test_trigonometric() + test_hyperbolic() end end From 66096167c66ca788c31effbb687f456834b67dba Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 13:28:38 +0800 Subject: [PATCH 590/630] ndarray: support transpose on 1D array (#375) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Python doesn't have this functionality, so I implement it via `reshape`. ```julia julia> x = NDArray(Float32[1, 2, 3, 4]) 4 mx.NDArray{Float32,1} @ CPU0: 1.0 2.0 3.0 4.0 julia> x' 1×4 mx.NDArray{Float32,2} @ CPU0: 1.0 2.0 3.0 4.0 ``` --- NEWS.md | 15 +++++++++++++++ src/ndarray.jl | 3 ++- src/util.jl | 5 ----- test/unittest/ndarray.jl | 8 +++++++- 4 files changed, 24 insertions(+), 7 deletions(-) diff --git a/NEWS.md b/NEWS.md index 703cb216a001..30b204aef24d 100644 --- a/NEWS.md +++ b/NEWS.md @@ -107,6 +107,21 @@ 4.0 ``` +* Transposing a column `NDArray` to a row `NDArray` is supported now. (#TBD) + + ```julia + julia> x = NDArray(Float32[1, 2, 3, 4]) + 4 mx.NDArray{Float32,1} @ CPU0: + 1.0 + 2.0 + 3.0 + 4.0 + + julia> x' + 1×4 mx.NDArray{Float32,2} @ CPU0: + 1.0 2.0 3.0 4.0 + ``` + ## API Changes ### `NDArray` diff --git a/src/ndarray.jl b/src/ndarray.jl index 900285ae5782..b86158e88842 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1114,7 +1114,8 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap dot(x::NDArray{T,N}, y::NDArray{S,N}) where {T,S,N} dot(y, x) # See https://github.com/dmlc/MXNet.jl/pull/123 -@_remap transpose(arr::NDArray) transpose(_only2d(arr)) +@_remap transpose(arr::NDArray{T,1}) where T reshape(arr; shape = (1, length(arr)), reverse = true) +@_remap transpose(arr::NDArray{T,2}) where T transpose(arr) @_remap permutedims(arr::NDArray, axes) transpose(arr; axes = length(axes) .- tuple(axes...)) @_remap prod(arr::NDArray) prod(arr) diff --git a/src/util.jl b/src/util.jl index c53fb9a597fa..c729bc7cd9ae 100644 --- a/src/util.jl +++ b/src/util.jl @@ -163,11 +163,6 @@ function _format_signature(narg::Int, arg_names::Ref{char_pp}) return join([unsafe_string(name) for name in arg_names] , ", ") end -@inline function _only2d(x) - @assert ndims(x) == 2 - x -end - """ libmxnet operators signature checker. diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 0608d4fa6c41..a397d7899d54 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -775,7 +775,13 @@ function test_fill() end # function test_fill function test_transpose() - info("NDArray::transpose") + info("NDArray::transpose::1D") + let A = rand(Float32, 4), x = NDArray(A) + @test size(x) == (4,) + @test size(x') == (1, 4) + end + + info("NDArray::transpose::2D") let A = rand(Float32, 2, 3), x = mx.NDArray(A) @test size(x) == (2, 3) @test size(x') == (3, 2) From 881759f1bea5451d5eab9e86b88cba6f1ef42cdf Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 13:40:01 +0800 Subject: [PATCH 591/630] example: fix MSE init (#376) --- examples/regression-example.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index e820d54001ec..38541c2b7d0c 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -63,7 +63,7 @@ optimizer = mx.ADAM() trainprovider, evalprovider = data_source(#= batchsize =# 200) mx.fit(model, optimizer, trainprovider, initializer = mx.NormalInitializer(0.0, 0.1), - eval_metric = mx.MSE{mx.NDArray{Float32,1}}(), + eval_metric = mx.MSE(), eval_data = evalprovider, n_epoch = 20, callbacks = [mx.speedometer()]) @@ -71,7 +71,7 @@ mx.fit(model, optimizer, trainprovider, trainprovider, evalprovider = data_source(#= batchsize =# samplesize) mx.fit(model, optimizer, trainprovider, initializer = mx.NormalInitializer(0.0, 0.1), - eval_metric = mx.MSE{mx.NDArray{Float32,1}}(), + eval_metric = mx.MSE(), eval_data = evalprovider, n_epoch = 500, # previous setting is batchsize = 200, epoch = 20 # implies we did (5000 / 200) * 20 times update in previous `fit` From 5908d97477d4b76c3a894e81981f98a6c9efbe57 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 13:46:55 +0800 Subject: [PATCH 592/630] base: cleanup stale code in _defstruct_impl (#377) --- src/base.jl | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/base.jl b/src/base.jl index e75dc3b6ac65..0334260f886a 100644 --- a/src/base.jl +++ b/src/base.jl @@ -218,21 +218,13 @@ function _defstruct_impl(is_immutable, name, fields) if isa(name, Symbol) name = esc(name) super_name = :Any - elseif VERSION >= v"0.5-" + else @assert(isa(name, Expr) && name.head == :(<:) && length(name.args) == 2 && isa(name.args[1], Symbol) && isa(name.args[2], Symbol), "name must be of form 'Name <: SuperType'") super_name = esc(name.args[2]) name = esc(name.args[1]) - else - @assert(isa(name, Expr) && name.head == :comparison && - length(name.args) == 3 && name.args[2] == :(<:) && - isa(name.args[1], Symbol) && isa(name.args[3], Symbol), - "name must be of form 'Name <: SuperType'") - - super_name = esc(name.args[3]) - name = esc(name.args[1]) end field_defs = Vector{Expr}(length(fields)) # :(field2 :: Int) From a941f3aec70828682f44ae21f41543a971677fc3 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 13:47:12 +0800 Subject: [PATCH 593/630] ndarray: add modulo operator (#373) * ndarray: add modulo operator * add news --- NEWS.md | 11 +++++++++++ src/broadcast.jl | 3 ++- src/ndarray.jl | 27 +++++++++++++++++++++++---- test/unittest/ndarray.jl | 32 ++++++++++++++++++++++++++++++++ 4 files changed, 68 insertions(+), 5 deletions(-) diff --git a/NEWS.md b/NEWS.md index 30b204aef24d..a7942b0ed469 100644 --- a/NEWS.md +++ b/NEWS.md @@ -107,6 +107,17 @@ 4.0 ``` +* modulo operator. (#TBD) + + ```julia + x = NDArray(...) + y = NDArray(...) + + x .% y + x .% 2 + 2 .% x + ``` + * Transposing a column `NDArray` to a row `NDArray` is supported now. (#TBD) ```julia diff --git a/src/broadcast.jl b/src/broadcast.jl index a5fdacd829d0..cdde7f46b2c9 100644 --- a/src/broadcast.jl +++ b/src/broadcast.jl @@ -1,6 +1,7 @@ using TakingBroadcastSeriously: Broadcasted, unwrap -for f in :[tan, asin, acos, atan, +for f in :[%, + tan, asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh].args # copy from TakingBroadcastSeriously @eval Base.$f(a::Broadcasted...) = Broadcasted(broadcast_($f, unwrap.(a)...)) diff --git a/src/ndarray.jl b/src/ndarray.jl index b86158e88842..d62a72c39684 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -709,17 +709,32 @@ broadcast_(::typeof(/), x::NDArray, y::NDArray) = _div(x, y) broadcast_(::typeof(/), x::NDArray, y::Real) = _div_scalar(x, scalar = y) broadcast_(::typeof(/), y::Real, x::NDArray) = _rdiv_scalar(x, scalar = y) +import Base: % + +""" + .%(x::NDArray, y::NDArray) + .%(x::NDArray, y::Real) + .%(x::Real, y::NDArray) + +Elementwise modulo for `NDArray`. +""" +%(x::NDArray, y::Real) = _mod_scalar(x, scalar = y) + +broadcast_(::typeof(%), x::NDArray, y::NDArray) = _mod(x, y) +broadcast_(::typeof(%), x::NDArray, y::Real) = _mod_scalar(x, scalar = y) +broadcast_(::typeof(%), y::Real, x::NDArray) = _rmod_scalar(x, scalar = y) + import Base: ^ # document of `.^` is merged into SymbolicNode's broadcast_(::typeof(^), x::NDArray, y::NDArray) = _power(x, y) -broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar = s) -broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar = s) +broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar = s) +broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar = s) broadcast_(::typeof(^), ::Irrational{:e}, x::NDArray) = exp(x) -broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar = s) -broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar = s) +broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar = s) +broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar = s) """ fill!(arr::NDArray, x) @@ -1147,6 +1162,9 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap _minus(x::NDArray, y::NDArray) _minus(x, y) @_remap _minus!(x::NDArray, y::NDArray) _minus(x, y) +@_remap _mod(x::NDArray, y::NDArray) _mod(x, y) +@_remap _mod!(x::NDArray, y::NDArray) _mod(x, y) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1265,6 +1283,7 @@ const _op_import_bl = [ # import black list; do not import these funcs # arithmetic "_plus", "_minus", + "_mod", "dot", "max", diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index a397d7899d54..7c74536a8a18 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -437,6 +437,37 @@ function test_rdiv() end # function test_rdiv +function test_mod() + info("NDArray::mod") + const A = [1 2; 3 4] + const B = [1 1; 3 3] + + let x = NDArray(A), y = NDArray(B) + C = A .% B + D = B .% A + + w = x .% y + z = y .% x + + @test copy(w) ≈ C + @test copy(z) ≈ D + end + + let x = NDArray(A) + C = A .% 2 + y = x .% 2 + @test copy(y) ≈ C + end + + info("NDArray::rmod") + let x = NDArray(A) + C = 11 .% A + y = 11 .% x + @test copy(y) ≈ C + end +end # function test_mod + + function test_gd() dims = rand_dims() tw, aw = rand_tensors(dims) @@ -888,6 +919,7 @@ end # function test_hyperbolic test_mul() test_div() test_rdiv() + test_mod() test_gd() test_saveload() test_clip() From 8135a635b06bf36df1511fa238bc15bfa7cc6e75 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 13:48:03 +0800 Subject: [PATCH 594/630] ndarray: type convertion of _div_scalar (#357) Ref: #353 - and handle the case of integer NDArray divided by zero --- src/ndarray.jl | 12 ++++++++++-- test/unittest/ndarray.jl | 13 +++++++++++++ 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index d62a72c39684..33b94c05e559 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -669,13 +669,21 @@ Matrix (2D NDArray) multiplication. Elementwise divide a scalar or an `NDArray` of the same shape from `dst`. Inplace updating. """ -function div_from!(dst::NDArray{T}, arg::NDArrayOrReal) where {T} +function div_from!(dst::NDArray, arg::NDArrayOrReal) @assert dst.writable if isa(arg, Real) - _div_scalar(dst, scalar = convert(T, arg), out = dst) + _div_scalar(dst, scalar = arg, out = dst) else _div(dst, arg, out = dst) end + dst +end + +function div_from!(dst::NDArray{T}, arg::Real) where {T<:Integer} + @assert dst.writable + @assert(round(T, arg) != zero(T), "Integer divided by zero") + _div_scalar(dst, scalar = arg, out = dst) + dst end """ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 7c74536a8a18..8bd87c65ec9e 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -396,6 +396,19 @@ function test_div() t6, a6 = rand_tensors(Float16, dims) scalar_large = 1e4 @test t6 ./ scalar_large ≈ copy(a6 ./ scalar_large) + + info("NDArray::div::scalar::type convert") + let x = mx.NDArray([1, 2, 3]) + y = x ./ 1.1 + @test eltype(y) == Int + @test copy(y) == [1, 2, 3] + + y = x ./ 2 + @test eltype(y) == Int # this differs from julia + @test copy(y) == [0, 1, 1] + + @test_throws AssertionError x ./ 0.5 + end end From 1e20f50b9ce859e19a657c8820244d159ca7cca6 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 15 Dec 2017 16:14:26 +0800 Subject: [PATCH 595/630] ndarray: protect from diving zero for non-inplace op (#378) --- src/ndarray.jl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/ndarray.jl b/src/ndarray.jl index 33b94c05e559..42c69266e210 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -717,6 +717,11 @@ broadcast_(::typeof(/), x::NDArray, y::NDArray) = _div(x, y) broadcast_(::typeof(/), x::NDArray, y::Real) = _div_scalar(x, scalar = y) broadcast_(::typeof(/), y::Real, x::NDArray) = _rdiv_scalar(x, scalar = y) +function broadcast_(::typeof(/), x::NDArray{T}, y::Real) where {T<:Integer} + @assert(round(T, y) != zero(T), "Integer divided by zero") + _div_scalar(x, scalar = y) +end + import Base: % """ From d92122546f02703e99364e09c4448b990b42b0b3 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 16 Dec 2017 23:39:39 +0800 Subject: [PATCH 596/630] doc/ndarray: add ref table for hyperbolic function (#382) --- docs/src/api/ndarray.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/docs/src/api/ndarray.md b/docs/src/api/ndarray.md index 660bd95fc070..9582ff24a7ef 100644 --- a/docs/src/api/ndarray.md +++ b/docs/src/api/ndarray.md @@ -13,7 +13,7 @@ In the following example `y` can be a `Real` value or another `NDArray` | `^` | `x .^ y` | Elementwise power | -## Trigonometric functions +## Trigonometric Functions | API | Example | | |----------------|------------|-----------------------------| @@ -25,6 +25,18 @@ In the following example `y` can be a `Real` value or another `NDArray` | [`atan`](@ref) | `atan.(x)` | Elementwise inverse tangent | +## Hyperbolic Functions + +| API | Example | | +|-----------------|-------------|----------------------------------------| +| [`sinh`](@ref) | `sinh.(x)` | Elementwise hyperbolic sine | +| [`cosh`](@ref) | `cosh.(x)` | Elementwise hyperbolic cosine | +| [`tanh`](@ref) | `tanh.(x)` | Elementwise hyperbolic tangent | +| [`asinh`](@ref) | `asinh.(x)` | Elementwise inverse hyperbolic sine | +| [`acosh`](@ref) | `acosh.(x)` | Elementwise inverse hyperbolic cosine | +| [`atanh`](@ref) | `atanh.(x)` | Elementwise inverse hyperbolic tangent | + + ## Reference ```@autodocs From 450759868007dc2bed1024a19e5bfa17f1d305d3 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 17 Dec 2017 06:01:01 +0800 Subject: [PATCH 597/630] ndarray: `cat`, `hcat`, `vcat` (#380) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit e.g. `hcat` ```julia julia> x 4 mx.NDArray{Float64,1} @ CPU0: 1.0 2.0 3.0 4.0 julia> y 4 mx.NDArray{Float64,1} @ CPU0: 2.0 4.0 6.0 8.0 julia> [x y] 4×2 mx.NDArray{Float64,2} @ CPU0: 1.0 2.0 2.0 4.0 3.0 6.0 4.0 8.0 ``` --- NEWS.md | 28 +++++++++++++++++- src/ndarray.jl | 26 +++++++++++++++++ test/unittest/ndarray.jl | 62 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 115 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index a7942b0ed469..70d8626f7259 100644 --- a/NEWS.md +++ b/NEWS.md @@ -117,7 +117,33 @@ x .% 2 2 .% x ``` - + +* `cat`, `vcat`, `hcat` is implemented. (#TBD) + + E.g. `hcat` + ```julia + julia> x + 4 mx.NDArray{Float64,1} @ CPU0: + 1.0 + 2.0 + 3.0 + 4.0 + + julia> y + 4 mx.NDArray{Float64,1} @ CPU0: + 2.0 + 4.0 + 6.0 + 8.0 + + julia> [x y] + 4×2 mx.NDArray{Float64,2} @ CPU0: + 1.0 2.0 + 2.0 4.0 + 3.0 6.0 + 4.0 8.0 + ``` + * Transposing a column `NDArray` to a row `NDArray` is supported now. (#TBD) ```julia diff --git a/src/ndarray.jl b/src/ndarray.jl index 42c69266e210..139e40ef1e59 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -522,6 +522,32 @@ function deepcopy(arr::NDArray) NDArray(MX_NDArrayHandle(out_ref[])) end +""" + hcat(x::NDArray...) +""" +Base.hcat(xs::NDArray{T}...) where T = cat(2, xs...) + +""" + vcat(x::NDArray...) +""" +Base.vcat(xs::NDArray{T}...) where T = cat(1, xs...) + +""" + cat(dim, xs::NDArray...) + +Concate the `NDArray`s which have the same element type along the `dim`. +Building a diagonal matrix is not supported yet. +""" +function Base.cat(dim::Int, xs::NDArray{T}...) where T + ns = ndims.(xs) + d = Base.max(dim, maximum(ns)) + xs′ = map(zip(ns, xs)) do i + n, x = i + (d > n) ? reshape(x, -2, Base.ones(Int, d - n)...) : x + end + concat(xs′..., dim = d - dim) +end + """ @inplace diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 8bd87c65ec9e..a24126cf194b 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -179,6 +179,67 @@ function test_endof() end end # function test_endof +function test_cat() + function check_cat(f, A, B = 2A) + C = [A B] + D = [A; B] + x = NDArray(A) + y = NDArray(B) + z = NDArray(C) + d = NDArray(D) + + if f == :hcat + @test copy([x y]) == [A B] + @test copy([x y 3y x]) == [A B 3B A] + @test copy([z y x]) == [C B A] + elseif f == :vcat + @test copy([x; y]) == [A; B] + @test copy([x; y; 3y; x]) == [A; B; 3B; A] + @test copy([x; d]) == [A; D] + @test copy([d; x]) == [D; A] + else + @assert false + end + end + + let A = [1, 2, 3, 4] + info("NDArray::hcat::1D") + check_cat(:hcat, A) + + info("NDArray::vcat::1D") + check_cat(:vcat, A) + end + + let A = [1 2; 3 4] + info("NDArray::hcat::2D") + check_cat(:hcat, A) + + info("NDArray::vcat::2D") + check_cat(:vcat, A) + end + + let A = rand(4, 3, 2) + info("NDArray::hcat::3D") + check_cat(:hcat, A) + + info("NDArray::vcat::3D") + check_cat(:vcat, A) + end + + let A = rand(4, 3, 2, 2) + info("NDArray::hcat::4D") + check_cat(:hcat, A) + + info("NDArray::vcat::4D") + check_cat(:vcat, A) + end + + let A = [1, 2, 3, 4] + info("NDArray::cat::3D/1D") + check_cat(:vcat, reshape(A, 4, 1, 1), 2A) + end +end # function test_cat + function test_plus() dims = rand_dims() t1, a1 = rand_tensors(dims) @@ -927,6 +988,7 @@ end # function test_hyperbolic test_linear_idx() test_first() test_endof() + test_cat() test_plus() test_minus() test_mul() From 39ecb32463497d0750cdbf73ae534cc64a32181e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 17 Dec 2017 06:02:02 +0800 Subject: [PATCH 598/630] base: improve MXError display (#379) --- src/base.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/base.jl b/src/base.jl index 0334260f886a..8f14d44c6b3d 100644 --- a/src/base.jl +++ b/src/base.jl @@ -3,6 +3,8 @@ struct MXError <: Exception msg :: AbstractString end +Base.show(io::IO, e::MXError) = print(io, e.msg) + ################################################################################ # Common types used in MXNet API ################################################################################ From cb443b79778b42bbc4b88ff2a87e4bbf377cda07 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 18 Dec 2017 20:16:25 +0800 Subject: [PATCH 599/630] ndarray: porting Python's autograd (#274) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ref: https://github.com/apache/incubator-mxnet/blob/065adb3702c110af7b537799be3ec9c16c27a72b/python/mxnet/autograd.py * API ported * attach_grad * grad * mark_variables * get_symbol * record * pause * train_mode * predict_mode * backward * An example ```julia x = NDArray([1 2; 3 4]) mx.attach_grad!(x) y = mx.record() do mx.square(x) end mx.backward!(y) mx.getgrad(x) # 2×2 Array{Int64,2}: # 2 4 # 6 8 ``` --- NEWS.md | 2 + src/MXNet.jl | 1 + src/autograd.jl | 387 ++++++++++++++++++++++++++++++++++++++ src/base.jl | 2 +- src/ndarray.jl | 4 +- test/unittest/autograd.jl | 386 +++++++++++++++++++++++++++++++++++++ test/unittest/ndarray.jl | 2 +- 7 files changed, 780 insertions(+), 4 deletions(-) create mode 100644 src/autograd.jl create mode 100644 test/unittest/autograd.jl diff --git a/NEWS.md b/NEWS.md index 70d8626f7259..4540cba50fef 100644 --- a/NEWS.md +++ b/NEWS.md @@ -75,6 +75,8 @@ ### `NDArray` +* A port of Python's `autograd` for `NDArray` (#274) + * `size(x, dims...)` is supported now. (#TBD) ```julia diff --git a/src/MXNet.jl b/src/MXNet.jl index 3583c140b64c..352d20aad150 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -105,6 +105,7 @@ include("broadcast.jl") include("ndarray.jl") include("random.jl") +include("autograd.jl") include("name.jl") include("symbolic-node.jl") diff --git a/src/autograd.jl b/src/autograd.jl new file mode 100644 index 000000000000..4584decb0a52 --- /dev/null +++ b/src/autograd.jl @@ -0,0 +1,387 @@ +# Autograd for NDArray +# this is a port of Python's autograd module +# https://github.com/apache/incubator-mxnet/blob/master/python/mxnet/autograd.py + +############################################################################### +# Private util functions +############################################################################### + +""" + _set_recording(state::Bool)::Bool + +Set status to recording/not recording. When recording, graph will be constructed +for gradient computation. + +## Parameters + +* `state::Bool` + +## Returns + +Previous state before this set +""" +function _set_recording(state::Bool)::Bool + prev = Ref{Cint}(C_NULL) + @mxcall(:MXAutogradSetIsRecording, (Cint, Ref{Cint}), state, prev) + prev[] +end + +_set_recording(::Void) = nothing + +""" +Set status to training/predicting. +For example, Dropout will drop inputs randomly when +`train_mode = true` while simply passing through if `train_mode = false`. + +## Parameters +* `train_mode::Bool` + +## Returns + +Previous state before this set. +""" +function _set_training(train_mode::Bool)::Bool + prev = Ref{Cint}(C_NULL) + @mxcall(:MXAutogradSetIsTraining, (Cint, Ref{Cint}), train_mode, prev) + prev[] +end + +_set_training(::Void) = nothing + +############################################################################### +# Public API +############################################################################### + +""" + is_recording()::Bool + +Get status on recording/not recording. +""" +function is_recording()::Bool + state = Ref{Cint}(C_NULL) + @mxcall(:MXAutogradIsRecording, (Ref{Cint},), state) + state[] +end + +""" + is_training()::Bool + +Get status on recording/not recording. +""" +function is_training()::Bool + state = Ref{Cint}(C_NULL) + @mxcall(:MXAutogradIsTraining, (Ref{Cint},), state) + state[] +end + +@inline function _record(f, is_record::Union{Void,Bool}, train_mode::Union{Void,Bool}) + # Port from Python's `_RecordingStateScope` context manager + # __enter__ + prev_is_record = _set_recording(is_record) + prev_train_mode = _set_training(train_mode) + + try + f() + finally + # __exit__ + if is_record != nothing && prev_is_record != is_record + _set_recording(prev_is_record) + end + if train_mode != nothing && prev_train_mode != train_mode + _set_recording(prev_train_mode) + end + end +end + +""" + record(f, train_mode = true) + record(translates = true) do + ... + end + +Returns an autograd recording scope context to be used in `do` block +and captures code that needs gradients to be calculated. + +Parameter `train_mode::Bool` controls whether the forward pass is in training +or predicting mode. +This controls the behavior of some layers such as `Dropout`, `BatchNorm`. + +!!! note + When forwarding with `train_mode = false`, the corresponding backward + should also use `train_mode = false`, otherwise gradient is undefined. + +```julia +x = mx.NDArray([1 2; 3 4]) +∇ = mx.attach_grad!(x) +y = mx.record() do + 2x +end +mx.backward!(y) + +julia> ∇ +2×2 mx.NDArray{Int64,2} @ CPU0: + 2 2 + 2 2 +``` +""" +record(f, train_mode::Bool = true) = _record(f, true, train_mode) + +""" + pause(f, train_mode = false) + pause(train_mode = false) do + ... + end + +Create a scope context for codes that do not need gradients to be calculated. + +```julia +record() do + ... + pause() do + # testing, IO, gradient updates... + end +end +``` +""" +pause(f, train_mode::Bool = false) = _record(f, false, train_mode) + +""" + train_mode(f) + train_mode() do + ... + end + +Create a scope context in which forward pass behavior is set to training mode, +without changing the recording states. + +```julia +y = model(x) +train_mode() do + z = mx.Dropout(y) + ... +end +``` +""" +train_mode(f) = _record(f, nothing, true) + +""" + predict_mode(f) + predict_mode() do + ... + end + +Create a scope context in which forward pass behavior is set to inference mode, +without changing the recording states. + +```julia +record() do + y = model(x) + predict_mode() do + y = sampling(y) + end +end +``` +""" +predict_mode(f) = _record(f, nothing, false) + +""" + backward!(head, head_grad; retain_graph = false, train_mode = true) + backward!(heads, head_grads; retain_graph = false, train_mode = true) + +Compute the gradients of heads w.r.t previously marked variables. + +## Parameters + +- `head::NDArray`: output NDArray + +- `head_grad::NDArray` or `Void`: gradient coefficient with respect to head. + +- `heads::Vector{NDArray}`: a list of output NDArray + +- `head_grads::Vector`: a list of gradient coefficient with respect ot heads. + the element should be `NDArray` or `Void` + +- `retain_graph::Bool`: whether to keep the graph after backward. e.g: + If you want to differentiate the same graph twice, + you need to pass `retain_graph=true`. + +- `train_mode::Bool`: whether to do backward for training or predicting. +""" +backward!(head::NDArray, head_grad::NDArray; kws...) = + backward!([head], [head_grad]; kws...) + +backward!(head::NDArray, head_grad::Void = nothing; kws...) = + backward!([head], head_grad; kws...) + +function backward!(heads::VecOfNDArray, head_grad::Void; + retain_graph::Bool = false, train_mode::Bool = true) + @mxcall( + :MXAutogradBackwardEx, + (MX_uint, + Ptr{MX_handle}, + Ptr{MX_handle}, + MX_uint, + Ptr{MX_handle}, + Cint, + Cint, + Cint, + Ptr{MX_handle}, + Ptr{MX_handle}), + length(heads), + map(x -> x.handle, heads), + C_NULL, + 0, + C_NULL, + retain_graph, + false, # create_graph + train_mode, + C_NULL, + C_NULL) +end + +function backward!(heads::VecOfNDArray, head_grads::Vector; + retain_graph::Bool = false, train_mode::Bool = true) + output_handles = map(x -> x.handle, heads) + ograd_handles = map(head_grads) do x + if x isa NDArray + x.handle + elseif x isa Void + MX_handle(C_NULL) + else + throw(ArgumentError("element of head_grads should be NDArray or Void")) + end + end + @assert length(output_handles) == length(ograd_handles) + @mxcall( + :MXAutogradBackwardEx, + (MX_uint, + Ptr{MX_handle}, + Ptr{MX_handle}, + MX_uint, + Ptr{MX_handle}, + Cint, + Cint, + Cint, + Ptr{MX_handle}, + Ptr{MX_handle}), + length(output_handles), + output_handles, + ograd_handles, + 0, + C_NULL, + retain_graph, + false, # create_graph + train_mode, + C_NULL, + C_NULL) +end + +""" + getgrad(arr::NDArray) + +Returns the gradient buffer attached to this `NDArray`. +If the gradient buffer isn't attached yet, return `nothing`. +""" +function getgrad(arr::NDArray) + out = Ref{MX_handle}(C_NULL) + @mxcall(:MXNDArrayGetGrad, (MX_handle, Ref{MX_handle}), arr.handle, out) + (out[] == C_NULL) ? nothing : NDArray(MX_NDArrayHandle(out[])) +end + +""" + attach_grad!(x::NDArray, grad_req::Symbol = :write) + +Attach a gradient buffer to this `NDArray`, +so that [`backward!`](@ref) can compute gradient with respect to it. + +## Parameters + +- `x::NDArray` +- `grad_req::Symbol` (default is `:write`) + +## Return + +The attached gradient buffer + +## See also + +- [`getgrad`](@ref) +""" +function attach_grad!(x::NDArray, grad_req::Symbol = :write) + # TODO: support storage type (stype in Python) + # TODO: make sure it works with gpu array + grad = zeros_like(x) + _mark_variables!([x], [grad], grad_req) + grad +end + +""" + mark_variables!(var, grad, grad_req) + mark_variables!(vars, grads, grad_reqs) + +Mark `NDArrays` as variables to compute gradient for autograd. + +## Parameters + +- `var::NDArray` +- `grad::NDArray` +- `grad_req::Symbol`: `:nop`, `:write`, `:inplace` or `:add` +- `vars::Vector{NDArray}` +- `grads::Vector{NDArray}` +- `grad_req::Vector{Symbol}` +""" +mark_variables!(var::NDArray, grad::NDArray, grad_reqs::Symbol = :write) = + _mark_variables!([var], [grad], grad_reqs) + +mark_variables!(var::VecOfNDArray, grads::VecOfNDArray, grad_reqs = :write) = + _mark_variables!(var, grads, grad_reqs) + +@inline function _getgrad_req(x::Symbol)::GRAD_REQ + val = get(grad_req_map, x, false) + if val == false + throw(ArgumentError("invalid grad_reqs $x")) + end + val +end + +@inline _getgrad_reqs(x::Symbol, n::Int) = + map((_) -> MX_uint(_getgrad_req(x)), Base.OneTo(n)) + +@inline function _getgrad_reqs(xs::Vector{Symbol}, n::Int) + if length(xs) != n + throw(ArgumentError("number of variables and grad_reqs not matched")) + end + map(MX_uint ∘ _getgrad_req, xs) +end + +@inline function _mark_variables!(vars::VecOfNDArray, grads::VecOfNDArray, + grad_reqs = :write) + n = length(vars) + if n != length(grads) + throw(ArgumentError("number of variables and gradients not matched")) + end + + var_hdls = map(x -> x.handle, vars) + grad_hdls = map(x -> x.handle, grads) + grad_reqs = _getgrad_reqs(grad_reqs, n) + + @mxcall(:MXAutogradMarkVariables, + (MX_uint, Ref{MX_handle}, Ptr{MX_uint}, Ref{MX_handle}), + length(vars), var_hdls, grad_reqs, grad_hdls) +end + +""" + symbol(x::NDArray) + +Retrieve recorded computation history as `SymbolicNode`, + where `x` is a `NDArray` representing the head of computation graph. + """ +function symbol(x::NDArray) + ref = Ref{MX_handle}(C_NULL) + @mxcall(:MXAutogradGetSymbol, (MX_handle, Ref{MX_handle}), x, ref) + SymbolicNode(MX_SymbolHandle(ref[])) +end + +############################################################################### +# TODO: User-defined differentiable function +############################################################################### diff --git a/src/base.jl b/src/base.jl index 8f14d44c6b3d..b8f73eb4e6bc 100644 --- a/src/base.jl +++ b/src/base.jl @@ -20,7 +20,7 @@ const char_pp = Ptr{char_p} ################################################################################ # OpReqType in include/mxnet/op_attr_types.h @enum GRAD_REQ GRAD_NOP=0 GRAD_WRITE=1 GRAD_INPLACE=2 GRAD_ADD=3 -const grad_req_map = Dict{Symbol, GRAD_REQ}( +const grad_req_map = Dict{Symbol,GRAD_REQ}( :nop => GRAD_NOP, # no operation, do not write anything :write => GRAD_WRITE, # write gradient to provided space :inplace => GRAD_INPLACE, # perform an inplace write diff --git a/src/ndarray.jl b/src/ndarray.jl index 139e40ef1e59..de5d6ba4fba4 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -306,10 +306,10 @@ dimension. For example, given an `NDArray` of shape (2,3,4), `slice(array, 2:3)` a `NDArray` of shape (2,3,2), sharing the data with the original array. This operation is used in data parallelization to split mini-batch into sub-batches for different devices. """ -function slice(arr :: NDArray, ::Colon) +function slice(arr::NDArray, ::Colon) arr end -function slice(arr :: NDArray, slice::UnitRange{Int}) +function slice(arr::NDArray, slice::UnitRange{Int}) dim1 = size(arr)[end] @assert(1 <= slice.start <= slice.stop <= dim1) if slice.start == 1 && slice.stop == dim1 diff --git a/test/unittest/autograd.jl b/test/unittest/autograd.jl new file mode 100644 index 000000000000..12c1022bd208 --- /dev/null +++ b/test/unittest/autograd.jl @@ -0,0 +1,386 @@ +module TestAutoGrad + +using Base.Test + +using MXNet + + +function checkgradient(f, x, y, ∇) + ∇x = mx.attach_grad!(x) + y′ = mx.record(f) + @test copy(y′) ≈ y + @test copy(∇x) |> sum == 0 + mx.backward!(y′) + @test copy(mx.getgrad(x)) ≈ ∇ +end # function checkgradient + + +function test_getgrad() + info("AutoGrad::getgrad") + + info("AutoGrad::getgrad::unattached") + @test nothing == mx.getgrad(mx.zeros(10)) + + info("AutoGrad::getgrad::attached") + x = mx.NDArray([1 2; 3 4]) + grad = mx.attach_grad!(x) + @test eltype(grad) ≡ Int + @test copy(grad) == [0 0; 0 0] + + grad[:] = 42 + @test copy(mx.getgrad(x)) == [42 42; 42 42] +end + + +function test_mark_variables!() + info("AutoGrad::mark_variables!") + x = mx.zeros(4) + ẋ = mx.zeros(4) + y = mx.zeros(4) + ẏ = mx.zeros(4) + mx.mark_variables!([x, y], [ẋ, ẏ], [:nop, :nop]) + ẋ[:] = 42 + ẏ[:] = 24 + + @test copy(mx.getgrad(x)) == [42, 42, 42, 42] + @test copy(mx.getgrad(y)) == [24, 24, 24, 24] + + info("AutoGrad::mark_variables!::invalid grad_reqs") + x = mx.zeros(4) + y = mx.zeros(4) + @test_throws ArgumentError mx.mark_variables!(x, y, :magic) + @test_throws ArgumentError mx.mark_variables!([x], [y], [:magic]) + + info("AutoGrad::mark_variables!::args length mismatch") + x = mx.zeros(4) + y = mx.zeros(4) + z = mx.zeros(4) + @test_throws ArgumentError mx.mark_variables!([x], [y, z]) + @test_throws ArgumentError mx.mark_variables!([x], [y], [:write, :nop]) +end + + +function test_record() + let x = mx.NDArray([1 2; 3 4]) + info("AutoGrad::record::backward!") + + y = [1 4; 9 16] + ∇ = [2 4; 6 8] # gradient is 2x + checkgradient(x, y, ∇) do + mx.square(x) + end + end + + let x = mx.NDArray([1 2; 3 4]) + info("AutoGrad::record::symbol") + + mx.attach_grad!(x) + y = mx.record() do + mx.square(x) + end + + @test copy(y) == [1 4; 9 16] + + @test isa(mx.symbol(y), mx.SymbolicNode) + end + + let x = mx.NDArray([1 2; 3 4]) + info("AutoGrad::record::backward!(retain_graph=true)") + + mx.attach_grad!(x) + y = mx.record() do + mx.square(x) + end + + @test copy(y) == [1 4; 9 16] + + mx.backward!(y, retain_graph=true) + # gradient is 2x + @test copy(mx.getgrad(x)) == [2 4; 6 8] + + @test isa(mx.symbol(y), mx.SymbolicNode) + end + + mx._record(nothing, nothing) do # no error with edage case + @test true + end +end # function test_record + + +function test_is_recording() + info("AutoGrad::is_recording") + mx.record() do + @test mx.is_recording() + end +end # function test_is_recording + + +function test_is_training() + info("AutoGrad::is_training") + mx.record() do + @test mx.is_training() + end + + mx.record(false) do + @test !mx.is_training() + end +end # function test_is_training + + +function test_pause() + info("AutoGrad::pause") + let x = mx.NDArray([1 2; 3 4]) + ∇ = mx.attach_grad!(x) + y = mx.record() do + y = mx.square(x) + mx.pause() do + z = mx.square(y) + @test copy(z) == [1 16; 81 256] + end + y + end + + @test copy(y) == [1 4; 9 16] + + mx.backward!(y) + @test copy(∇) == [2 4; 6 8] + end +end # function test_pause + + +function test_train_mode() + info("AutoGrad::train_mode") + let x = mx.NDArray(Float32[1 2; 3 4]) + y = mx.train_mode() do + mx.Dropout(x, p = 1) + end + + @test all(isnan.(copy(y))) + end +end # function test_train_mode + + +function test_predict_mode() + info("AutoGrad::predict_mode") + let x = mx.NDArray(Float32[1 2; 3 4]) + y = mx.predict_mode() do + mx.Dropout(x, p = 1) + end + + @test copy(y) ≈ Float32[1 2; 3 4] + end +end # function test_train_mode + + +function test_backward!() + info("AutoGrad::backward!::with head_grad") + let x = mx.NDArray(Float32[1 2; 3 4]), A = Float32[.2 .4; 0 .1] + ∇ = mx.attach_grad!(x) + y = mx.record() do + mx.square(x) + end + mx.backward!(y, mx.NDArray(A)) + @test copy(∇) ≈ [2 4; 6 8] .* A + end + + info("AutoGrad::backward!::with head_grads") + let x = mx.NDArray(Float32[1 2; 3 4]) + ∇ = mx.attach_grad!(x) + mx.record() do + x′ = mx.square(x) + y = mx.square(x) + z = mx.square(x) .+ 42 + mx.backward!([x′, y, z], [nothing, + mx.NDArray(Float32[.01 .01; 1 1]), + mx.NDArray(Float32[1 1; .1 .1])]) + end + ans = [4.02 8.04 + 12.6 16.8] + @test copy(∇) ≈ ans + end + + info("AutoGrad::backward!::ArgumentError") + let x = mx.NDArray([42]) + @test_throws ArgumentError mx.backward!([x], [24]) + end +end # function test_backward! + + +function test_symbol() + info("AutoGrad::symbol") + + let x = mx.zeros(4) + mx.attach_grad!(x) + @test isa(mx.symbol(x), mx.SymbolicNode) + end +end + + +function test_add() + info("AutoGrad::add") + + info("AutoGrad::add::x") + let x = mx.NDArray([1 2; 3 4]) + y = [1 2; 3 4] + ∇ = [1 1; 1 1] # gradient is 1 + checkgradient(x, y, ∇) do + x + end + end + + info("AutoGrad::add::+x") + let x = mx.NDArray([1 2; 3 4]) + y = [1 2; 3 4] + ∇ = [1 1; 1 1] # gradient is 1 + checkgradient(x, y, ∇) do + +x + end + end + + info("AutoGrad::add::x .+ 42") + let x = mx.NDArray([1 2; 3 4]) + y = [43 44; 45 46] + ∇ = [1 1; 1 1] # gradient is 1 + checkgradient(x, y, ∇) do + x .+ 42 + end + end + + info("AutoGrad::add::42 .+ x") + let x = mx.NDArray([1 2; 3 4]) + y = [43 44; 45 46] + ∇ = [1 1; 1 1] + checkgradient(x, y, ∇) do + 42 .+ x + end + end + + # TODO: info("AutoGrad::add::x .+ y") +end # function test_add + + +function test_sub() + info("AutoGrad::sub") + + info("AutoGrad::sub::-x") + let x = mx.NDArray([1 2; 3 4]) + y = [-1 -2; -3 -4] + ∇ = [-1 -1; -1 -1] # gradient is -1 + checkgradient(x, y, ∇) do + -x + end + end + + info("AutoGrad::sub::x .- 42") + let x = mx.NDArray([1 2; 3 4]) + y = [-41 -40; -39 -38] + ∇ = [1 1; 1 1] + checkgradient(x, y, ∇) do + x .- 42 + end + end + + info("AutoGrad::sub::42 .- x") + let x = mx.NDArray([1 2; 3 4]) + y = [41 40; 39 38] + ∇ = -[1 1; 1 1] + checkgradient(x, y, ∇) do + 42 .- x + end + end + + # TODO: info("AutoGrad::add::x .- y") +end # function test_sub + + +function test_mul() + info("AutoGrad::mul") + + info("AutoGrad::mul::2x .* x") + let x = mx.NDArray([1 2; 3 4]) + y = [2 8; 18 32] + ∇ = [4 8; 12 16] # 4x + checkgradient(x, y, ∇) do + 2x .* x + end + end + + info("AutoGrad::mul::x * 2 .* x") + let x = mx.NDArray([1 2; 3 4]) + y = [2 8; 18 32] + ∇ = [4 8; 12 16] # 4x + checkgradient(x, y, ∇) do + x * 2 .* x + end + end +end + + +function test_div() + info("AutoGrad::div") + + info("AutoGrad::div::x ./ 2") + let x = mx.NDArray(Float32[1 2; 3 4]) + y = Float32[.5 1; 1.5 2] + ∇ = [.5 .5; .5 .5] + checkgradient(x, y, ∇) do + x ./ 2 + end + end + + info("AutoGrad::rdiv::2 ./ x") + let A = Float32[1 2; 3 4], x = mx.NDArray(A) + y = 2 ./ A + ∇ = @. -2 / A^2 # -2 / x² + checkgradient(x, y, ∇) do + 2 ./ x + end + end +end # function test_div + + +function test_power() + info("AutoGrad::power") + + info("AutoGrad::power::x.^3") + let A = Float32[1 2; 3 4] + x = mx.NDArray(A) + y = A.^3 + ∇ = 3(A.^2) + checkgradient(x, y, ∇) do + x.^3 + end + end + + info("AutoGrad::power::x.^.5") + let A = Float32[1 2; 3 4] + x = mx.NDArray(A) + y = A.^.5 + ∇ = .5(A.^-.5) + checkgradient(x, y, ∇) do + x.^.5 + end + end +end + + +@testset "AutoGrad Test" begin + test_getgrad() + test_mark_variables!() + test_record() + test_is_recording() + test_is_training() + test_pause() + test_train_mode() + test_predict_mode() + test_backward!() + test_symbol() + test_add() + test_sub() + test_mul() + test_div() + test_power() +end + + +end # model TestAutoGrad diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index a24126cf194b..ef4fb1f2373c 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -502,7 +502,7 @@ function test_rdiv() @test copy(x) ≈ y end - info("NDArray:rdiv::type convert") + info("NDArray::rdiv::type convert") let x = mx.NDArray([1, 2, 3]) y = 5.5 ./ x @test eltype(y) == Int # this differs from julia From 883cdd39dd96a884fde758ce94a4d0106b5e0307 Mon Sep 17 00:00:00 2001 From: Nicu Stiurca Date: Mon, 18 Dec 2017 17:56:30 -0600 Subject: [PATCH 600/630] build: make appending lib to MXNET_HOME optional (#386) The MXNet shared libraries are not necessarily in a lib subdirectory. --- deps/build.jl | 5 +++-- src/base.jl | 1 + 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index b09e5f44242e..ff4441227cac 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -8,10 +8,11 @@ libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "master") curr_win = "20171019" # v0.12.0 if haskey(ENV, "MXNET_HOME") - info("MXNET_HOME environment detected: $(ENV["MXNET_HOME"])") + MXNET_HOME = ENV["MXNET_HOME"] + info("MXNET_HOME environment detected: $MXNET_HOME") info("Trying to load existing libmxnet...") lib = Libdl.find_library("libmxnet.$(Libdl.dlext)", - ["$(ENV["MXNET_HOME"])/lib"]) + [joinpath(MXNET_HOME, "lib"), MXNET_HOME]) if !isempty(lib) info("Existing libmxnet detected at $lib, skip building...") libmxnet_detected = true diff --git a/src/base.jl b/src/base.jl index b8f73eb4e6bc..271e35607880 100644 --- a/src/base.jl +++ b/src/base.jl @@ -32,6 +32,7 @@ const grad_req_map = Dict{Symbol,GRAD_REQ}( ################################################################################ const MXNET_LIB = Libdl.find_library("libmxnet.$(Libdl.dlext)", [joinpath(get(ENV, "MXNET_HOME", ""), "lib"), + get(ENV, "MXNET_HOME", ""), Pkg.dir("MXNet", "deps", "usr", "lib")]) if isempty(MXNET_LIB) # touch this file, so that after the user properly build libmxnet, the precompiled From bfeba817bac0216e1cb19e147acc5665bfb732cd Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 21 Dec 2017 09:50:58 +0800 Subject: [PATCH 601/630] ndarray: support matrix/tensor multiplication (#384) --- NEWS.md | 20 ++++++++++++++++++++ src/ndarray.jl | 8 ++++---- test/unittest/ndarray.jl | 23 ++++++++++++++++++++++- 3 files changed, 46 insertions(+), 5 deletions(-) diff --git a/NEWS.md b/NEWS.md index 4540cba50fef..efd3299e337c 100644 --- a/NEWS.md +++ b/NEWS.md @@ -161,6 +161,26 @@ 1.0 2.0 3.0 4.0 ``` +* Matrix/tensor multiplication is supported now. (#TBD) + + ```julia + julia> x + 2×3 mx.NDArray{Float32,2} @ CPU0: + 1.0 2.0 3.0 + 4.0 5.0 6.0 + + julia> y + 3 mx.NDArray{Float32,1} @ CPU0: + -1.0 + -2.0 + -3.0 + + julia> x * y + 2 mx.NDArray{Float32,1} @ CPU0: + -14.0 + -32.0 + ``` + ## API Changes ### `NDArray` diff --git a/src/ndarray.jl b/src/ndarray.jl index de5d6ba4fba4..ef2293486f07 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -674,7 +674,7 @@ import Base: * """ .*(x, y) -Currently only multiplication a scalar with an `NDArray` is implemented. +Elementwise multiplication for `NDArray`. """ *(x::NDArray, y::Real) = _mul_scalar(x, scalar = y) *(y::Real, x::NDArray) = _mul_scalar(x, scalar = y) @@ -686,9 +686,9 @@ broadcast_(::typeof(*), x::NDArray, y::NDArray) = _mul(x, y) """ *(A::NDArray, B::NDArray) -Matrix (2D NDArray) multiplication. +Matrix/tensor multiplication. """ -*(x::NDArray{T,2}, y::NDArray{S,2}) where {T,S} = dot(x, y) +*(x::NDArray{T}, y::NDArray{T}) where T = x ⋅ y """ div_from!(dst::NDArray, arg::NDArrayOrReal) @@ -1165,7 +1165,7 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap minimum(arr::NDArray, dims) min(arr; axis = 0 .- dims, keepdims = true) # See https://github.com/dmlc/MXNet.jl/issues/55 -@_remap dot(x::NDArray{T,N}, y::NDArray{S,N}) where {T,S,N} dot(y, x) +@_remap dot(x::NDArray, y::NDArray) dot(y, x) # See https://github.com/dmlc/MXNet.jl/pull/123 @_remap transpose(arr::NDArray{T,1}) where T reshape(arr; shape = (1, length(arr)), reverse = true) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index ef4fb1f2373c..e7b70d2e3884 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -740,7 +740,28 @@ function test_dot() x = mx.zeros(1, 2) y = mx.zeros(1, 2, 3) - @test_throws MethodError dot(x, y) + @test_throws mx.MXError dot(x, y) # dimension mismatch + + info("NDArray::matrix mul") + let + A = [1. 2 3; 4 5 6] + B = [-1., -2, -3] + x = NDArray(A) + y = NDArray(B) + z = x * y + @test copy(z) == A * B + @test size(z) == (2,) + end + + let + A = [1. 2 3; 4 5 6] + B = [-1. -2; -3 -4; -5 -6] + x = NDArray(A) + y = NDArray(B) + z = x * y + @test copy(z) == A * B + @test size(z) == (2, 2) + end end function test_eltype() From 378789509632e51d5ca98f64bc131d5f062efb50 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 21 Dec 2017 12:48:02 +0800 Subject: [PATCH 602/630] ndarray: remap expand_dims (#381) --- NEWS.md | 1 + src/MXNet.jl | 3 ++- src/ndarray.jl | 39 +++++++++++++++++++++++++--- test/unittest/ndarray.jl | 55 ++++++++++++++++++++++++++++++---------- 4 files changed, 80 insertions(+), 18 deletions(-) diff --git a/NEWS.md b/NEWS.md index efd3299e337c..fe8f7d613687 100644 --- a/NEWS.md +++ b/NEWS.md @@ -4,6 +4,7 @@ * `NDArray` * `context()` * `empty()` + * `expand_dims()` * `SymbolicNode` * `Variable` diff --git a/src/MXNet.jl b/src/MXNet.jl index 352d20aad150..734abfd76650 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -32,7 +32,8 @@ export SymbolicNode, # ndarray.jl export NDArray, context, - empty + empty, + expand_dims # executor.jl export Executor, diff --git a/src/ndarray.jl b/src/ndarray.jl index ef2293486f07..274c306c7441 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1045,7 +1045,8 @@ end # Mapping NDArray functions to Base-like API ################################################################################ -const _mxsig = Dict{Symbol,Expr}() +const _ndsig = Dict{Symbol,Expr}() +const _nddoc = Dict{Symbol,Any}() function _autoimport(name::Symbol, sig::Expr) if name == :broadcast_ @@ -1074,6 +1075,9 @@ _broadcast_target(sig::Expr) = sig.args[2].args[].args[end] Generate docstring from function signature """ function _docsig(fname::Symbol, sig::Expr) + s = get(_nddoc, fname, "") + !isempty(s) && return s + if fname !== :broadcast_ " $sig" else @@ -1141,14 +1145,14 @@ macro _remap(sig::Expr, imp::Expr) end macro _remap(sig::Expr, imp::Symbol) - imp = _mxsig[imp] + imp = _ndsig[imp] esc(quote @_remap($sig, $imp) end) end -_mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) +_ndsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap reshape(arr::NDArray, dim...; reverse = false) reshape @_remap reshape(arr::NDArray, dim; reverse = false) reshape @@ -1175,6 +1179,34 @@ _mxsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap prod(arr::NDArray) prod(arr) @_remap prod(arr::NDArray, dims) prod(arr; axis = 0 .- dims, keepdims = true) +_nddoc[:expand_dims] = +""" + expand_dims(x::NDArray, dim) + +Insert a new axis into `dim`. + +```julia +julia> x +4 mx.NDArray{Float64,1} @ CPU0: + 1.0 + 2.0 + 3.0 + 4.0 + +julia> mx.expand_dims(x, 1) +1×4 mx.NDArray{Float64,2} @ CPU0: + 1.0 2.0 3.0 4.0 + +julia> mx.expand_dims(x, 2) +4×1 mx.NDArray{Float64,2} @ CPU0: + 1.0 + 2.0 + 3.0 + 4.0 +``` +""" +@_remap expand_dims(x::NDArray, dim) expand_dims(x; axis = -dim) + # trigonometric functions, remap to keep consistent with Base @_remap broadcast_(::typeof(sin), x::NDArray) sin(x) @_remap broadcast_(::typeof(cos), x::NDArray) cos(x) @@ -1318,6 +1350,7 @@ const _op_import_bl = [ # import black list; do not import these funcs "_full", # we already have `mx.fill` "_ones", # we already have `mx.ones` "_zeros", # we already have `mx.zeros` + "expand_dims", # arithmetic "_plus", diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index e7b70d2e3884..6b258fe99646 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -779,27 +779,53 @@ function test_eltype() end function test_reshape() - info("NDArray::reshape") - A = rand(2, 3, 4) + info("NDArray::reshape") + A = rand(2, 3, 4) - B = reshape(mx.NDArray(A), 4, 3, 2) - @test size(B) == (4, 3, 2) - @test copy(B)[3, 1, 1] == A[1, 2, 1] + B = reshape(NDArray(A), 4, 3, 2) + @test size(B) == (4, 3, 2) + @test copy(B)[3, 1, 1] == A[1, 2, 1] - C = reshape(mx.NDArray(A), (4, 3, 2)) - @test size(C) == (4, 3, 2) - @test copy(C)[3, 1, 1] == A[1, 2, 1] + C = reshape(NDArray(A), (4, 3, 2)) + @test size(C) == (4, 3, 2) + @test copy(C)[3, 1, 1] == A[1, 2, 1] - info("NDArray::reshape::reverse") - A = mx.zeros(10, 5, 4) + info("NDArray::reshape::reverse") + A = mx.zeros(10, 5, 4) - B = reshape(A, -1, 0) - @test size(B) == (40, 5) + B = reshape(A, -1, 0) + @test size(B) == (40, 5) - C = reshape(A, -1, 0, reverse=true) - @test size(C) == (50, 4) + C = reshape(A, -1, 0, reverse=true) + @test size(C) == (50, 4) end +function test_expand_dims() + info("NDArray::expand_dims") + let A = [1, 2, 3, 4], x = NDArray(A) + @test size(x) == (4,) + + y = expand_dims(x, 1) + @test size(y) == (1, 4) + + y = expand_dims(x, 2) + @test size(y) == (4, 1) + end + + let A = [1 2; 3 4; 5 6], x = NDArray(A) + @test size(x) == (3, 2) + + y = expand_dims(x, 1) + @test size(y) == (1, 3, 2) + + y = expand_dims(x, 2) + @test size(y) == (3, 1, 2) + + y = expand_dims(x, 3) + @test size(y) == (3, 2, 1) + end +end # test_expand_dims + function test_sum() info("NDArray::sum") @@ -1025,6 +1051,7 @@ end # function test_hyperbolic test_nd_as_jl() test_dot() test_reshape() + test_expand_dims() test_sum() test_mean() test_maximum() From 813bfdcfe68cf651f875f43ea9e2aff30c010102 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 22 Dec 2017 01:42:46 +0800 Subject: [PATCH 603/630] random: Base-like APIs for rand, rand!, randn, randn! (#383) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * random: Base-like APIs for rand, rand!, randn, randn! and deprecate the original APIs ```julia julia> mx.rand(2, 3) 2×3 mx.NDArray{Float32,2} @ CPU0: 0.631961 0.324175 0.0762663 0.285366 0.395292 0.074995 julia> mx.rand(2, 3, low = low, high = high) 2×3 mx.NDArray{Float32,2} @ CPU0: 7.83884 7.85793 7.64791 7.68646 8.56082 8.42189 ``` ```julia julia> mx.randn(2, 3) 2×3 mx.NDArray{Float32,2} @ CPU0: 0.962853 0.424535 -0.320123 0.478113 1.72886 1.72287 julia> mx.randn(2, 3, μ = 100) 2×3 mx.NDArray{Float32,2} @ CPU0: 99.5635 100.483 99.888 99.9889 100.533 100.072 ``` * fix depwarn --- NEWS.md | 26 +++++++++++++++++ src/deprecated.jl | 26 +++++++++++++++++ src/initializer.jl | 14 ++++----- src/random.jl | 64 ++++++++++++++++++++--------------------- test/unittest/random.jl | 8 +++--- 5 files changed, 93 insertions(+), 45 deletions(-) diff --git a/NEWS.md b/NEWS.md index fe8f7d613687..c47fcacacb05 100644 --- a/NEWS.md +++ b/NEWS.md @@ -208,6 +208,32 @@ * `arccosh(x)` -> `acosh.(x)` * `arctanh(x)` -> `atanh.(x)` +* `rand`, `rand!`, `randn`, `randn!` is more Base-like now (#TBD). + + ```julia + julia> mx.rand(2, 3) + 2×3 mx.NDArray{Float32,2} @ CPU0: + 0.631961 0.324175 0.0762663 + 0.285366 0.395292 0.074995 + + julia> mx.rand(2, 3; low = 1, high = 10) + 2×3 mx.NDArray{Float32,2} @ CPU0: + 7.83884 7.85793 7.64791 + 7.68646 8.56082 8.42189 + ``` + + ```julia + julia> mx.randn(2, 3) + 2×3 mx.NDArray{Float32,2} @ CPU0: + 0.962853 0.424535 -0.320123 + 0.478113 1.72886 1.72287 + + julia> mx.randn(2, 3, μ = 100) + 2×3 mx.NDArray{Float32,2} @ CPU0: + 99.5635 100.483 99.888 + 99.9889 100.533 100.072 + ``` + # v0.3.0 (2017.11.16) * Update `libmxnet` to diff --git a/src/deprecated.jl b/src/deprecated.jl index 8a8df0a56789..0167d57c4b83 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -23,3 +23,29 @@ @deprecate arcsinh(x::NDArray) asinh.(x) @deprecate arccosh(x::NDArray) acosh.(x) @deprecate arctanh(x::NDArray) atanh.(x) + +# @deprecate make `randn` exported accidentially +# so we make the depwarn manually +function randn(μ, σ, dims::NTuple{N,Int}, ctx::Context = cpu()) where N + warn("mx.randn(μ, σ, dims, ctx = cpu()) is deprecated, use " * + "mx.randn(dims...; μ = μ, σ = σ, context = ctx) instead.") + mx.randn(dims...; μ = μ, σ = σ, context = ctx) +end + +function randn!(μ, σ, x::NDArray) + warn("mx.randn!(μ, σ, x::NDArray) is deprecated, use " * + "mx.randn!(x; μ = μ, σ = σ) instead.") + randn!(x; μ = μ, σ = σ) +end + +function rand!(low::Real, high::Real, x::NDArray) + warn("rand!(low, high, x::NDArray) is deprecated, use " * + "rand!(x, low = low, high = high) instead.") + rand!(x, low = low, high = high) +end + +function rand(low::Real, high::Real, dims::NTuple{N,Int}, context::Context = cpu()) where N + warn("rand!(low, high, dims, x::NDArray, context = cpu()) is deprecated, use " * + "rand!(dims..., x; low = low, high = high, context = cpu()) instead.") + rand(dims...; low = low, high = high, context = context) +end diff --git a/src/initializer.jl b/src/initializer.jl index 157958586642..188c5deb6255 100644 --- a/src/initializer.jl +++ b/src/initializer.jl @@ -104,9 +104,8 @@ Construct a `UniformInitializer` with the specified scale. """ UniformInitializer() = UniformInitializer(0.07) -function _init_weight(self :: UniformInitializer, name :: Base.Symbol, array :: NDArray) - rand!(-self.scale, self.scale, array) -end +_init_weight(i::UniformInitializer, name::Symbol, x::NDArray) = + rand!(x, low = -i.scale, high = i.scale) """ NormalInitializer @@ -124,9 +123,8 @@ Construct a `NormalInitializer` with mean `mu` and variance `sigma`. """ NormalInitializer(; mu=0, sigma=0.01) = NormalInitializer(mu, sigma) -function _init_weight(self :: NormalInitializer, name :: Base.Symbol, array :: NDArray) - randn!(self.μ, self.σ, array) -end +_init_weight(i::NormalInitializer, name::Symbol, x::NDArray) = + randn!(x, μ = i.μ, σ = i.σ) """ XavierInitializer @@ -175,8 +173,8 @@ function _init_weight(self :: XavierInitializer, name :: Base.Symbol, array :: N σ = √(self.magnitude / factor) if self.distribution == xv_uniform - rand!(-σ, σ, array) + rand!(array, low = -σ, high = σ) elseif self.distribution == xv_normal - randn!(0.0, σ, array) + randn!(array; μ = 0.0, σ = σ) end end diff --git a/src/random.jl b/src/random.jl index 3c5f1dcb564b..1f612e310fe8 100644 --- a/src/random.jl +++ b/src/random.jl @@ -1,68 +1,66 @@ """ - rand!(low, high, arr::NDArray) + rand!(x::NDArray; low = 0, high = 1) Draw random samples from a uniform distribution. Samples are uniformly distributed over the half-open interval [low, high) (includes low, but excludes high). -# Examples - ```julia -julia> mx.rand(0, 1, mx.zeros(2, 2)) |> copy -2×2 Array{Float32,2}: - 0.405374 0.321043 - 0.281153 0.713927 +julia> mx.rand!(empty(2, 3)) +2×3 mx.NDArray{Float32,2} @ CPU0: + 0.385748 0.839275 0.444536 + 0.0879585 0.215928 0.104636 + +julia> mx.rand!(empty(2, 3), low = 1, high = 10) +2×3 mx.NDArray{Float32,2} @ CPU0: + 6.6385 4.18888 2.07505 + 8.97283 2.5636 1.95586 ``` """ -function rand!(low::Real, high::Real, out::NDArray) - _random_uniform(NDArray, low=low, high=high, shape=size(out), out=out) -end +rand!(x::NDArray; low = 0, high = 1) = + _random_uniform(NDArray, low = low, high = high, shape = size(x), out = x) """ - rand(low, high, shape, context=cpu()) + rand(dims...; low = 0, high = 1, context = cpu()) Draw random samples from a uniform distribution. Samples are uniformly distributed over the half-open interval [low, high) (includes low, but excludes high). -# Examples - ```julia -julia> mx.rand(0, 1, (2, 2)) |> copy -2×2 Array{Float32,2}: - 0.405374 0.321043 - 0.281153 0.713927 +julia> mx.rand(2, 2) +2×2 mx.NDArray{Float32,2} @ CPU0: + 0.487866 0.825691 + 0.0234245 0.794797 + +julia> mx.rand(2, 2; low = 1, high = 10) +2×2 mx.NDArray{Float32,2} @ CPU0: + 5.5944 5.74281 + 9.81258 3.58068 ``` """ -function rand(low::Real, high::Real, shape::NTuple{N, Int}, ctx::Context=cpu()) where N - out = empty(shape, ctx) - rand!(low, high, out) -end +rand(dims::Int...; low = 0, high = 1, context = cpu()) = + rand!(empty(dims, context), low = low, high = high) """ - randn!(mean, std, arr::NDArray) + randn!(x::NDArray; μ = 0, σ = 1) Draw random samples from a normal (Gaussian) distribution. """ -function randn!(mean::Real, stdvar::Real, out::NDArray) - _random_normal(NDArray, loc=mean, scale=stdvar, shape=size(out), out=out) -end +randn!(x::NDArray; μ = 0, σ = 1) = + _random_normal(NDArray, loc = μ, scale = σ, shape = size(x), out = x) """ - randn(mean, std, shape, context=cpu()) + randn(dims...; μ = 0, σ = 1, context = cpu()) Draw random samples from a normal (Gaussian) distribution. """ -function randn(mean::Real, stdvar::Real, shape::NTuple{N,Int}, ctx::Context=cpu()) where N - out = empty(shape, ctx) - randn!(mean, stdvar, out) -end +randn(dims::Int...; μ = 0, σ = 1, context = cpu()) = + randn!(empty(dims, context), μ = μ, σ = σ) """ srand(seed::Int) Set the random seed of libmxnet """ -function srand(seed_state::Int) - @mxcall(:MXRandomSeed, (Cint,), seed_state) -end +srand(seed_state::Int) = @mxcall(:MXRandomSeed, (Cint,), seed_state) diff --git a/test/unittest/random.jl b/test/unittest/random.jl index 30995ecf79bf..34b9d3625c0d 100644 --- a/test/unittest/random.jl +++ b/test/unittest/random.jl @@ -9,11 +9,11 @@ function test_uniform() low = -10; high = 10 seed = 123 mx.srand(seed) - ret1 = mx.rand(low, high, dims) + ret1 = mx.rand(dims..., low = low, high = high) mx.srand(seed) ret2 = mx.empty(dims) - mx.rand!(low, high, ret2) + mx.rand!(ret2, low = low, high = high) @test copy(ret1) == copy(ret2) @test abs(mean(copy(ret1)) - (high+low)/2) < 0.1 @@ -26,11 +26,11 @@ function test_gaussian() μ = 10; σ = 2 seed = 456 mx.srand(seed) - ret1 = mx.randn(μ, σ, dims) + ret1 = mx.randn(dims..., μ = μ, σ = σ) mx.srand(seed) ret2 = mx.empty(dims) - mx.randn!(μ, σ, ret2) + mx.randn!(ret2, μ = μ, σ = σ) @test copy(ret1) == copy(ret2) @test abs(mean(copy(ret1)) - μ) < 0.1 From d3cdbf6702fd8250456ffd7cbda94028948ad24e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 28 Dec 2017 10:50:13 +0800 Subject: [PATCH 604/630] ndarray: remap activation functions (#387) * sigmoid * relu * softmax * log_softmax --- NEWS.md | 15 +++++++ docs/src/api/ndarray.md | 11 ++++++ src/MXNet.jl | 8 +++- src/broadcast.jl | 7 ++++ src/deprecated.jl | 5 +++ src/ndarray.jl | 84 +++++++++++++++++++++++++++++++++++++--- test/unittest/ndarray.jl | 68 ++++++++++++++++++++++++++++++++ 7 files changed, 191 insertions(+), 7 deletions(-) diff --git a/NEWS.md b/NEWS.md index c47fcacacb05..63ca201b56b2 100644 --- a/NEWS.md +++ b/NEWS.md @@ -5,6 +5,11 @@ * `context()` * `empty()` * `expand_dims()` + * `σ()` + * `sigmoid()` + * `relu()` + * `softmax()` + * `log_softmax()` * `SymbolicNode` * `Variable` @@ -208,6 +213,16 @@ * `arccosh(x)` -> `acosh.(x)` * `arctanh(x)` -> `atanh.(x)` +* Please use dot-call on following activation functions. + And the `dim` of `softmax` and `log_softmax` has been fixed + as Julia column-based style. + (#TBD) + + * `σ.(x)` + * `relu.(x)` + * `softmax.(x, [dim = ndims(x)])` + * `log_softmax.(x, [dim = ndims(x)])` + * `rand`, `rand!`, `randn`, `randn!` is more Base-like now (#TBD). ```julia diff --git a/docs/src/api/ndarray.md b/docs/src/api/ndarray.md index 9582ff24a7ef..76fb6a39b621 100644 --- a/docs/src/api/ndarray.md +++ b/docs/src/api/ndarray.md @@ -37,6 +37,17 @@ In the following example `y` can be a `Real` value or another `NDArray` | [`atanh`](@ref) | `atanh.(x)` | Elementwise inverse hyperbolic tangent | +## Activation Functions + +| API | Example | | +|-----------------------|-------------------|-------------------------| +| [`σ`](@ref) | `σ.(x)` | Sigmoid function | +| [`sigmoid`](@ref) | `sigmoid.(x)` | Sigmoid function | +| [`relu`](@ref) | `relu.(x)` | ReLU function | +| [`softmax`](@ref) | `softmax.(x)` | Softmax function | +| [`log_softmax`](@ref) | `log_softmax.(x)` | Softmax followed by log | + + ## Reference ```@autodocs diff --git a/src/MXNet.jl b/src/MXNet.jl index 734abfd76650..8b0b8b8a0761 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -33,7 +33,13 @@ export SymbolicNode, export NDArray, context, empty, - expand_dims + expand_dims, + # activation funcs + σ, + sigmoid, + relu, + softmax, + log_softmax # executor.jl export Executor, diff --git a/src/broadcast.jl b/src/broadcast.jl index cdde7f46b2c9..5d15adf385cc 100644 --- a/src/broadcast.jl +++ b/src/broadcast.jl @@ -8,3 +8,10 @@ for f in :[%, @eval Base.$f(a::Broadcasted, b) = Broadcasted(broadcast_($f, unwrap(a), b)) @eval Base.$f(b, a::Broadcasted) = Broadcasted(broadcast_($f, b, unwrap(a))) end + +for f in :[σ, sigmoid, relu, softmax, log_softmax].args + # copy from TakingBroadcastSeriously + @eval $f(a::Broadcasted...) = Broadcasted(broadcast_($f, unwrap.(a)...)) + @eval $f(a::Broadcasted, b) = Broadcasted(broadcast_($f, unwrap(a), b)) + @eval $f(b, a::Broadcasted) = Broadcasted(broadcast_($f, b, unwrap(a))) +end diff --git a/src/deprecated.jl b/src/deprecated.jl index 0167d57c4b83..6b24411b4125 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -49,3 +49,8 @@ function rand(low::Real, high::Real, dims::NTuple{N,Int}, context::Context = cpu "rand!(dims..., x; low = low, high = high, context = cpu()) instead.") rand(dims...; low = low, high = high, context = context) end + +@deprecate sigmoid(x::NDArray) sigmoid.(x) +@deprecate relu(x::NDArray) relu.(x) +@deprecate softmax(x::NDArray; axis = ndims(x)) softmax.(x, axis) +@deprecate log_softmax(x::NDArray; axis = ndims(x)) log_softmax.(x, axis) diff --git a/src/ndarray.jl b/src/ndarray.jl index 274c306c7441..cb8e3eb6f813 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1075,16 +1075,21 @@ _broadcast_target(sig::Expr) = sig.args[2].args[].args[end] Generate docstring from function signature """ function _docsig(fname::Symbol, sig::Expr) - s = get(_nddoc, fname, "") - !isempty(s) && return s - if fname !== :broadcast_ + s = get(_nddoc, fname, "") + !isempty(s) && return s " $sig" else name = _broadcast_target(sig) - sig_ = Expr(:call, Symbol(name, "."), sig.args[3:end]...) - str = " $sig_" - @eval @doc $str $name + str = get(_nddoc, name, "") + _nddoc[name] = false # change to false, denote docstring has been set up + if isempty(str) + sig_ = Expr(:call, Symbol(name, "."), sig.args[3:end]...) + str = " $sig_" + end + if str ≠ false + @eval @doc $str $name + end "" end end @@ -1223,6 +1228,67 @@ julia> mx.expand_dims(x, 2) @_remap broadcast_(::typeof(acosh), x::NDArray) arccosh(x) @_remap broadcast_(::typeof(atanh), x::NDArray) arctanh(x) +# activation functions +_nddoc[:σ] = _nddoc[:sigmoid] = doc""" + σ.(x::NDArray) + sigmoid.(x::NDArray) + +Computes sigmoid of x element-wise. + +```math +σ(x) = \frac{1}{(1 + exp(-x))} +``` + +The storage type of `sigmoid` output is always dense. +""" +@_remap broadcast_(::typeof(σ), x::NDArray) sigmoid(x) +@_remap broadcast_(::typeof(sigmoid), x::NDArray) sigmoid(x) + +_nddoc[:relu] = doc""" + relu.(x::NDArray) + +Computes rectified linear. + +```math +\max(x, 0) +``` +""" +@_remap broadcast_(::typeof(relu), x::NDArray) relu(x) + +_nddoc[:softmax] = doc""" + softmax.(x::NDArray, [dim = ndims(x)]) + +Applies the softmax function. + +The resulting array contains elements in the range `(0, 1)` +and the elements along the given axis sum up to 1. + +```math +softmax(\mathbf{z})_j = \frac{e^{z_j}}{\sum_{k=1}^K e^{z_k}} +``` +""" +@_remap broadcast_(::typeof(softmax), x::NDArray) softmax(x; axis = -ndims(x)) +@_remap broadcast_(::typeof(softmax), x::NDArray, dim::Int) softmax(x; axis = -dim) + +_nddoc[:log_softmax] = """ + log_softmax.(x::NDArray, [dim = ndims(x)]) + +Computes the log softmax of the input. +This is equivalent to computing softmax followed by log. + +julia> x +2×3 mx.NDArray{Float64,2} @ CPU0: + 1.0 2.0 0.1 + 0.1 2.0 1.0 + +julia> mx.log_softmax.(x) +2×3 mx.NDArray{Float64,2} @ CPU0: + -1.41703 -0.41703 -2.31703 + -2.31703 -0.41703 -1.41703 +""" +@_remap broadcast_(::typeof(log_softmax), x::NDArray) log_softmax(x; axis = -ndims(x)) +@_remap broadcast_(::typeof(log_softmax), x::NDArray, dim::Int) log_softmax(x; axis = -dim) + ################################################################################ # remapping to solving type unstablility ################################################################################ @@ -1383,6 +1449,12 @@ const _op_import_bl = [ # import black list; do not import these funcs "arcsinh", "arccosh", "arctanh", + + # activation + "sigmoid", + "relu", + "softmax", + "log_softmax", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 6b258fe99646..58c5796eb00d 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1024,6 +1024,73 @@ function test_hyperbolic() end end # function test_hyperbolic +function test_act_funcs() + info("NDArray::σ/sigmoid") + let + A = Float32[.1, .2, -.3, -.4] + B = @. 1 / (1 + e^(-A)) + x = NDArray(A) + y = σ.(x) + @test copy(y) ≈ B + + z = sigmoid.(x) + @test copy(z) ≈ B + end + + info("NDArray::relu") + let + A = [1, 2, -3, -4] + B = max.(A, 0) + x = NDArray(A) + y = relu.(x) + @test copy(y) ≈ B + end + + info("NDArray::softmax::1D") + let + A = Float32[1, 2, 3, 4] + B = exp.(A) ./ sum(exp.(A)) + x = NDArray(A) + y = softmax.(x) + @test copy(y) ≈ B + end + + info("NDArray::softmax::2D") + let + A = Float32[1 2; 3 4] + B = exp.(A) ./ sum(exp.(A), 1) + x = NDArray(A) + y = softmax.(x, 1) + @test copy(y) ≈ B + + C = exp.(A) ./ sum(exp.(A), 2) + z = softmax.(x, 2) + @test copy(z) ≈ C + end + + info("NDArray::log_softmax::1D") + let + A = Float32[1, 2, 3, 4] + B = log.(exp.(A) ./ sum(exp.(A))) + x = NDArray(A) + y = log_softmax.(x) + @test copy(y) ≈ B + end + + info("NDArray::log_softmax::2D") + let + A = Float32[1 2; 3 4] + B = log.(exp.(A) ./ sum(exp.(A), 1)) + x = NDArray(A) + y = log_softmax.(x, 1) + @test copy(y) ≈ B + + C = log.(exp.(A) ./ sum(exp.(A), 2)) + z = log_softmax.(x, 2) + @test copy(z) ≈ C + end +end # function test_act_funcs + ################################################################################ # Run tests ################################################################################ @@ -1063,6 +1130,7 @@ end # function test_hyperbolic test_size() test_trigonometric() test_hyperbolic() + test_act_funcs() end end From 9d606639c882d8d2e58040a3aabc4f944615fef7 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 31 Dec 2017 00:36:21 +0800 Subject: [PATCH 605/630] ndarray: refine signature of `clip` (#391) Before: `clip(x, a_min = -4, a_max = 4)` After: `clip(x, -4, 4)` The `a_min` and `a_max` is a required but they are keyword argument. So this commit change them to positional arguments and make sure will fill them. --- NEWS.md | 10 ++++++++++ src/MXNet.jl | 2 ++ src/deprecated.jl | 2 ++ src/ndarray.jl | 36 ++++++++++++++++++++++++++++++++++++ src/optimizer.jl | 26 +++++++++++++------------- test/unittest/ndarray.jl | 9 ++++++++- 6 files changed, 71 insertions(+), 14 deletions(-) diff --git a/NEWS.md b/NEWS.md index 63ca201b56b2..aa08666b751f 100644 --- a/NEWS.md +++ b/NEWS.md @@ -2,6 +2,8 @@ * Following material from `mx` module got exported (#TBD): * `NDArray` + * `clip()` + * `clip!()` * `context()` * `empty()` * `expand_dims()` @@ -249,6 +251,14 @@ 99.9889 100.533 100.072 ``` +* Signature of `clip` changed, it doesn't require any keyword argument now. + (#TBD) + + Before: `clip(x, a_min = -4, a_max = 4)` + After: `clip(x, -4, 4)` + +---- + # v0.3.0 (2017.11.16) * Update `libmxnet` to diff --git a/src/MXNet.jl b/src/MXNet.jl index 8b0b8b8a0761..e4c23f299dc1 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -31,6 +31,8 @@ export SymbolicNode, # ndarray.jl export NDArray, + clip, + clip!, context, empty, expand_dims, diff --git a/src/deprecated.jl b/src/deprecated.jl index 6b24411b4125..9a24aa815148 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -54,3 +54,5 @@ end @deprecate relu(x::NDArray) relu.(x) @deprecate softmax(x::NDArray; axis = ndims(x)) softmax.(x, axis) @deprecate log_softmax(x::NDArray; axis = ndims(x)) log_softmax.(x, axis) + +@deprecate clip(x; a_min = 0, a_max = 0) clip(x, a_min, a_max) diff --git a/src/ndarray.jl b/src/ndarray.jl index cb8e3eb6f813..5583e545ae11 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1184,6 +1184,41 @@ _ndsig[:reshape] = :(reshape(arr; shape = dim, reverse = !reverse)) @_remap prod(arr::NDArray) prod(arr) @_remap prod(arr::NDArray, dims) prod(arr; axis = 0 .- dims, keepdims = true) +_nddoc[:clip] = _nddoc[:clip!] = +""" + clip(x::NDArray, min, max) + clip!(x::NDArray, min, max) + +Clips (limits) the values in `NDArray`. +Given an interval, values outside the interval are clipped to the interval edges. +Clipping `x` between `min` and `x` would be: + +```julia +clip(x, min_, max_) = max(min(x, max_), min_)) +``` + +```jldoctest +julia> x = NDArray(1:9); + +julia> mx.clip(x, 2, 8)' +1×9 mx.NDArray{Int64,2} @ CPU0: + 2 2 3 4 5 6 7 8 8 +``` + +The storage type of clip output depends on storage types of inputs and the +`min`, `max` parameter values: + +- clip(default) = default +- clip(row_sparse, min <= 0, max >= 0) = row_sparse +- clip(csr, min <= 0, max >= 0) = csr +- clip(row_sparse, min < 0, max < 0) = default +- clip(row_sparse, min > 0, max > 0) = default +- clip(csr, min < 0, max < 0) = csr +- clip(csr, min > 0, max > 0) = csr +""" +@_remap clip(x::NDArray, min::Real, max::Real) clip(x; a_min = min, a_max = max) +@_remap clip!(x::NDArray, min::Real, max::Real) clip(x; a_min = min, a_max = max) + _nddoc[:expand_dims] = """ expand_dims(x::NDArray, dim) @@ -1416,6 +1451,7 @@ const _op_import_bl = [ # import black list; do not import these funcs "_full", # we already have `mx.fill` "_ones", # we already have `mx.ones` "_zeros", # we already have `mx.zeros` + "clip", "expand_dims", # arithmetic diff --git a/src/optimizer.jl b/src/optimizer.jl index 8d46a9e407a1..7a647332231b 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -243,29 +243,29 @@ Base class for all optimizer options. abstract type AbstractOptimizerOptions end """ - normalized_gradient(opts, state, weight, grad) + normalized_gradient(opts, state, W, ∇) -* `opts::AbstractOptimizerOptions`: options for the optimizer, should contain the field -`grad_clip` and `weight_decay`. -* `state::OptimizationState`: the current optimization state. -* `weight::NDArray`: the trainable weights. -* `grad::NDArray`: the original gradient of the weights. +Get the properly normalized gradient (re-scaled and clipped if necessary). - Get the properly normalized gradient (re-scaled and clipped if necessary). +* `opts::AbstractOptimizerOptions`: options for the optimizer, + should contain the field `grad_clip` and `weight_decay`. +* `state::OptimizationState`: the current optimization state. +* `W::NDArray`: the trainable weights. +* `∇::NDArray`: the original gradient of the weights. """ function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, - weight::NDArray, grad::NDArray) - grad_scale = 1.0 / state.batch_size + W::NDArray, ∇::NDArray) + scale = 1.0 / state.batch_size + ∇ = ∇ * scale - grad = grad_scale * grad if opts.grad_clip > 0 - grad = clip(grad, a_min=-opts.grad_clip, a_max=opts.grad_clip) + ∇ = clip(∇, -opts.grad_clip, opts.grad_clip) end if opts.weight_decay > 0 - @inplace grad += opts.weight_decay * weight + @inplace ∇ += opts.weight_decay * W end - return grad + ∇ end include("optimizers/sgd.jl") diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 58c5796eb00d..df357f6c4c33 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -604,12 +604,19 @@ function test_clip() j_array, nd_array = rand_tensors(dims) clip_up = maximum(abs.(j_array)) / 2 clip_down = 0 - clipped = mx.clip(nd_array, a_min=clip_down, a_max=clip_up) + clipped = clip(nd_array, clip_down, clip_up) # make sure the original array is not modified @test copy(nd_array) ≈ j_array @test all(clip_down .<= copy(clipped) .<= clip_up) + + info("NDArray::clip!") + let + x = NDArray(1.0:20) + clip!(x, 5, 15) + @test all(5 .<= copy(x) .<= 15) + end end function test_power() From a4143e93daf550e8ef014aefcd335a63f3565694 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 1 Jan 2018 12:06:53 +0800 Subject: [PATCH 606/630] Bump :copyright: (#397) --- LICENSE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE.md b/LICENSE.md index a34c4a0f662e..5ecf95ac60bc 100644 --- a/LICENSE.md +++ b/LICENSE.md @@ -1,6 +1,6 @@ The MXNet.jl package is licensed under version 2.0 of the Apache License: -> Copyright (c) 2015: +> Copyright (c) 2015-2018: > * Chiyuan Zhang > > Apache License From ae425bfd8463cea26b043adc175962b314b519ce Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 1 Jan 2018 23:40:19 +0800 Subject: [PATCH 607/630] doc: append 'Defined in' into operator docstring (#393) e.g. ``` sum(arr::NDArray) Defined in src/operator/tensor/broadcast_reduce_op_value.cc:L85 ``` --- src/ndarray.jl | 18 +++++++++++++----- src/util.jl | 19 ++++++++++++++++++- test/unittest/util.jl | 7 +++++++ 3 files changed, 38 insertions(+), 6 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 5583e545ae11..1e6eeee81f94 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -1074,11 +1074,9 @@ _broadcast_target(sig::Expr) = sig.args[2].args[].args[end] """ Generate docstring from function signature """ -function _docsig(fname::Symbol, sig::Expr) +function _docsig(fname::Symbol, sig::Expr, opname::String) if fname !== :broadcast_ - s = get(_nddoc, fname, "") - !isempty(s) && return s - " $sig" + get(_nddoc, fname, " $sig") * "\n" * _getdocdefine(opname) else name = _broadcast_target(sig) str = get(_nddoc, name, "") @@ -1088,6 +1086,16 @@ function _docsig(fname::Symbol, sig::Expr) str = " $sig_" end if str ≠ false + # append "Defined in ..." + def = _getdocdefine(opname) + str = if str isa Markdown.MD + str = Markdown.MD(copy(str.content), copy(str.meta)) + push!(str, Markdown.Paragraph(def)) + str + else + str * def + end + @eval @doc $str $name end "" @@ -1139,7 +1147,7 @@ macro _remap(sig::Expr, imp::Expr) $retexpr end - docstr = _docsig(fname, sig) + docstr = _docsig(fname, sig, opname) func_def = Expr(:function, sig, func_body) esc(quote diff --git a/src/util.jl b/src/util.jl index c729bc7cd9ae..c4f5e63244dd 100644 --- a/src/util.jl +++ b/src/util.jl @@ -92,7 +92,7 @@ function _get_cached_libmx_op_handle(name :: String) end end -function _get_libmx_op_description(name :: String, handle :: MX_OpHandle) +function _get_libmx_op_description(name::String, handle::MX_OpHandle) # get operator information (human readable) ref_real_name = Ref{char_p}(0) ref_desc = Ref{char_p}(0) @@ -163,6 +163,23 @@ function _format_signature(narg::Int, arg_names::Ref{char_pp}) return join([unsafe_string(name) for name in arg_names] , ", ") end +""" +Extract the line of `Defined in ...` + +julia> mx._getdocdefine("sgd_update") +"Defined in src/operator/optimizer_op.cc:L53" +``` +""" +function _getdocdefine(name::String) + op = _get_libmx_op_handle(name) + str = _get_libmx_op_description(name, op)[1] + lines = split(str, '\n') + for m ∈ match.(r"^Defined in .*$", lines) + m != nothing && return m.match + end + "" +end + """ libmxnet operators signature checker. diff --git a/test/unittest/util.jl b/test/unittest/util.jl index 823decffd442..0e6107ca0acc 100644 --- a/test/unittest/util.jl +++ b/test/unittest/util.jl @@ -5,6 +5,12 @@ using Base.Test using MXNet +function test_getdocdefine() + info("Util::_getdocdefine") + @test contains(mx._getdocdefine("sgd_update"), "Defined in") +end # function test_getdocdefine + + function test_firstarg() info("Util::_firstarg") @test mx._firstarg(:(f(x, y))) == :x @@ -22,6 +28,7 @@ end # function test_firstarg @testset "Util Test" begin test_firstarg() + test_getdocdefine() end # @testset "Util" end # module TestUtil From 930090b8b94c8158ffcbc210b36ab93d810798e5 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 2 Jan 2018 08:51:51 +0800 Subject: [PATCH 608/630] ndarray: fix 1D NDArray display (#399) --- src/ndarray.jl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 1e6eeee81f94..78adf381f29c 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -113,10 +113,11 @@ function Base.show(io::IO, x::NDArray) end # for REPL -function Base.show(io::IO, ::MIME{Symbol("text/plain")}, x::NDArray) +function Base.show(io::IO, ::MIME{Symbol("text/plain")}, x::NDArray{T, N}) where {T, N} type_ = split(string(typeof(x)), '.', limit=2)[end] - println(io, "$(join(size(x), "×")) $(type_) @ $(context(x)):") - Base.showarray(io, try_get_shared(x, sync = :read), false, header=false) + size_ = N == 1 ? "$(length(x))-element" : join(size(x), "×") + println(io, "$size_ $type_ @ $(context(x)):") + Base.showarray(io, try_get_shared(x, sync = :read), false, header = false) end Base.unsafe_convert(::Type{MX_handle}, obj::NDArray) = From a1cef7f84a69fe7b8dc6cf6d10f32775121043e1 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 2 Jan 2018 10:46:39 +0800 Subject: [PATCH 609/630] ndarray: fix type convertion in setindex! (#398) Make this case work ```julia x = mx.NDArray([1, 2, 3]); x[:] = 1.1 ``` --- src/ndarray.jl | 2 +- test/unittest/ndarray.jl | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/ndarray.jl b/src/ndarray.jl index 78adf381f29c..14e07979e768 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -356,7 +356,7 @@ end function setindex!(arr::NDArray, val::Real, ::Colon) @assert arr.writable - _set_value(out=arr, src=convert(eltype(arr), val)) + _set_value(out = arr, src = dump_mx_param(val)) end function setindex!(arr::NDArray, val::Array{T}, ::Colon) where T<:Real diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index df357f6c4c33..62fa4f9188f0 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -157,6 +157,14 @@ function test_linear_idx() x[24] = 42 @test copy(x[24]) == [42] end + + info("NDArray::setindex!::type convert") + let + x = NDArray([1, 2, 3]) + @test eltype(x) == Int + x[:] = π + @test copy(x) == [3, 3, 3] + end end # function test_linear_idx function test_first() From 86ffb5d77693a279f254aa17bc344bba8abee2b9 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 4 Jan 2018 12:21:10 +0800 Subject: [PATCH 610/630] ndarray: inplace modulo operators (#389) ```julia mod_from!(x, y) mod_from!(x, 2) rmod_from!(2, x) ``` --- NEWS.md | 10 ++++++++- src/ndarray.jl | 40 +++++++++++++++++++++++++++++++--- test/unittest/ndarray.jl | 47 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 93 insertions(+), 4 deletions(-) diff --git a/NEWS.md b/NEWS.md index aa08666b751f..431d69a6057a 100644 --- a/NEWS.md +++ b/NEWS.md @@ -117,7 +117,7 @@ 4.0 ``` -* modulo operator. (#TBD) +* Modulo operator. (#TBD) ```julia x = NDArray(...) @@ -128,6 +128,14 @@ 2 .% x ``` +* Inplace modulo operator, `mod_from!` and `rmod_from!`. (#TBD) + + ```julia + mod_from!(x, y) + mod_from!(x, 2) + rmod_from!(2, x) + ``` + * `cat`, `vcat`, `hcat` is implemented. (#TBD) E.g. `hcat` diff --git a/src/ndarray.jl b/src/ndarray.jl index 14e07979e768..c75c2e1c366f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -749,6 +749,24 @@ function broadcast_(::typeof(/), x::NDArray{T}, y::Real) where {T<:Integer} _div_scalar(x, scalar = y) end +""" + mod_from!(x::NDArray, y::NDArray) + mod_from!(x::NDArray, y::Real) + +Elementwise modulo for `NDArray`. +Inplace updating. +""" +mod_from!(x::NDArray, y::NDArray) = _mod!(x, y) +mod_from!(x::NDArray, y::Real) = _mod_scalar!(x, y) + +""" + rmod_from!(y::Real, x::NDArray) + +Elementwise modulo for `NDArray`. +Inplace updating. +""" +rmod_from!(y::Real, x::NDArray) = _rmod_scalar!(x, y) + import Base: % """ @@ -761,8 +779,8 @@ Elementwise modulo for `NDArray`. %(x::NDArray, y::Real) = _mod_scalar(x, scalar = y) broadcast_(::typeof(%), x::NDArray, y::NDArray) = _mod(x, y) -broadcast_(::typeof(%), x::NDArray, y::Real) = _mod_scalar(x, scalar = y) -broadcast_(::typeof(%), y::Real, x::NDArray) = _rmod_scalar(x, scalar = y) +broadcast_(::typeof(%), x::NDArray, y::Real) = _mod_scalar(x, y) +broadcast_(::typeof(%), y::Real, x::NDArray) = _rmod_scalar(x, y) import Base: ^ @@ -1061,8 +1079,13 @@ function _autoimport(name::Symbol, sig::Expr) end end +_isinplace(name::Symbol) = endswith(string(name), "!") + +_writable(name::Symbol, x) = + _isinplace(name) ? :(@assert $x.writable "this NDArray isn't writable") : :() + function _outexpr(name::Symbol, x #= the first arg of `sig` =#) - if endswith(string(name), "!") # `func!` + if _isinplace(name) # `func!` Ptr, 1, :([[MX_handle(x.handle)]]), :($x) else retexpr = :(NDArray(MX_NDArrayHandle(unsafe_load(hdls_ref[], 1)))) @@ -1124,7 +1147,10 @@ macro _remap(sig::Expr, imp::Expr) # handler for `func!` which has side effect on first argument. T, n_output, hdls_ref, retexpr = _outexpr(fname, _firstarg(sig)) + assert_expr = _writable(fname, _firstarg(sig)) + func_body = quote + $assert_expr op_handle = _get_cached_libmx_op_handle($opname) n_output = Ref(Cint($n_output)) hdls_ref = $hdls_ref @@ -1346,6 +1372,12 @@ julia> mx.log_softmax.(x) @_remap _mod(x::NDArray, y::NDArray) _mod(x, y) @_remap _mod!(x::NDArray, y::NDArray) _mod(x, y) +@_remap _mod_scalar(x::NDArray, y::Real) _mod_scalar(x; scalar = y) +@_remap _mod_scalar!(x::NDArray, y::Real) _mod_scalar(x; scalar = y) + +@_remap _rmod_scalar(x::NDArray, y::Real) _rmod_scalar(x; scalar = y) +@_remap _rmod_scalar!(x::NDArray, y::Real) _rmod_scalar(x; scalar = y) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1467,6 +1499,8 @@ const _op_import_bl = [ # import black list; do not import these funcs "_plus", "_minus", "_mod", + "_mod_scalar", + "_rmod_scalar", "dot", "max", diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 62fa4f9188f0..b52ecc722b97 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -535,6 +535,7 @@ function test_mod() @test copy(z) ≈ D end + info("NDArray::mod::scalar") let x = NDArray(A) C = A .% 2 y = x .% 2 @@ -547,6 +548,52 @@ function test_mod() y = 11 .% x @test copy(y) ≈ C end + + info("NDArray::mod_from!") + let + x = NDArray(A) + y = NDArray(B) + C = A .% B + mx.mod_from!(x, y) + @test copy(x) ≈ C + end + + let + x = NDArray(A) + y = NDArray(B) + C = B .% A + mx.mod_from!(y, x) + + @test copy(y) ≈ C + end + + info("NDArray::mod_from!::scalar") + let + x = NDArray(A) + C = A .% 2 + mx.mod_from!(x, 2) + @test copy(x) ≈ C + end + + info("NDArray::rmod_from!") + let + x = NDArray(A) + C = 11 .% A + mx.rmod_from!(11, x) + @test copy(x) ≈ C + end + + info("NDArray::mod_from!::writable") + let + x = NDArray(A) + y = NDArray(B) + x.writable = false + y.writable = false + @test_throws AssertionError mx.mod_from!(x, y) + @test_throws AssertionError mx.mod_from!(y, x) + @test_throws AssertionError mx.mod_from!(x, 2) + @test_throws AssertionError mx.rmod_from!(2, x) + end end # function test_mod From 30852dd2a60179fa7b43c0a551852a24f14732d6 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 4 Jan 2018 12:22:59 +0800 Subject: [PATCH 611/630] ndarray: support modulo operation in macro inplace (#390) * ndarray: inplace modulo operators ```julia mod_from!(x, y) mod_from!(x, 2) rmod_from!(2, x) ``` * ndarray: support modulo operation for macro inplace Blocker: #389 * update doc --- NEWS.md | 1 + docs/src/api/ndarray.md | 1 + src/MXNet.jl | 1 + src/ndarray.jl | 23 +++++++++++++---------- test/unittest/ndarray.jl | 9 +++++++++ 5 files changed, 25 insertions(+), 10 deletions(-) diff --git a/NEWS.md b/NEWS.md index 431d69a6057a..b6264f412761 100644 --- a/NEWS.md +++ b/NEWS.md @@ -7,6 +7,7 @@ * `context()` * `empty()` * `expand_dims()` + * `@inplace` * `σ()` * `sigmoid()` * `relu()` diff --git a/docs/src/api/ndarray.md b/docs/src/api/ndarray.md index 76fb6a39b621..5877d8257758 100644 --- a/docs/src/api/ndarray.md +++ b/docs/src/api/ndarray.md @@ -11,6 +11,7 @@ In the following example `y` can be a `Real` value or another `NDArray` | `*` | `x .* y` | Elementwise multiplication | | `/` | `x ./ y` | Elementwise division | | `^` | `x .^ y` | Elementwise power | +| `%` | `x .% y` | Elementwise modulo | ## Trigonometric Functions diff --git a/src/MXNet.jl b/src/MXNet.jl index e4c23f299dc1..bd27c715b61d 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -36,6 +36,7 @@ export NDArray, context, empty, expand_dims, + @inplace, # activation funcs σ, sigmoid, diff --git a/src/ndarray.jl b/src/ndarray.jl index c75c2e1c366f..c9cea4fe3597 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -572,18 +572,21 @@ will translate into which will do inplace adding of the contents of `b` into `a`. """ -macro inplace(stmt) - if stmt.head == :+= || stmt.head == :.+= - Expr(:call, :add_to!, esc(stmt.args[1]), esc(stmt.args[2])) - elseif stmt.head == :-= || stmt.head == :.-= - Expr(:call, :sub_from!, esc(stmt.args[1]), esc(stmt.args[2])) - elseif stmt.head == :.*= - Expr(:call, :mul_to!, esc(stmt.args[1]), esc(stmt.args[2])) - elseif stmt.head == :./= - Expr(:call, :div_from!, esc(stmt.args[1]), esc(stmt.args[2])) +macro inplace(ex) + f = if ex.head == :+= || ex.head == :.+= + :add_to! + elseif ex.head == :-= || ex.head == :.-= + :sub_from! + elseif ex.head == :.*= + :mul_to! + elseif ex.head == :./= + :div_from! + elseif ex.head == :.%= + :mod_from! else - error("unsupported inplace translation for $stmt") + error("unsupported inplace translation for $ex") end + Expr(:call, f, esc(ex.args[1]), esc(ex.args[2])) end """ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index b52ecc722b97..df7238dc73a4 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -594,6 +594,15 @@ function test_mod() @test_throws AssertionError mx.mod_from!(x, 2) @test_throws AssertionError mx.rmod_from!(2, x) end + + info("NDArray::mod::inplace") + let + x = NDArray(A) + y = NDArray(B) + C = A .% B + @inplace x .%= y + @test copy(x) ≈ C + end end # function test_mod From 0ef59668f695c777ae094fdf437cf1b9165af3a0 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 4 Jan 2018 12:38:10 +0800 Subject: [PATCH 612/630] ndarray: add Base.ones(::NDArray) and Base.zeros (#363) For creating NDArray with same type and dims --- NEWS.md | 4 ++++ src/ndarray.jl | 38 +++++++++++++++----------------------- test/unittest/ndarray.jl | 22 ++++++++++++++++++++++ 3 files changed, 41 insertions(+), 23 deletions(-) diff --git a/NEWS.md b/NEWS.md index b6264f412761..f49184dfdda7 100644 --- a/NEWS.md +++ b/NEWS.md @@ -118,6 +118,10 @@ 4.0 ``` +* `Base.ones(x::NDArray)` for creating an one-ed `NDArray`. (#TBD) + +* `Base.zeros(x::NDArray)` for creating a zero-ed `NDArray`. (#TBD) + * Modulo operator. (#TBD) ```julia diff --git a/src/ndarray.jl b/src/ndarray.jl index c9cea4fe3597..94f21f00e943 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -172,8 +172,9 @@ Note that the returned `NDArray` is uninitialized. Base.similar(x::NDArray{T}) where {T} = empty(T, size(x), context(x)) """ - zeros(DType, dims[, ctx::Context = cpu()]) - zeros(DType, dims...) + zeros([DType], dims, [ctx::Context = cpu()]) + zeros([DType], dims...) + zeros(x::NDArray) Create zero-ed `NDArray` with specific shape and type. """ @@ -185,19 +186,17 @@ end zeros(::Type{T}, dims::Int...) where {T<:DType} = zeros(T, dims) -""" - zeros(dims[, ctx::Context = cpu()]) - zeros(dims...) - -Create zero-ed `NDArray` with specific shape. -""" -zeros(dims::NTuple{N, Int}, ctx::Context = cpu()) where N = +zeros(dims::NTuple{N,Int}, ctx::Context = cpu()) where N = zeros(MX_float, dims, ctx) zeros(dims::Int...) = zeros(dims) +zeros(x::NDArray)::typeof(x) = zeros_like(x) +Base.zeros(x::NDArray)::typeof(x) = zeros_like(x) + """ - ones(DType, dims::Tuple[, ctx::Context = cpu()]) - ones(DType, dim1, dim2...) + ones([DType], dims, [ctx::Context = cpu()]) + ones([DType], dims...) + ones(x::NDArray) Create an `NDArray` with specific shape & type, and initialize with 1. """ @@ -209,20 +208,13 @@ end ones(::Type{T}, dims::Int...) where T<:DType = ones(T, dims) -""" - ones(dims::Tuple[, ctx::Context = cpu()]) - ones(dim1, dim2, ...) - -Create an `NDArray` with specific shape and initialize with 1. -""" -function ones(dims::NTuple{N,Int}, ctx::Context = cpu()) where N - arr = empty(dims, ctx) - arr[:] = 1 - arr -end - +ones(dims::NTuple{N,Int}, ctx::Context = cpu()) where N = + ones(MX_float, dims, ctx) ones(dims::Int...) = ones(dims) +ones(x::NDArray)::typeof(x) = ones_like(x) +Base.ones(x::NDArray)::typeof(x) = ones_like(x) + import Base: size, length, ndims, eltype """ diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index df7238dc73a4..ac80fc519d13 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -29,6 +29,27 @@ function test_constructor() end # function test_constructor +function test_ones_zeros_like() + info("NDArray::Base.zeros") + let x = mx.rand(1, 10, (1, 3, 2, 4)) + y = zeros(x) + @test sum(copy(y)) == 0 + + y = mx.zeros(x) + @test sum(copy(y)) == 0 + end + + info("NDArray::Base.ones") + let x = mx.rand(1, 10, (1, 3, 2, 4)) + y = ones(x) + @test sum(copy(y)) == 1 * 3 * 2 * 4 + + y = mx.ones(x) + @test sum(copy(y)) == 1 * 3 * 2 * 4 + end +end # function test_ones_zeros_like + + function test_copy() dims = rand_dims() tensor = rand(mx.MX_float, dims) @@ -1167,6 +1188,7 @@ end # function test_act_funcs ################################################################################ @testset "NDArray Test" begin test_constructor() + test_ones_zeros_like() test_assign() test_copy() test_slice() From 49399fc9d351ddcb1e08e910b0b39d67d314d714 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 7 Jan 2018 14:29:11 +0800 Subject: [PATCH 613/630] ndarray: broadcasting along dimension on arith operators (#401) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * `+` * `-` * `*` * `/` * `%` * `^` ```julia julia> x = NDArray([1 2 3; 4 5 6]) 2×3 mx.NDArray{Int64,2} @ CPU0: 1 2 3 4 5 6 julia> y = NDArray([1; 10]) 2-element mx.NDArray{Int64,1} @ CPU0: 1 10 julia> x .+ y 2×3 mx.NDArray{Int64,2} @ CPU0: 2 3 4 14 15 16 ``` --- NEWS.md | 28 +++++++++++ src/deprecated.jl | 40 +++++++++++++++ src/ndarray.jl | 72 ++++++++++++++++++++++---- test/unittest/ndarray.jl | 106 ++++++++++++++++++++++++++++++++++++++- 4 files changed, 234 insertions(+), 12 deletions(-) diff --git a/NEWS.md b/NEWS.md index f49184dfdda7..ee00a6260396 100644 --- a/NEWS.md +++ b/NEWS.md @@ -206,6 +206,34 @@ ### `NDArray` +* Broadcasting along dimension supported on following operators (#TBD): + + * `+` + * `-` + * `*` + * `/` + * `%` + * `^` + + ```julia + julia> x = NDArray([1 2 3; + 4 5 6]) + 2×3 mx.NDArray{Int64,2} @ CPU0: + 1 2 3 + 4 5 6 + + julia> y = NDArray([1; + 10]) + 2-element mx.NDArray{Int64,1} @ CPU0: + 1 + 10 + + julia> x .+ y + 2×3 mx.NDArray{Int64,2} @ CPU0: + 2 3 4 + 14 15 16 + ``` + * Please use dot-call on following trigonometric functions. Also, the `arc*` has been renamed to keep consistent with `Base`. (#TBD) diff --git a/src/deprecated.jl b/src/deprecated.jl index 9a24aa815148..aa12e264f729 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -56,3 +56,43 @@ end @deprecate log_softmax(x::NDArray; axis = ndims(x)) log_softmax.(x, axis) @deprecate clip(x; a_min = 0, a_max = 0) clip(x, a_min, a_max) + +function broadcast_plus(x::NDArray, y::NDArray) + warn("broadcast_plus(x, y) is deprecated, use x .+ y instead.") + x .+ y +end + +function broadcast_add(x::NDArray, y::NDArray) + warn("broadcast_add(x, y) is deprecated, use x .+ y instead.") + x .+ y +end + +function broadcast_sub(x::NDArray, y::NDArray) + warn("broadcast_sub(x, y) is deprecated, use x .- y instead.") + x .- y +end + +function broadcast_minus(x::NDArray, y::NDArray) + warn("broadcast_minus(x, y) is deprecated, use x .- y instead.") + x .- y +end + +function broadcast_mul(x::NDArray, y::NDArray) + warn("broadcast_mul(x, y) is deprecated, use x .* y instead.") + x .* y +end + +function broadcast_div(x::NDArray, y::NDArray) + warn("broadcast_div(x, y) is deprecated, use x ./ y instead.") + x ./ y +end + +function broadcast_mod(x::NDArray, y::NDArray) + warn("broadcast_mod(x, y) is deprecated, use x .% y instead.") + x .% y +end + +function broadcast_power(x::NDArray, y::NDArray) + warn("broadcast_power(x, y) is deprecated, use x.^y instead.") + x.^y +end diff --git a/src/ndarray.jl b/src/ndarray.jl index 94f21f00e943..9aab4ebcaf9a 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -613,8 +613,12 @@ added together. Note at least the first or second argument needs to be an +(x::NDArray, y::Real) = _plus_scalar(x, scalar = y) +(y::Real, x::NDArray) = _plus_scalar(x, scalar = y) -broadcast_(::typeof(+), x::NDArray, y::NDArrayOrReal) = x + y -broadcast_(::typeof(+), x::Real, y::NDArray) = x + y +broadcast_(::typeof(+), x::NDArray, y::Real) = x + y +broadcast_(::typeof(+), x::Real, y::NDArray) = x + y + +broadcast_(::typeof(+), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = x + y +broadcast_(::typeof(+), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = + _broadcast_add(x, y) """ sub_from!(dst::NDArray, args::NDArrayOrReal...) @@ -646,8 +650,12 @@ Or create the negative of `x`. -(x::NDArray, y::Real) = _minus_scalar(x, scalar = y) -(y::Real, x::NDArray) = _rminus_scalar(x, scalar = y) -broadcast_(::typeof(-), x::NDArray, y::NDArrayOrReal) = x - y -broadcast_(::typeof(-), x::Real, y::NDArray) = x - y +broadcast_(::typeof(-), x::NDArray, y::Real) = x - y +broadcast_(::typeof(-), x::Real, y::NDArray) = x - y + +broadcast_(::typeof(-), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = x - y +broadcast_(::typeof(-), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = + _broadcast_minus(x, y) """ mul_to!(dst::NDArray, arg::NDArrayOrReal) @@ -675,9 +683,13 @@ Elementwise multiplication for `NDArray`. *(x::NDArray, y::Real) = _mul_scalar(x, scalar = y) *(y::Real, x::NDArray) = _mul_scalar(x, scalar = y) -broadcast_(::typeof(*), x::NDArray, y::Real) = x * y -broadcast_(::typeof(*), y::Real, x::NDArray) = x * y -broadcast_(::typeof(*), x::NDArray, y::NDArray) = _mul(x, y) +broadcast_(::typeof(*), x::NDArray, y::Real) = x * y +broadcast_(::typeof(*), y::Real, x::NDArray) = x * y + +broadcast_(::typeof(*), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = + _mul(x, y) +broadcast_(::typeof(*), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = + _broadcast_mul(x, y) """ *(A::NDArray, B::NDArray) @@ -735,10 +747,14 @@ of the same shape. """ /(x::NDArray, y::Real) = _div_scalar(x, scalar = y) -broadcast_(::typeof(/), x::NDArray, y::NDArray) = _div(x, y) broadcast_(::typeof(/), x::NDArray, y::Real) = _div_scalar(x, scalar = y) broadcast_(::typeof(/), y::Real, x::NDArray) = _rdiv_scalar(x, scalar = y) +broadcast_(::typeof(/), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = + _div(x, y) +broadcast_(::typeof(/), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = + _broadcast_div(x, y) + function broadcast_(::typeof(/), x::NDArray{T}, y::Real) where {T<:Integer} @assert(round(T, y) != zero(T), "Integer divided by zero") _div_scalar(x, scalar = y) @@ -773,15 +789,18 @@ Elementwise modulo for `NDArray`. """ %(x::NDArray, y::Real) = _mod_scalar(x, scalar = y) -broadcast_(::typeof(%), x::NDArray, y::NDArray) = _mod(x, y) broadcast_(::typeof(%), x::NDArray, y::Real) = _mod_scalar(x, y) broadcast_(::typeof(%), y::Real, x::NDArray) = _rmod_scalar(x, y) +broadcast_(::typeof(%), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = + _mod(x, y) +broadcast_(::typeof(%), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = + _broadcast_mod(x, y) + import Base: ^ # document of `.^` is merged into SymbolicNode's -broadcast_(::typeof(^), x::NDArray, y::NDArray) = _power(x, y) broadcast_(::typeof(^), x::NDArray, s::Real) = _power_scalar(x, scalar = s) broadcast_(::typeof(^), s::Real, x::NDArray) = _rpower_scalar(x, scalar = s) @@ -789,6 +808,11 @@ broadcast_(::typeof(^), ::Irrational{:e}, x::NDArray) = exp(x) broadcast_(::typeof(^), x::NDArray, s::Irrational) = _power_scalar(x, scalar = s) broadcast_(::typeof(^), s::Irrational, x::NDArray) = _rpower_scalar(x, scalar = s) +broadcast_(::typeof(^), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = + _power(x, y) +broadcast_(::typeof(^), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = + _broadcast_power(x, y) + """ fill!(arr::NDArray, x) @@ -1373,6 +1397,24 @@ julia> mx.log_softmax.(x) @_remap _rmod_scalar(x::NDArray, y::Real) _rmod_scalar(x; scalar = y) @_remap _rmod_scalar!(x::NDArray, y::Real) _rmod_scalar(x; scalar = y) +@_remap _broadcast_add(x::NDArray, y::NDArray) broadcast_add(x, y) +@_remap _broadcast_add!(x::NDArray, y::NDArray) broadcast_add(x, y) + +@_remap _broadcast_minus(x::NDArray, y::NDArray) broadcast_minus(x, y) +@_remap _broadcast_minus!(x::NDArray, y::NDArray) broadcast_minus(x, y) + +@_remap _broadcast_mul(x::NDArray, y::NDArray) broadcast_mul(x, y) +@_remap _broadcast_mul!(x::NDArray, y::NDArray) broadcast_mul(x, y) + +@_remap _broadcast_div(x::NDArray, y::NDArray) broadcast_div(x, y) +@_remap _broadcast_div!(x::NDArray, y::NDArray) broadcast_div(x, y) + +@_remap _broadcast_mod(x::NDArray, y::NDArray) broadcast_mod(x, y) +@_remap _broadcast_mod!(x::NDArray, y::NDArray) broadcast_mod(x, y) + +@_remap _broadcast_power(x::NDArray, y::NDArray) broadcast_power(x, y) +@_remap _broadcast_power!(x::NDArray, y::NDArray) broadcast_power(x, y) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1529,6 +1571,16 @@ const _op_import_bl = [ # import black list; do not import these funcs "relu", "softmax", "log_softmax", + + # broadcast + "broadcast_add", + "broadcast_plus", + "broadcast_minus", + "broadcast_sub", + "broadcast_mul", + "broadcast_div", + "broadcast_mod", + "broadcast_power", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index ac80fc519d13..f18b9d20f123 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -8,8 +8,8 @@ using ..Main: rand_dims ################################################################################ # Test Implementations ################################################################################ -rand_tensors(dims::NTuple{N, Int}) where {N} = rand_tensors(mx.MX_float, dims) -function rand_tensors(::Type{T}, dims::NTuple{N, Int}) where {N, T} +rand_tensors(dims::NTuple{N,Int}) where {N} = rand_tensors(mx.MX_float, dims) +function rand_tensors(::Type{T}, dims::NTuple{N,Int}) where {N,T} tensor = rand(T, dims) array = copy(tensor, mx.cpu()) return (tensor, array) @@ -330,6 +330,23 @@ function test_plus() y = x .+ 2.9 @test copy(y) == [3, 4, 5] end + + info("NDArray::broadcast_add") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = x .+ y + @test copy(z) == A .+ B + + # TODO + # @inplace x .+= y + # @test copy(x) == A .+ B + end end function test_minus() @@ -386,6 +403,23 @@ function test_minus() let x = mx.NDArray([1, 2, 3]) @test copy(x .- π) ≈ [-2, -1, 0] end + + info("NDArray::broadcast_minus") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = x .- y + @test copy(z) == A .- B + + # TODO + # @inplace x .-= y + # @test copy(x) == A .- B + end end function test_mul() @@ -445,6 +479,23 @@ function test_mul() @test eltype(x) == Int @test copy(y) == [3, 6, 9] end + + info("NDArray::broadcast_mul") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = x .* y + @test copy(z) == A .* B + + # TODO + # @inplace x .*= y + # @test copy(x) == A .* B + end end function test_div() @@ -499,6 +550,23 @@ function test_div() @test_throws AssertionError x ./ 0.5 end + + info("NDArray::broadcast_div") + let + A = Float32[1 2 3; + 4 5 6] + B = Float32[1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = x ./ y + @test copy(z) == A ./ B + + # TODO + # @inplace x ./= y + # @test copy(x) == A ./ B + end end @@ -624,6 +692,23 @@ function test_mod() @inplace x .%= y @test copy(x) ≈ C end + + info("NDArray::broadcast_mod") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = x .% y + @test copy(z) == A .% B + + # TODO + # @inplace x .%= y + # @test copy(x) == A .% B + end end # function test_mod @@ -788,6 +873,23 @@ function test_power() end # TODO: Float64: wait for https://github.com/apache/incubator-mxnet/pull/8012 + + info("NDArray::broadcast_power") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = x.^y + @test copy(z) == A.^B + + # TODO + # @inplace x .^= y + # @test copy(x) == A.^B + end end # function test_power function test_sqrt() From e79ce7190d0faa2fbef5253f1b1cc6cd3ba6014a Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 10 Jan 2018 21:28:46 +0800 Subject: [PATCH 614/630] ndarray: broadcast comparison operators (#402) --- NEWS.md | 10 ++++++++- src/deprecated.jl | 30 ++++++++++++++++++++++++++ src/ndarray.jl | 46 ++++++++++++++++++++++++++++++++++++++++ test/unittest/ndarray.jl | 44 ++++++++++++++++++++++++++++++++++++-- 4 files changed, 127 insertions(+), 3 deletions(-) diff --git a/NEWS.md b/NEWS.md index ee00a6260396..12fec5fe2152 100644 --- a/NEWS.md +++ b/NEWS.md @@ -206,7 +206,9 @@ ### `NDArray` -* Broadcasting along dimension supported on following operators (#TBD): +* Broadcasting along dimension supported on following operators, + and the original `mx.broadcast_*` APIs are deprecated + (#401) (#402): * `+` * `-` @@ -214,6 +216,12 @@ * `/` * `%` * `^` + * `==` + * `!=` + * `>` + * `>=` + * `<` + * `<=` ```julia julia> x = NDArray([1 2 3; diff --git a/src/deprecated.jl b/src/deprecated.jl index aa12e264f729..a0f722805168 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -96,3 +96,33 @@ function broadcast_power(x::NDArray, y::NDArray) warn("broadcast_power(x, y) is deprecated, use x.^y instead.") x.^y end + +function broadcast_equal(x::NDArray, y::NDArray) + warn("broadcast_equal(x, y) is deprecated, use x .== y instead.") + x .== y +end + +function broadcast_not_equal(x::NDArray, y::NDArray) + warn("broadcast_not_equal(x, y) is deprecated, use x .== y instead.") + x .!= y +end + +function broadcast_greater(x::NDArray, y::NDArray) + warn("broadcast_greater(x, y) is deprecated, use x .== y instead.") + x .> y +end + +function broadcast_greater_equal(x::NDArray, y::NDArray) + warn("broadcast_greater_equal(x, y) is deprecated, use x .== y instead.") + x .>= y +end + +function broadcast_lesser(x::NDArray, y::NDArray) + warn("broadcast_lesser(x, y) is deprecated, use x .== y instead.") + x .< y +end + +function broadcast_lesser_equal(x::NDArray, y::NDArray) + warn("broadcast_lesser_equal(x, y) is deprecated, use x .== y instead.") + x .<= y +end diff --git a/src/ndarray.jl b/src/ndarray.jl index 9aab4ebcaf9a..8fa4abc6482f 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -813,6 +813,28 @@ broadcast_(::typeof(^), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = broadcast_(::typeof(^), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = _broadcast_power(x, y) + +############################################################################### +# comparison +############################################################################### +broadcast_(::typeof(==), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_equal(x, y) + +broadcast_(::typeof(!=), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_not_equal(x, y) + +broadcast_(::typeof(>), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_greater(x, y) + +broadcast_(::typeof(>=), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_greater_equal(x, y) + +broadcast_(::typeof(<), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_lesser(x, y) + +broadcast_(::typeof(<=), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_lesser_equal(x, y) + """ fill!(arr::NDArray, x) @@ -1415,6 +1437,24 @@ julia> mx.log_softmax.(x) @_remap _broadcast_power(x::NDArray, y::NDArray) broadcast_power(x, y) @_remap _broadcast_power!(x::NDArray, y::NDArray) broadcast_power(x, y) +@_remap _broadcast_equal(x::NDArray, y::NDArray) broadcast_equal(x, y) +@_remap _broadcast_equal!(x::NDArray, y::NDArray) broadcast_equal(x, y) + +@_remap _broadcast_not_equal(x::NDArray, y::NDArray) broadcast_not_equal(x, y) +@_remap _broadcast_not_equal!(x::NDArray, y::NDArray) broadcast_not_equal(x, y) + +@_remap _broadcast_greater(x::NDArray, y::NDArray) broadcast_greater(x, y) +@_remap _broadcast_greater!(x::NDArray, y::NDArray) broadcast_greater(x, y) + +@_remap _broadcast_greater_equal(x::NDArray, y::NDArray) broadcast_greater_equal(x, y) +@_remap _broadcast_greater_equal!(x::NDArray, y::NDArray) broadcast_greater_equal(x, y) + +@_remap _broadcast_lesser(x::NDArray, y::NDArray) broadcast_lesser(x, y) +@_remap _broadcast_lesser!(x::NDArray, y::NDArray) broadcast_lesser(x, y) + +@_remap _broadcast_lesser_equal(x::NDArray, y::NDArray) broadcast_lesser_equal(x, y) +@_remap _broadcast_lesser_equal!(x::NDArray, y::NDArray) broadcast_lesser_equal(x, y) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1581,6 +1621,12 @@ const _op_import_bl = [ # import black list; do not import these funcs "broadcast_div", "broadcast_mod", "broadcast_power", + "broadcast_equal", + "broadcast_not_equal", + "broadcast_greater", + "broadcast_greater_equal", + "broadcast_lesser", + "broadcast_lesser_equal", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index f18b9d20f123..b96e524dea93 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -31,7 +31,7 @@ end # function test_constructor function test_ones_zeros_like() info("NDArray::Base.zeros") - let x = mx.rand(1, 10, (1, 3, 2, 4)) + let x = mx.rand(1, 3, 2, 4, low = 1, high = 10) y = zeros(x) @test sum(copy(y)) == 0 @@ -40,7 +40,7 @@ function test_ones_zeros_like() end info("NDArray::Base.ones") - let x = mx.rand(1, 10, (1, 3, 2, 4)) + let x = mx.rand(1, 3, 2, 4, low = 1, high = 10) y = ones(x) @test sum(copy(y)) == 1 * 3 * 2 * 4 @@ -1285,6 +1285,45 @@ function test_act_funcs() end end # function test_act_funcs +macro check_equal(op) + quote + A = [1 2 3 + 4 5 6] + B = [1, + 6] + x = NDArray(A) + y = NDArray(B) + a = broadcast($op, x, y) + @test copy(a) == broadcast($op, A, B) + + C = [3 2 1 + 6 5 4] + z = NDArray(C) + b = broadcast($op, x, z) + @test copy(b) == broadcast($op, A, C) + end +end + +function test_equal() + info("NDArray::broadcast_equal") + @check_equal == + + info("NDArray::broadcast_not_equal") + @check_equal != + + info("NDArray::broadcast_greater") + @check_equal > + + info("NDArray::broadcast_greater_equal") + @check_equal >= + + info("NDArray::broadcast_lesser") + @check_equal < + + info("NDArray::broadcast_lesser_equal") + @check_equal <= +end # function test_equal + ################################################################################ # Run tests ################################################################################ @@ -1326,6 +1365,7 @@ end # function test_act_funcs test_trigonometric() test_hyperbolic() test_act_funcs() + test_equal() end end From 2ab0584725468d135dc5ecc40a885ad44ea83268 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 13 Jan 2018 16:25:38 +0800 Subject: [PATCH 615/630] ndarray: copy!(NDArray, AbstractArray) (#405) ```julia julia> x = mx.zeros(3) 3-element mx.NDArray{Float32,1} @ CPU0: 0.0 0.0 0.0 julia> copy!(x, 3:5) 3-element mx.NDArray{Float32,1} @ CPU0: 3.0 4.0 5.0 ``` --- NEWS.md | 16 ++++++++++++++++ src/ndarray.jl | 3 ++- test/unittest/ndarray.jl | 9 +++++++++ 3 files changed, 27 insertions(+), 1 deletion(-) diff --git a/NEWS.md b/NEWS.md index 12fec5fe2152..401ee450380e 100644 --- a/NEWS.md +++ b/NEWS.md @@ -118,6 +118,22 @@ 4.0 ``` +* `copy!(NDArray, AbstractArray)` is implemented now. (#TBD) + + ```julia + julia> x = mx.zeros(3) + 3-element mx.NDArray{Float32,1} @ CPU0: + 0.0 + 0.0 + 0.0 + + julia> copy!(x, 3:5) + 3-element mx.NDArray{Float32,1} @ CPU0: + 3.0 + 4.0 + 5.0 + ``` + * `Base.ones(x::NDArray)` for creating an one-ed `NDArray`. (#TBD) * `Base.zeros(x::NDArray)` for creating a zero-ed `NDArray`. (#TBD) diff --git a/src/ndarray.jl b/src/ndarray.jl index 8fa4abc6482f..9532e2b7f70b 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -430,7 +430,7 @@ end import Base: copy!, copy, convert, deepcopy """ - copy!(dst :: Union{NDArray, Array}, src :: Union{NDArray, Array}) + copy!(dst::Union{NDArray, Array}, src::Union{NDArray, Array}) Copy contents of `src` into `dst`. """ @@ -453,6 +453,7 @@ function copy!(dst::Array{T}, src::NDArray{T}) where T<:DType end copy!(dst::Array{<:Real}, src::NDArray) = copy!(dst, copy(src)) +copy!(dst::NDArray, src::AbstractArray) = copy!(dst, collect(src)) function copy!(dst::NDArray{T}, src::Array{<:Real}) where {T} @assert dst.writable diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index b96e524dea93..1b656902480c 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -76,6 +76,15 @@ function test_copy() @test eltype(x) == Float64 @test copy(x) ≈ [1., 2, 3, 4] end + + info("NDArray::copy!::AbstractArray") + let + x = mx.zeros(4) + copy!(x, 1:4) + + @test eltype(x) == Float32 + @test copy(x) == [1, 2, 3, 4] + end end function test_deepcopy() From 1c990bc9dc387dee5f92e802d222b5af7a33f47e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 17 Jan 2018 14:45:22 +0800 Subject: [PATCH 616/630] ndarray: new constructor NDArray(Type, AbstractArray) (#404) ```julia julia> NDArray([1, 2, 3]) 3-element mx.NDArray{Int64,1} @ CPU0: 1 2 3 julia> NDArray(Float32, [1, 2, 3]) 3-element mx.NDArray{Float32,1} @ CPU0: 1.0 2.0 3.0 ``` --- NEWS.md | 17 +++++++++++++++++ src/ndarray.jl | 2 ++ test/unittest/ndarray.jl | 12 ++++++++++++ 3 files changed, 31 insertions(+) diff --git a/NEWS.md b/NEWS.md index 401ee450380e..11e853c2149e 100644 --- a/NEWS.md +++ b/NEWS.md @@ -84,6 +84,23 @@ ### `NDArray` +* A handy constructor: `NDArray(Type, AbstractArray)` is added. (#TBD) + + E.g. + ```julia + julia> NDArray([1, 2, 3]) + 3-element mx.NDArray{Int64,1} @ CPU0: + 1 + 2 + 3 + + julia> NDArray(Float32, [1, 2, 3]) + 3-element mx.NDArray{Float32,1} @ CPU0: + 1.0 + 2.0 + 3.0 + ``` + * A port of Python's `autograd` for `NDArray` (#274) * `size(x, dims...)` is supported now. (#TBD) diff --git a/src/ndarray.jl b/src/ndarray.jl index 9532e2b7f70b..8130679b2ae3 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -98,6 +98,8 @@ end NDArray(x::AbstractArray{T}) where {T<:DType} = copy(collect(x), cpu()) NDArray(x::Array{T}) where {T<:DType} = copy(x, cpu()) +NDArray(::Type{T}, x::AbstractArray) where {T<:DType} = + copy(convert(AbstractArray{T}, x), cpu()) NDArray(handle, writable = true) = NDArray{eltype(handle), ndims(handle)}(handle, writable) diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 1b656902480c..26fb7327a21d 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -26,6 +26,18 @@ function test_constructor() check_absarray(1:10) check_absarray(1.0:10) + + info("NDArray::NDArray(Type, AbstractArray)") + let + x = mx.NDArray(Float32, [1, 2, 3]) + @test eltype(x) == Float32 + @test copy(x) == [1, 2, 3] + end + let + x = mx.NDArray(Float32, [1.1, 2, 3]) + @test eltype(x) == Float32 + @test copy(x) ≈ [1.1, 2, 3] + end end # function test_constructor From 0fba766d7c6ba44f3d3f3beeeddfd5c3087b5f23 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Fri, 19 Jan 2018 09:13:52 +0800 Subject: [PATCH 617/630] ndarray: broadcast_min/max (#403) --- NEWS.md | 4 +++- src/broadcast.jl | 3 ++- src/deprecated.jl | 10 ++++++++++ src/ndarray.jl | 23 ++++++++++++++++++++++- test/unittest/ndarray.jl | 26 ++++++++++++++++++++++++++ 5 files changed, 63 insertions(+), 3 deletions(-) diff --git a/NEWS.md b/NEWS.md index 11e853c2149e..f8d1f1073456 100644 --- a/NEWS.md +++ b/NEWS.md @@ -241,7 +241,7 @@ * Broadcasting along dimension supported on following operators, and the original `mx.broadcast_*` APIs are deprecated - (#401) (#402): + (#401) (#402) (#403): * `+` * `-` @@ -255,6 +255,8 @@ * `>=` * `<` * `<=` + * `max` + * `min` ```julia julia> x = NDArray([1 2 3; diff --git a/src/broadcast.jl b/src/broadcast.jl index 5d15adf385cc..9686db5d6412 100644 --- a/src/broadcast.jl +++ b/src/broadcast.jl @@ -2,7 +2,8 @@ using TakingBroadcastSeriously: Broadcasted, unwrap for f in :[%, tan, asin, acos, atan, - sinh, cosh, tanh, asinh, acosh, atanh].args + sinh, cosh, tanh, asinh, acosh, atanh, + min, max].args # copy from TakingBroadcastSeriously @eval Base.$f(a::Broadcasted...) = Broadcasted(broadcast_($f, unwrap.(a)...)) @eval Base.$f(a::Broadcasted, b) = Broadcasted(broadcast_($f, unwrap(a), b)) diff --git a/src/deprecated.jl b/src/deprecated.jl index a0f722805168..b722faec6c70 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -126,3 +126,13 @@ function broadcast_lesser_equal(x::NDArray, y::NDArray) warn("broadcast_lesser_equal(x, y) is deprecated, use x .== y instead.") x .<= y end + +function broadcast_maximum(x::NDArray, y::NDArray) + warn("broadcast_maximum(x, y) is deprecated, use max.(x, y) instead.") + max.(x, y) +end + +function broadcast_minimum(x::NDArray, y::NDArray) + warn("broadcast_minimum(x, y) is deprecated, use min.(x, y) instead.") + min.(x, y) +end diff --git a/src/ndarray.jl b/src/ndarray.jl index 8130679b2ae3..3bfd7fccc8d2 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -816,10 +816,10 @@ broadcast_(::typeof(^), x::NDArray{T,N}, y::NDArray{T,N}) where {T,N} = broadcast_(::typeof(^), x::NDArray{T,N}, y::NDArray{T,M}) where {T,N,M} = _broadcast_power(x, y) - ############################################################################### # comparison ############################################################################### + broadcast_(::typeof(==), x::NDArray{T}, y::NDArray{T}) where {T} = _broadcast_equal(x, y) @@ -838,6 +838,19 @@ broadcast_(::typeof(<), x::NDArray{T}, y::NDArray{T}) where {T} = broadcast_(::typeof(<=), x::NDArray{T}, y::NDArray{T}) where {T} = _broadcast_lesser_equal(x, y) + +############################################################################### +# min/max +############################################################################### + +import Base: min, max + +broadcast_(::typeof(max), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_maximum(x, y) + +broadcast_(::typeof(min), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_minimum(x, y) + """ fill!(arr::NDArray, x) @@ -1458,6 +1471,12 @@ julia> mx.log_softmax.(x) @_remap _broadcast_lesser_equal(x::NDArray, y::NDArray) broadcast_lesser_equal(x, y) @_remap _broadcast_lesser_equal!(x::NDArray, y::NDArray) broadcast_lesser_equal(x, y) +@_remap _broadcast_maximum(x::NDArray, y::NDArray) broadcast_maximum(x, y) +@_remap _broadcast_maximum!(x::NDArray, y::NDArray) broadcast_maximum(x, y) + +@_remap _broadcast_minimum(x::NDArray, y::NDArray) broadcast_minimum(x, y) +@_remap _broadcast_minimum!(x::NDArray, y::NDArray) broadcast_minimum(x, y) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1630,6 +1649,8 @@ const _op_import_bl = [ # import black list; do not import these funcs "broadcast_greater_equal", "broadcast_lesser", "broadcast_lesser_equal", + "broadcast_maximum", + "broadcast_minimum", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 26fb7327a21d..4d2993defdb0 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1078,6 +1078,19 @@ function test_maximum() @test copy(maximum(X, [1, 2])) == maximum(A, [1, 2]) @test copy(maximum(X, (1, 2))) == maximum(A, (1, 2)) end + + info("NDArray::broadcast_maximum") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = max.(x, y) + @test copy(z) == max.(A, B) + end end function test_minimum() @@ -1091,6 +1104,19 @@ function test_minimum() @test copy(minimum(X, [1, 2])) == minimum(A, [1, 2]) @test copy(minimum(X, (1, 2))) == minimum(A, (1, 2)) end + + info("NDArray::broadcast_minimum") + let + A = [1 2 3; + 4 5 6] + B = [1, + 2] + x = NDArray(A) + y = NDArray(B) + + z = min.(x, y) + @test copy(z) == min.(A, B) + end end function test_prod() From 39beaaf14d6c7da64224ba0c0167aeccc0aa9d9e Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 22 Jan 2018 13:59:40 +0800 Subject: [PATCH 618/630] remap broadcast_to/axis/axes (#406) - export `broadcast_to`, `broadcast_axis`, `broadcast_axes` --- NEWS.md | 3 ++ src/MXNet.jl | 6 ++- src/ndarray.jl | 80 ++++++++++++++++++++++++++++++++++++++-- test/unittest/ndarray.jl | 26 +++++++++++++ 4 files changed, 111 insertions(+), 4 deletions(-) diff --git a/NEWS.md b/NEWS.md index f8d1f1073456..826ae72f01e7 100644 --- a/NEWS.md +++ b/NEWS.md @@ -13,6 +13,9 @@ * `relu()` * `softmax()` * `log_softmax()` + * `broadcast_to()` + * `broadcast_axis()` + * `broadcast_axes()` * `SymbolicNode` * `Variable` diff --git a/src/MXNet.jl b/src/MXNet.jl index bd27c715b61d..9bae62d30b73 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -42,7 +42,11 @@ export NDArray, sigmoid, relu, softmax, - log_softmax + log_softmax, + # broadcast utils + broadcast_to, + broadcast_axis, + broadcast_axes # executor.jl export Executor, diff --git a/src/ndarray.jl b/src/ndarray.jl index 3bfd7fccc8d2..b5b95e7fbbcd 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -76,7 +76,7 @@ end # NDArray Type ################################################################################ """ - NDArray + NDArray{T,N} Wrapper of the `NDArray` type in `libmxnet`. This is the basic building block of tensor-based computation. @@ -89,11 +89,11 @@ of tensor-based computation. C/C++/Python shape (100,1,28,28), while in Julia, the same piece of memory have shape (28,28,1,100). """ -mutable struct NDArray{T,D} +mutable struct NDArray{T,N} handle :: MX_NDArrayHandle writable :: Bool - NDArray{T,D}(handle, writable = true) where {T,D} = new(handle, writable) + NDArray{T,N}(handle, writable = true) where {T,N} = new(handle, writable) end NDArray(x::AbstractArray{T}) where {T<:DType} = copy(collect(x), cpu()) @@ -1477,6 +1477,77 @@ julia> mx.log_softmax.(x) @_remap _broadcast_minimum(x::NDArray, y::NDArray) broadcast_minimum(x, y) @_remap _broadcast_minimum!(x::NDArray, y::NDArray) broadcast_minimum(x, y) +_nddoc[:broadcast_to] = """ + broadcast_to(x::NDArray, dims) + broadcast_to(x::NDArray, dims...) + +Broadcasts the input array to a new shape. + +In the case of broacasting doesn't work out of box, +you can expand the NDArray first. + +```jldoctest +julia> x = mx.ones(2, 3, 4); + +julia> y = mx.ones(1, 1, 4); + +julia> x .+ mx.broadcast_to(y, 2, 3, 4) +2×3×4 mx.NDArray{Float32,3} @ CPU0: +[:, :, 1] = + 2.0 2.0 2.0 + 2.0 2.0 2.0 + +[:, :, 2] = + 2.0 2.0 2.0 + 2.0 2.0 2.0 + +[:, :, 3] = + 2.0 2.0 2.0 + 2.0 2.0 2.0 + +[:, :, 4] = + 2.0 2.0 2.0 + 2.0 2.0 2.0 +``` +""" +@_remap broadcast_to(x::NDArray, dims) broadcast_to(x; shape = dims) +@_remap broadcast_to(x::NDArray, dims...) broadcast_to(x; shape = dims) + +_nddoc[:broadcast_axis] = _nddoc[:broadcast_axes] = """ + broadcast_axis(x::NDArray, dim, size) + broadcast_axes(x::NDArray, dim, size) + +Broadcasts the input array over particular axis(axes). +Parameter `dim` and `size` could be a scalar, a Tuple or an Array. + +`broadcast_axes` is just an alias. + +```jldoctest +julia> x +1×2×1 mx.NDArray{Int64,3} @ CPU0: +[:, :, 1] = + 1 2 + +julia> mx.broadcast_axis(x, 1, 2) +2×2×1 mx.NDArray{Int64,3} @ CPU0: +[:, :, 1] = + 1 2 + 1 2 + +julia> mx.broadcast_axis(x, 3, 2) +1×2×2 mx.NDArray{Int64,3} @ CPU0: +[:, :, 1] = + 1 2 + +[:, :, 2] = + 1 2 +``` +""" +@_remap(broadcast_axis(x::NDArray, dim, size), + broadcast_axis(x; axis = ndims(x) .- dim, size = size)) +@_remap(broadcast_axes(x::NDArray, dim, size), + broadcast_axes(x; axis = ndims(x) .- dim, size = size)) + ################################################################################ # NDArray functions dynamically imported from libmxnet ################################################################################ @@ -1651,6 +1722,9 @@ const _op_import_bl = [ # import black list; do not import these funcs "broadcast_lesser_equal", "broadcast_maximum", "broadcast_minimum", + "broadcast_to", + "broadcast_axis", + "broadcast_axes", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index 4d2993defdb0..ba5830a5810f 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1371,6 +1371,30 @@ function test_equal() @check_equal <= end # function test_equal +function test_broadcast_to() + info("NDArray::broadcast_to") + A = [1 2 3] + x = NDArray(A) + @test mx.broadcast_to(x, (1, 3)) |> copy == A + @test mx.broadcast_to(x, (5, 3)) |> copy == repeat(A, outer = (5, 1)) + + @test mx.broadcast_to(x, 1, 3) |> copy == A + @test mx.broadcast_to(x, 5, 3) |> copy == repeat(A, outer = (5, 1)) +end # function test_broadcast_to + +function test_broadcast_axis() + info("NDArray::broadcast_axis") + A = reshape([1, 2, 3], 1, 3, 1) + x = NDArray(A) + + @test mx.broadcast_axis(x, 1, 4) |> copy == [A; A; A; A] + @test mx.broadcast_axis(x, 3, 2) |> copy == cat(3, A, A) + + info("NDArray::broadcast_axes") + @test mx.broadcast_axes(x, 1, 4) |> copy == [A; A; A; A] + @test mx.broadcast_axes(x, 3, 2) |> copy == cat(3, A, A) +end # function test_broadcast_axis + ################################################################################ # Run tests ################################################################################ @@ -1413,6 +1437,8 @@ end # function test_equal test_hyperbolic() test_act_funcs() test_equal() + test_broadcast_to() + test_broadcast_axis() end end From ec2b6995dc2283e52f40b8ab1f6485230d02c779 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 24 Jan 2018 10:36:39 +0800 Subject: [PATCH 619/630] ndarray: remap hypot (#407) close #400 --- src/broadcast.jl | 3 ++- src/deprecated.jl | 5 +++++ src/ndarray.jl | 9 +++++++++ test/unittest/ndarray.jl | 14 ++++++++++++++ 4 files changed, 30 insertions(+), 1 deletion(-) diff --git a/src/broadcast.jl b/src/broadcast.jl index 9686db5d6412..1b58addec321 100644 --- a/src/broadcast.jl +++ b/src/broadcast.jl @@ -3,7 +3,8 @@ using TakingBroadcastSeriously: Broadcasted, unwrap for f in :[%, tan, asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh, - min, max].args + min, max, + hypot].args # copy from TakingBroadcastSeriously @eval Base.$f(a::Broadcasted...) = Broadcasted(broadcast_($f, unwrap.(a)...)) @eval Base.$f(a::Broadcasted, b) = Broadcasted(broadcast_($f, unwrap(a), b)) diff --git a/src/deprecated.jl b/src/deprecated.jl index b722faec6c70..67b1707cdee6 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -136,3 +136,8 @@ function broadcast_minimum(x::NDArray, y::NDArray) warn("broadcast_minimum(x, y) is deprecated, use min.(x, y) instead.") min.(x, y) end + +function broadcast_hypot(x::NDArray, y::NDArray) + warn("broadcast_hypot(x, y) is deprecated, use hypot.(x, y) instead.") + hypot.(x, y) +end diff --git a/src/ndarray.jl b/src/ndarray.jl index b5b95e7fbbcd..d2ceb8498ca0 100644 --- a/src/ndarray.jl +++ b/src/ndarray.jl @@ -875,6 +875,11 @@ end fill(x, dims::Integer...) = fill(x, dims) +import Base: hypot + +broadcast_(::typeof(hypot), x::NDArray{T}, y::NDArray{T}) where {T} = + _broadcast_hypot(x, y) + """ Manipulating as Julia Arrays ---------------------------- @@ -1477,6 +1482,9 @@ julia> mx.log_softmax.(x) @_remap _broadcast_minimum(x::NDArray, y::NDArray) broadcast_minimum(x, y) @_remap _broadcast_minimum!(x::NDArray, y::NDArray) broadcast_minimum(x, y) +@_remap _broadcast_hypot(x::NDArray, y::NDArray) broadcast_hypot(x, y) +@_remap _broadcast_hypot!(x::NDArray, y::NDArray) broadcast_hypot(x, y) + _nddoc[:broadcast_to] = """ broadcast_to(x::NDArray, dims) broadcast_to(x::NDArray, dims...) @@ -1725,6 +1733,7 @@ const _op_import_bl = [ # import black list; do not import these funcs "broadcast_to", "broadcast_axis", "broadcast_axes", + "broadcast_hypot", ] macro _import_ndarray_functions() diff --git a/test/unittest/ndarray.jl b/test/unittest/ndarray.jl index ba5830a5810f..891dffd0f390 100644 --- a/test/unittest/ndarray.jl +++ b/test/unittest/ndarray.jl @@ -1395,6 +1395,19 @@ function test_broadcast_axis() @test mx.broadcast_axes(x, 3, 2) |> copy == cat(3, A, A) end # function test_broadcast_axis +function test_hypot() + info("NDArray::hypot") + A = [3 3 3] + B = [4, 4] + C = hypot.(A, B) + + x = NDArray(A) + y = NDArray(B) + z = hypot.(x, y) + + @test copy(z) == C +end # function test_hypot + ################################################################################ # Run tests ################################################################################ @@ -1439,6 +1452,7 @@ end # function test_broadcast_axis test_equal() test_broadcast_to() test_broadcast_axis() + test_hypot() end end From 157b0887438c6a1bcd635b51fa0b4eae3972972f Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 28 Jan 2018 18:17:43 +0800 Subject: [PATCH 620/630] doc: update predict section of mnist tutorial (#411) --- docs/src/tutorial/mnist.md | 21 ++++++++------------- examples/mnist/mlp.jl | 21 ++++++++------------- 2 files changed, 16 insertions(+), 26 deletions(-) diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index a12e10b37821..2d1c68e23e66 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -245,21 +245,16 @@ data provider, and compute the prediction accuracy manually: ```julia # collect all labels from eval data -labels = Array[] -for batch in eval_provider - push!(labels, copy(mx.get_label(batch))) -end -labels = cat(1, labels...) +labels = reduce( + vcat, + copy(mx.get(eval_provider, batch, :softmax_label)) for batch ∈ eval_provider) +# labels are 0...9 +labels .= labels .+ 1 # Now we use compute the accuracy -correct = 0 -for i = 1:length(labels) - # labels are 0...9 - if indmax(probs[:,i]) == labels[i]+1 - correct += 1 - end -end -println(mx.format("Accuracy on eval set: {1:.2f}%", 100correct/length(labels))) +pred = map(i -> indmax(probs[1:10, i]), 1:size(probs, 2)) +correct = sum(pred .== labels) +@printf "Accuracy on eval set: %.2f%%\n" 100correct/length(labels) ``` Alternatively, when the dataset is huge, one can provide a callback to diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index 3f713654d5b9..e5aa279c4f77 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -46,18 +46,13 @@ mx.fit(model, optimizer, train_provider, eval_data=eval_provider, n_epoch=20) probs = mx.predict(model, eval_provider) # collect all labels from eval data -labels = Array[] -for batch in eval_provider - push!(labels, copy(mx.get(eval_provider, batch, :softmax_label))) -end -labels = cat(1, labels...) +labels = reduce( + vcat, + copy(mx.get(eval_provider, batch, :softmax_label)) for batch ∈ eval_provider) +# labels are 0...9 +labels .= labels .+ 1 # Now we use compute the accuracy -correct = 0 -for i = 1:length(labels) - # labels are 0...9 - if indmax(probs[:,i]) == labels[i]+1 - correct += 1 - end -end -println(mx.format("Accuracy on eval set: {1:.2f}%", 100correct/length(labels))) +pred = map(i -> indmax(probs[1:10, i]), 1:size(probs, 2)) +correct = sum(pred .== labels) +@printf "Accuracy on eval set: %.2f%%\n" 100correct/length(labels) From 9f4f53338ab5f96c8ff71273e95005f038755b23 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Wed, 31 Jan 2018 11:33:23 +0800 Subject: [PATCH 621/630] Optimizer module overhaul (#396) --- NEWS.md | 100 +++++++++++ README.md | 3 +- docs/src/api/optimizer.md | 16 ++ docs/src/tutorial/mnist.md | 6 +- docs/src/user-guide/overview.md | 4 +- examples/char-lstm/train.jl | 2 +- examples/cifar10/cifar10.jl | 2 +- examples/mnist/lenet-stn.jl | 2 +- examples/mnist/lenet.jl | 2 +- examples/mnist/mlp-test.jl | 9 +- examples/mnist/mlp.jl | 2 +- examples/regression-example.jl | 2 +- src/MXNet.jl | 5 +- src/base.jl | 1 + src/kvstore.jl | 2 +- src/model.jl | 24 ++- src/optimizer.jl | 306 +++++++++++++++++--------------- src/optimizers/adadelta.jl | 130 +++++++------- src/optimizers/adagrad.jl | 94 +++++----- src/optimizers/adam.jl | 104 ++++++----- src/optimizers/adamax.jl | 108 +++++------ src/optimizers/nadam.jl | 148 ++++++++------- src/optimizers/rmsprop.jl | 102 +++++------ src/optimizers/sgd.jl | 114 ++++++------ test/unittest/optimizer.jl | 68 +++++++ 25 files changed, 772 insertions(+), 584 deletions(-) create mode 100644 test/unittest/optimizer.jl diff --git a/NEWS.md b/NEWS.md index 826ae72f01e7..d531d9ea0caf 100644 --- a/NEWS.md +++ b/NEWS.md @@ -61,6 +61,9 @@ * `Nadam` * `RMSProp` * `SGD` + * `getupdater()` + * `normgrad!()` + * `update!()` * `AbstractDataProvider` * `AbstractDataBatch` @@ -344,6 +347,103 @@ Before: `clip(x, a_min = -4, a_max = 4)` After: `clip(x, -4, 4)` +### Optimizer + +We overhauled the optimizer APIs, introducing breaking changes. +There are tons of renaming, and we try to increase the flexibility. +Making it decouples from some high-level, so user can use it without +understand some detail implementations of `fit!`. + +See #396. + +* All the keyword argument of optimizers have been renamed. + Now we have more elegant keyword arguments than Python's, + thanks to well Unicode support on Julia's REPL and editor plugin. + *These are breaking changes, no deprecation warning.* + + | old | new | comment | + |---------------------------|-----------|--------------------------------| + | `opts.lr` | `η` | type `\eta` in REPL | + | `opts.momentum` | `μ` | type `\mu` in REPL | + | `opts.grad_clip` | `clip` | type `\nablac` in REPL | + | `opts.weight_decay` | `λ` | type `\lambda` in REPL | + | `opts.lr_schedular` | `η_sched` | type `\eta_sched` in REPL | + | `opts.momentum_schedular` | `μ_sched` | type `\mu_sched` in REPL | + + For instance, one accessed the learning via `SGD().opts.lr`, + but now, it's `SGD().η`. + +* New keyword argument `scale` for gradient rescaling. + + Docstring: + ``` + If != 0, multiply the gradient with `∇r` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. + ``` + +* Keyword arguments of `NadamScheduler` has been renamed. + *This is a breaking change, no deprecation warning.* + + * Before + + ```julia + NadamScheduler(; mu0 = 0.99, delta = 0.004, gamma = 0.5, alpha = 0.96) + ``` + + * After + + ```julia + NadamScheduler(; μ = 0.99, δ = 0.004, γ = 0.5, α = 0.96) + ``` + +* The attribute `optimizer.state` is removed. + `OptimizationState` is only used by high-level abstraction, like `fit!`. + +* `LearningRate` scheduler API changes: + + * `get_learning_rate` is removed. + Please use `Base.get` to get learning rate. + + ```julia + julia> sched = mx.LearningRate.Exp(.1) + MXNet.mx.LearningRate.Exp(0.1, 0.9, 0) + + julia> get(sched) + 0.1 + + julia> update!(sched); + + julia> get(sched) + 0.09000000000000001 + ``` + + * `update!` to bump counter of `Scheduler.t` + ```julia + julia> sched.t + 1 + + julia> update!(sched); + + julia> sched.t + 2 + + julia> update!(sched); + + julia> sched.t + 3 + ``` + +* `Momentum` module API changes: + + * `get_momentum_scheduler` is removed. Please use `Base.get` instead. + + ```julia + julia> get(mx.Momentum.Fixed(.9)) + 0.9 + ``` + ---- # v0.3.0 (2017.11.16) diff --git a/README.md b/README.md index c6600cdc9fd5..74208b9ac034 100644 --- a/README.md +++ b/README.md @@ -37,7 +37,8 @@ train_provider, eval_provider = get_mnist_providers(batch_size) model = mx.FeedForward(mlp, context=mx.cpu()) # optimization algorithm -optimizer = mx.SGD(lr=0.1, momentum=0.9) +# where η is learning rate and μ is momentum +optimizer = mx.SGD(η=0.1, μ=0.9) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/docs/src/api/optimizer.md b/docs/src/api/optimizer.md index 17974a577913..28d01cc9fd89 100644 --- a/docs/src/api/optimizer.md +++ b/docs/src/api/optimizer.md @@ -1,5 +1,21 @@ # Optimizers +Says, you have the parameter `W` inited for your model and +got its gradient stored as `∇` (perhaps from AutoGrad APIs). +Here is minimal snippet of getting your parameter `W` baked by `SGD`. + +```@repl +using MXNet + +opt = SGD(η = 10) +decend! = getupdater(opt) + +W = NDArray(Float32[1, 2, 3, 4]); +∇ = NDArray(Float32[.1, .2, .3, .4]); + +decend!(1, ∇, W) +``` + ```@autodocs Modules = [MXNet.mx, MXNet.mx.LearningRate, MXNet.mx.Momentum] Pages = ["optimizer.jl"] diff --git a/docs/src/tutorial/mnist.md b/docs/src/tutorial/mnist.md index 2d1c68e23e66..76430fd1b1d0 100644 --- a/docs/src/tutorial/mnist.md +++ b/docs/src/tutorial/mnist.md @@ -100,10 +100,10 @@ help. The last thing we need to specify is the optimization algorithm (a.k.a. *optimizer*) to use. We use the basic SGD with a fixed learning rate 0.1 -and momentum 0.9: +, momentum 0.9 and weight decay 0.00001: ```julia -optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) +optimizer = mx.SGD(η=0.1, μ=0.9, λ=0.00001) ``` Now we can do the training. Here the `n_epoch` parameter specifies that @@ -205,7 +205,7 @@ on GPU, and train it. model = mx.FeedForward(lenet, context=mx.gpu()) # optimizer -optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) +optimizer = mx.SGD(η=0.05, μ=0.9, λ=0.00001) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/docs/src/user-guide/overview.md b/docs/src/user-guide/overview.md index cddeed6bc8c9..a81d7ff30e9e 100644 --- a/docs/src/user-guide/overview.md +++ b/docs/src/user-guide/overview.md @@ -147,10 +147,10 @@ macroexpand(:(@mx.inplace a += b)) As we can see, it translate the `+=` operator to an explicit `add_to!` function call, which invokes into libmxnet to add the contents of `b` into `a` directly. For example, the following is the update rule in the -`SGD Optimizer` (both `grad` and `weight` are `NDArray` objects): +`SGD Optimizer` (both gradient `∇` and weight `W` are `NDArray` objects): ```julia -@inplace weight += -lr * (grad_scale * grad + self.weight_decay * weight) +@inplace W .+= -η .* (∇ + λ .* W) ``` Note there is no much magic in `mx.inplace`: it only does a shallow diff --git a/examples/char-lstm/train.jl b/examples/char-lstm/train.jl index 000534f64d7b..8b7a682c9560 100644 --- a/examples/char-lstm/train.jl +++ b/examples/char-lstm/train.jl @@ -34,7 +34,7 @@ end #--train model = mx.FeedForward(lstm, context=context) -optimizer = mx.ADAM(lr=BASE_LR, weight_decay=WEIGHT_DECAY, grad_clip=CLIP_GRADIENT) +optimizer = mx.ADAM(η=BASE_LR, λ=WEIGHT_DECAY, clip=CLIP_GRADIENT) mx.fit(model, optimizer, data_tr, eval_data=data_val, n_epoch=N_EPOCH, initializer=mx.UniformInitializer(0.1), diff --git a/examples/cifar10/cifar10.jl b/examples/cifar10/cifar10.jl index 165ee1934568..5d3d2eeafe41 100644 --- a/examples/cifar10/cifar10.jl +++ b/examples/cifar10/cifar10.jl @@ -77,7 +77,7 @@ gpus = [mx.Context(mx.GPU, i) for i = 0:num_gpus-1] model = mx.FeedForward(softmax, context=gpus) # optimizer -optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.0001) +optimizer = mx.SGD(η=0.05, μ=0.9, λ=0.0001) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=num_epoch, eval_data=test_provider, diff --git a/examples/mnist/lenet-stn.jl b/examples/mnist/lenet-stn.jl index 23ca9de3fdb3..ae94bf9b16f7 100644 --- a/examples/mnist/lenet-stn.jl +++ b/examples/mnist/lenet-stn.jl @@ -57,7 +57,7 @@ train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) model = mx.FeedForward(lenet, context=mx.cpu()) # optimizer -optimizer = mx.ADAM(lr=0.01, weight_decay=0.00001) +optimizer = mx.ADAM(η=0.01, λ=0.00001) # fit parameters initializer=mx.XavierInitializer(distribution = mx.xv_uniform, regularization = mx.xv_avg, magnitude = 1) diff --git a/examples/mnist/lenet.jl b/examples/mnist/lenet.jl index af3e8c41dc71..64bcdea9cba9 100644 --- a/examples/mnist/lenet.jl +++ b/examples/mnist/lenet.jl @@ -39,7 +39,7 @@ train_provider, eval_provider = get_mnist_providers(batch_size; flat=false) model = mx.FeedForward(lenet, context=mx.gpu()) # optimizer -optimizer = mx.SGD(lr=0.05, momentum=0.9, weight_decay=0.00001) +optimizer = mx.SGD(η=0.05, μ=0.9, λ=0.00001) # fit parameters mx.fit(model, optimizer, train_provider, n_epoch=20, eval_data=eval_provider) diff --git a/examples/mnist/mlp-test.jl b/examples/mnist/mlp-test.jl index 56bd00b6a122..dac2c9cd84eb 100644 --- a/examples/mnist/mlp-test.jl +++ b/examples/mnist/mlp-test.jl @@ -72,7 +72,14 @@ end function test_mnist_mlp() info("MNIST::SGD") - @test mnist_fit_and_predict(mx.SGD(lr=0.1, momentum=0.9), mx.UniformInitializer(0.01), 2) > 90 + @test mnist_fit_and_predict(mx.SGD(η=.2), mx.UniformInitializer(.01), 2) > 90 + + info("MNIST::SGD::η scheduler") + @test mnist_fit_and_predict(mx.SGD(η_sched=mx.LearningRate.Inv(.25)), + mx.UniformInitializer(.01), 2) > 90 + + info("MNIST::SGD::momentum μ") + @test mnist_fit_and_predict(mx.SGD(η=.1, μ=.9), mx.UniformInitializer(.01), 2) > 90 info("MNIST::ADAM") @test mnist_fit_and_predict(mx.ADAM(), mx.NormalInitializer(), 2) > 90 diff --git a/examples/mnist/mlp.jl b/examples/mnist/mlp.jl index e5aa279c4f77..8a73d2aa7637 100644 --- a/examples/mnist/mlp.jl +++ b/examples/mnist/mlp.jl @@ -36,7 +36,7 @@ train_provider, eval_provider = get_mnist_providers(batch_size) model = mx.FeedForward(mlp, context=mx.cpu()) # optimizer -optimizer = mx.SGD(lr=0.1, momentum=0.9, weight_decay=0.00001) +optimizer = mx.SGD(η=0.1, μ=0.9, λ=0.00001) # fit parameters mx.fit(model, optimizer, train_provider, eval_data=eval_provider, n_epoch=20) diff --git a/examples/regression-example.jl b/examples/regression-example.jl index 38541c2b7d0c..7d05baf3c632 100644 --- a/examples/regression-example.jl +++ b/examples/regression-example.jl @@ -55,7 +55,7 @@ net = @mx.chain mx.Variable(:data) => model = mx.FeedForward(net, context=mx.cpu()) # set up the optimizer: select one, explore parameters, if desired -#optimizer = mx.SGD(lr=0.01, momentum=0.9, weight_decay=0.00001) +#optimizer = mx.SGD(η=0.01, μ=0.9, λ=0.00001) optimizer = mx.ADAM() # train, reporting loss for training and evaluation sets diff --git a/src/MXNet.jl b/src/MXNet.jl index 9bae62d30b73..b550104a4e5b 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -95,7 +95,10 @@ export AbstractOptimizer, AdaMax, Nadam, RMSProp, - SGD + SGD, + getupdater, + normgrad!, + update! # io.jl export AbstractDataProvider, diff --git a/src/base.jl b/src/base.jl index 271e35607880..a3dcbbf7fe35 100644 --- a/src/base.jl +++ b/src/base.jl @@ -159,6 +159,7 @@ end # NTuple{N, Int} passed to libmxnet. # # TODO: find a better solution in case this cause issues in the future. +# I made `@_remap` in `ndarray.jl`. (Iblis Lin) ################################################################################ dump_mx_param(val::Any) = string(val) dump_mx_param(val::Float64) = @sprintf("%.16e", val) diff --git a/src/kvstore.jl b/src/kvstore.jl index fa4768cceaf7..c4d3ad9724a7 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -127,6 +127,6 @@ function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) if ismatch(r"dist", string(get_type(self))) && is_worker # TODO else - set_updater(self, get_updater(optimizer)) + set_updater(self, getupdater(optimizer)) end end diff --git a/src/model.jl b/src/model.jl index 06b7a2cf8a13..2f6dfef1c6d1 100644 --- a/src/model.jl +++ b/src/model.jl @@ -286,7 +286,8 @@ end kvstore :: Union{Symbol, KVStore} = :local, force_init :: Bool = false, callbacks :: Vector{AbstractCallback} = AbstractCallback[], - verbosity :: Int = 3 + verbosity :: Int = 3, + η_decay :: Symbol = :epoch, ) function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, @@ -309,12 +310,11 @@ end Alias to [`fit`](@ref). """ -function train(self :: FeedForward, optimizer :: AbstractOptimizer, data :: AbstractDataProvider; kwargs...) - fit(self, optimizer, data; kwargs...) -end +train(m::FeedForward, opt::AbstractOptimizer, data::AbstractDataProvider; kw...) = + fit(m, opt, data; kw...) """ - fit(model :: FeedForward, optimizer, data; kwargs...) + fit(model::FeedForward, optimizer, data; kwargs...) Train the `model` on `data` with the `optimizer`. @@ -343,6 +343,7 @@ Train the `model` on `data` with the `optimizer`. - `1`: Print starting and final messages - `2`: Print one time messages and a message at the start of each epoch - `3`: Print a summary of the training and validation accuracy for each epoch +* `η_decay::Symbol`: `:epoch` or `:batch`, decay learning rate on epoch or batch. """ function fit(self::FeedForward, optimizer::AbstractOptimizer, data::AbstractDataProvider; kwargs...) @@ -418,10 +419,11 @@ function fit(self::FeedForward, optimizer::AbstractOptimizer, data::AbstractData aux_arrays = [NDArray[exec.aux_arrays[i] for exec in train_execs] for i = 1:length(aux_names)] op_state = OptimizationState(batch_size) - optimizer.state = op_state + # set up the gradient rescaling if user not set + iszero(optimizer.scale) && (optimizer.scale = 1 / batch_size) if !update_on_kvstore - updater = get_updater(optimizer) + updater = getupdater(optimizer) end if !isa(kvstore, Void) @@ -481,7 +483,6 @@ function fit(self::FeedForward, optimizer::AbstractOptimizer, data::AbstractData op_state.curr_iter += 1 op_state.curr_batch += 1 - optimizer.state = op_state # update parameters for idx = 1:length(param_names) @@ -514,6 +515,9 @@ function fit(self::FeedForward, optimizer::AbstractOptimizer, data::AbstractData end end + # trigger learning rate decay + opts.η_decay == :batch && update!(optimizer.η_sched) + # invoke callbacks after finishing each iteration _invoke_callbacks(self, opts.callbacks, op_state, AbstractBatchCallback) @@ -577,6 +581,10 @@ function fit(self::FeedForward, optimizer::AbstractOptimizer, data::AbstractData copy!(self.aux_params[name], aux_avg) end end + + # trigger learning rate decay + opts.η_decay == :epoch && update!(optimizer.η_sched) + _invoke_callbacks(self, opts.callbacks, op_state, AbstractEpochCallback; metric=metric) end # end of all epochs diff --git a/src/optimizer.jl b/src/optimizer.jl index 7a647332231b..06f93a5942d2 100644 --- a/src/optimizer.jl +++ b/src/optimizer.jl @@ -1,3 +1,7 @@ +############################################################################### +# Types +############################################################################### + """ AbstractOptimizer @@ -19,12 +23,10 @@ Base type for all momentum scheduler. """ abstract type AbstractMomentumScheduler end - - """ OptimizationState -# Attributes: +### Attributes * `batch_size`: The size of the mini-batch used in stochastic training. * `curr_epoch`: The current epoch count. Epoch 0 means no training yet, during the first @@ -45,99 +47,113 @@ mutable struct OptimizationState curr_batch :: Int curr_iter :: Int end + OptimizationState(batch_size::Int) = OptimizationState(batch_size, 0, 0, 0) +############################################################################### +# LearningRate module +############################################################################### -""" - get_learning_rate(scheduler, state) +module LearningRate -# Arguments -* `scheduler::AbstractLearningRateScheduler`: a learning rate scheduler. -* `state::OptimizationState`: the current state about epoch, mini-batch and iteration count. +import Base: get +import ..mx: AbstractLearningRateScheduler, OptimizationState, update! + +export initlrsched + +initlrsched(η::Real) = LearningRate.Fixed(η) + +update!(a::AbstractLearningRateScheduler) = (isdefined(a, :t) && (a.t += 1)) -Returns the current learning rate. """ -function get_learning_rate end + get(sched::AbstractLearningRateScheduler) -################################################################################ -# The learning rate module -module LearningRate -import ..mx: AbstractLearningRateScheduler, OptimizationState, get_learning_rate +Returns the current learning rate. +""" +get(::AbstractLearningRateScheduler) = nothing """ - LearningRate.Fixed + LearningRate.Fixed(η) Fixed learning rate scheduler always return the same learning rate. """ -mutable struct Fixed <: AbstractLearningRateScheduler - learning_rate :: Float64 +struct Fixed <: AbstractLearningRateScheduler + η::Float64 end -get_learning_rate(self :: Fixed, state :: OptimizationState) = self.learning_rate -""" - LearningRate.Exp +get(f::Fixed) = f.η + +doc""" + LearningRate.Exp(η₀; γ = 0.9) -``\eta_t = \eta_0\gamma^t``. Here ``t`` is the epoch count, or the iteration -count if `decay_on_iteration` is set to true. +```math +\eta_t = \eta_0\gamma^t +``` + +Where `t` is the epoch count, or the iteration count. """ mutable struct Exp <: AbstractLearningRateScheduler - learning_rate :: Float64 - gamma :: Float64 - on_iteration :: Bool + η₀::Float64 + γ ::Float64 + t ::Int end -function Exp(base_lr::Real; gamma::Real=0.9, decay_on_iteration::Bool=false) - @assert(0 < gamma < 1) - Exp(Float64(base_lr), Float64(gamma), decay_on_iteration) + +function Exp(η₀; γ = 0.9, t = 0) + @assert 0 < γ < 1 + Exp(η₀, γ, t) end -get_learning_rate(self :: Exp, state :: OptimizationState) = - self.learning_rate * self.gamma ^ (self.on_iteration ? state.curr_iter : state.curr_epoch) -""" - LearningRate.Inv -``\eta_t = \eta_0 * (1 + \gamma * t)^(-power)``. -Here ``t`` is the epoch count, or the iteration count if `decay_on_iteration` -is set to true. +get(a::Exp) = a.η₀ * a.γ^a.t + +doc""" + LearningRate.Inv(η₀; γ = 0.9, p = 0.5) + +```math +\eta_t = \eta_0 (1 + \gamma t)^{-p} +``` + +Where `t` is the epoch count, or the iteration count. """ mutable struct Inv <: AbstractLearningRateScheduler - learning_rate :: Float64 - gamma :: Float64 - power :: Float64 - on_iteration :: Bool + η₀::Float64 + γ ::Float64 + p ::Float64 + t ::Int end -function Inv(base_lr :: Real; gamma::Real=0.9, power::Real=0.5, decay_on_iteration::Bool=false) - @assert(0 < gamma < 1) - @assert(0 <= power) - Inv(Float64(base_lr), Float64(gamma), Float64(power), decay_on_iteration) -end -get_learning_rate(self :: Inv, state :: OptimizationState) = - self.learning_rate * ( 1 + self.gamma * (self.on_iteration ? state.curr_iter : state.curr_epoch)) ^ (-self.power) -end# module LearningRate -################################################################################ -function get_lr_scheduler(scheduler :: Any, lr :: Real) - if isa(scheduler, AbstractLearningRateScheduler) - return scheduler - else - return LearningRate.Fixed(lr) - end + +function Inv(η₀; γ = 0.9, p = 0.5, t = 0) + @assert 0 < γ < 1 + @assert 0 <= p + Inv(η₀, γ, p, t) end +get(i::Inv) = i.η₀ * (1 + i.γ*i.t)^(-i.p) + +end # module LearningRate + +using .LearningRate + +############################################################################### +# Momentum module +############################################################################### + +module Momentum + +import Base: get +import ..mx: AbstractMomentumScheduler, OptimizationState + +export initmomsched """ - get_momentum(scheduler, state) + get(sched) -* `scheduler::AbstractMomentumScheduler`: the momentum scheduler. -* `state::OptimizationState`: the state about current epoch, mini-batch and iteration count. +* `sched::AbstractMomentumScheduler`: the momentum scheduler. Returns the current momentum. """ -function get_momentum -end +get - -################################################################################ -# The Momentum module -module Momentum -import ..mx: AbstractMomentumScheduler, OptimizationState, get_momentum +initmomsched(μ::Real) = iszero(μ) ? Momentum.Null() : Momentum.Fixed(μ) """ Momentum.Null @@ -147,7 +163,8 @@ explicitly indicate momentum should not be used. """ struct Null <: AbstractMomentumScheduler end -get_momentum(self :: Null, state :: OptimizationState) = 0.0 + +get(::Null) = 0.0 """ Momentum.Fixed @@ -155,119 +172,116 @@ get_momentum(self :: Null, state :: OptimizationState) = 0.0 Fixed momentum scheduler always returns the same value. """ mutable struct Fixed <: AbstractMomentumScheduler - momentum :: Float64 + μ::Float64 end -get_momentum(self :: Fixed, state :: OptimizationState) = self.momentum -""" - Momentum.NadamScheduler +get(f::Fixed) = f.μ + +doc""" + NadamScheduler(; μ = 0.99, δ = 0.004, γ = 0.5, α = 0.96) Nesterov-accelerated adaptive momentum scheduler. -Description in "Incorporating Nesterov Momentum into Adam." -[http://cs229.stanford.edu/proj2015/054_report.pdf] -(http://cs229.stanford.edu/proj2015/054_report.pdf) - -``\mu_t = \mu_0 * (1 - \gamma * \alpha^{t * \delta})``. -Here -* ``t`` is the iteration count -* ``\delta``: default `0.004` is scheduler decay, -* ``\gamma``: default `0.5` -* ``\alpha``: default `0.96` -* ``\mu_0``: default `0.99` +Description in [Incorporating Nesterov Momentum into Adam] +(http://cs229.stanford.edu/proj2015/054_report.pdf). + +```math +\mu_t = \mu_0 * (1 - \gamma * \alpha^{t * \delta}) +``` + +Where +* `t`: iteration count +* `μ`: default `0.99`, μ₀ +* `δ`: default `0.004` is scheduler decay. +* `γ`: default `0.5` +* `α`: default `0.96` """ -mutable struct NadamScheduler <: AbstractMomentumScheduler - mu0 :: Float64 - delta :: Float64 - gamma :: Float64 - alpha :: Float64 -end -function NadamScheduler(;mu0::Real=0.99, delta::Real=0.004, - gamma::Real=0.5, alpha::Real=0.96) - @assert(0.0 <= delta) - @assert(0.0 <= alpha <= 1.0) - @assert(0.0 <= mu0 <= 1.0) - @assert(0.0 <= gamma <= 1.0) - NadamScheduler(Float64(mu0), Float64(delta), Float64(gamma), Float64(alpha)) +struct NadamScheduler <: AbstractMomentumScheduler + μ::Float64 + δ::Float64 + γ::Float64 + α::Float64 end -get_momentum(self :: NadamScheduler, state :: OptimizationState) = - self.mu0 * (1.0 - self.gamma*self.alpha^(state.curr_iter * self.delta)), - self.mu0 * (1.0 - self.gamma*self.alpha^((state.curr_iter + 1) * self.delta)) - -end # module Momentum -################################################################################ -function get_momentum_scheduler(scheduler :: Any, momentum :: Real) - if isa(scheduler, AbstractMomentumScheduler) - return scheduler - elseif momentum == 0 - return Momentum.Null() - else - return Momentum.Fixed(momentum) - end + +function NadamScheduler(; μ = 0.99, δ = 0.004, γ = 0.5, α = 0.96) + @assert 0.0 <= μ < 1.0 + @assert 0.0 <= δ + @assert 0.0 <= γ <= 1.0 + @assert 0.0 <= α <= 1.0 + NadamScheduler(μ, δ, γ, α) end -function get_momentum_scheduler(scheduler :: Any, - another_scheduler :: AbstractMomentumScheduler) +""" + get(n::NadamScheduler, t) - if isa(scheduler, AbstractMomentumScheduler) - return scheduler - else - return another_scheduler - end -end +Where `t` is the iteration count. +""" +get(n::NadamScheduler, t) = + n.μ * (1.0 - n.γ * n.α^( t * n.δ)), + n.μ * (1.0 - n.γ * n.α^((t + 1) * n.δ)) + +end # module Momentum + +using .Momentum + +############################################################################### +# Public APIs +############################################################################### """ - get_updater(optimizer) + getupdater(optimizer) -A utility function to create an updater function, that uses its closure to -store all the states needed for each weights. +A utility function to create an updater function of `KVStore`, +that uses its closure to store all the states needed for each weights. -* `optimizer::AbstractOptimizer`: the underlying optimizer. +Ther returned function has following signature: + +```julia +decend!(index::Int, ∇::NDArray, x::NDArray) +``` + +If the optimizer is stateful and need access/store states during updating, +`index` will be the key to access/store states. """ -function get_updater(optimizer::AbstractOptimizer) +function getupdater(optimizer::AbstractOptimizer) states = Dict{Int,Any}() - function updater(index::Int, grad::NDArray, weight::NDArray) + function updater(index::Int, ∇::NDArray, x::NDArray) if !haskey(states, index) - states[index] = create_state(optimizer, index, weight) + states[index] = create_state(optimizer, index, x) end - update(optimizer, index, weight, grad, states[index]) + update!(optimizer, index, x, ∇, states[index]) end - return updater + updater end """ - AbstractOptimizerOptions - -Base class for all optimizer options. -""" -abstract type AbstractOptimizerOptions end - -""" - normalized_gradient(opts, state, W, ∇) + normgrad(optimizer, W, ∇) Get the properly normalized gradient (re-scaled and clipped if necessary). -* `opts::AbstractOptimizerOptions`: options for the optimizer, - should contain the field `grad_clip` and `weight_decay`. -* `state::OptimizationState`: the current optimization state. +* `optimizer`: the optimizer, + should contain the field `scale`, `clip` and `λ`. * `W::NDArray`: the trainable weights. * `∇::NDArray`: the original gradient of the weights. """ -function normalized_gradient(opts::AbstractOptimizerOptions, state::OptimizationState, - W::NDArray, ∇::NDArray) - scale = 1.0 / state.batch_size - ∇ = ∇ * scale - - if opts.grad_clip > 0 - ∇ = clip(∇, -opts.grad_clip, opts.grad_clip) - end - if opts.weight_decay > 0 - @inplace ∇ += opts.weight_decay * W - end +function normgrad!(opt::AbstractOptimizer, W::NDArray, ∇::NDArray) + # rescaling + s = opt.scale + !iszero(s) && @inplace ∇ .*= s + # gradient clipping + c = opt.clip + c > 0 && clip!(∇, -c, c) + # weight decay + λ = opt.λ + λ > 0 && @inplace ∇ += λ .* W ∇ end +############################################################################### +# Builtin Optimizers +############################################################################### + include("optimizers/sgd.jl") include("optimizers/adam.jl") include("optimizers/adagrad.jl") diff --git a/src/optimizers/adadelta.jl b/src/optimizers/adadelta.jl index 3915d036496a..9fee3517a3cb 100644 --- a/src/optimizers/adadelta.jl +++ b/src/optimizers/adadelta.jl @@ -1,91 +1,87 @@ -@defstruct AdaDeltaOptions <: AbstractOptimizerOptions ( - (lr :: Real = 1.0, lr > 0), - (rho :: Real = 0.95, rho > 0 && rho < 1), - (epsilon :: Real = 1e-6, epsilon > 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.00001, weight_decay >= 0), - lr_scheduler :: Any = nothing -) - -""" - AdaDelta +doc""" + AdaDelta(; kwargs...) Scale learning rates by the ratio of accumulated gradients to accumulated updates, see [1] and notes for further description. - AdaDelta(; kwargs...) - -# Attributes -* `lr::Real`: default `1.0`, the learning rate controlling the - size of update steps -* `rho::Real`: default `0.9`, squared gradient moving average decay factor -* `epsilon::Real`: default `1e-6`, small value added for - numerical stability -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.00001`, weight decay is equivalent +### Attributes +* `η`: default `1.0`, learning rate. +* `ρ`: default `0.95`, squared gradient moving average decay factor. +* `ϵ`: default `1e-6`, small value added for numerical stability. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `λ`: default `0.00001`, weight decay is equivalent to adding a global l2 regularizer for all the parameters. -# Notes -`rho` should be between 0 and 1. A value of `rho` close to 1 will decay the +### Notes +`ρ` should be between 0 and 1. A value of `ρ` close to 1 will decay the moving average slowly and a value close to 0 will decay the moving average fast. -`rho` = 0.95 and `epsilon` = 1e-6 are suggested in the paper and reported to +`ρ = 0.95` and `ϵ = 1e-6` are suggested in the paper and reported to work for multiple datasets (MNIST, speech). In the paper, no learning rate is -considered (so `lr` = 1.0). Probably best to keep it at this value. +considered (so `η = 1.0`). Probably best to keep it at this value. -`epsilon` is important for the very first update (so the numerator does -not become 0). +`ϵ` is important for the very first update (so the numerator does not become 0). -Using the step size `lr` and a decay factor `rho` the learning rate is +Using the step size `η` and a decay factor `ρ` the learning rate is calculated as: -``r_t &= \rho r_{t-1} + (1-\rho)*g^2\\ -\eta_t &= \eta \frac{\sqrt{s_{t-1} + \epsilon}} {\sqrt{r_t + \epsilon}}\\ -s_t &= \rho s_{t-1} + (1-\rho)*(\eta_t*g)^2`` -# References -* [1]: Zeiler, M. D. (2012): - ADADELTA: An Adaptive Learning Rate Method. arXiv Preprint arXiv:1212.5701. +```math +\begin{align*} + r_t &= ρ r_{t-1} + (1 - ρ) g^2 \\ + η_t &= η \frac{\sqrt{s_{t-1} + ϵ}} {\sqrt{r_t + ϵ}} \\ + s_t &= ρ s_{t-1} + (1 - ρ) (η_t \times g)^2 +\end{align*} +``` + +### References +1. Zeiler, M. D. (2012): + ADADELTA: An Adaptive Learning Rate Method. arXiv Preprint arXiv:1212.5701. """ - -mutable struct AdaDelta <: AbstractOptimizer - opts :: AdaDeltaOptions - state :: OptimizationState - - function AdaDelta(; kwargs...) - opts = AdaDeltaOptions(;kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) - - new(opts) - end -end +AdaDelta + +@defstruct AdaDelta <: AbstractOptimizer ( + (η :: Real = 1.0, η > 0), + (ρ :: Real = 0.95, 0 < ρ < 1 ), + (ϵ :: Real = 1e-6, ϵ > 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 1e-5, λ >= 0), + η_sched :: Any = initlrsched(η) +) mutable struct AdaDeltaState - acc :: NDArray - delta_acc :: NDArray + x :: NDArray + Δx :: NDArray end -function create_state(self :: AdaDelta, index :: Int, weight :: NDArray) - return AdaDeltaState(zeros(size(weight), context(weight)), - zeros(size(weight), context(weight))) -end +create_state(::AdaDelta, ::Int, W::NDArray) = + AdaDeltaState(zeros(size(W), context(W)), zeros(size(W), context(W))) + +function update!(ada::AdaDelta, ::Int, W::NDArray, ∇::NDArray, s::AdaDeltaState) + η = get(ada.η_sched) + x = s.x + Δx = s.Δx + ρ = ada.ρ + ϵ = ada.ϵ -function update(self :: AdaDelta, index :: Int, weight :: NDArray, - grad :: NDArray, state :: AdaDeltaState) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) + normgrad!(ada, W, ∇) - # Update state.acc as in RMSProp - @inplace state.acc .*= self.opts.rho - @inplace state.acc .+= (1 - self.opts.rho) * grad .* grad + # Update s.acc as in RMSProp + @inplace x .*= ρ + @inplace x .+= (1 - ρ) .* ∇.^2 - # Compute update using the "old" state.delta_acc - update = grad .* sqrt(state.delta_acc + self.opts.epsilon) ./ - (sqrt(state.acc + self.opts.epsilon)) - @inplace weight .+= -lr * update + # Compute update using the "old" Δx + Δxₜ = ∇ .* sqrt(Δx .+ ϵ) ./ sqrt(x .+ ϵ) # FIXME: sqrt dot-call + @inplace W .+= -η .* Δxₜ - # update state.delta_acc using update - @inplace state.delta_acc .*= self.opts.rho - @inplace state.delta_acc .+= (1 - self.opts.rho) * update .* update + # update Δx using update + @inplace Δx .*= ρ + @inplace Δx .+= (1 - ρ) .* Δxₜ.^2 end diff --git a/src/optimizers/adagrad.jl b/src/optimizers/adagrad.jl index a5eee0bbd102..c72bac1e3b7e 100644 --- a/src/optimizers/adagrad.jl +++ b/src/optimizers/adagrad.jl @@ -1,66 +1,60 @@ -@defstruct AdaGradOptions <: AbstractOptimizerOptions ( - (lr :: Real = 0.1, lr > 0), - (epsilon :: Real = 1e-6, epsilon > 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.00001, weight_decay >= 0), - lr_scheduler :: Any = nothing -) - -""" - AdaGrad +doc""" + AdaGrad(; kwargs...) Scale learning rates by dividing with the square root of accumulated squared gradients. See [1] for further description. - AdaGrad(; kwargs...) - -# Attributes -* `lr::Real`: default `0.1`, the learning rate controlling the - size of update steps -* `epsilon::Real`: default `1e-6`, small value added for - numerical stability -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.00001`, weight decay is equivalent +### Arguments +* `η`: default `0.1`, learning rate. +* `ϵ`: default `1e-6`, small value added for numerical stability. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `λ`: default `0.00001`, weight decay is equivalent to adding a global l2 regularizer for all the parameters. -# Notes -Using step size lr AdaGrad calculates the learning rate for feature i at +### Notes +Using step size `η` AdaGrad calculates the learning rate for feature `i` at time step t as: -``η_{t,i} = \frac{lr}{\sqrt{\sum^t_{t^\prime} g^2_{t^\prime,i} + ϵ}} g_{t,i}`` + +```math +η_{t,i} = \frac{lr}{\sqrt{\sum^t_{t^\prime} g^2_{t^\prime,i} + ϵ}} g_{t,i} +``` + as such the learning rate is monotonically decreasing. Epsilon is not included in the typical formula, see [2]. -# References -* [1]: Duchi, J., Hazan, E., & Singer, Y. (2011): - Adaptive subgradient methods for online learning and - stochastic optimization. JMLR, 12:2121-2159. -* [2]: Chris Dyer: Notes on AdaGrad. - [http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf] - (http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf) +### References +1. Duchi, J., Hazan, E., & Singer, Y. (2011): + Adaptive subgradient methods for online learning and + stochastic optimization. JMLR, 12:2121-2159. +2. Chris Dyer: Notes on AdaGrad. + [http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf] + (http://www.ark.cs.cmu.edu/cdyer/adagrad.pdf) """ +AdaGrad + +@defstruct AdaGrad <: AbstractOptimizer ( + (η :: Real = 0.1, η > 0), + (ϵ :: Real = 1e-6, ϵ > 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 1e-5, λ >= 0), + η_sched :: Any = initlrsched(η) +) -mutable struct AdaGrad <: AbstractOptimizer - opts :: AdaGradOptions - state :: OptimizationState - - function AdaGrad(; kwargs...) - opts = AdaGradOptions(;kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) - - new(opts) - end -end +create_state(::AdaGrad, ::Int, W::NDArray) = zeros(size(W), context(W)) -function create_state(self :: AdaGrad, index :: Int, weight :: NDArray) - return zeros(size(weight), context(weight)) -end +function update!(ada::AdaGrad, ::Int, W::NDArray, ∇::NDArray, x::NDArray) + η = get(ada.η_sched) + ϵ = ada.ϵ -function update(self :: AdaGrad, index :: Int, weight :: NDArray, - grad :: NDArray, state :: NDArray) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) + normgrad!(ada, W, ∇) - @inplace state .+= grad .* grad - @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) + @inplace x .+= ∇.^2 # update state + @inplace W .+= -η .* ∇ ./ sqrt(x .+ ϵ) # FIXME: sqrt dot-call end diff --git a/src/optimizers/adam.jl b/src/optimizers/adam.jl index aa1bc90f9f78..0d5c1a23850d 100644 --- a/src/optimizers/adam.jl +++ b/src/optimizers/adam.jl @@ -1,13 +1,3 @@ -@defstruct ADAMOptions <: AbstractOptimizerOptions ( - (lr :: Real = 0.001, lr > 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.00001, weight_decay >= 0), - (beta1 :: Real = 0.9, beta1 > 0), - (beta2 :: Real = 0.999, beta2 > 0), - (epsilon :: Real = 1e-8, epsilon > 0), - lr_scheduler :: Any = nothing -) - """ ADAM @@ -16,58 +6,66 @@ Stochastic Optimization*. arXiv:1412.6980 [cs.LG]. ADAM(; kwargs...) -* `lr::Real`: default `0.001`, learning rate. -* `lr_scheduler::AbstractLearningRateScheduler`: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` - parameter. -* `beta1::Real`: default `0.9`. -* `beta2::Real`: default `0.999`. -* `epsilon::Real`: default `1e-8`. -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.00001`, weight decay is equivalent - to adding a global l2 regularizer for all the parameters. +### Arguments +* `η`: default `0.001`, learning rate. +* `β1`: default `0.9`. +* `β2`: default `0.999`. +* `ϵ`: default `1e-8`. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `λ`: default `0.00001`, weight decay is equivalent + to adding a global l2 regularizer for all the parameters. +* `η_sched::AbstractLearningRateScheduler`: default `LearningRate.Fixed(η)`, a + dynamic learning rate scheduler. If set, will overwrite the `η` parameter. """ -mutable struct ADAM <: AbstractOptimizer - opts :: ADAMOptions - state :: OptimizationState +ADAM - function ADAM(; kwargs...) - opts = ADAMOptions(;kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) - - new(opts) - end -end +@defstruct ADAM <: AbstractOptimizer ( + (η :: Real = 0.001, η > 0), + (β1 :: Real = 0.9, 0 <= β1 < 1), + (β2 :: Real = 0.999, 0 <= β2 < 1), + (ϵ :: Real = 1e-8, ϵ > 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 1e-5, λ >= 0), + η_sched :: Any = initlrsched(η) +) mutable struct ADAMState - current_lr :: Float64 # current learning rate - mt :: NDArray - vt :: NDArray - beta1Power :: Float64 - beta2Power :: Float64 + η :: Float64 # current learning rate + mₜ :: NDArray + vₜ :: NDArray + β1ᵗ :: Float64 + β2ᵗ :: Float64 end -function create_state(self :: ADAM, index :: Int, weight :: NDArray) - return ADAMState( get_learning_rate(self.opts.lr_scheduler, self.state), - zeros(size(weight), context(weight)), - zeros(size(weight), context(weight)), - self.opts.beta1, - self.opts.beta2 ) -end +create_state(adam::ADAM, ::Int, W::NDArray) = + ADAMState(get(adam.η_sched), + zeros(size(W), context(W)), + zeros(size(W), context(W)), + adam.β1, adam.β2) + +function update!(adam::ADAM, ::Int, W::NDArray, ∇:: NDArray, s::ADAMState) + η = s.η + β1 = adam.β1 + β2 = adam.β2 + ϵ = adam.ϵ -function update(self :: ADAM, index :: Int, weight :: NDArray, grad :: NDArray, state :: ADAMState) - lr = state.current_lr - grad = normalized_gradient(self.opts, self.state, weight, grad) + normgrad!(adam, W, ∇) - state.mt = self.opts.beta1 * state.mt + (1 - self.opts.beta1) .* grad - state.vt = self.opts.beta2 * state.vt + (1 - self.opts.beta2) .* grad .* grad + s.mₜ = β1 * s.mₜ + (1 - β1) .* ∇ + s.vₜ = β2 * s.vₜ + (1 - β2) .* ∇.^2 - at = sqrt(1.0 - state.beta2Power)/(1.0 - state.beta1Power) + aₜ= sqrt(1.0 - s.β2ᵗ)/(1.0 - s.β1ᵗ) - state.beta1Power *= self.opts.beta1 - state.beta2Power *= self.opts.beta2 + # update βᵗ to βᵗ⁺¹ + s.β1ᵗ *= β1 + s.β2ᵗ *= β2 - @inplace weight .+= -lr * at * state.mt ./ - (sqrt(state.vt) + self.opts.epsilon) + @inplace W .+= -η * aₜ * s.mₜ ./ (sqrt(s.vₜ) .+ ϵ) end diff --git a/src/optimizers/adamax.jl b/src/optimizers/adamax.jl index adcdb78bf255..87ef0aa68831 100644 --- a/src/optimizers/adamax.jl +++ b/src/optimizers/adamax.jl @@ -1,77 +1,67 @@ -@defstruct AdaMaxOptions <: AbstractOptimizerOptions ( - (lr :: Real = 0.002, lr > 0), - (beta1 :: Real = 0.9, beta1 > 0 && beta1 < 1), - (beta2 :: Real = 0.999, beta2 > 0 && beta2 < 1), - (epsilon :: Real = 1e-8, epsilon > 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.00001, weight_decay >= 0), - lr_scheduler :: Any = nothing -) - """ - AdaMax + AdaMax(; kwargs...) This is a variant of of the Adam algorithm based on the infinity norm. See [1] for further description. - AdaMax(; kwargs...) - -# Attributes -* `lr::Real`: default `0.002`, the learning rate controlling the - size of update steps -* `beta1::Real`: default `0.9`, exponential decay rate - for the first moment estimates -* `beta2::Real`: default `0.999`, exponential decay rate for the - weighted infinity norm estimates -* `epsilon::Real`: default `1e-8`, small value added for - numerical stability -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.00001`, weight decay is equivalent +### Arguments +* `η`: default `0.002`, learning rate. +* `β1`: default `0.9`, exponential decay rate for the first moment estimates. +* `β2`: default `0.999`, exponential decay rate for the weighted + infinity norm estimates. +* `ϵ`: default `1e-8`, small value added for numerical stability. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `λ`: default `0.00001`, weight decay is equivalent to adding a global l2 regularizer for all the parameters. -# References -* [1]: Kingma, Diederik, and Jimmy Ba (2014): - Adam: A Method for Stochastic Optimization. - [http://arxiv.org/abs/1412.6980v8] - (http://arxiv.org/abs/1412.6980v8). +### References +1. Kingma, Diederik, and Jimmy Ba (2014): + Adam: A Method for Stochastic Optimization. Section 7. + [http://arxiv.org/abs/1412.6980] + (http://arxiv.org/abs/1412.6980). """ +AdaMax -mutable struct AdaMax <: AbstractOptimizer - opts :: AdaMaxOptions - state :: OptimizationState - - function AdaMax(; kwargs...) - opts = AdaMaxOptions(; kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) - - new(opts) - end -end +@defstruct AdaMax <: AbstractOptimizer ( + (η :: Real = 0.002, η > 0), + (β1 :: Real = 0.9, 0 <= β1 < 1), + (β2 :: Real = 0.999, 0 <= β2 < 1), + (ϵ :: Real = 1e-8, ϵ > 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 1e-5, λ >= 0), + η_sched :: Any = initlrsched(η) +) mutable struct AdaMaxState - mt :: NDArray - ut :: NDArray - beta1Power :: Float64 + mₜ :: NDArray + uₜ :: NDArray + β1ᵗ :: Float64 end -function create_state(self :: AdaMax, index :: Int, weight :: NDArray) - return AdaMaxState( zeros(size(weight), context(weight)), - zeros(size(weight), context(weight)), - self.opts.beta1 ) -end +create_state(ada::AdaMax, ::Int, W::NDArray) = + AdaMaxState(zeros(size(W), context(W)), + zeros(size(W), context(W)), + ada.β1) + +function update!(ada::AdaMax, ::Int, W::NDArray, ∇::NDArray, s::AdaMaxState) + η = get(ada.η_sched) + β1 = ada.β1 + β2 = ada.β2 + ϵ = ada.ϵ -function update(self :: AdaMax, index :: Int, weight :: NDArray, - grad :: NDArray, state :: AdaMaxState) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) + normgrad!(ada, W, ∇) - @inplace state.mt .*= self.opts.beta1 - @inplace state.mt .+= (1 - self.opts.beta1) * grad - state.ut = _maximum(self.opts.beta2 * state.ut, abs(grad)) + s.mₜ = β1 * s.mₜ .+ (1 - β1) .* ∇ + s.uₜ = _maximum(β2 * s.uₜ, abs(∇)) # FIXME abs dot-call - @inplace weight .+= - lr / (1 - state.beta1Power) * - state.mt ./ (state.ut + self.opts.epsilon) + @inplace W .+= -η / (1 - s.β1ᵗ) * s.mₜ ./ (s.uₜ + ϵ) - state.beta1Power *= self.opts.beta1 + s.β1ᵗ *= ada.β1 end diff --git a/src/optimizers/nadam.jl b/src/optimizers/nadam.jl index c3ac18dcc11a..524465ecb984 100644 --- a/src/optimizers/nadam.jl +++ b/src/optimizers/nadam.jl @@ -1,100 +1,94 @@ -@defstruct NadamOptions <: AbstractOptimizerOptions ( - (lr :: Real = 0.001, lr > 0), - (beta1 :: Real = 0.99, beta1 > 0 && beta1 < 1), - (beta2 :: Real = 0.999, beta2 > 0 && beta2 < 1), - (epsilon :: Real = 1e-8, epsilon > 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.00001, weight_decay >= 0), - lr_scheduler :: Any = nothing, - momentum_scheduler :: Any = nothing -) - -""" - Nadam +doc""" + Nadam(; kwargs...) Nesterov Adam optimizer: Adam RMSprop with Nesterov momentum, see [1] and notes for further description. - Nadam(; kwargs...) -# Attributes -* `lr::Real`: default `0.001`, learning rate. -* `beta1::Real`: default `0.99`. -* `beta2::Real`: default `0.999`. -* `epsilon::Real`: default `1e-8`, small value added for - numerical stability -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.00001`, weight decay is equivalent +### Arguments +* `η`: default `0.001`, learning rate. +* `β1`: default `0.99`. +* `β2`: default `0.999`. +* `ϵ`: default `1e-8`, small value added for numerical stability. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `λ`: default `0.00001`, weight decay is equivalent to adding a global l2 regularizer for all the parameters. -* `lr_scheduler::AbstractLearningRateScheduler`: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` +* `η_sched::AbstractLearningRateScheduler`: default `nothing`, a + dynamic learning rate scheduler. If set, will overwrite the `η` parameter. -* `momentum_scheduler::AbstractMomentumScheduler` default - `NadamScheduler` of the form - ``\mu_t = beta1 * (1 - 0.5 * 0.96^{t * 0.004})`` +* `μ_sched::NadamScheduler` default `NadamScheduler()` of the form. + + ```math + \mu_t = β_1 (1 - 0.5 \times 0.96^{t \times 0.004}) + ``` -# Notes +### Notes Default parameters follow those provided in the paper. It is recommended to leave the parameters of this optimizer at their default values. -# References -* [1]: Incorporating Nesterov Momentum into Adam. - [http://cs229.stanford.edu/proj2015/054_report.pdf] - (http://cs229.stanford.edu/proj2015/054_report.pdf) -* [2]: On the importance of initialization and momentum in deep learning - [http://www.cs.toronto.edu/~fritz/absps/momentum.pdf] - (http://www.cs.toronto.edu/~fritz/absps/momentum.pdf) +### References +1. [Incorporating Nesterov Momentum into Adam] + (http://cs229.stanford.edu/proj2015/054_report.pdf). + +2. [On the importance of initialization and momentum in deep learning] + (http://www.cs.toronto.edu/~fritz/absps/momentum.pdf). """ -mutable struct Nadam <: AbstractOptimizer - opts :: NadamOptions - state :: OptimizationState - - function Nadam(; kwargs...) - opts = NadamOptions(; kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) - opts.momentum_scheduler = get_momentum_scheduler(opts.momentum_scheduler, - Momentum.NadamScheduler(mu0=opts.beta1)) - - new(opts) - end -end +Nadam + +@defstruct Nadam <: AbstractOptimizer ( + (η :: Real = 0.001, η > 0), + (β1 :: Real = 0.99, 0 <= β1 < 1), + (β2 :: Real = 0.999, 0 <= β2 < 1), + (ϵ :: Real = 1e-8, ϵ > 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 1e-5, λ >= 0), + η_sched :: Any = initlrsched(η), + μ_sched :: Momentum.NadamScheduler = Momentum.NadamScheduler(μ = β1) +) mutable struct NadamState - mt :: NDArray - nt :: NDArray - momentum :: Float64 - beta2Power :: Float64 + m :: NDArray + n :: NDArray + Πμ :: Float64 + β2ᵗ :: Float64 + t :: Int # use in NadamScheduler. + # we store `t` in state because state is created for each `index` end -function create_state(self :: Nadam, index :: Int, weight :: NDArray) - return NadamState( zeros(size(weight), context(weight)), - zeros(size(weight), context(weight)), - 1.0, - self.opts.beta2 ) -end +create_state(n::Nadam, ::Int, W::NDArray) = + NadamState(zeros(size(W), context(W)), zeros(size(W), context(W)), + 1.0, n.β2, 1) + +function update!(na::Nadam, ::Int, W::NDArray, ∇::NDArray, s::NadamState) + η = get(na.η_sched) + μₜ, μₜ₁= get(na.μ_sched, s.t) + β1, β2 = na.β1, na.β2 + ϵ = na.ϵ -function update(self :: Nadam, index :: Int, weight :: NDArray, - grad :: NDArray, state :: NadamState) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) + normgrad!(na, W, ∇) + s.t += 1 - mu_t, mu_t1 = - get_momentum(self.opts.momentum_scheduler, self.state) - state.momentum *= mu_t - momentum_next = state.momentum * mu_t1 + s.Πμ *= μₜ + Πμ′ = s.Πμ * μₜ₁ - grad_prime = grad / (1.0 - state.momentum) - @inplace state.mt .*= self.opts.beta1 - @inplace state.mt .+= (1.0 - self.opts.beta1) * grad - mt = state.mt / (1.0 - momentum_next) + ∇′ = ∇ / (1.0 - s.Πμ) + @inplace s.m .*= β1 + @inplace s.m .+= (1.0 - β1) * ∇ + m̂ = s.m / (1.0 - Πμ′) - @inplace state.nt .*= self.opts.beta2 - @inplace state.nt .+= (1.0 - self.opts.beta2) .* grad .* grad - nt = state.nt / (1.0 - state.beta2Power) - state.beta2Power *= self.opts.beta2 + @inplace s.n .*= β2 + @inplace s.n .+= (1.0 - β2) .* ∇.^2 + n̂ = s.n / (1.0 - s.β2ᵗ) + s.β2ᵗ *= β2 - mt_prime = (1.0 - mu_t) * grad_prime + mu_t1 * mt - @inplace weight .+= -lr * mt_prime ./ (sqrt(nt) + self.opts.epsilon) + m̄ = (1.0 - μₜ) * ∇′+ μₜ₁ * m̂ + @inplace W .+= -η * m̄ ./ (sqrt(n̂) + ϵ) end diff --git a/src/optimizers/rmsprop.jl b/src/optimizers/rmsprop.jl index 8afed0adc81d..3c51835300c4 100644 --- a/src/optimizers/rmsprop.jl +++ b/src/optimizers/rmsprop.jl @@ -1,71 +1,67 @@ -@defstruct RMSPropOptions <: AbstractOptimizerOptions ( - (lr :: Real = 0.001, lr > 0), - (rho :: Real = 0.9, rho > 0 && rho < 1), - (epsilon :: Real = 1e-6, epsilon > 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.00001, weight_decay >= 0), - lr_scheduler :: Any = nothing -) - -""" - RMSProp +doc""" + RMSProp(; kwargs...) Scale learning rates by dividing with the moving average of the root mean squared (RMS) gradients. See [1] for further description. - RMSProp(; kwargs...) +### Arguments -# Attributes -* `lr::Real`: default `0.1`, the learning rate controlling the - size of update steps -* `rho::Real`: default `0.9`, gradient moving average decay factor -* `epsilon::Real`: default `1e-6`, small value added for - numerical stability -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.00001`, weight decay is equivalent +* `η`: default `0.1`, learning rate. +* `ρ`: default `0.9`, gradient moving average decay factor. +* `ϵ`: default `1e-8`, small value added for numerical stability. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `λ`: default `0.00001`, weight decay is equivalent to adding a global l2 regularizer for all the parameters. -# Notes -`rho` should be between 0 and 1. A value of `rho` close to 1 will decay the +### Notes +`ρ` should be between 0 and 1. A value of `ρ` close to 1 will decay the moving average slowly and a value close to 0 will decay the moving average fast. -Using the step size ``lr`` and a decay factor ``\rho`` the -learning rate ``\eta_t`` is calculated as: -``r_t &= ρ r_{t-1} + (1 - ρ)*g^2 \\ - η_t &= \frac{lr}{\sqrt{r_t + ϵ}}`` +Using the step size `η` and a decay factor `ρ the +learning rate `ηₜ` is calculated as: -# References -* [1]: Tieleman, T. and Hinton, G. (2012): - Neural Networks for Machine Learning, Lecture 6.5 - rmsprop. - Coursera. [http://www.youtube.com/watch?v=O3sxAc4hxZU] - (http://www.youtube.com/watch?v=O3sxAc4hxZU) (formula @5:20) -""" +```math +\begin{align*} + r_t &= ρ r_{t-1} + (1 - ρ)g^2 \\ + η_t &= \frac{η}{\sqrt{r_t + ϵ}} +\end{align*} +``` -mutable struct RMSProp <: AbstractOptimizer - opts :: RMSPropOptions - state :: OptimizationState - - function RMSProp(; kwargs...) - opts = RMSPropOptions(;kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) +### References +1. Tieleman, T. and Hinton, G. (2012): + Neural Networks for Machine Learning, Lecture 6.5 - rmsprop. + Coursera. [http://www.youtube.com/watch?v=O3sxAc4hxZU] + (http://www.youtube.com/watch?v=O3sxAc4hxZU) (formula @5:20) +""" +RMSProp - new(opts) - end -end +@defstruct RMSProp <: AbstractOptimizer ( + (η :: Real = 0.001, η > 0), + (ρ :: Real = 0.9, 0 < ρ < 1), + (ϵ :: Real = 1e-8, ϵ > 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 1e-5, λ >= 0), + η_sched :: Any = initlrsched(η) +) -function create_state(self :: RMSProp, index :: Int, weight :: NDArray) - return zeros(size(weight), context(weight)) -end +create_state(::RMSProp, ::Int, W::NDArray) = zeros(size(W), context(W)) -function update(self :: RMSProp, index :: Int, weight :: NDArray, - grad :: NDArray, state :: NDArray) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) +function update!(rms::RMSProp, ::Int, W::NDArray, ∇::NDArray, s::NDArray) + η = get(rms.η_sched) + ρ = rms.ρ + ϵ = rms.ϵ - @inplace state .*= self.opts.rho - @inplace state .+= (1 - self.opts.rho) * grad .* grad + normgrad!(rms, W, ∇) - @inplace weight .+= -lr * grad ./ (sqrt(state + self.opts.epsilon)) + @inplace s .*= ρ + @inplace s .+= (1 - ρ) .* (∇.^2) + @inplace W .+= -η .* ∇ ./ sqrt(s .+ ϵ) # FIXME: sqrt should be dot-call end diff --git a/src/optimizers/sgd.jl b/src/optimizers/sgd.jl index 6e8ab34f1409..9da9f1a0b72b 100644 --- a/src/optimizers/sgd.jl +++ b/src/optimizers/sgd.jl @@ -1,69 +1,71 @@ -@defstruct SGDOptions <: AbstractOptimizerOptions ( - (lr :: Real = 0.01, lr > 0), - (momentum :: Real = 0.0, momentum >= 0), - (grad_clip :: Real = 0, grad_clip >= 0), - (weight_decay :: Real = 0.0001, weight_decay >= 0), - lr_scheduler :: Any = nothing, - momentum_scheduler :: Any = nothing -) - -""" - SGD +doc""" + SGD(; kwargs...) Stochastic gradient descent optimizer. - SGD(; kwargs...) +Vanilla SGD: -# Arguments: -* `lr::Real`: default `0.01`, learning rate. -* `lr_scheduler::AbstractLearningRateScheduler`: default `nothing`, a - dynamic learning rate scheduler. If set, will overwrite the `lr` - parameter. -* `momentum::Real`: default `0.0`, the momentum. -* `momentum_scheduler::AbstractMomentumScheduler`: default `nothing`, - a dynamic momentum scheduler. If set, will overwrite the `momentum` - parameter. -* `grad_clip::Real`: default `0`, if positive, will clip the gradient - into the bounded range `[-grad_clip, grad_clip]`. -* `weight_decay::Real`: default `0.0001`, weight decay is equivalent to - adding a global l2 regularizer to the parameters. -""" -mutable struct SGD <: AbstractOptimizer - opts :: SGDOptions - state :: OptimizationState +```math +\theta \leftarrow \theta - \eta \nabla +``` - function SGD(; kwargs...) - opts = SGDOptions(;kwargs...) - opts.lr_scheduler = get_lr_scheduler(opts.lr_scheduler, opts.lr) - opts.momentum_scheduler = get_momentum_scheduler(opts.momentum_scheduler, opts.momentum) +SGD with momentum:: - new(opts) - end -end +```math +\begin{align*} + \nu & \leftarrow \mu \nu_{t-1} - \eta \nabla \\ + \theta & \leftarrow \theta + \nu_t +\end{align*} +``` -function create_state(self :: SGD, index :: Int, weight :: NDArray) - if isa(self.opts.momentum_scheduler, Momentum.Null) - return nothing - else - return zeros(size(weight), context(weight)) - end -end +### Arguments + +* `η`: default `0.01`, learning rate. +* `μ`: default `0`, the momentum, usually set to `0.9` in this implementation. +* `λ`: default `0.0001`, weight decay is equivalent to + adding a global l2 regularizer to the parameters. +* `clip`: default `0`, gradient clipping. + If positive, will clip the gradient into the bounded range `[-clip, clip]`. +* `scale`: default `0`, gradient rescaling. + If != 0, multiply the gradient with `scale` before updating. + Often choose to be `1.0 / batch_size`. + If leave it default, high-level API like `fit!` will set it to + `1.0 / batch_size`, since `fit!` knows the `batch_size`. +* `μ_sched::AbstractMomentumScheduler`: default `Momentum.Null()`, + a dynamic momentum scheduler. If set, will overwrite the `momentum` + parameter. +* `η_sched::AbstractLearningRateScheduler`: default `LearningRate.Fixed(η)`, a + dynamic learning rate scheduler. If set, will overwrite the `η` parameter. +""" +SGD -function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: Void) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) - - @inplace weight += -lr * grad +@defstruct SGD <: AbstractOptimizer ( + (η :: Real = 0.01, η > 0), + (μ :: Real = 0.0, μ >= 0), + (clip :: Real = 0, clip >= 0), + scale :: Real = 0, + (λ :: Real = 0.0001, λ >= 0), + η_sched :: Any = initlrsched(η), + μ_sched :: Any = initmomsched(μ) +) + +create_state(sgd::SGD, ::Int, W::NDArray) = + isa(sgd.μ_sched, Momentum.Null) ? nothing : zeros(size(W), context(W)) + +function update!(sgd::SGD, ::Int, W::NDArray, ∇::NDArray, ::Void) + η = get(sgd.η_sched) + normgrad!(sgd, W, ∇) + @inplace W += -η * ∇ end # update with momentum -function update(self :: SGD, index :: Int, weight :: NDArray, grad :: NDArray, state :: NDArray) - lr = get_learning_rate(self.opts.lr_scheduler, self.state) - grad = normalized_gradient(self.opts, self.state, weight, grad) +function update!(sgd::SGD, ::Int, W::NDArray, ∇::NDArray, ν::NDArray) + η = get(sgd.η_sched) + μ = get(sgd.μ_sched) + + normgrad!(sgd, W, ∇) - mom = state :: NDArray - coef = get_momentum(self.opts.momentum_scheduler, self.state) - @inplace mom .*= coef - @inplace mom .+= -lr * grad - @inplace weight .+= mom + @inplace ν .*= μ + @inplace ν .+= -η .* ∇ + @inplace W .+= ν end diff --git a/test/unittest/optimizer.jl b/test/unittest/optimizer.jl new file mode 100644 index 000000000000..664d53d6421a --- /dev/null +++ b/test/unittest/optimizer.jl @@ -0,0 +1,68 @@ +module TestOptimizer + +using Base.Test + +using MXNet +using MXNet.mx.LearningRate +using MXNet.mx.Momentum + + +function test_fixed_η() + info("Optimizer::LearningRate::Fixed") + x = LearningRate.Fixed(.42) + @test get(x) == .42 + update!(x) + @test get(x) == .42 +end # function test_fixed_η + + +function check_η_decay(x) + info("Optimizer::LearningRate::$x") + + η = get(x) + @test η == 1 + + for i ∈ 1:5 + update!(x) + η′ = get(x) + @test η′ < η + η = η′ + end +end # function check_η_decay + + +test_exp_η() = LearningRate.Exp(1) |> check_η_decay + + +test_inv_η() = LearningRate.Inv(1) |> check_η_decay + + +function test_μ_null() + info("Optimizer::Momentum::Null") + x = Momentum.Null() + @test iszero(get(x)) +end + + +function test_μ_fixed() + info("Optimizer::Momentum::Fixed") + x = Momentum.Fixed(42) + @test get(x) == 42 +end + + +@testset "Optimizer Test" begin + @testset "LearningRate Test" begin + test_fixed_η() + test_exp_η() + test_inv_η() + end + + @testset "Momentum Test" begin + test_μ_null() + test_μ_fixed() + end +end + + +end # module TestOptimizer From 2c09c26b1b781fc3d612eed26edc732e54f321c5 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 4 Feb 2018 16:37:11 +0800 Subject: [PATCH 622/630] Update README: `predict` section (#412) See also #411 --- README.md | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 74208b9ac034..26a20cf3a0db 100644 --- a/README.md +++ b/README.md @@ -50,22 +50,18 @@ You can also predict using the `model` in the following way: probs = mx.predict(model, eval_provider) # collect all labels from eval data -labels = Array[] -for batch in eval_provider - push!(labels, copy(mx.get(eval_provider, batch, :softmax_label))) -end -labels = cat(1, labels...) +labels = reduce( + vcat, + copy(mx.get(eval_provider, batch, :softmax_label)) for batch ∈ eval_provider) +# labels are 0...9 +labels .= labels .+ 1 # Now we use compute the accuracy -correct = 0 -for i = 1:length(labels) - # labels are 0...9 - if indmax(probs[:,i]) == labels[i]+1 - correct += 1 - end -end +pred = map(i -> indmax(probs[1:10, i]), 1:size(probs, 2)) +correct = sum(pred .== labels) accuracy = 100correct/length(labels) -println(mx.format("Accuracy on eval set: {1:.2f}%", accuracy)) +@printf "Accuracy on eval set: %.2f%%\n" accuracy ``` -For more details, please refer to the [documentation](https://dmlc.github.io/MXNet.jl/latest) and [examples](examples). +For more details, please refer to the +[documentation](https://dmlc.github.io/MXNet.jl/latest) and [examples](examples). From 6ac37f9ae2f8755ed349a8f9e071f55ed42516b5 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 5 Feb 2018 11:35:54 +0800 Subject: [PATCH 623/630] kvstore: refine and copy docstring from Python (#388) * kvstore: refine and copy docstring from Python add a Base.show for KVStore docstring for set_updater set_updater -> setupdater! and test cases set_optimizer -> setoptimizer! sgd * docstring of setoptimizer! --- NEWS.md | 6 + src/MXNet.jl | 7 +- src/kvstore.jl | 322 ++++++++++++++++++++++++++++++++------- src/model.jl | 22 ++- test/unittest/kvstore.jl | 28 +++- 5 files changed, 312 insertions(+), 73 deletions(-) diff --git a/NEWS.md b/NEWS.md index d531d9ea0caf..71ee86ff7da4 100644 --- a/NEWS.md +++ b/NEWS.md @@ -47,6 +47,12 @@ * `SeqMetric` * `KVStore` + * `init!()` + * `push!()` + * `pull!()` + * `barrier()` + * `set_updater()` + * `set_optimizer()` * `AbstractInitializer` * `UniformInitializer` diff --git a/src/MXNet.jl b/src/MXNet.jl index b550104a4e5b..a22e4040fece 100644 --- a/src/MXNet.jl +++ b/src/MXNet.jl @@ -79,7 +79,12 @@ export AbstractEvalMetric, SeqMetric # kvstore.jl -export KVStore +export KVStore, + init!, + pull!, + barrier, + setoptimizer!, + setupdater! # initializer.jl export AbstractInitializer, diff --git a/src/kvstore.jl b/src/kvstore.jl index c4d3ad9724a7..755d6f3d1b20 100644 --- a/src/kvstore.jl +++ b/src/kvstore.jl @@ -1,3 +1,31 @@ +import Base.push! + +""" + KVStore(kv_type = :local) + +For single machine training, there are two commonly used types: + +- `local`: Copies all gradients to CPU memory and updates weights there. + +- `device`: Aggregates gradients and updates weights on GPU(s). + With this setting, the `KVStore` also attempts to use GPU peer-to-peer + communication, potentially accelerating the communication. + +For distributed training, `KVStore` also supports a number of types: + +- `dist_sync`: Behaves similarly to `local` but with one major difference. + With `dist_sync`, batch-size now means the batch size used on each machine. + So if there are `n` machines and we use batch size ``b``, + then `dist_sync` behaves like `local` with batch size `n * b`. + +- `dist_device_sync`: Identical to `dist_sync` with the difference similar + to `device` vs `local`. + +- `dist_async`: Performs asynchronous updates. + The weights are updated whenever gradients are received from any machine. + No two updates happen on the same weight at the same time. + However, the order is not guaranteed. +""" mutable struct KVStore handle :: MX_KVStoreHandle updater_c :: Ptr{Void} @@ -7,20 +35,21 @@ mutable struct KVStore end function KVStore(kv_type::Symbol = :local) - #@assert(kv_type ∈ [:local]) # TODO: update with allowed types - + @assert kv_type ∈ (:local, :device, :dist_sync, :dist_device_sync, :dist_async) ref_hdr = Ref{MX_handle}(0) - kv_type = string(kv_type) - @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), kv_type, ref_hdr) - return KVStore(MX_KVStoreHandle(ref_hdr[])) + @mxcall(:MXKVStoreCreate, (char_p, Ref{MX_handle}), dump_mx_param(kv_type), ref_hdr) + KVStore(MX_KVStoreHandle(ref_hdr[])) end -function Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) + +Base.unsafe_convert(::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(MX_handle, obj.handle) -end Base.convert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) Base.cconvert(t::Type{MX_handle}, obj::KVStore) = Base.unsafe_convert(t, obj) -function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{<:Vector{<:NDArray}}) +Base.show(io::IO, kv::KVStore) = + print(io, "mx.KVStore @ $(get_type(kv))") + +function _flatten_kvlist(keys::Vector{Int}, vals::Vector{<:Vector{<:NDArray}}) @assert length(keys) == length(vals) keys_flt = Int[] vals_flt = NDArray[] @@ -31,102 +60,277 @@ function _flatten_kvlist(keys :: Vector{Int}, vals :: Vector{<:Vector{<:NDArray} return (keys_flt, vals_flt) end -init!(self::KVStore, key::Int, val::NDArray) = init!(self, [key], [val]) +""" + init!(kv::KVStore, key::Int, val::NDArray) + init!(kv::KVStore, keys, vals) + +Initializes a single or a sequence of key-value pairs into the store. -init!(self::KVStore, key::Int, vals::Vector{<:NDArray}) = - init!(self, Base.ones(Int, length(vals)) * key, vals) +For each key, one must `init!` it before calling `push!` or `pull!`. +When multiple workers invoke `init!` for the same key, only +the value supplied by worker with rank `0` is used. This function returns +after data has been initialized successfully. -init!(self::KVStore, keys::Vector{Int}, vals::Vector{<:Vector{<:NDArray}}) = - init!(self, _flatten_kvlist(keys, vals)...) +```jldoctest +julia> kv = KVStore(:local) +mx.KVStore @ local -function init!(self::KVStore, keys::Vector{Int}, vals::Vector{<:NDArray}) +julia> init!(kv, 42, mx.rand(2, 3)) +``` +""" +init!(kv::KVStore, key::Int, val::NDArray) = init!(kv, [key], [val]) +init!(kv::KVStore, key::Int, vals::Vector{<:NDArray}) = + init!(kv, Base.ones(Int, length(vals)) * key, vals) +init!(kv::KVStore, keys::Vector{Int}, vals::Vector{<:Vector{<:NDArray}}) = + init!(kv, _flatten_kvlist(keys, vals)...) + +function init!(kv::KVStore, keys::Vector{Int}, vals::VecOfNDArray) @assert length(keys) == length(vals) keys = Cint[keys...] vals = MX_handle[vals...] @mxcall(:MXKVStoreInit, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}), - self, length(keys), keys, vals) + kv, length(keys), keys, vals) end -import Base.push! -function push!(self :: KVStore, key :: Int, val :: NDArray; priority :: Int = 0) - push!(self, [key], [val]; priority = priority) -end -function push!(self :: KVStore, key :: Int, vals :: Vector{<:NDArray}; priority :: Int = 0) - push!(self, Base.ones(Int, length(vals))*key, vals; priority = priority) -end -function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{<:Vector{<:NDArray}}; - priority::Int=0) - push!(self, _flatten_kvlist(keys, vals)...; priority = priority) -end -function push!(self :: KVStore, keys :: Vector{Int}, vals :: Vector{<:NDArray}; priority::Int=0) +""" + push!(kv::KVStore, key, val; priority = 0) + push!(kv::KVStore, key, vals; priority = 0) + push!(kv::KVStore, keys, vals; priority = 0) + +Pushes a single or a sequence of key-value pairs into the store. + +This function returns immediately after adding an operator to the engine. +The actual operation is executed asynchronously. If there are consecutive +pushes to the same key, there is no guarantee on the serialization of pushes. +The execution of a push does not guarantee that all previous pushes are +finished. There is no synchronization between workers by default. +One can use ``barrier()`` to sync all workers. + +`push!` and `pull!` single `NDArray`: +```jldoctest +julia> kv = KVStore(:local) +mx.KVStore @ local + +julia> x = mx.empty(2, 3); + +julia> init!(kv, 3, x) + +julia> push!(kv, 3, mx.ones(2, 3) * 8) + +julia> pull!(kv, 3, x) + +julia> x +2×3 mx.NDArray{Float32,2} @ CPU0: + 8.0 8.0 8.0 + 8.0 8.0 8.0 +``` + +Aggregate values and `push!`: +```jldoctest +julia> vals = [mx.ones((2, 3), gpu(0)) * 3, mx.ones((2, 3), gpu(1)) * 4]; + +julia> push!(kv, 3, vals) + +julia> pull!(kv, 3, x) + +julia> x +2×3 mx.NDArray{Float32,2} @ CPU0: + 7.0 7.0 7.0 + 7.0 7.0 7.0 +``` + +`push!` a list of key to single device: + +```jldoctest +julia> keys = [4, 5]; + +julia> init!(kv, keys, [empty(2, 3), empty(2, 3)]) + +julia> push!(kv, keys, [x, x]) + +julia> y, z = empty(2, 3), empty(2, 3); + +julia> pull!(kv, keys, [y, z]) +``` +""" +push!(kv::KVStore, key::Int, val::NDArray; priority::Int = 0) = + push!(kv, [key], [val]; priority = priority) +push!(kv::KVStore, key::Int, vals::Vector{<:NDArray}; priority::Int = 0) = + push!(kv, Base.ones(Int, length(vals)) * key, vals; priority = priority) +push!(kv:: KVStore, keys::Vector{Int}, vals::Vector{<:Vector{<:NDArray}}; + priority::Int = 0) = + push!(kv, _flatten_kvlist(keys, vals)...; priority = priority) + +function push!(kv::KVStore, keys::Vector{Int}, vals::Vector{<:NDArray}; priority::Int = 0) @assert length(keys) == length(vals) keys = Cint[keys...] vals = MX_handle[vals...] @mxcall(:MXKVStorePush, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), - self, length(keys), keys, vals, priority) + kv, length(keys), keys, vals, priority) end -function pull!(self :: KVStore, key :: Int, out :: NDArray; priority :: Int = 0) - pull!(self, [key], [out]) -end -function pull!(self :: KVStore, key :: Int, outs :: Vector{<:NDArray}; priority :: Int = 0) - pull!(self, Base.ones(Int, length(outs))*key, outs; priority = priority) -end -function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{<:Vector{<:NDArray}}; priority::Int=0) - pull!(self, _flatten_kvlist(keys, outs)...; priority = priority) -end -function pull!(self :: KVStore, keys :: Vector{Int}, outs :: Vector{<:NDArray}; priority::Int=0) +""" Pulls a single value or a sequence of values from the store. + +This function returns immediately after adding an operator to the engine. +Subsequent attempts to read from the `out` variable will be blocked until the +pull operation completes. + +`pull` is executed asynchronously after all previous `pull` calls and only +the last `push` call for the same input key(s) are finished. + +The returned values are guaranteed to be the latest values in the store. + +See [`pull!`](@ref) for more examples. +""" +pull!(kv::KVStore, key::Int, out::NDArray; priority::Int = 0) = + pull!(kv, [key], [out], priority = priority) +pull!(kv::KVStore, key::Int, outs::Vector{<:NDArray}; priority::Int = 0) = + pull!(kv, Base.ones(Int, length(outs))*key, outs; priority = priority) +pull!(kv::KVStore, keys::Vector{Int}, outs::Vector{<:Vector{<:NDArray}}; + priority::Int = 0) = + pull!(kv, _flatten_kvlist(keys, outs)...; priority = priority) + +function pull!(kv::KVStore, keys::Vector{Int}, outs::Vector{<:NDArray}; priority::Int = 0) @assert length(keys) == length(outs) keys = Cint[keys...] outs = MX_handle[outs...] @mxcall(:MXKVStorePull, (MX_handle, MX_uint, Ptr{Cint}, Ptr{MX_handle}, Cint), - self, length(keys), keys, outs, priority) + kv, length(keys), keys, outs, priority) end -function get_type(self :: KVStore) +function get_type(kv::KVStore) type_ref = Ref{char_p}(0) - @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), self, type_ref) + @mxcall(:MXKVStoreGetType, (MX_handle, Ref{char_p}), kv, type_ref) return Symbol(unsafe_string(type_ref[])) end -function get_num_workers(self :: KVStore) +function get_num_workers(kv::KVStore) ref_size = Ref{Cint}(0) - @mxcall(:MXKVStoreGetGroupSize, (MX_handle, Ref{Cint}), self, ref_size) + @mxcall(:MXKVStoreGetGroupSize, (MX_handle, Ref{Cint}), kv, ref_size) return Int(ref_size[]) end -function get_rank(self :: KVStore) +function get_rank(kv::KVStore) ref_rank = Ref{Cint}(0) - @mxcall(:MXKVStoreGetRank, (MX_handle, Ref{Cint}), self, ref_rank) + @mxcall(:MXKVStoreGetRank, (MX_handle, Ref{Cint}), kv, ref_rank) return Int(ref_rank[]) end +""" + barrier(kv::KVStore) + +Invokes global barrier among all worker nodes. + +For example, assume there are `n` machines. We would like machine `0` to first +`init` the values and then have all the workers `pull` the initialized value. +Before pulling, we can place invoke `barrier(kv)` to guarantee that the +initialization is finished. +""" +barrier(kv::KVStore) = @mxcall(:MXKVStoreBarrier, (MX_handle,), kv) + # TODO: Currently Julia does not support closure in c-callbacks, so we are making use of the # extra handle parameter of the API to pass the updater object around. Fix this when someday # full closure cfunction is supported in Julia. -function _kvstore_update_wrapper(index::Cint, nd_recv::MX_handle, nd_local::MX_handle, updater::Ptr{Void}) - updater_func = unsafe_pointer_to_objref(updater) :: Function - updater_func(Int(index), NDArray(MX_NDArrayHandle(nd_recv)), NDArray(MX_NDArrayHandle(nd_local))) - return nothing +function _kvstore_update_wrapper(key::Cint, nd_recv::MX_handle, nd_local::MX_handle, + updater::Ptr{Void}) + updater_func = unsafe_pointer_to_objref(updater) + updater_func(Int(key), NDArray(MX_NDArrayHandle(nd_recv)), + NDArray(MX_NDArrayHandle(nd_local))) + nothing end -function set_updater(self :: KVStore, updater :: Function) - self.updater = updater # keep a reference to the julia object so that updater_c is kept valid - self.updater_c = cfunction(_kvstore_update_wrapper, Void, (Cint, MX_handle, MX_handle, Ptr{Void})) +""" + setupdater!(kv, updater) + +Sets a `push!` updater into the store. + +This function only changes the local store. +When running on multiple machines one must use `set_optimizer`. + +```jldoctest +julia> update(key, val, orig) = mx.@inplace orig += val .* .2 +update (generic function with 1 method) + +julia> kv = KVStore(:local) +mx.KVStore @ local + +julia> mx.setupdater!(kv, update) + +julia> init!(kv, 42, mx.ones(2, 3)) + +julia> push!(kv, 42, mx.ones(2, 3)) + +julia> x = empty(2, 3); + +julia> pull!(kv, 42, x) + +julia> x +2×3 mx.NDArray{Float32,2} @ CPU0: + 1.2 1.2 1.2 + 1.2 1.2 1.2 +``` +""" +function setupdater!(kv::KVStore, updater) + kv.updater = updater # keep a reference to the julia object so that updater_c is kept valid + kv.updater_c = cfunction(_kvstore_update_wrapper, Void, + (Cint, MX_handle, MX_handle, Ptr{Void})) @mxcall(:MXKVStoreSetUpdater, (MX_handle, Ptr{Void}, Any), - self, self.updater_c, updater) + kv, kv.updater_c, updater) end -function set_optimizer(self :: KVStore, optimizer :: AbstractOptimizer) - ref_is_worker = Ref{Cint}(0) - @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref_is_worker) - is_worker = ref_is_worker[] +""" + setoptimizer!(kv::KVStore, opt) + +Registers an optimizer with the kvstore. + +When using a single machine, this function updates the local optimizer. +If using multiple machines and this operation is invoked from a worker node, +it will serialized the optimizer with pickle and send it to all servers. +The function returns after all servers have been updated. + +```jldoctest +julia> kv = KVStore() +mx.KVStore @ local + +julia> W = mx.zeros(2, 3) # 2×3 weight matrix +2×3 mx.NDArray{Float32,2} @ CPU0: + 0.0 0.0 0.0 + 0.0 0.0 0.0 - if ismatch(r"dist", string(get_type(self))) && is_worker +julia> init!(kv, 42, W) + +julia> setoptimizer!(kv, SGD(η = .2)) # SGD with .2 as learning rate + +julia> ∇W = mx.ones(2, 3) # assume it's the gradient +2×3 mx.NDArray{Float32,2} @ CPU0: + 1.0 1.0 1.0 + 1.0 1.0 1.0 + +julia> push!(kv, 42, ∇W) + +julia> pull!(kv, 42, W) # fetch weight and write back to `W` + +julia> W +2×3 mx.NDArray{Float32,2} @ CPU0: + -0.2 -0.2 -0.2 + -0.2 -0.2 -0.2 +``` +""" +function setoptimizer!(kv::KVStore, opt::AbstractOptimizer) + if ismatch(r"dist", string(get_type(kv))) && _isworker() # TODO + error("not implemented") else - set_updater(self, getupdater(optimizer)) + setupdater!(kv, getupdater(opt)) end end + +function _isworker()::Bool + ref = Ref{Cint}(0) + @mxcall(:MXKVStoreIsWorkerNode, (Ref{Cint},), ref) + ref_is_worker[] +end + +# TODO: sparse support? diff --git a/src/model.jl b/src/model.jl index 2f6dfef1c6d1..a9febc922ec6 100644 --- a/src/model.jl +++ b/src/model.jl @@ -281,27 +281,25 @@ end @defstruct TrainingOptions ( initializer :: AbstractInitializer = UniformInitializer(0.01), n_epoch :: Int = 10, - eval_data :: Union{Void, AbstractDataProvider} = nothing, + eval_data :: Union{Void,AbstractDataProvider} = nothing, eval_metric :: AbstractEvalMetric = Accuracy(), - kvstore :: Union{Symbol, KVStore} = :local, + kvstore :: Union{Symbol,KVStore} = :local, force_init :: Bool = false, callbacks :: Vector{AbstractCallback} = AbstractCallback[], verbosity :: Int = 3, η_decay :: Symbol = :epoch, ) -function _invoke_callbacks(self::FeedForward, callbacks::Vector{AbstractCallback}, +function _invoke_callbacks(m::FeedForward, callbacks::Vector{AbstractCallback}, state::OptimizationState, type_filter::Type; - metric::Vector{Tuple{Symbol,T}} = Vector{Tuple{Symbol,Real}}()) where T<:Real + metric = Vector{Tuple{Symbol,Real}}()) map(callbacks) do cb - if isa(cb, type_filter) - if type_filter == AbstractEpochCallback - # epoch callback have extra access to the model object - cb(self, state, metric) - else - cb(state) - end - end + !isa(cb, type_filter) && return + + # epoch callback have extra access to the model object + type_filter == AbstractEpochCallback && return cb(m, state, metric) + + cb(state) end end diff --git a/test/unittest/kvstore.jl b/test/unittest/kvstore.jl index cf15f1a65f05..89af95203c90 100644 --- a/test/unittest/kvstore.jl +++ b/test/unittest/kvstore.jl @@ -13,7 +13,7 @@ function init_kv() vals = [mx.zeros(SHAPE) for k in KEYS] mx.init!(kv, KEYS, vals) - return kv + kv end function test_kv_basic() @@ -62,10 +62,36 @@ function test_aggregator() end end +function check_setupdater!(f) + kv = KVStore(:local) + setupdater!(kv, f) + + A = Float32[1, 2, 3, 4] + B = Float32[.5, .6, .7, .8] + x = NDArray(A) + Δ = NDArray(B) + init!(kv, 42, x) + push!(kv, 42, Δ) + pull!(kv, 42, x) + + @test copy(x) ≈ A + 2B +end # function check_setupdater! + +function test_setupdater!() + info("KVStore::setupdater!") + + f(key, Δ, x) = @mx.inplace x += 2Δ + g(key, Δ, x) = (x[:] += 2Δ) + + check_setupdater!(f) + check_setupdater!(g) +end # test_setupdater! + @testset "KVStore Test" begin test_kv_basic() test_single_kv_pair() test_aggregator() + test_setupdater!() end end From 4038704f1f8bd733bc2aab5205068dddbbf122a2 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 15 Feb 2018 15:17:34 +0800 Subject: [PATCH 624/630] build: Propagate more build flags from ENV (#414) - `ADD_CFLAGS` - `ADD_LDFLAGS` See https://github.com/dmlc/MXNet.jl/issues/413#issuecomment-365664071 --- deps/build.jl | 12 ++++++++++++ docs/src/user-guide/install.md | 21 ++++++++++++++------- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index ff4441227cac..0312f6ed870c 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -68,6 +68,10 @@ else info("Did not find a CUDA installation, using CPU-only version of MXNet.") end +# propagate more build flags from ENV +const ADD_CFLAGS = get(ENV, "ADD_CFLAGS", nothing) +const ADD_LDFLAGS = get(ENV, "ADD_LDFLAGS", nothing) + function get_cpucore() if haskey(ENV, "TRAVIS") # on travis-ci 2 @@ -219,6 +223,14 @@ if !libmxnet_detected `sed -i -s 's/ADD_CFLAGS =\(.*\)/ADD_CFLAGS =\1 -DMXNET_USE_LAPACK/' config.mk` end + # propagate more build flags from ENV + if ADD_CFLAGS != nothing + `sed -i -s "s@ADD_CFLAGS =\(.*\)@ADD_CFLAGS =\1 $ADD_CFLAGS@" config.mk` + end + if ADD_LDFLAGS != nothing + `sed -i -s "s@ADD_LDFLAGS =\(.*\)@ADD_LDFLAGS =\1 $ADD_LDFLAGS@" config.mk` + end + if USE_JULIA_BLAS `make -j$(get_cpucore()) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` else diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index 30ed65d48511..4583f9f925d3 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -21,13 +21,20 @@ MXNet.jl is built on top of [libmxnet](https://github.com/dmlc/mxnet). Upon installation, Julia will try to automatically download and build libmxnet. -There are three environment variables that change this behaviour. If you -already have a pre-installed version of mxnet you can use `MXNET_HOME` -to point the build-process in the right direction. If the automatic -cuda detection fails you can also set `CUDA_HOME` to override the process. -To control which version of libmxnet will be compiled, you can use the -`MXNET_COMMIT` variable to point to either a version tag (e.g. `v0.10.0`), a -branch name (e.g. `master`) or a specific commit hash (e.g. `a0b1c2d3`). +There are several environment variables that change this behaviour. + +- `MXNET_HOME`: If you already have a pre-installed version of mxnet + you can use `MXNET_HOME` to point the build-process in the right direction. +- `CUDA_HOME`: If the automatic cuda detection fails you can also set `CUDA_HOME` + to override the process. +- `MXNET_COMMIT`: To control which version of libmxnet will be compiled, + you can use the`MXNET_COMMIT` variable to point to either a version tag + (e.g. `v0.10.0`), a branch name (e.g. `master`) or a specific commit hash + (e.g. `a0b1c2d3`). +- `ADD_CFLAGS`: Additional C flags. For instance, + if you need to point non-standard include directory, please set it as + `ENV["ADD_CFLAGS"] = "-I'/path/to/include/dir'"`. +- `ADD_LDFLAGS`: Additional linker flags. The libmxnet source is downloaded to `Pkg.dir("MXNet", "deps", "src", "mxnet")`. The automatic build is using default configurations, with OpenCV disabled. From 3c8e9d3abdfead7e113ca81ea87f8ae5e43ca304 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sun, 4 Mar 2018 15:42:17 +0800 Subject: [PATCH 625/630] build: propagate CC/CXX into config.mk (#419) * build: propagate CC/CXX into config.mk See: https://github.com/dmlc/MXNet.jl/issues/418#issuecomment-369638203 * update doc --- deps/build.jl | 8 ++++++++ docs/src/user-guide/install.md | 2 ++ 2 files changed, 10 insertions(+) diff --git a/deps/build.jl b/deps/build.jl index 0312f6ed870c..38c8e19c2fa2 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -69,6 +69,8 @@ else end # propagate more build flags from ENV +const CC = get(ENV, "CC", nothing) +const CXX = get(ENV, "CXX", nothing) const ADD_CFLAGS = get(ENV, "ADD_CFLAGS", nothing) const ADD_LDFLAGS = get(ENV, "ADD_LDFLAGS", nothing) @@ -224,6 +226,12 @@ if !libmxnet_detected end # propagate more build flags from ENV + if CC != nothing + `sed -i -s "s@^export CC =\(.*\)@export CC = $CC@" config.mk` + end + if CXX != nothing + `sed -i -s "s@^export CXX =\(.*\)@export CXX = $CXX@" config.mk` + end if ADD_CFLAGS != nothing `sed -i -s "s@ADD_CFLAGS =\(.*\)@ADD_CFLAGS =\1 $ADD_CFLAGS@" config.mk` end diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index 4583f9f925d3..dd1262b47c61 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -31,6 +31,8 @@ There are several environment variables that change this behaviour. you can use the`MXNET_COMMIT` variable to point to either a version tag (e.g. `v0.10.0`), a branch name (e.g. `master`) or a specific commit hash (e.g. `a0b1c2d3`). +- `CC`: The path of C compiler. +- `CXX`: The path of C++ compiler. - `ADD_CFLAGS`: Additional C flags. For instance, if you need to point non-standard include directory, please set it as `ENV["ADD_CFLAGS"] = "-I'/path/to/include/dir'"`. From 3df1214e6b3e71d10d4f7156bcfb56e633b38f16 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Thu, 8 Mar 2018 20:23:06 +0800 Subject: [PATCH 626/630] build: propagate USE_JEMALLOC (#422) * build: propagate USE_JEMALLOC see https://github.com/dmlc/MXNet.jl/issues/418#issuecomment-371209108 --- deps/build.jl | 4 ++++ docs/src/user-guide/install.md | 9 +++++++++ 2 files changed, 13 insertions(+) diff --git a/deps/build.jl b/deps/build.jl index 38c8e19c2fa2..a16afe6aaac0 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -73,6 +73,7 @@ const CC = get(ENV, "CC", nothing) const CXX = get(ENV, "CXX", nothing) const ADD_CFLAGS = get(ENV, "ADD_CFLAGS", nothing) const ADD_LDFLAGS = get(ENV, "ADD_LDFLAGS", nothing) +const USE_JEMALLOC = get(ENV, "USE_JEMALLOC", nothing) # "0" or "1" function get_cpucore() if haskey(ENV, "TRAVIS") # on travis-ci @@ -238,6 +239,9 @@ if !libmxnet_detected if ADD_LDFLAGS != nothing `sed -i -s "s@ADD_LDFLAGS =\(.*\)@ADD_LDFLAGS =\1 $ADD_LDFLAGS@" config.mk` end + if USE_JEMALLOC != nothing + `sed -i -s "s@USE_JEMALLOC =\(.*\)@USE_JEMALLOC = $USE_JEMALLOC@" config.mk` + end if USE_JULIA_BLAS `make -j$(get_cpucore()) USE_BLAS=$blas_name $MSHADOW_LDFLAGS` diff --git a/docs/src/user-guide/install.md b/docs/src/user-guide/install.md index dd1262b47c61..f1d5eeefacfe 100644 --- a/docs/src/user-guide/install.md +++ b/docs/src/user-guide/install.md @@ -37,6 +37,15 @@ There are several environment variables that change this behaviour. if you need to point non-standard include directory, please set it as `ENV["ADD_CFLAGS"] = "-I'/path/to/include/dir'"`. - `ADD_LDFLAGS`: Additional linker flags. +- `USE_JEMALLOC`: Default is enabled if jemalloc available. + If you ran into segfault cause by jemalloc, + Please try to disable it. + + ```julia + # first remove whole libmxnet source: Pkg.dir("MXNet", "deps", "src") + ENV["USE_JEMALLOC"] = "0" + Pkg.build("MXNet") + ``` The libmxnet source is downloaded to `Pkg.dir("MXNet", "deps", "src", "mxnet")`. The automatic build is using default configurations, with OpenCV disabled. From 0ccf8b482ae1b7772314829d6dd13319d122f295 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Tue, 3 Apr 2018 13:24:02 +0800 Subject: [PATCH 627/630] build: add `libmxnet.so` into lib search list. (#424) In case of macOS, if user build libmxnet from source set MXNET_HOME, the output is still named as `libmxnet.so`. Ref: dmlc/MXNet.jl#423 --- deps/build.jl | 4 +++- src/base.jl | 2 +- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/deps/build.jl b/deps/build.jl index a16afe6aaac0..da4beee76f5f 100644 --- a/deps/build.jl +++ b/deps/build.jl @@ -11,7 +11,9 @@ if haskey(ENV, "MXNET_HOME") MXNET_HOME = ENV["MXNET_HOME"] info("MXNET_HOME environment detected: $MXNET_HOME") info("Trying to load existing libmxnet...") - lib = Libdl.find_library("libmxnet.$(Libdl.dlext)", + # In case of macOS, if user build libmxnet from source and set the MXNET_HOME, + # the output is still named as `libmxnet.so`. + lib = Libdl.find_library(["libmxnet.$(Libdl.dlext)", "libmxnet.so"], [joinpath(MXNET_HOME, "lib"), MXNET_HOME]) if !isempty(lib) info("Existing libmxnet detected at $lib, skip building...") diff --git a/src/base.jl b/src/base.jl index a3dcbbf7fe35..212f24d6fe37 100644 --- a/src/base.jl +++ b/src/base.jl @@ -30,7 +30,7 @@ const grad_req_map = Dict{Symbol,GRAD_REQ}( ################################################################################ # Initialization and library API entrance ################################################################################ -const MXNET_LIB = Libdl.find_library("libmxnet.$(Libdl.dlext)", +const MXNET_LIB = Libdl.find_library(["libmxnet.$(Libdl.dlext)", "libmxnet.so"], # see build.jl [joinpath(get(ENV, "MXNET_HOME", ""), "lib"), get(ENV, "MXNET_HOME", ""), Pkg.dir("MXNet", "deps", "usr", "lib")]) From b74a51603be51619a0b275b8ff8420b4182e1a58 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 20 Aug 2018 18:14:09 +0800 Subject: [PATCH 628/630] Update README.md more about julia version --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 26a20cf3a0db..0512b9a1aece 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,8 @@ MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julia * Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. * Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. +The current support julia version is 0.6. Julia 0.7/1.0 is + Here is an example of how training a simple 3-layer MLP on MNIST looks like: ```julia From 224ae962db8ae252ce1ac317c6d4a2cfe67a7d77 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Mon, 20 Aug 2018 18:14:47 +0800 Subject: [PATCH 629/630] Typo in README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0512b9a1aece..59f74bdce5f0 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julia * Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. * Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. -The current support julia version is 0.6. Julia 0.7/1.0 is +The current support julia version is 0.6. Julia 0.7/1.0 is not supported yet. Here is an example of how training a simple 3-layer MLP on MNIST looks like: From 9f8425b2774630f5442da257b306f4fa9afa8124 Mon Sep 17 00:00:00 2001 From: Iblis Lin Date: Sat, 29 Sep 2018 02:48:43 +0800 Subject: [PATCH 630/630] import Julia binding - enable Jenkins CI build for Julia - add license headers to Julia source code - update links for Julia README --- Jenkinsfile | 44 +++++++++++---- ci/docker/Dockerfile.build.ubuntu_cpu | 3 ++ ci/docker/install/ubuntu_julia.sh | 37 +++++++++++++ ci/docker/runtime_functions.sh | 54 +++++++++++++++++++ julia/README.md | 15 ++---- julia/appveyor.yml | 17 ++++++ julia/deps/build.jl | 21 +++++++- julia/deps/cblas.h | 19 +++++++ julia/deps/cpcblas.sh | 18 +++++++ julia/docs/Makefile | 17 ++++++ julia/docs/make.jl | 17 ++++++ julia/docs/mkdocs.yml | 17 ++++++ julia/examples/char-lstm/config.jl | 17 ++++++ julia/examples/char-lstm/lstm.jl | 17 ++++++ julia/examples/char-lstm/sampler.jl | 17 ++++++ julia/examples/char-lstm/seq-data.jl | 17 ++++++ julia/examples/char-lstm/train.jl | 17 ++++++ julia/examples/char-lstm/visualize.jl | 17 ++++++ julia/examples/cifar10/cifar10.jl | 17 ++++++ .../ijulia-pretrained-predict/imagehelper.py | 17 ++++++ julia/examples/mnist/lenet-stn.jl | 17 ++++++ julia/examples/mnist/lenet.jl | 17 ++++++ julia/examples/mnist/mlp-test.jl | 17 ++++++ julia/examples/mnist/mlp.jl | 17 ++++++ julia/examples/mnist/mnist-data.jl | 17 ++++++ julia/examples/nondefault-example.jl | 17 ++++++ julia/examples/regression-example.jl | 17 ++++++ julia/models/Inception/get.sh | 18 +++++++ julia/plugins/io/svmlight.jl | 17 ++++++ julia/src/MXNet.jl | 17 ++++++ julia/src/autograd.jl | 17 ++++++ julia/src/base.jl | 17 ++++++ julia/src/broadcast.jl | 17 ++++++ julia/src/callback.jl | 17 ++++++ julia/src/context.jl | 17 ++++++ julia/src/deprecated.jl | 17 ++++++ julia/src/executor.jl | 17 ++++++ julia/src/initializer.jl | 17 ++++++ julia/src/io.jl | 17 ++++++ julia/src/kvstore.jl | 17 ++++++ julia/src/metric.jl | 17 ++++++ julia/src/model.jl | 17 ++++++ julia/src/name.jl | 17 ++++++ julia/src/ndarray.jl | 17 ++++++ julia/src/nn-factory.jl | 17 ++++++ julia/src/optimizer.jl | 17 ++++++ julia/src/optimizers/adadelta.jl | 17 ++++++ julia/src/optimizers/adagrad.jl | 17 ++++++ julia/src/optimizers/adam.jl | 17 ++++++ julia/src/optimizers/adamax.jl | 17 ++++++ julia/src/optimizers/nadam.jl | 17 ++++++ julia/src/optimizers/rmsprop.jl | 17 ++++++ julia/src/optimizers/sgd.jl | 17 ++++++ julia/src/random.jl | 17 ++++++ julia/src/symbolic-node.jl | 17 ++++++ julia/src/util.jl | 17 ++++++ julia/src/visualize.jl | 17 ++++++ julia/test/common.jl | 17 ++++++ julia/test/runtests.jl | 17 ++++++ julia/test/travis/run_coverage.sh | 19 ++++++- julia/test/travis/run_test.sh | 18 +++++++ julia/test/travis/setup_env.sh | 18 +++++++ julia/test/unittest/autograd.jl | 17 ++++++ julia/test/unittest/bind.jl | 17 ++++++ julia/test/unittest/initializer.jl | 17 ++++++ julia/test/unittest/io.jl | 17 ++++++ julia/test/unittest/kvstore.jl | 17 ++++++ julia/test/unittest/metric.jl | 17 ++++++ julia/test/unittest/model.jl | 17 ++++++ julia/test/unittest/name.jl | 17 ++++++ julia/test/unittest/ndarray.jl | 17 ++++++ julia/test/unittest/operator.jl | 17 ++++++ julia/test/unittest/optimizer.jl | 17 ++++++ julia/test/unittest/random.jl | 17 ++++++ julia/test/unittest/symbolic-node.jl | 21 ++++++-- julia/test/unittest/util.jl | 17 ++++++ julia/test/unittest/visualize.jl | 17 ++++++ .../apache_rat_license_check/rat-excludes | 1 + tools/license_header.py | 6 ++- 79 files changed, 1371 insertions(+), 29 deletions(-) create mode 100755 ci/docker/install/ubuntu_julia.sh diff --git a/Jenkinsfile b/Jenkinsfile index 81a25deca27b..af059c58e830 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -89,6 +89,30 @@ def python3_gpu_ut_nocudnn(docker_container_name) { } } +def deploy_docs() { + parallel 'Docs': { + node(NODE_LINUX_CPU) { + ws('workspace/docs') { + timeout(time: max_time, unit: 'MINUTES') { + utils.init_git() + utils.docker_run('ubuntu_cpu', 'deploy_docs', false) + sh "ci/other/ci_deploy_doc.sh ${env.BRANCH_NAME} ${env.BUILD_NUMBER}" + } + } + } + }, + 'Julia docs': { + node(NODE_LINUX_CPU) { + ws('workspace/julia-docs') { + timeout(time: max_time, unit: 'MINUTES') { + utils.unpack_and_init('cpu', mx_lib) + utils.docker_run('ubuntu_cpu', 'deploy_jl_docs', false) + } + } + } + } +} + node('mxnetlinux-cpu') { // Loading the utilities requires a node context unfortunately checkout scm @@ -746,6 +770,16 @@ core_logic: { } } }, + 'Julia 0.6: CPU': { + node(NODE_LINUX_CPU) { + ws('workspace/ut-julia06-cpu') { + timeout(time: max_time, unit: 'MINUTES') { + utils.unpack_and_init('cpu', mx_lib) + utils.docker_run('ubuntu_cpu', 'unittest_ubuntu_cpu_julia06', false) + } + } + } + }, 'Python 2: CPU Win':{ node(NODE_WINDOWS_CPU) { @@ -911,15 +945,7 @@ core_logic: { } stage('Deploy') { - node(NODE_LINUX_CPU) { - ws('workspace/docs') { - timeout(time: max_time, unit: 'MINUTES') { - utils.init_git() - utils.docker_run('ubuntu_cpu', 'deploy_docs', false) - sh "ci/other/ci_deploy_doc.sh ${env.BRANCH_NAME} ${env.BUILD_NUMBER}" - } - } - } + deploy_docs() } } , diff --git a/ci/docker/Dockerfile.build.ubuntu_cpu b/ci/docker/Dockerfile.build.ubuntu_cpu index f45c8da4af87..7c7e2240ee61 100755 --- a/ci/docker/Dockerfile.build.ubuntu_cpu +++ b/ci/docker/Dockerfile.build.ubuntu_cpu @@ -45,6 +45,9 @@ RUN /work/ubuntu_r.sh COPY install/ubuntu_perl.sh /work/ RUN /work/ubuntu_perl.sh +COPY install/ubuntu_julia.sh /work/ +RUN /work/ubuntu_julia.sh + COPY install/ubuntu_clang.sh /work/ RUN /work/ubuntu_clang.sh diff --git a/ci/docker/install/ubuntu_julia.sh b/ci/docker/install/ubuntu_julia.sh new file mode 100755 index 000000000000..62013e36d8fd --- /dev/null +++ b/ci/docker/install/ubuntu_julia.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# build and install are separated so changes to build don't invalidate +# the whole docker cache for the image + +set -ex + +export JLBINARY='julia.tar.gz' +export JULIADIR='/work/julia' +export JULIA="${JULIADIR}/bin/julia" + +mkdir -p $JULIADIR +# The julia version in Ubuntu repo is too old +# We download the tarball from the official link: +# https://julialang.org/downloads/ +wget -O $JLBINARY https://julialang-s3.julialang.org/bin/linux/x64/0.6/julia-0.6.2-linux-x86_64.tar.gz +tar xzvf $JLBINARY -C $JULIADIR --strip 1 +rm $JLBINARY + +$JULIA -e 'versioninfo()' diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index d1fc2239a442..96b1646eff97 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -816,6 +816,35 @@ unittest_ubuntu_gpu_R() { make rpkgtest R_LIBS=/tmp/r-site-library R_GPU_ENABLE=1 } +unittest_ubuntu_cpu_julia06() { + set -ex + export PATH="/work/julia/bin:$PATH" + export MXNET_HOME='/work/mxnet' + export JULIA_PKGDIR='/work/julia-pkg' + export DEPDIR=`julia -e 'print(Pkg.dir())'` + + julia -e 'versioninfo()' + julia -e 'Pkg.init()' + + # install package + ln -sf ${MXNET_HOME}/julia ${DEPDIR}/MXNet + + # install dependencies + julia -e 'Pkg.resolve()' + + # FIXME + export LD_PRELOAD='/usr/lib/x86_64-linux-gnu/libjemalloc.so' + + # use the prebuilt binary from $MXNET_HOME/lib + julia -e 'Pkg.build("MXNet")' + + # run the script `julia/test/runtests.jl` + julia -e 'Pkg.test("MXNet")' + + # See https://github.com/dmlc/MXNet.jl/pull/303#issuecomment-341171774 + julia -e 'using MXNet; mx._sig_checker()' +} + unittest_centos7_cpu() { set -ex cd /work/mxnet @@ -1090,6 +1119,31 @@ deploy_docs() { popd } +deploy_jl_docs() { + set -ex + export PATH="/work/julia/bin:$PATH" + export MXNET_HOME='/work/mxnet' + export JULIA_PKGDIR='/work/julia-pkg' + export DEPDIR=`julia -e 'print(Pkg.dir())'` + + julia -e 'versioninfo()' + julia -e 'Pkg.init()' + ln -sf ${MXNET_HOME}/julia ${DEPDIR}/MXNet + julia -e 'Pkg.resolve()' + + # FIXME + export LD_PRELOAD='/usr/lib/x86_64-linux-gnu/libjemalloc.so' + + # use the prebuilt binary from $MXNET_HOME/lib + julia -e 'Pkg.build("MXNet")' + # build docs + julia -e 'Pkg.add("Documenter")' + julia -e 'cd(Pkg.dir("MXNet")); include(joinpath("docs", "make.jl"))' + + # TODO: make Jenkins worker push to MXNet.jl ph-pages branch if master build + # ... +} + # broken_link_checker broken_link_checker() { diff --git a/julia/README.md b/julia/README.md index 59f74bdce5f0..a4299575f95e 100644 --- a/julia/README.md +++ b/julia/README.md @@ -1,23 +1,14 @@ # MXNet -[![Build Status](https://travis-ci.org/dmlc/MXNet.jl.svg?branch=master)](https://travis-ci.org/dmlc/MXNet.jl) -[![Windows Build](https://ci.appveyor.com/api/projects/status/re90njols2th2ide?svg=true)](https://ci.appveyor.com/project/pluskid/mxnet-jl) -[![codecov.io](https://codecov.io/github/dmlc/MXNet.jl/coverage.svg?branch=master)](https://codecov.io/github/dmlc/MXNet.jl?branch=master) -[![](https://img.shields.io/badge/docs-latest-blue.svg)](https://dmlc.github.io/MXNet.jl/latest) -[![](https://img.shields.io/badge/docs-stable-blue.svg)](https://dmlc.github.io/MXNet.jl/stable) [![MXNet](http://pkg.julialang.org/badges/MXNet_0.6.svg)](http://pkg.julialang.org/?pkg=MXNet) -[![License](http://dmlc.github.io/img/apache2.svg)](LICENSE.md) -[![Join the chat at https://gitter.im/dmlc/mxnet](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/dmlc/mxnet) -MXNet.jl is the [dmlc/mxnet](https://github.com/dmlc/mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of its features include: +MXNet.jl is the [dmlc/mxnet](https://github.com/apache/incubator-mxnet) [Julia](http://julialang.org/) package. MXNet.jl brings flexible and efficient GPU computing and state-of-art deep learning to Julia. Some highlight of its features include: * Efficient tensor/matrix computation across multiple devices, including multiple CPUs, GPUs and distributed server nodes. -* Flexible symbolic manipulation to composite and construct state-of-the-art deep learning models. +* Flexible symbolic manipulation to composite and construction of state-of-the-art deep learning models. -The current support julia version is 0.6. Julia 0.7/1.0 is not supported yet. - -Here is an example of how training a simple 3-layer MLP on MNIST looks like: +Here is an example of how training a simple 3-layer MLP on MNIST: ```julia using MXNet diff --git a/julia/appveyor.yml b/julia/appveyor.yml index 449465246eff..50e275cfa8a8 100644 --- a/julia/appveyor.yml +++ b/julia/appveyor.yml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + environment: matrix: - JULIA_URL: "https://julialang-s3.julialang.org/bin/winnt/x64/0.6/julia-0.6-latest-win64.exe" diff --git a/julia/deps/build.jl b/julia/deps/build.jl index da4beee76f5f..bdc33be8c79b 100644 --- a/julia/deps/build.jl +++ b/julia/deps/build.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + import JSON ################################################################################ @@ -5,7 +22,7 @@ import JSON ################################################################################ libmxnet_detected = false libmxnet_curr_ver = get(ENV, "MXNET_COMMIT", "master") -curr_win = "20171019" # v0.12.0 +curr_win = "20180211" # v1.1.0 if haskey(ENV, "MXNET_HOME") MXNET_HOME = ENV["MXNET_HOME"] @@ -171,7 +188,7 @@ if !libmxnet_detected @build_steps begin BinDeps.DirectoryRule(_mxdir, @build_steps begin ChangeDirectory(_srcdir) - `git clone https://github.com/dmlc/mxnet` + `git clone https://github.com/apache/incubator-mxnet` end) @build_steps begin ChangeDirectory(_mxdir) diff --git a/julia/deps/cblas.h b/julia/deps/cblas.h index 98a02f4fa968..d9449dc8e21d 100644 --- a/julia/deps/cblas.h +++ b/julia/deps/cblas.h @@ -1,3 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + #ifndef CBLAS_H #define CBLAS_H diff --git a/julia/deps/cpcblas.sh b/julia/deps/cpcblas.sh index 1a592186b49c..99342897a58c 100755 --- a/julia/deps/cpcblas.sh +++ b/julia/deps/cpcblas.sh @@ -1,5 +1,23 @@ #!/bin/sh +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + # be invoked from build.jl set -e diff --git a/julia/docs/Makefile b/julia/docs/Makefile index d1cadf2e487b..57c623889a83 100644 --- a/julia/docs/Makefile +++ b/julia/docs/Makefile @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + all: julia --color=yes ./make.jl mkdocs build diff --git a/julia/docs/make.jl b/julia/docs/make.jl index 0a2b01d25cc8..6e3705a95fdc 100644 --- a/julia/docs/make.jl +++ b/julia/docs/make.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using Documenter, MXNet makedocs( diff --git a/julia/docs/mkdocs.yml b/julia/docs/mkdocs.yml index e636a194196d..24281730885f 100644 --- a/julia/docs/mkdocs.yml +++ b/julia/docs/mkdocs.yml @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + site_name: MXNet.jl repo_url: https://github.com/dmlc/MXNet.jl diff --git a/julia/examples/char-lstm/config.jl b/julia/examples/char-lstm/config.jl index b5961549675f..c6ed0ff63b3c 100644 --- a/julia/examples/char-lstm/config.jl +++ b/julia/examples/char-lstm/config.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + const DROPOUT = 0 const BATCH_SIZE = 32 const SEQ_LENGTH = 32 diff --git a/julia/examples/char-lstm/lstm.jl b/julia/examples/char-lstm/lstm.jl index de6748df9420..fc4bcc4b6a91 100644 --- a/julia/examples/char-lstm/lstm.jl +++ b/julia/examples/char-lstm/lstm.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # An explicitly unrolled LSTM with fixed sequence length. using MXNet diff --git a/julia/examples/char-lstm/sampler.jl b/julia/examples/char-lstm/sampler.jl index df4647f4f893..1a4aada22957 100644 --- a/julia/examples/char-lstm/sampler.jl +++ b/julia/examples/char-lstm/sampler.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + include(joinpath(@__DIR__, "config.jl")) include(joinpath(@__DIR__, "lstm.jl")) include(joinpath(@__DIR__, "seq-data.jl")) diff --git a/julia/examples/char-lstm/seq-data.jl b/julia/examples/char-lstm/seq-data.jl index 1456ae94a1a3..3489e5bc3c39 100644 --- a/julia/examples/char-lstm/seq-data.jl +++ b/julia/examples/char-lstm/seq-data.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # Simple data provider that load text using Iterators using MXNet diff --git a/julia/examples/char-lstm/train.jl b/julia/examples/char-lstm/train.jl index 8b7a682c9560..57bfeb6b6e11 100644 --- a/julia/examples/char-lstm/train.jl +++ b/julia/examples/char-lstm/train.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + include(joinpath(@__DIR__, "config.jl")) include(joinpath(@__DIR__, "lstm.jl")) include(joinpath(@__DIR__, "seq-data.jl")) diff --git a/julia/examples/char-lstm/visualize.jl b/julia/examples/char-lstm/visualize.jl index acf757515005..e2a2c87c9c10 100644 --- a/julia/examples/char-lstm/visualize.jl +++ b/julia/examples/char-lstm/visualize.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + include(joinpath(dirname(@__FILE__), "config.jl")) include(joinpath(dirname(@__FILE__), "lstm.jl")) diff --git a/julia/examples/cifar10/cifar10.jl b/julia/examples/cifar10/cifar10.jl index 5d3d2eeafe41..a00664ce3a50 100644 --- a/julia/examples/cifar10/cifar10.jl +++ b/julia/examples/cifar10/cifar10.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using MXNet #-------------------------------------------------------------------------------- diff --git a/julia/examples/imagenet/ijulia-pretrained-predict/imagehelper.py b/julia/examples/imagenet/ijulia-pretrained-predict/imagehelper.py index 7584b45d9313..dddef7415f45 100644 --- a/julia/examples/imagenet/ijulia-pretrained-predict/imagehelper.py +++ b/julia/examples/imagenet/ijulia-pretrained-predict/imagehelper.py @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + import numpy as np from skimage import io, transform diff --git a/julia/examples/mnist/lenet-stn.jl b/julia/examples/mnist/lenet-stn.jl index ae94bf9b16f7..95cd0955d402 100644 --- a/julia/examples/mnist/lenet-stn.jl +++ b/julia/examples/mnist/lenet-stn.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using MXNet #-------------------------------------------------------------------------------- diff --git a/julia/examples/mnist/lenet.jl b/julia/examples/mnist/lenet.jl index 64bcdea9cba9..5ee15d69dd1b 100644 --- a/julia/examples/mnist/lenet.jl +++ b/julia/examples/mnist/lenet.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using MXNet #-------------------------------------------------------------------------------- diff --git a/julia/examples/mnist/mlp-test.jl b/julia/examples/mnist/mlp-test.jl index dac2c9cd84eb..1af84ed3ba8a 100644 --- a/julia/examples/mnist/mlp-test.jl +++ b/julia/examples/mnist/mlp-test.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # This file is primarily to be included from runtest.jl. We tried to cover various # features of MXNet.jl in this example in order to detect regression errors. diff --git a/julia/examples/mnist/mlp.jl b/julia/examples/mnist/mlp.jl index 8a73d2aa7637..20facc9b71b3 100644 --- a/julia/examples/mnist/mlp.jl +++ b/julia/examples/mnist/mlp.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using MXNet #-------------------------------------------------------------------------------- diff --git a/julia/examples/mnist/mnist-data.jl b/julia/examples/mnist/mnist-data.jl index 7349152617f9..12160cf6f18e 100644 --- a/julia/examples/mnist/mnist-data.jl +++ b/julia/examples/mnist/mnist-data.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + function get_mnist_providers(batch_size::Int; data_name=:data, label_name=:softmax_label, flat=true) # download MNIST into Pkg.dir("MXNet")/data/mnist if not exist filenames = mx.get_mnist_ubyte() diff --git a/julia/examples/nondefault-example.jl b/julia/examples/nondefault-example.jl index b93887850ca3..75eff085a459 100644 --- a/julia/examples/nondefault-example.jl +++ b/julia/examples/nondefault-example.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + #= Contents: This file contains code for: - Setting the initial values of the biases and weights equal to the final values of a previous run. diff --git a/julia/examples/regression-example.jl b/julia/examples/regression-example.jl index 7d05baf3c632..bbbb415fe664 100644 --- a/julia/examples/regression-example.jl +++ b/julia/examples/regression-example.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + #= This script shows how a simple MLP net may be used for regression. It shows how data in memory may be diff --git a/julia/models/Inception/get.sh b/julia/models/Inception/get.sh index dfd3701bd0b4..16452a361d98 100755 --- a/julia/models/Inception/get.sh +++ b/julia/models/Inception/get.sh @@ -1,4 +1,22 @@ #!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + wget -c http://data.dmlc.ml/mxnet/data/Inception.zip unzip Inception.zip diff --git a/julia/plugins/io/svmlight.jl b/julia/plugins/io/svmlight.jl index 46b79743b87f..f9d9b2ec83db 100644 --- a/julia/plugins/io/svmlight.jl +++ b/julia/plugins/io/svmlight.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + #=doc SVMLight / LibSVM is a popular data format for sparse features. Some preprocessed datasets in this format could be found at http://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/ diff --git a/julia/src/MXNet.jl b/julia/src/MXNet.jl index a22e4040fece..03c3cb89b530 100644 --- a/julia/src/MXNet.jl +++ b/julia/src/MXNet.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + __precompile__() module MXNet diff --git a/julia/src/autograd.jl b/julia/src/autograd.jl index 4584decb0a52..72fb82ba1bbb 100644 --- a/julia/src/autograd.jl +++ b/julia/src/autograd.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # Autograd for NDArray # this is a port of Python's autograd module # https://github.com/apache/incubator-mxnet/blob/master/python/mxnet/autograd.py diff --git a/julia/src/base.jl b/julia/src/base.jl index 212f24d6fe37..ce1c183eafb5 100644 --- a/julia/src/base.jl +++ b/julia/src/base.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + "Exception thrown when an error occurred calling MXNet API." struct MXError <: Exception msg :: AbstractString diff --git a/julia/src/broadcast.jl b/julia/src/broadcast.jl index 1b58addec321..fee960a46271 100644 --- a/julia/src/broadcast.jl +++ b/julia/src/broadcast.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using TakingBroadcastSeriously: Broadcasted, unwrap for f in :[%, diff --git a/julia/src/callback.jl b/julia/src/callback.jl index 16e44fdc632d..06e431de06d0 100644 --- a/julia/src/callback.jl +++ b/julia/src/callback.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ AbstractCallback diff --git a/julia/src/context.jl b/julia/src/context.jl index 2b83eebe97a7..c97522b3b846 100644 --- a/julia/src/context.jl +++ b/julia/src/context.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + @enum CONTEXT_TYPE CPU=1 GPU=2 CPU_PINNED=3 """ diff --git a/julia/src/deprecated.jl b/julia/src/deprecated.jl index 67b1707cdee6..12c5345aa198 100644 --- a/julia/src/deprecated.jl +++ b/julia/src/deprecated.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # NDArray reshape (#272) @deprecate reshape(arr::NDArray; shape=()) reshape(arr, shape) @deprecate Reshape(arr::NDArray; shape=()) reshape(arr, shape) diff --git a/julia/src/executor.jl b/julia/src/executor.jl index c99517b6d76b..4bf4339d65d1 100644 --- a/julia/src/executor.jl +++ b/julia/src/executor.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + import Base: bind """ diff --git a/julia/src/initializer.jl b/julia/src/initializer.jl index 188c5deb6255..95dbeb31febd 100644 --- a/julia/src/initializer.jl +++ b/julia/src/initializer.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ AbstractInitializer diff --git a/julia/src/io.jl b/julia/src/io.jl index c8dbee9b8e1a..e5f43950754c 100644 --- a/julia/src/io.jl +++ b/julia/src/io.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ AbstractDataProvider diff --git a/julia/src/kvstore.jl b/julia/src/kvstore.jl index 755d6f3d1b20..ac0367144384 100644 --- a/julia/src/kvstore.jl +++ b/julia/src/kvstore.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + import Base.push! """ diff --git a/julia/src/metric.jl b/julia/src/metric.jl index 489df2ddd8b2..772eb3b3e680 100644 --- a/julia/src/metric.jl +++ b/julia/src/metric.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ AbstractEvalMetric diff --git a/julia/src/model.jl b/julia/src/model.jl index a9febc922ec6..109cb35e38a6 100644 --- a/julia/src/model.jl +++ b/julia/src/model.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ AbstractModel diff --git a/julia/src/name.jl b/julia/src/name.jl index d281770eb357..8180886c869c 100644 --- a/julia/src/name.jl +++ b/julia/src/name.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + abstract type AbstractNameManager end const NameType = Union{Base.Symbol, AbstractString} const NameCounter = Dict{Base.Symbol, Int} diff --git a/julia/src/ndarray.jl b/julia/src/ndarray.jl index d2ceb8498ca0..9e47150a1a00 100644 --- a/julia/src/ndarray.jl +++ b/julia/src/ndarray.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + # All the types supported by mshadow. See `mshadow/base.h` const DType = Union{Float32, Float64, Float16, UInt8, Int32, Int8, Int64} @enum TypeFlag kFloat32 kFloat64 kFloat16 kUint8 kInt32 kInt8 kInt64 diff --git a/julia/src/nn-factory.jl b/julia/src/nn-factory.jl index ab88d21adf29..b5134f9d2bd5 100644 --- a/julia/src/nn-factory.jl +++ b/julia/src/nn-factory.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ MLP(input, spec; hidden_activation = :relu, prefix) diff --git a/julia/src/optimizer.jl b/julia/src/optimizer.jl index 06f93a5942d2..6436878df678 100644 --- a/julia/src/optimizer.jl +++ b/julia/src/optimizer.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ############################################################################### # Types ############################################################################### diff --git a/julia/src/optimizers/adadelta.jl b/julia/src/optimizers/adadelta.jl index 9fee3517a3cb..2b0cd0060261 100644 --- a/julia/src/optimizers/adadelta.jl +++ b/julia/src/optimizers/adadelta.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + doc""" AdaDelta(; kwargs...) diff --git a/julia/src/optimizers/adagrad.jl b/julia/src/optimizers/adagrad.jl index c72bac1e3b7e..4236cb8cda20 100644 --- a/julia/src/optimizers/adagrad.jl +++ b/julia/src/optimizers/adagrad.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + doc""" AdaGrad(; kwargs...) diff --git a/julia/src/optimizers/adam.jl b/julia/src/optimizers/adam.jl index 0d5c1a23850d..c6aa99ba71fb 100644 --- a/julia/src/optimizers/adam.jl +++ b/julia/src/optimizers/adam.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ ADAM diff --git a/julia/src/optimizers/adamax.jl b/julia/src/optimizers/adamax.jl index 87ef0aa68831..de6a1ab759b3 100644 --- a/julia/src/optimizers/adamax.jl +++ b/julia/src/optimizers/adamax.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ AdaMax(; kwargs...) diff --git a/julia/src/optimizers/nadam.jl b/julia/src/optimizers/nadam.jl index 524465ecb984..fdcd1ea7e7ab 100644 --- a/julia/src/optimizers/nadam.jl +++ b/julia/src/optimizers/nadam.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + doc""" Nadam(; kwargs...) diff --git a/julia/src/optimizers/rmsprop.jl b/julia/src/optimizers/rmsprop.jl index 3c51835300c4..8351142681b1 100644 --- a/julia/src/optimizers/rmsprop.jl +++ b/julia/src/optimizers/rmsprop.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + doc""" RMSProp(; kwargs...) diff --git a/julia/src/optimizers/sgd.jl b/julia/src/optimizers/sgd.jl index 9da9f1a0b72b..dfd3d954baa3 100644 --- a/julia/src/optimizers/sgd.jl +++ b/julia/src/optimizers/sgd.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + doc""" SGD(; kwargs...) diff --git a/julia/src/random.jl b/julia/src/random.jl index 1f612e310fe8..b9b32a42ecff 100644 --- a/julia/src/random.jl +++ b/julia/src/random.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ rand!(x::NDArray; low = 0, high = 1) diff --git a/julia/src/symbolic-node.jl b/julia/src/symbolic-node.jl index bb3c97773488..508f9d449028 100644 --- a/julia/src/symbolic-node.jl +++ b/julia/src/symbolic-node.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + """ SymbolicNode diff --git a/julia/src/util.jl b/julia/src/util.jl index c4f5e63244dd..c07c9ec910fb 100644 --- a/julia/src/util.jl +++ b/julia/src/util.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ################################################################################ # Dataset related utilities ################################################################################ diff --git a/julia/src/visualize.jl b/julia/src/visualize.jl index 91bbd0c48481..b41db7e9e5a7 100644 --- a/julia/src/visualize.jl +++ b/julia/src/visualize.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + import JSON """ diff --git a/julia/test/common.jl b/julia/test/common.jl index 5854fc6659d0..5ac5f905143a 100644 --- a/julia/test/common.jl +++ b/julia/test/common.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + ################################################################################ # Common models used in testing ################################################################################ diff --git a/julia/test/runtests.jl b/julia/test/runtests.jl index eb05ff2f5ca4..8b46bfda6726 100644 --- a/julia/test/runtests.jl +++ b/julia/test/runtests.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + using MXNet using Base.Test diff --git a/julia/test/travis/run_coverage.sh b/julia/test/travis/run_coverage.sh index 73816fac7b98..ee22b258b549 100644 --- a/julia/test/travis/run_coverage.sh +++ b/julia/test/travis/run_coverage.sh @@ -1 +1,18 @@ -julia -e 'cd(Pkg.dir("MXNet")); Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())' \ No newline at end of file +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +julia -e 'cd(Pkg.dir("MXNet")); Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())' diff --git a/julia/test/travis/run_test.sh b/julia/test/travis/run_test.sh index bf726eb49157..414b1450b554 100755 --- a/julia/test/travis/run_test.sh +++ b/julia/test/travis/run_test.sh @@ -1,4 +1,22 @@ #!/bin/bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + set -e if [[ -a .git/shallow ]]; then git fetch --unshallow; fi diff --git a/julia/test/travis/setup_env.sh b/julia/test/travis/setup_env.sh index 4bae93243949..9d2f7341998d 100755 --- a/julia/test/travis/setup_env.sh +++ b/julia/test/travis/setup_env.sh @@ -1,5 +1,23 @@ #!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + echo "##########################" echo $TRAVIS_OS_NAME diff --git a/julia/test/unittest/autograd.jl b/julia/test/unittest/autograd.jl index 12c1022bd208..96f275b626af 100644 --- a/julia/test/unittest/autograd.jl +++ b/julia/test/unittest/autograd.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestAutoGrad using Base.Test diff --git a/julia/test/unittest/bind.jl b/julia/test/unittest/bind.jl index 3bac43f2345c..538b556bf474 100644 --- a/julia/test/unittest/bind.jl +++ b/julia/test/unittest/bind.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestBind using MXNet using Base.Test diff --git a/julia/test/unittest/initializer.jl b/julia/test/unittest/initializer.jl index 750959f2eb20..fa528c9f8e13 100644 --- a/julia/test/unittest/initializer.jl +++ b/julia/test/unittest/initializer.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + @testset "Initializers" begin @testset "Bilinear initializer" begin # Setup a filter with scale = 2 diff --git a/julia/test/unittest/io.jl b/julia/test/unittest/io.jl index f8e558613c99..81f2ff79a83b 100644 --- a/julia/test/unittest/io.jl +++ b/julia/test/unittest/io.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestIO using MXNet diff --git a/julia/test/unittest/kvstore.jl b/julia/test/unittest/kvstore.jl index 89af95203c90..96e1643d8d83 100644 --- a/julia/test/unittest/kvstore.jl +++ b/julia/test/unittest/kvstore.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestKVStore using MXNet using Base.Test diff --git a/julia/test/unittest/metric.jl b/julia/test/unittest/metric.jl index ca51f83671d9..32c4538b608c 100644 --- a/julia/test/unittest/metric.jl +++ b/julia/test/unittest/metric.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestMetric using MXNet diff --git a/julia/test/unittest/model.jl b/julia/test/unittest/model.jl index 86a3a6f131ec..dbe97093dc68 100644 --- a/julia/test/unittest/model.jl +++ b/julia/test/unittest/model.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestModel using Base.Test diff --git a/julia/test/unittest/name.jl b/julia/test/unittest/name.jl index 9287bf087074..eea73efb43c9 100644 --- a/julia/test/unittest/name.jl +++ b/julia/test/unittest/name.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestNameManager using MXNet using Base.Test diff --git a/julia/test/unittest/ndarray.jl b/julia/test/unittest/ndarray.jl index 891dffd0f390..3bacbb009dfc 100644 --- a/julia/test/unittest/ndarray.jl +++ b/julia/test/unittest/ndarray.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestNDArray using MXNet diff --git a/julia/test/unittest/operator.jl b/julia/test/unittest/operator.jl index 85fc7c6238e9..ed8312d91cc9 100644 --- a/julia/test/unittest/operator.jl +++ b/julia/test/unittest/operator.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestOperator using MXNet diff --git a/julia/test/unittest/optimizer.jl b/julia/test/unittest/optimizer.jl index 664d53d6421a..b068f12fffd7 100644 --- a/julia/test/unittest/optimizer.jl +++ b/julia/test/unittest/optimizer.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestOptimizer using Base.Test diff --git a/julia/test/unittest/random.jl b/julia/test/unittest/random.jl index 34b9d3625c0d..973a4bc32faa 100644 --- a/julia/test/unittest/random.jl +++ b/julia/test/unittest/random.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestRandom using MXNet using Base.Test diff --git a/julia/test/unittest/symbolic-node.jl b/julia/test/unittest/symbolic-node.jl index e3e003d296af..507af17332f8 100644 --- a/julia/test/unittest/symbolic-node.jl +++ b/julia/test/unittest/symbolic-node.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestSymbolicNode using MXNet @@ -137,10 +154,8 @@ function test_attrs() data2 = mx.Variable(:data2, attrs = Dict(:test => "hallo!")) @test get(mx.get_attr(data2, :test)) == "hallo!" - conv = mx.Convolution(data2, kernel = (1,1), num_filter = 1, attrs = Dict(:a => "a", :π => "π")) + conv = mx.Convolution(data2, kernel = (1,1), num_filter = 1) @test isnull(mx.get_attr(conv, :b)) - @test get(mx.get_attr(conv, :a)) == "a" - @test get(mx.get_attr(conv, :π)) == "π" @test isa(mx.get_name(conv), Symbol) @test_throws MethodError mx.Variable(:data3, attrs = Dict(:test => "1.0", :test2 => 1.0)) diff --git a/julia/test/unittest/util.jl b/julia/test/unittest/util.jl index 0e6107ca0acc..ddd613ca48ea 100644 --- a/julia/test/unittest/util.jl +++ b/julia/test/unittest/util.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestUtil using Base.Test diff --git a/julia/test/unittest/visualize.jl b/julia/test/unittest/visualize.jl index f4ccbf0e5a81..58d111b0fe14 100644 --- a/julia/test/unittest/visualize.jl +++ b/julia/test/unittest/visualize.jl @@ -1,3 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + module TestVisualize using MXNet using Base.Test diff --git a/tests/nightly/apache_rat_license_check/rat-excludes b/tests/nightly/apache_rat_license_check/rat-excludes index 301a342f4664..0c305f498b34 100755 --- a/tests/nightly/apache_rat_license_check/rat-excludes +++ b/tests/nightly/apache_rat_license_check/rat-excludes @@ -57,3 +57,4 @@ apache-rat-tasks/* moderngpu/* deformable_im2col.cuh deformable_im2col.h +REQUIRE diff --git a/tools/license_header.py b/tools/license_header.py index 7aef33b71213..f6726891f521 100755 --- a/tools/license_header.py +++ b/tools/license_header.py @@ -76,13 +76,15 @@ 'example/rcnn/rcnn/cython/nms_kernel.cu', 'prepare_mkl.sh', 'example/image-classification/predict-cpp/image-classification-predict.cc', - 'src/operator/contrib/ctc_include/'] + 'src/operator/contrib/ctc_include/', + 'julia/REQUIRE'] # language extensions and the according commment mark _LANGS = {'.cc':'*', '.h':'*', '.cu':'*', '.cuh':'*', '.py':'#', '.pm':'#', '.scala':'*', '.cc':'*', '.sh':'#', '.cmake':'#', '.java':'*', '.sh':'#', '.cpp':'*', '.hpp':'*', '.c':'*', - '.bat':'rem', '.pl':'#', '.m':'%', '.R':'#', '.mk':'#', '.cfg':'#', '.t':'#', '.ps1': '#'} + '.bat':'rem', '.pl':'#', '.m':'%', '.R':'#', '.mk':'#', '.cfg':'#', + '.t':'#', '.ps1':'#', '.jl':'#'} # Previous license header, which will be removed _OLD_LICENSE = re.compile('.*Copyright.*by Contributors')